home/zuul/zuul-output/0000755000175000017500000000000015117037211014120 5ustar zuulzuulhome/zuul/zuul-output/logs/0000755000175000017500000000000015117043120015060 5ustar zuulzuulhome/zuul/zuul-output/logs/controller/0000755000175000017500000000000015117042543017253 5ustar zuulzuulhome/zuul/zuul-output/logs/controller/post_oc_get_builds.log0000644000175000017500000004342315117042531023630 0ustar zuulzuul*** [INFO] Showing oc get 'builds' NAME TYPE FROM STATUS STARTED DURATION service-telemetry-framework-index-1 Docker Binary@3eef2b7 Cancelled (CancelledBuild) 11 minutes ago 7s service-telemetry-framework-index-2 Docker Binary@3eef2b7 Cancelled (CancelledBuild) 11 minutes ago 10s service-telemetry-framework-index-3 Docker Binary@3eef2b7 Cancelled (CancelledBuild) 10 minutes ago 3s service-telemetry-framework-index-4 Docker Binary@3eef2b7 Cancelled (CancelledBuild) 10 minutes ago 2s [INFO] oc get 'builds' -oyaml apiVersion: v1 items: - apiVersion: build.openshift.io/v1 kind: Build metadata: annotations: openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.number: "1" openshift.io/build.pod-name: service-telemetry-framework-index-1-build creationTimestamp: "2025-12-12T16:28:18Z" generation: 2 labels: build: service-telemetry-framework-index buildconfig: service-telemetry-framework-index openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.start-policy: Serial name: service-telemetry-framework-index-1 namespace: service-telemetry ownerReferences: - apiVersion: build.openshift.io/v1 controller: true kind: BuildConfig name: service-telemetry-framework-index uid: 87e1998d-7949-4a23-8745-bb3f9fafb5be resourceVersion: "43693" uid: 51809f6b-efe1-4123-9613-d72d50ac8ad2 spec: nodeSelector: null output: pushSecret: name: builder-dockercfg-ff94g to: kind: ImageStreamTag name: service-telemetry-framework-index:latest postCommit: {} resources: {} revision: git: author: email: victoria@redhat.com name: Victoria Martinez de la Cruz commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 committer: email: victoria@redhat.com name: Victoria Martinez de la Cruz message: |- Raise the waiting time in validate_deployment.sh Wait for 5 minutes instead of 3 seconds to make sure that the environment is stable type: Git serviceAccount: builder source: binary: {} dockerfile: | # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs type: Binary strategy: dockerStrategy: from: kind: DockerImage name: quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a volumes: - mounts: - destinationPath: /opt/app-root/auth name: pull-secret source: secret: defaultMode: 420 secretName: service-telemetry-framework-index-dockercfg type: Secret type: Docker status: cancelled: true completionTimestamp: "2025-12-12T16:28:25Z" conditions: - lastTransitionTime: "2025-12-12T16:28:18Z" lastUpdateTime: "2025-12-12T16:28:18Z" status: "False" type: New - lastTransitionTime: "2025-12-12T16:28:24Z" lastUpdateTime: "2025-12-12T16:28:24Z" status: "False" type: Pending - lastTransitionTime: "2025-12-12T16:28:25Z" lastUpdateTime: "2025-12-12T16:28:25Z" status: "False" type: Running - lastTransitionTime: "2025-12-12T16:28:25Z" lastUpdateTime: "2025-12-12T16:28:25Z" message: The build was cancelled by the user. reason: CancelledBuild status: "True" type: Cancelled config: kind: BuildConfig name: service-telemetry-framework-index namespace: service-telemetry duration: 7000000000 message: The build was cancelled by the user. output: {} outputDockerImageReference: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest phase: Cancelled reason: CancelledBuild startTimestamp: "2025-12-12T16:28:18Z" - apiVersion: build.openshift.io/v1 kind: Build metadata: annotations: openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.number: "2" openshift.io/build.pod-name: service-telemetry-framework-index-2-build creationTimestamp: "2025-12-12T16:28:36Z" generation: 2 labels: build: service-telemetry-framework-index buildconfig: service-telemetry-framework-index openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.start-policy: Serial name: service-telemetry-framework-index-2 namespace: service-telemetry ownerReferences: - apiVersion: build.openshift.io/v1 controller: true kind: BuildConfig name: service-telemetry-framework-index uid: 87e1998d-7949-4a23-8745-bb3f9fafb5be resourceVersion: "43804" uid: 663b266a-8bad-4320-aa95-fb3abb991796 spec: nodeSelector: null output: pushSecret: name: builder-dockercfg-ff94g to: kind: ImageStreamTag name: service-telemetry-framework-index:latest postCommit: {} resources: {} revision: git: author: email: victoria@redhat.com name: Victoria Martinez de la Cruz commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 committer: email: victoria@redhat.com name: Victoria Martinez de la Cruz message: |- Raise the waiting time in validate_deployment.sh Wait for 5 minutes instead of 3 seconds to make sure that the environment is stable type: Git serviceAccount: builder source: binary: {} dockerfile: | # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs type: Binary strategy: dockerStrategy: from: kind: DockerImage name: quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a volumes: - mounts: - destinationPath: /opt/app-root/auth name: pull-secret source: secret: defaultMode: 420 secretName: service-telemetry-framework-index-dockercfg type: Secret type: Docker status: cancelled: true completionTimestamp: "2025-12-12T16:28:50Z" conditions: - lastTransitionTime: "2025-12-12T16:28:36Z" lastUpdateTime: "2025-12-12T16:28:36Z" status: "False" type: New - lastTransitionTime: "2025-12-12T16:28:49Z" lastUpdateTime: "2025-12-12T16:28:49Z" status: "False" type: Pending - lastTransitionTime: "2025-12-12T16:28:50Z" lastUpdateTime: "2025-12-12T16:28:50Z" status: "False" type: Running - lastTransitionTime: "2025-12-12T16:28:50Z" lastUpdateTime: "2025-12-12T16:28:50Z" message: The build was cancelled by the user. reason: CancelledBuild status: "True" type: Cancelled config: kind: BuildConfig name: service-telemetry-framework-index namespace: service-telemetry duration: 10000000000 message: The build was cancelled by the user. output: {} outputDockerImageReference: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest phase: Cancelled reason: CancelledBuild startTimestamp: "2025-12-12T16:28:40Z" - apiVersion: build.openshift.io/v1 kind: Build metadata: annotations: openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.number: "3" openshift.io/build.pod-name: service-telemetry-framework-index-3-build creationTimestamp: "2025-12-12T16:29:02Z" generation: 2 labels: build: service-telemetry-framework-index buildconfig: service-telemetry-framework-index openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.start-policy: Serial name: service-telemetry-framework-index-3 namespace: service-telemetry ownerReferences: - apiVersion: build.openshift.io/v1 controller: true kind: BuildConfig name: service-telemetry-framework-index uid: 87e1998d-7949-4a23-8745-bb3f9fafb5be resourceVersion: "43886" uid: 7fa5685f-0d11-4e89-8ed1-eb27c1e5068a spec: nodeSelector: null output: pushSecret: name: builder-dockercfg-ff94g to: kind: ImageStreamTag name: service-telemetry-framework-index:latest postCommit: {} resources: {} revision: git: author: email: victoria@redhat.com name: Victoria Martinez de la Cruz commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 committer: email: victoria@redhat.com name: Victoria Martinez de la Cruz message: |- Raise the waiting time in validate_deployment.sh Wait for 5 minutes instead of 3 seconds to make sure that the environment is stable type: Git serviceAccount: builder source: binary: {} dockerfile: | # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs type: Binary strategy: dockerStrategy: from: kind: DockerImage name: quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a volumes: - mounts: - destinationPath: /opt/app-root/auth name: pull-secret source: secret: defaultMode: 420 secretName: service-telemetry-framework-index-dockercfg type: Secret type: Docker status: cancelled: true completionTimestamp: "2025-12-12T16:29:06Z" conditions: - lastTransitionTime: "2025-12-12T16:29:02Z" lastUpdateTime: "2025-12-12T16:29:02Z" status: "False" type: New - lastTransitionTime: "2025-12-12T16:29:05Z" lastUpdateTime: "2025-12-12T16:29:05Z" status: "False" type: Pending - lastTransitionTime: "2025-12-12T16:29:06Z" lastUpdateTime: "2025-12-12T16:29:06Z" status: "False" type: Running - lastTransitionTime: "2025-12-12T16:29:06Z" lastUpdateTime: "2025-12-12T16:29:06Z" message: The build was cancelled by the user. reason: CancelledBuild status: "True" type: Cancelled config: kind: BuildConfig name: service-telemetry-framework-index namespace: service-telemetry duration: 3000000000 message: The build was cancelled by the user. output: {} outputDockerImageReference: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest phase: Cancelled reason: CancelledBuild startTimestamp: "2025-12-12T16:29:03Z" - apiVersion: build.openshift.io/v1 kind: Build metadata: annotations: openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.number: "4" openshift.io/build.pod-name: service-telemetry-framework-index-4-build creationTimestamp: "2025-12-12T16:29:18Z" generation: 2 labels: build: service-telemetry-framework-index buildconfig: service-telemetry-framework-index openshift.io/build-config.name: service-telemetry-framework-index openshift.io/build.start-policy: Serial name: service-telemetry-framework-index-4 namespace: service-telemetry ownerReferences: - apiVersion: build.openshift.io/v1 controller: true kind: BuildConfig name: service-telemetry-framework-index uid: 87e1998d-7949-4a23-8745-bb3f9fafb5be resourceVersion: "43943" uid: 3a886b3c-dd4c-45e6-945b-b7f7a9d85470 spec: nodeSelector: null output: pushSecret: name: builder-dockercfg-ff94g to: kind: ImageStreamTag name: service-telemetry-framework-index:latest postCommit: {} resources: {} revision: git: author: email: victoria@redhat.com name: Victoria Martinez de la Cruz commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 committer: email: victoria@redhat.com name: Victoria Martinez de la Cruz message: |- Raise the waiting time in validate_deployment.sh Wait for 5 minutes instead of 3 seconds to make sure that the environment is stable type: Git serviceAccount: builder source: binary: {} dockerfile: | # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs type: Binary strategy: dockerStrategy: from: kind: DockerImage name: quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a volumes: - mounts: - destinationPath: /opt/app-root/auth name: pull-secret source: secret: defaultMode: 420 secretName: service-telemetry-framework-index-dockercfg type: Secret type: Docker status: cancelled: true completionTimestamp: "2025-12-12T16:29:21Z" conditions: - lastTransitionTime: "2025-12-12T16:29:18Z" lastUpdateTime: "2025-12-12T16:29:18Z" status: "False" type: New - lastTransitionTime: "2025-12-12T16:29:20Z" lastUpdateTime: "2025-12-12T16:29:20Z" status: "False" type: Pending - lastTransitionTime: "2025-12-12T16:29:22Z" lastUpdateTime: "2025-12-12T16:29:22Z" status: "False" type: Running - lastTransitionTime: "2025-12-12T16:29:22Z" lastUpdateTime: "2025-12-12T16:29:22Z" message: The build was cancelled by the user. reason: CancelledBuild status: "True" type: Cancelled config: kind: BuildConfig name: service-telemetry-framework-index namespace: service-telemetry duration: 2000000000 message: The build was cancelled by the user. output: {} outputDockerImageReference: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest phase: Cancelled reason: CancelledBuild startTimestamp: "2025-12-12T16:29:19Z" kind: List metadata: resourceVersion: "" home/zuul/zuul-output/logs/controller/post_oc_get_subscriptions.log0000644000175000017500000001364615117042532025262 0ustar zuulzuul*** [INFO] Showing oc get 'subscriptions' NAME PACKAGE SOURCE CHANNEL elasticsearch-eck-operator-certified elasticsearch-eck-operator-certified certified-operators stable service-telemetry-operator service-telemetry-operator infrawatch-operators unstable [INFO] oc get 'subscriptions' -oyaml apiVersion: v1 items: - apiVersion: operators.coreos.com/v1alpha1 kind: Subscription metadata: creationTimestamp: "2025-12-12T16:26:44Z" generation: 1 labels: operators.coreos.com/elasticsearch-eck-operator-certified.service-telemetry: "" name: elasticsearch-eck-operator-certified namespace: service-telemetry resourceVersion: "44085" uid: ac9bb60f-fdf6-49a6-b289-d0315fb3fbd7 spec: channel: stable installPlanApproval: Automatic name: elasticsearch-eck-operator-certified source: certified-operators sourceNamespace: openshift-marketplace status: catalogHealth: - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: certified-operators namespace: openshift-marketplace resourceVersion: "41251" uid: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83 healthy: true lastUpdated: "2025-12-12T16:29:39Z" - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: community-operators namespace: openshift-marketplace resourceVersion: "42447" uid: 88a656bd-c52a-4813-892e-7e3363ba9ac0 healthy: true lastUpdated: "2025-12-12T16:29:39Z" - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: redhat-operators namespace: openshift-marketplace resourceVersion: "41489" uid: ca744265-3ae3-4482-8c3d-b10e28fe1042 healthy: true lastUpdated: "2025-12-12T16:29:39Z" - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: infrawatch-operators namespace: service-telemetry resourceVersion: "44059" uid: 844125b7-bcbe-4137-9cf0-b045a652f768 healthy: true lastUpdated: "2025-12-12T16:29:39Z" conditions: - lastTransitionTime: "2025-12-12T16:29:44Z" message: all available catalogsources are healthy reason: AllCatalogSourcesHealthy status: "False" type: CatalogSourcesUnhealthy - message: 'error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused"' reason: ErrorPreventedResolution status: "True" type: ResolutionFailed currentCSV: elasticsearch-eck-operator-certified.v3.2.0 installPlanGeneration: 1 installPlanRef: apiVersion: operators.coreos.com/v1alpha1 kind: InstallPlan name: install-t6x4f namespace: service-telemetry resourceVersion: "41771" uid: ff0c53e3-1a6a-45d6-a8b8-40f1826013fe installedCSV: elasticsearch-eck-operator-certified.v3.2.0 installplan: apiVersion: operators.coreos.com/v1alpha1 kind: InstallPlan name: install-t6x4f uuid: ff0c53e3-1a6a-45d6-a8b8-40f1826013fe lastUpdated: "2025-12-12T16:29:44Z" state: AtLatestKnown - apiVersion: operators.coreos.com/v1alpha1 kind: Subscription metadata: creationTimestamp: "2025-12-12T16:29:24Z" generation: 1 labels: operators.coreos.com/service-telemetry-operator.service-telemetry: "" name: service-telemetry-operator namespace: service-telemetry resourceVersion: "44089" uid: d58b8d17-7fad-4b3a-b89e-4b5e8d6c5fdd spec: channel: unstable installPlanApproval: Automatic name: service-telemetry-operator source: infrawatch-operators sourceNamespace: service-telemetry status: catalogHealth: - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: certified-operators namespace: openshift-marketplace resourceVersion: "41251" uid: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83 healthy: true lastUpdated: "2025-12-12T16:29:40Z" - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: community-operators namespace: openshift-marketplace resourceVersion: "42447" uid: 88a656bd-c52a-4813-892e-7e3363ba9ac0 healthy: true lastUpdated: "2025-12-12T16:29:40Z" - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: redhat-operators namespace: openshift-marketplace resourceVersion: "41489" uid: ca744265-3ae3-4482-8c3d-b10e28fe1042 healthy: true lastUpdated: "2025-12-12T16:29:40Z" - catalogSourceRef: apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource name: infrawatch-operators namespace: service-telemetry resourceVersion: "44059" uid: 844125b7-bcbe-4137-9cf0-b045a652f768 healthy: true lastUpdated: "2025-12-12T16:29:40Z" conditions: - message: 'error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused"' reason: ErrorPreventedResolution status: "True" type: ResolutionFailed - lastTransitionTime: "2025-12-12T16:29:45Z" message: all available catalogsources are healthy reason: AllCatalogSourcesHealthy status: "False" type: CatalogSourcesUnhealthy lastUpdated: "2025-12-12T16:29:45Z" kind: List metadata: resourceVersion: "" home/zuul/zuul-output/logs/controller/post_oc_get_images.log0000644000175000017500000572634515117042533023635 0ustar zuulzuul*** [INFO] Showing oc get 'images' NAME IMAGE REFERENCE sha256:00cf28cf9a6c427962f922855a6cc32692c760764ce2ce7411cf605dd510367f registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:00cf28cf9a6c427962f922855a6cc32692c760764ce2ce7411cf605dd510367f sha256:01920073bbd480bd34e2d8e17dced64d342257fa9a263d1843edf1cc45a50a7c registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:01920073bbd480bd34e2d8e17dced64d342257fa9a263d1843edf1cc45a50a7c sha256:0194269c95cbc1aeb509611e283fd404791a0bc79911532bd9835bb51a19aff6 registry.redhat.io/ubi8/php-80@sha256:0194269c95cbc1aeb509611e283fd404791a0bc79911532bd9835bb51a19aff6 sha256:022488b1bf697b7dd8c393171a3247bef4ea545a9ab828501e72168f2aac9415 registry.access.redhat.com/ubi8/openjdk-8@sha256:022488b1bf697b7dd8c393171a3247bef4ea545a9ab828501e72168f2aac9415 sha256:02c1739b727e3b15a76cdd44f92cf91336d7dd34d5e830b2fe4ab3f2af48fe60 registry.redhat.io/ubi9/php-82@sha256:02c1739b727e3b15a76cdd44f92cf91336d7dd34d5e830b2fe4ab3f2af48fe60 sha256:04b6d6c9d05857ca39c360caf8698f3081360eeffe85da2eaab3e7e91040030b registry.redhat.io/ubi9/python-311@sha256:04b6d6c9d05857ca39c360caf8698f3081360eeffe85da2eaab3e7e91040030b sha256:0669a28577b41bb05c67492ef18a1d48a299ac54d1500df8f9f8f760ce4be24b registry.access.redhat.com/ubi8/openjdk-11@sha256:0669a28577b41bb05c67492ef18a1d48a299ac54d1500df8f9f8f760ce4be24b sha256:06bbbf9272d5c5161f444388593e9bd8db793d8a2d95a50b429b3c0301fafcdd registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:06bbbf9272d5c5161f444388593e9bd8db793d8a2d95a50b429b3c0301fafcdd sha256:07addbabcfd72212a82efce053a70362a06925ee1522c4dd783be878ffad46cb registry.redhat.io/ubi7/go-toolset@sha256:07addbabcfd72212a82efce053a70362a06925ee1522c4dd783be878ffad46cb sha256:0a1a889dcfb66dfe73d30f6a7a18dace8796e66e9f2203de97955500ad76f4aa quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:0a1a889dcfb66dfe73d30f6a7a18dace8796e66e9f2203de97955500ad76f4aa sha256:0a3a55052f6e8df1ea48de0c429c39e072b6aa8818250ccee634f96acacfd7c7 registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:0a3a55052f6e8df1ea48de0c429c39e072b6aa8818250ccee634f96acacfd7c7 sha256:0ab9d7fe68c5aef87b591e40bd6be7479e913798348f86ac43d7c0357794bb3f registry.redhat.io/ubi8/nodejs-20@sha256:0ab9d7fe68c5aef87b591e40bd6be7479e913798348f86ac43d7c0357794bb3f sha256:0d5551af0b9188a88eb39bf9745f992b1ab2ce4839d4a0555b59c58b5f3f6412 registry.redhat.io/ubi9/nodejs-20@sha256:0d5551af0b9188a88eb39bf9745f992b1ab2ce4839d4a0555b59c58b5f3f6412 sha256:0eea1d20aaa26041edf26b925fb204d839e5b93122190191893a0299b2e1b589 registry.access.redhat.com/ubi8/openjdk-17@sha256:0eea1d20aaa26041edf26b925fb204d839e5b93122190191893a0299b2e1b589 sha256:13577236b039ed11e9f1070f884e9836e731944575de2ee59b290b05e08ad5f8 registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:13577236b039ed11e9f1070f884e9836e731944575de2ee59b290b05e08ad5f8 sha256:144941f6745ed291d11d94c66037cfbb1d7cd9cf28db4f2234d9265efe767eff registry.redhat.io/fuse7/fuse-karaf-openshift-jdk11-rhel8@sha256:144941f6745ed291d11d94c66037cfbb1d7cd9cf28db4f2234d9265efe767eff sha256:146789aaa36b11c2194c3a1cd1bbc9d56016a67cf2791401b59c339983dd2a5e registry.redhat.io/rhscl/mariadb-105-rhel7@sha256:146789aaa36b11c2194c3a1cd1bbc9d56016a67cf2791401b59c339983dd2a5e sha256:14de89e89efc97aee3b50141108b7833708c3a93ad90bf89940025ab5267ba86 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:14de89e89efc97aee3b50141108b7833708c3a93ad90bf89940025ab5267ba86 sha256:15ca7a76fdcca80b800ea420857782badd4960d99d120322c255462c098ed641 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:15ca7a76fdcca80b800ea420857782badd4960d99d120322c255462c098ed641 sha256:16aacc57365922ce9329f1306153e444a02c4883b5b6ea648b0e47ef286df396 registry.access.redhat.com/ubi8/dotnet-80-runtime@sha256:16aacc57365922ce9329f1306153e444a02c4883b5b6ea648b0e47ef286df396 sha256:1b7ca459c309d850b031f63618176b460fa348899201a6a82a5a42c75d09563d registry.redhat.io/fuse7/fuse-java-openshift-rhel8@sha256:1b7ca459c309d850b031f63618176b460fa348899201a6a82a5a42c75d09563d sha256:1ce541f489f72d4b354b96e9ad8b8f4e27099534d1cf5cebdfd505a1825f6545 registry.redhat.io/ubi10/python-312-minimal@sha256:1ce541f489f72d4b354b96e9ad8b8f4e27099534d1cf5cebdfd505a1825f6545 sha256:1d526f710f53ea2805441bbd04057242715d6fe2c91257b1ccd53c7a72c499be registry.redhat.io/fuse7/fuse-java-openshift@sha256:1d526f710f53ea2805441bbd04057242715d6fe2c91257b1ccd53c7a72c499be sha256:1d68b58a73f4cf15fcd886ab39fddf18be923b52b24cb8ec3ab1da2d3e9bd5f6 registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:1d68b58a73f4cf15fcd886ab39fddf18be923b52b24cb8ec3ab1da2d3e9bd5f6 sha256:1ef62ca43bc7b5be382470644797926cecd419f1496a471cc230d71147b8f878 registry.redhat.io/ubi8/python-39@sha256:1ef62ca43bc7b5be382470644797926cecd419f1496a471cc230d71147b8f878 sha256:1fe8afafb4a8cfe086dd3c3f59fc717ccc8924e570deaed38f4751962aed4211 registry.redhat.io/fuse7/fuse-java-openshift@sha256:1fe8afafb4a8cfe086dd3c3f59fc717ccc8924e570deaed38f4751962aed4211 sha256:2254dc2f421f496b504aafbbd8ea37e660652c4b6b4f9a0681664b10873be7fe registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:2254dc2f421f496b504aafbbd8ea37e660652c4b6b4f9a0681664b10873be7fe sha256:229ee7b88c5f700c95d557d0b37b8f78dbb6b125b188c3bf050cfdb32aec7962 registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:229ee7b88c5f700c95d557d0b37b8f78dbb6b125b188c3bf050cfdb32aec7962 sha256:230cd475733320b85bef99f7634b0f73ba82e323865d1e46be6d76fe03c337e8 registry.redhat.io/fuse7/fuse-eap-openshift@sha256:230cd475733320b85bef99f7634b0f73ba82e323865d1e46be6d76fe03c337e8 sha256:2406759804b3f8c4ea6a6ba582a1ab82be48362a8a815a82bb4aa04bf81e86e3 registry.redhat.io/jboss-eap-7/eap-xp4-openjdk11-runtime-openshift-rhel8@sha256:2406759804b3f8c4ea6a6ba582a1ab82be48362a8a815a82bb4aa04bf81e86e3 sha256:25f6a298e4505c38e7220e8a654852de3822d40a99b5f47da657251f31c3ffc3 registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:25f6a298e4505c38e7220e8a654852de3822d40a99b5f47da657251f31c3ffc3 sha256:26c16d866c58f4d47901f8a009d39b5f2ddc76a5d7b2cb5b0065ed8cd8eea1b6 registry.access.redhat.com/ubi8/dotnet-80@sha256:26c16d866c58f4d47901f8a009d39b5f2ddc76a5d7b2cb5b0065ed8cd8eea1b6 sha256:2788719b2da9f43a904d670b43cf29445a687a1ad6eb96e4a052e15cd3188a0f registry.redhat.io/ubi8/php-82@sha256:2788719b2da9f43a904d670b43cf29445a687a1ad6eb96e4a052e15cd3188a0f sha256:27901936ab8866d1d1293479bb3448cb3ff98cdcaa8242926a9c49896a864c2f registry.redhat.io/ubi9/nginx-124@sha256:27901936ab8866d1d1293479bb3448cb3ff98cdcaa8242926a9c49896a864c2f sha256:2861514e125903261aa0004883a7f7aeeb4c189b2d0d175372d1edc111942eda registry.redhat.io/ubi9/go-toolset@sha256:2861514e125903261aa0004883a7f7aeeb4c189b2d0d175372d1edc111942eda sha256:29d831cb1f5fe839ab6e8c59d57f695094938a9f97e071bdbc854e54a90ecb94 registry.redhat.io/fuse7/fuse-java-openshift@sha256:29d831cb1f5fe839ab6e8c59d57f695094938a9f97e071bdbc854e54a90ecb94 sha256:2ae058ee7239213fb495491112be8cc7e6d6661864fd399deb27f23f50f05eb4 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:2ae058ee7239213fb495491112be8cc7e6d6661864fd399deb27f23f50f05eb4 sha256:2c7e510949ed2c19504d0aaed1ad891f8aa03cd649d04359cc6a2cdffc40b594 registry.redhat.io/ubi9/nginx-122@sha256:2c7e510949ed2c19504d0aaed1ad891f8aa03cd649d04359cc6a2cdffc40b594 sha256:2cee344e4cfcfdc9a117fd82baa6f2d5daa7eeed450e02cd5d5554b424410439 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:2cee344e4cfcfdc9a117fd82baa6f2d5daa7eeed450e02cd5d5554b424410439 sha256:2ec1207cc75b74c26998f7c3386199a30213b37a05fab6e41a713d4a74bc4d5a registry.redhat.io/rhel9/mysql-80@sha256:2ec1207cc75b74c26998f7c3386199a30213b37a05fab6e41a713d4a74bc4d5a sha256:2f59ad75b66a3169b0b03032afb09aa3cfa531dbd844e3d3a562246e7d09c282 registry.access.redhat.com/ubi8/openjdk-8@sha256:2f59ad75b66a3169b0b03032afb09aa3cfa531dbd844e3d3a562246e7d09c282 sha256:301b093ae4fbc18f7d9d11b803d9da4220dad4556202a8de2f04377ff87c2f4d registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:301b093ae4fbc18f7d9d11b803d9da4220dad4556202a8de2f04377ff87c2f4d sha256:306ce79b5647eb627aebbd6a9c956f9cd09c24ef7b12e42c1de6ba89f8fd8121 registry.redhat.io/rhscl/postgresql-12-rhel7@sha256:306ce79b5647eb627aebbd6a9c956f9cd09c24ef7b12e42c1de6ba89f8fd8121 sha256:32a5e806bd88b40568d46864fd313541498e38fabfc5afb5f3bdfe052c4b4c5f registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:32a5e806bd88b40568d46864fd313541498e38fabfc5afb5f3bdfe052c4b4c5f sha256:3375ec169d274278da56d1401c5c1285f7d2812ea0bde2ac9ad9652b69f80893 registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:3375ec169d274278da56d1401c5c1285f7d2812ea0bde2ac9ad9652b69f80893 sha256:33d4dff40514e91d86b42e90b24b09a5ca770d9f67657c936363d348cd33d188 registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:33d4dff40514e91d86b42e90b24b09a5ca770d9f67657c936363d348cd33d188 sha256:38427fd30565b66ec512fb8d86bf442a7ac4a100d44332e8c42b472fdf821db0 registry.redhat.io/ubi8/python-36@sha256:38427fd30565b66ec512fb8d86bf442a7ac4a100d44332e8c42b472fdf821db0 sha256:3843651d85087f9f19c0047f3b0c09e41f241946867d4a78acfda37ca0a405e2 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3843651d85087f9f19c0047f3b0c09e41f241946867d4a78acfda37ca0a405e2 sha256:38c7e4f7dea04bb536f05d78e0107ebc2a3607cf030db7f5c249f13ce1f52d59 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:38c7e4f7dea04bb536f05d78e0107ebc2a3607cf030db7f5c249f13ce1f52d59 sha256:3b94ccfa422b8ba0014302a3cfc6916b69f0f5a9dfd757b6704049834d4ff0ae registry.access.redhat.com/ubi8/openjdk-17@sha256:3b94ccfa422b8ba0014302a3cfc6916b69f0f5a9dfd757b6704049834d4ff0ae sha256:3bc55cb1bafdd281a784ec7950c8e95914079522f152f642e8172869e83b4585 registry.redhat.io/ocp-tools-4/jenkins-rhel8@sha256:3bc55cb1bafdd281a784ec7950c8e95914079522f152f642e8172869e83b4585 sha256:3f00540ce2a3a01d2a147a7d73825fe78697be213a050bd09edae36266d6bc40 registry.access.redhat.com/ubi8/openjdk-11@sha256:3f00540ce2a3a01d2a147a7d73825fe78697be213a050bd09edae36266d6bc40 sha256:3f01daca201d91f4989f8ffe80625d2e08fc0e69f241a7359d30c15cc7c9a419 registry.redhat.io/ubi8/openjdk-17@sha256:3f01daca201d91f4989f8ffe80625d2e08fc0e69f241a7359d30c15cc7c9a419 sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a sha256:3fdd215cd56a7bfe500482c73271dc1e79bf90457a0f245e6f50d3d63ca4da7c registry.redhat.io/ubi9/nodejs-20-minimal@sha256:3fdd215cd56a7bfe500482c73271dc1e79bf90457a0f245e6f50d3d63ca4da7c sha256:403c4481d100edee6355e3defc17cec304db6f510b0c953c298e171b16dbd8bf registry.redhat.io/jboss-eap-7/eap74-openjdk8-openshift-rhel7@sha256:403c4481d100edee6355e3defc17cec304db6f510b0c953c298e171b16dbd8bf sha256:4100f9633dd7533a6ab847ea2b666de21bbb84baf070945a3c142cb019cd9a5f registry.redhat.io/ubi9/python-312@sha256:4100f9633dd7533a6ab847ea2b666de21bbb84baf070945a3c142cb019cd9a5f sha256:421d1f6a10e263677b7687ccea8e4a59058e2e3c80585505eec9a9c2e6f9f40e registry.redhat.io/redhat-openjdk-18/openjdk18-openshift@sha256:421d1f6a10e263677b7687ccea8e4a59058e2e3c80585505eec9a9c2e6f9f40e sha256:425e2c7c355bea32be238aa2c7bdd363b6ab3709412bdf095efe28a8f6c07d84 registry.access.redhat.com/ubi8/openjdk-11@sha256:425e2c7c355bea32be238aa2c7bdd363b6ab3709412bdf095efe28a8f6c07d84 sha256:431753c8a6a8541fdc0edd3385b2c765925d244fdd2347d2baa61303789696be registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:431753c8a6a8541fdc0edd3385b2c765925d244fdd2347d2baa61303789696be sha256:4361e4a098acb1d1cbd79b6fb1e67a891949e65cdc40e286a8e5da9bfb7fa332 registry.redhat.io/fuse7/fuse-java-openshift@sha256:4361e4a098acb1d1cbd79b6fb1e67a891949e65cdc40e286a8e5da9bfb7fa332 sha256:43920d10408205a379519e16530d5181db65a79df3c2725b3cbad26798d09037 registry.redhat.io/ubi8/dotnet-90-runtime@sha256:43920d10408205a379519e16530d5181db65a79df3c2725b3cbad26798d09037 sha256:4396f6b4629ba45fe23c13c91aaa64427e957b15841bc65c84537763f00bcbe0 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:4396f6b4629ba45fe23c13c91aaa64427e957b15841bc65c84537763f00bcbe0 sha256:44f6258460b45f78d229e9a3865e7481285d268460760710f69f951ed2b41aa5 registry.redhat.io/ubi9/perl-532@sha256:44f6258460b45f78d229e9a3865e7481285d268460760710f69f951ed2b41aa5 sha256:46a4e73ddb085d1f36b39903ea13ba307bb958789707e9afde048764b3e3cae2 registry.access.redhat.com/ubi8/openjdk-17@sha256:46a4e73ddb085d1f36b39903ea13ba307bb958789707e9afde048764b3e3cae2 sha256:496e23be70520863bce6f7cdc54d280aca2c133d06e992795c4dcbde1a9dd1ab registry.access.redhat.com/ubi8/openjdk-8@sha256:496e23be70520863bce6f7cdc54d280aca2c133d06e992795c4dcbde1a9dd1ab sha256:4a69ef73f4b6afb2819630baf5d169a038ed4def330a44534926aa933cbbd7e5 registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:4a69ef73f4b6afb2819630baf5d169a038ed4def330a44534926aa933cbbd7e5 sha256:4b4e59365194340a2a68d6720b35f4f3434c75c7d504a9e785fb056f7e4212e7 registry.redhat.io/rhel9/postgresql-13@sha256:4b4e59365194340a2a68d6720b35f4f3434c75c7d504a9e785fb056f7e4212e7 sha256:4f35566977c35306a8f2102841ceb7fa10a6d9ac47c079131caed5655140f9b2 registry.access.redhat.com/ubi8/openjdk-21@sha256:4f35566977c35306a8f2102841ceb7fa10a6d9ac47c079131caed5655140f9b2 sha256:4ffd7ccbe8ff0aa2e09e1c8a72410aadc721ed3ed227890f03ce3f8aa2b33700 registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:4ffd7ccbe8ff0aa2e09e1c8a72410aadc721ed3ed227890f03ce3f8aa2b33700 sha256:50729a37cfd9eeb05865038eef01b6a2baa92e2f12fe429de3f43d85ef8824b5 registry.redhat.io/ocp-tools-4/jenkins-agent-base-rhel8@sha256:50729a37cfd9eeb05865038eef01b6a2baa92e2f12fe429de3f43d85ef8824b5 sha256:50752ad94cb38e775d2a65fdc43c3249d6d24d87d15411af7b1525aa8a934277 registry.redhat.io/ubi8/python-312@sha256:50752ad94cb38e775d2a65fdc43c3249d6d24d87d15411af7b1525aa8a934277 sha256:510b3f197ad39f82b828cc8be16e12a193d4dcd6ea27af01ce10c71b87c5cbfc registry.redhat.io/rhel8/postgresql-15@sha256:510b3f197ad39f82b828cc8be16e12a193d4dcd6ea27af01ce10c71b87c5cbfc sha256:52f9b4df3f7833876ee502a6bff2539491db07e060b213b6a0a8fda0c4a881c1 registry.redhat.io/ocp-tools-4/jenkins-agent-base-rhel8@sha256:52f9b4df3f7833876ee502a6bff2539491db07e060b213b6a0a8fda0c4a881c1 sha256:5443b2d3e19c8f540cbe133113a7a4479f3ad98caa1a2a5e6ac48acbe4914b39 registry.redhat.io/fuse7/fuse-eap-openshift@sha256:5443b2d3e19c8f540cbe133113a7a4479f3ad98caa1a2a5e6ac48acbe4914b39 sha256:547a068fddf44318e62b26caa267375dc77acc10ae5dcdd0869a1d60f8b93d5d registry.redhat.io/ubi9/python-312-minimal@sha256:547a068fddf44318e62b26caa267375dc77acc10ae5dcdd0869a1d60f8b93d5d sha256:5522021e9081fa0f0163f75afedb9efaaad25c2a1dde6ce0fab3142ddcc7dd60 registry.redhat.io/rh-sso-7/sso75-openshift-rhel8@sha256:5522021e9081fa0f0163f75afedb9efaaad25c2a1dde6ce0fab3142ddcc7dd60 sha256:5555a6031cbb8eb09cfeb73cacaabefd5a5824637b047af69b981bc66bdd8b3c registry.redhat.io/fuse7/fuse-java-openshift@sha256:5555a6031cbb8eb09cfeb73cacaabefd5a5824637b047af69b981bc66bdd8b3c sha256:55a832a2dd32c4ab288b2c76e1c531bd6df07651010f7b9f8f983dff5ee584ab registry.redhat.io/ubi8/dotnet-80-runtime@sha256:55a832a2dd32c4ab288b2c76e1c531bd6df07651010f7b9f8f983dff5ee584ab sha256:55dc61c31ea50a8f7a45e993a9b3220097974948b5cd1ab3f317e7702e8cb6fc registry.access.redhat.com/ubi8/openjdk-21-runtime@sha256:55dc61c31ea50a8f7a45e993a9b3220097974948b5cd1ab3f317e7702e8cb6fc sha256:561ade81cb6455e0de35573d7ca68ac3f043c86385aac9e274dd53a7a75d3c16 registry.redhat.io/rh-sso-7/sso76-openshift-rhel8@sha256:561ade81cb6455e0de35573d7ca68ac3f043c86385aac9e274dd53a7a75d3c16 sha256:57ab1f0ad24e02143978fc79c5219a02c4d6a5a27225ee5454c85a47839b6ddc registry.redhat.io/ubi8/openjdk-8-runtime@sha256:57ab1f0ad24e02143978fc79c5219a02c4d6a5a27225ee5454c85a47839b6ddc sha256:57fa0cb158aa31193908df27fc707afcfdd4bdaf93b3286f5602d5f804e1927f registry.redhat.io/fuse7/fuse-java-openshift@sha256:57fa0cb158aa31193908df27fc707afcfdd4bdaf93b3286f5602d5f804e1927f sha256:59b88fb0c467ca43bf3c1af6bfd8777577638dd8079f995cdb20b6f4e20ce0b6 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:59b88fb0c467ca43bf3c1af6bfd8777577638dd8079f995cdb20b6f4e20ce0b6 sha256:5bb11da5abfe6a1bc937f0439f08fa27efc96c438106b6a545be62672a39fc26 registry.redhat.io/fuse7/fuse-eap-openshift@sha256:5bb11da5abfe6a1bc937f0439f08fa27efc96c438106b6a545be62672a39fc26 sha256:5c16087bf81b2285e8815970b44385f7c98612fd6eb1af23b6d89db86004efa3 registry.redhat.io/rhel8/mariadb-103@sha256:5c16087bf81b2285e8815970b44385f7c98612fd6eb1af23b6d89db86004efa3 sha256:5c5d7468f6838b6a714482e62ea956659212f3415ec8f69989f75eb6d8744a6e quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5c5d7468f6838b6a714482e62ea956659212f3415ec8f69989f75eb6d8744a6e sha256:5dfcc5b000a1fab4be66bbd43e4db44b61176e2bcba9c24f6fe887dea9b7fd49 registry.access.redhat.com/ubi8/dotnet-60-runtime@sha256:5dfcc5b000a1fab4be66bbd43e4db44b61176e2bcba9c24f6fe887dea9b7fd49 sha256:5e4e0fd08883744f35560eac43b8120f6324d9b488eb7a7716955fb98ddbace5 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5e4e0fd08883744f35560eac43b8120f6324d9b488eb7a7716955fb98ddbace5 sha256:5f474ef095d7b7aabc5b1c60818201aca66343856fb67eb93751f3b4a82d391b registry.redhat.io/rhscl/postgresql-13-rhel7@sha256:5f474ef095d7b7aabc5b1c60818201aca66343856fb67eb93751f3b4a82d391b sha256:5fb3543c0d42146f0506c1ea4d09575131da6a2f27885729b7cfce13a0fa90e3 registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:5fb3543c0d42146f0506c1ea4d09575131da6a2f27885729b7cfce13a0fa90e3 sha256:603d10af5e3476add5b5726fdef893033869ae89824ee43949a46c9f004ef65d registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:603d10af5e3476add5b5726fdef893033869ae89824ee43949a46c9f004ef65d sha256:61555b923dabe4ff734279ed1bdb9eb6d450c760e1cc04463cf88608ac8d1338 registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:61555b923dabe4ff734279ed1bdb9eb6d450c760e1cc04463cf88608ac8d1338 sha256:64acf3403b5c2c85f7a28f326c63f1312b568db059c66d90b34e3c59fde3a74b registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:64acf3403b5c2c85f7a28f326c63f1312b568db059c66d90b34e3c59fde3a74b sha256:663eb81388ae8f824e7920c272f6d2e2274cf6c140d61416607261cdce9d50e2 registry.access.redhat.com/ubi8/openjdk-11@sha256:663eb81388ae8f824e7920c272f6d2e2274cf6c140d61416607261cdce9d50e2 sha256:6740d72db4de99ecb4652cff89a239242afd150d6ccf6ed0ebff89ffcbbc649e registry.redhat.io/ubi8/go-toolset@sha256:6740d72db4de99ecb4652cff89a239242afd150d6ccf6ed0ebff89ffcbbc649e sha256:67a2ae44e1bd87166e5c70f8147ccc9064ddfc8f43170bc92db9b12568cc7f73 registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:67a2ae44e1bd87166e5c70f8147ccc9064ddfc8f43170bc92db9b12568cc7f73 sha256:67fee4b64b269f5666a1051d806635b675903ef56d07b7cc019d3d59ff1aa97c registry.access.redhat.com/ubi8/openjdk-11@sha256:67fee4b64b269f5666a1051d806635b675903ef56d07b7cc019d3d59ff1aa97c sha256:6a9e81b2eea2f32f2750909b6aa037c2c2e68be3bc9daf3c7a3163c9e1df379f registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:6a9e81b2eea2f32f2750909b6aa037c2c2e68be3bc9daf3c7a3163c9e1df379f sha256:6c009f430da02bdcff618a7dcd085d7d22547263eeebfb8d6377a4cf6f58769d registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:6c009f430da02bdcff618a7dcd085d7d22547263eeebfb8d6377a4cf6f58769d sha256:6cb572c7356c9d9ae6d2760491095de4b1797471ea97c50b330bc2ce1168da56 registry.access.redhat.com/ubi8/dotnet-90@sha256:6cb572c7356c9d9ae6d2760491095de4b1797471ea97c50b330bc2ce1168da56 sha256:6d07cbaef7869b2e0d878740ad685b150f3d8ab960544c881916a01f25f9b6ef registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:6d07cbaef7869b2e0d878740ad685b150f3d8ab960544c881916a01f25f9b6ef sha256:6d8afb6d1fced4deee8de43b935e2bf5164c81bc26bee01da0fce69b74b63f83 registry.redhat.io/fuse7/fuse-eap-openshift-jdk11-rhel8@sha256:6d8afb6d1fced4deee8de43b935e2bf5164c81bc26bee01da0fce69b74b63f83 sha256:70a21b3f93c05843ce9d07f125b1464436caf01680bb733754a2a5df5bc3b11b registry.access.redhat.com/ubi8/dotnet-60@sha256:70a21b3f93c05843ce9d07f125b1464436caf01680bb733754a2a5df5bc3b11b sha256:7164a06e9ba98a3ce9991bd7019512488efe30895175bb463e255f00eb9421fd registry.access.redhat.com/ubi8/openjdk-8@sha256:7164a06e9ba98a3ce9991bd7019512488efe30895175bb463e255f00eb9421fd sha256:7201e059b92acc55fe9fe1cc390d44e92f0e2af297fbe52b3f1bb56327f59624 registry.redhat.io/ubi8/dotnet-80@sha256:7201e059b92acc55fe9fe1cc390d44e92f0e2af297fbe52b3f1bb56327f59624 sha256:739fac452e78a21a16b66e0451b85590b9e48ec7a1ed3887fbb9ed85cf564275 registry.access.redhat.com/ubi8/openjdk-17@sha256:739fac452e78a21a16b66e0451b85590b9e48ec7a1ed3887fbb9ed85cf564275 sha256:74051f86b00fb102e34276f03a310c16bc57b9c2a001a56ba66359e15ee48ba6 registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:74051f86b00fb102e34276f03a310c16bc57b9c2a001a56ba66359e15ee48ba6 sha256:74cc70f8d3698d41793d19b0e23b0a79448d02ada93f402a1714d26d164e4c1d registry.redhat.io/jboss-eap-7/eap74-openjdk11-openshift-rhel8@sha256:74cc70f8d3698d41793d19b0e23b0a79448d02ada93f402a1714d26d164e4c1d sha256:74efcab05b844a1c226bb18221a5309e7364b48d52757a809787e9b58e235ed9 registry.redhat.io/rhel8/postgresql-13@sha256:74efcab05b844a1c226bb18221a5309e7364b48d52757a809787e9b58e235ed9 sha256:75fbf4aa5c14bba44b5dfbf6673dc80ce35376f626df3a102a5a2edf8141cd34 registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:75fbf4aa5c14bba44b5dfbf6673dc80ce35376f626df3a102a5a2edf8141cd34 sha256:7711108ef60ef6f0536bfa26914af2afaf6455ce6e4c4abd391e31a2d95d0178 registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:7711108ef60ef6f0536bfa26914af2afaf6455ce6e4c4abd391e31a2d95d0178 sha256:77e5675e066943925eb228f51434080f10bb0be323c9c55ac62d223a0dd1b250 registry.redhat.io/ubi9/ruby-33@sha256:77e5675e066943925eb228f51434080f10bb0be323c9c55ac62d223a0dd1b250 sha256:789f5edf1369a40bf56ca698eafee86b74a8d53b39d100bbb91279aaebceb6d5 registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:789f5edf1369a40bf56ca698eafee86b74a8d53b39d100bbb91279aaebceb6d5 sha256:78af15475eac13d2ff439b33a9c3bdd39147858a824c420e8042fd5f35adce15 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:78af15475eac13d2ff439b33a9c3bdd39147858a824c420e8042fd5f35adce15 sha256:78bf175cecb15524b2ef81bff8cc11acdf7c0f74c08417f0e443483912e4878a registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:78bf175cecb15524b2ef81bff8cc11acdf7c0f74c08417f0e443483912e4878a sha256:7bcc365e0ba823ed020ee6e6c3e0c23be5871c8dea3f7f1a65029002c83f9e55 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:7bcc365e0ba823ed020ee6e6c3e0c23be5871c8dea3f7f1a65029002c83f9e55 sha256:7c8a088031661d94022418e93fb63744c38e1c4cff93ea3b95c096a290c2b7a3 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7c8a088031661d94022418e93fb63744c38e1c4cff93ea3b95c096a290c2b7a3 sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b quay.io/infrawatch-operators/service-telemetry-operator-bundle@sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b sha256:7de877b0e748cdb47cb702400f3ddaa3c3744a022887e2213c2bb27775ab4b25 registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:7de877b0e748cdb47cb702400f3ddaa3c3744a022887e2213c2bb27775ab4b25 sha256:7ef75cdbc399425105060771cb8e700198cc0bddcfb60bf4311bf87ea62fd440 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:7ef75cdbc399425105060771cb8e700198cc0bddcfb60bf4311bf87ea62fd440 sha256:7ff7af22f08d9dc8043a73013d629ee03277ff18ad94001092de70fd5917e9e8 registry.redhat.io/ubi10/nodejs-22@sha256:7ff7af22f08d9dc8043a73013d629ee03277ff18ad94001092de70fd5917e9e8 sha256:8027301bb8716941e2a15b7d31b055ec7eba327ad3b7c72fb5accfa077a32521 registry.redhat.io/rhscl/postgresql-10-rhel7@sha256:8027301bb8716941e2a15b7d31b055ec7eba327ad3b7c72fb5accfa077a32521 sha256:81684e422367a075ac113e69ea11d8721416ce4bedea035e25313c5e726fd7d1 registry.access.redhat.com/ubi8/openjdk-8@sha256:81684e422367a075ac113e69ea11d8721416ce4bedea035e25313c5e726fd7d1 sha256:85093d0f55d06662420925f64e914ff05499c79c2ede3ef80085a44d40f16a80 registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:85093d0f55d06662420925f64e914ff05499c79c2ede3ef80085a44d40f16a80 sha256:852d8ea448cfb93036fc5f1b69f58249bc2e4454d326bd927839c5de6ce50a7b registry.redhat.io/fuse7/fuse-eap-openshift@sha256:852d8ea448cfb93036fc5f1b69f58249bc2e4454d326bd927839c5de6ce50a7b sha256:865ce4a073a00133133ba2d375cb9529dab8d10cf2aebd5537e9028f21aa261b registry.redhat.io/ubi10/nginx-126@sha256:865ce4a073a00133133ba2d375cb9529dab8d10cf2aebd5537e9028f21aa261b sha256:868224c3b7c309b9e04003af70a5563af8e4c662f0c53f2a7606e0573c9fad85 registry.access.redhat.com/ubi8/openjdk-11@sha256:868224c3b7c309b9e04003af70a5563af8e4c662f0c53f2a7606e0573c9fad85 sha256:88751105dd023552164e3c312742986b011078becb28f1464f1524d134925d73 registry.access.redhat.com/ubi8/dotnet-90-runtime@sha256:88751105dd023552164e3c312742986b011078becb28f1464f1524d134925d73 sha256:887cc9e7fd3ce89adf6233aaf52b0243930e1a958190a09bf37c10f069890ee7 registry.redhat.io/ubi8/nodejs-22@sha256:887cc9e7fd3ce89adf6233aaf52b0243930e1a958190a09bf37c10f069890ee7 sha256:8968c86f5e2831796cf5f464c87a911b5513fd543b7f3485ccc497fe05ad6bca registry.redhat.io/fuse7/fuse-eap-openshift@sha256:8968c86f5e2831796cf5f464c87a911b5513fd543b7f3485ccc497fe05ad6bca sha256:8a5b580b76c2fc2dfe55d13bb0dd53e8c71d718fc1a3773264b1710f49060222 registry.access.redhat.com/ubi8/openjdk-8@sha256:8a5b580b76c2fc2dfe55d13bb0dd53e8c71d718fc1a3773264b1710f49060222 sha256:8a7d4c245418f8099293270f8bbcf7a4207839c4c4ef9974c2e16e303329edf3 registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:8a7d4c245418f8099293270f8bbcf7a4207839c4c4ef9974c2e16e303329edf3 sha256:8abcc25fba7202b68818271353a9203677ac3c2d638dafc84e6b45e68d913f59 registry.redhat.io/ubi8/openjdk-8@sha256:8abcc25fba7202b68818271353a9203677ac3c2d638dafc84e6b45e68d913f59 sha256:8be99c30a4e5b021129310847bddca64a93fb38b0c8dfeac482b4c28de062e5a registry.redhat.io/ubi8/openjdk-11@sha256:8be99c30a4e5b021129310847bddca64a93fb38b0c8dfeac482b4c28de062e5a sha256:8c148fc54caeb860262c3710675c97c58aba79af2d3c76de795b97143aae0e3f registry.redhat.io/fuse7/fuse-java-openshift@sha256:8c148fc54caeb860262c3710675c97c58aba79af2d3c76de795b97143aae0e3f sha256:8c1d5419ebfe1eb1a888c32a456f48ea01e1a6a33c7db59acf689ede1d944516 registry.redhat.io/ubi9/python-39@sha256:8c1d5419ebfe1eb1a888c32a456f48ea01e1a6a33c7db59acf689ede1d944516 sha256:8d57f273f8521c9b2d55756dbff05559184d1aeec46517e46c71de97cd72c12b registry.redhat.io/rhel8/postgresql-12@sha256:8d57f273f8521c9b2d55756dbff05559184d1aeec46517e46c71de97cd72c12b sha256:8e83f3ef3b5ad4bd7c4002d0201e4d5dd26a158c0be3ad29405ff4800d5661b8 registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:8e83f3ef3b5ad4bd7c4002d0201e4d5dd26a158c0be3ad29405ff4800d5661b8 sha256:9036a59a8275f9c205ef5fc674f38c0495275a1a7912029f9a784406bb00b1f5 registry.access.redhat.com/ubi8/openjdk-11@sha256:9036a59a8275f9c205ef5fc674f38c0495275a1a7912029f9a784406bb00b1f5 sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc quay.io/infrawatch-operators/smart-gateway-operator-bundle@sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc sha256:920ff7e5efc777cb523669c425fd7b553176c9f4b34a85ceddcb548c2ac5f78a registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:920ff7e5efc777cb523669c425fd7b553176c9f4b34a85ceddcb548c2ac5f78a sha256:936daac34be9105cb05ca1eb7bcf89b280df38060709cf581df03fb69362e4df registry.redhat.io/ubi8/nodejs-20-minimal@sha256:936daac34be9105cb05ca1eb7bcf89b280df38060709cf581df03fb69362e4df sha256:953aeb7c686ebe9359eb9e020aabaa011e47de9a0c38a3e97f85ff038abef5e6 registry.redhat.io/jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8@sha256:953aeb7c686ebe9359eb9e020aabaa011e47de9a0c38a3e97f85ff038abef5e6 sha256:956ad570b06da524a856c6c2b421c8b4aab160fc4565cde798c72fa050c2dedf registry.redhat.io/fuse7/fuse-eap-openshift@sha256:956ad570b06da524a856c6c2b421c8b4aab160fc4565cde798c72fa050c2dedf sha256:963cb8b96a26857034d16753409e0b48eb8c7e2702ad97ea53136a705946e535 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:963cb8b96a26857034d16753409e0b48eb8c7e2702ad97ea53136a705946e535 sha256:9a1ff2292e9e3aa41290373a931e9b52de2b206e4da35dc12dc553f7b0e58146 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9a1ff2292e9e3aa41290373a931e9b52de2b206e4da35dc12dc553f7b0e58146 sha256:9ab26cb4005e9b60fd6349950957bbd0120efba216036da53c547c6f1c9e5e7f registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:9ab26cb4005e9b60fd6349950957bbd0120efba216036da53c547c6f1c9e5e7f sha256:9b5d2fc574a13613f18fa983ac2901593c1e812836e918095bc3d15b6cc4ba57 registry.redhat.io/ubi8/openjdk-17-runtime@sha256:9b5d2fc574a13613f18fa983ac2901593c1e812836e918095bc3d15b6cc4ba57 sha256:9c10ee657f8d4fc4cee2d6f3fca56a8ded4354b90beea00e8274d1927e0fe8c7 registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:9c10ee657f8d4fc4cee2d6f3fca56a8ded4354b90beea00e8274d1927e0fe8c7 sha256:9c8685e9b35e0262af34b42288252f421e0791efd835b5673cf9d10c90863a36 registry.redhat.io/ubi9/httpd-24@sha256:9c8685e9b35e0262af34b42288252f421e0791efd835b5673cf9d10c90863a36 sha256:9d0ed8688df061f7555046a61cec60f909b325e77dee6aec9d2350f81efa0b46 registry.redhat.io/fuse7/fuse-eap-openshift@sha256:9d0ed8688df061f7555046a61cec60f909b325e77dee6aec9d2350f81efa0b46 sha256:9d759db3bb650e5367216ce261779c5a58693fc7ae10f21cd264011562bd746d registry.access.redhat.com/ubi8/openjdk-8@sha256:9d759db3bb650e5367216ce261779c5a58693fc7ae10f21cd264011562bd746d sha256:9d895b1202aaf35bbe7b432736307c965d9d84fd91c06e41f8ac21c7ea0590a0 registry.redhat.io/ubi8/ruby-33@sha256:9d895b1202aaf35bbe7b432736307c965d9d84fd91c06e41f8ac21c7ea0590a0 sha256:9dea3590288a2f7e58af534af889123e044b0e4e03d179664bf5ac2206e9e91d registry.redhat.io/rhel8/mariadb-105@sha256:9dea3590288a2f7e58af534af889123e044b0e4e03d179664bf5ac2206e9e91d sha256:a0a6db2dcdb3d49e36bd0665e3e00f242a690391700e42cab14e86b154152bfd registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:a0a6db2dcdb3d49e36bd0665e3e00f242a690391700e42cab14e86b154152bfd sha256:a1f3f9545a7657a88c4bddbc00f4df862b7658f247e195704b6d9f8a0249c9fa registry.redhat.io/ubi10/httpd-24@sha256:a1f3f9545a7657a88c4bddbc00f4df862b7658f247e195704b6d9f8a0249c9fa sha256:a3ca570ae1293a2d88bd995b972f67a784a416cd9916a3d2bef7b38e23b88df3 registry.redhat.io/ubi8/php-74@sha256:a3ca570ae1293a2d88bd995b972f67a784a416cd9916a3d2bef7b38e23b88df3 sha256:a4c6773a5eb5183f6dad3b029aa2cc4b6715797985dc53a3faf972007e7ad4d3 registry.redhat.io/rhel9/postgresql-15@sha256:a4c6773a5eb5183f6dad3b029aa2cc4b6715797985dc53a3faf972007e7ad4d3 sha256:a4dcf2213376727c3da1d10efbea52f20c74674bfb06643723f195a849171c10 registry.redhat.io/fuse7/fuse-karaf-openshift-rhel8@sha256:a4dcf2213376727c3da1d10efbea52f20c74674bfb06643723f195a849171c10 sha256:a4e8d81c9e54234f84072295425120e358752d58af8297ecfbae5d4ad01e6a2e registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:a4e8d81c9e54234f84072295425120e358752d58af8297ecfbae5d4ad01e6a2e sha256:a8e4081414cfa644e212ded354dfee12706e63afb19a27c0c0ae2c8c64e56ca6 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:a8e4081414cfa644e212ded354dfee12706e63afb19a27c0c0ae2c8c64e56ca6 sha256:aa02a20c2edf83a009746b45a0fd2e0b4a2b224fdef1581046f6afef38c0bee2 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:aa02a20c2edf83a009746b45a0fd2e0b4a2b224fdef1581046f6afef38c0bee2 sha256:aabd8f354cad901a56c029dbd62483262f4c435a329882ba5c3f7041c8cc04f8 registry.redhat.io/ubi9/php-83@sha256:aabd8f354cad901a56c029dbd62483262f4c435a329882ba5c3f7041c8cc04f8 sha256:abf2ce8a3db39549eb583fd38851e51a1d34c3448225d0cf1aede35e5e0cccfd registry.redhat.io/ubi8/perl-526@sha256:abf2ce8a3db39549eb583fd38851e51a1d34c3448225d0cf1aede35e5e0cccfd sha256:acc20d8b3eb50bef50abda0e44d46d353ba28be950b1685cc8053c977b31eaf6 registry.redhat.io/rhel9/mariadb-105@sha256:acc20d8b3eb50bef50abda0e44d46d353ba28be950b1685cc8053c977b31eaf6 sha256:adc5b484c12f915309a95acb71890e4a1a8148d5dadd6cc22d0794cdab81557b registry.redhat.io/fuse7/fuse-java-openshift@sha256:adc5b484c12f915309a95acb71890e4a1a8148d5dadd6cc22d0794cdab81557b sha256:ade3b81041a336ef1a37f3c52f85fc0b92bd62b76f304a86d262770110e3fbab registry.redhat.io/ubi8/nginx-122@sha256:ade3b81041a336ef1a37f3c52f85fc0b92bd62b76f304a86d262770110e3fbab sha256:ae7c07fccaaec3ad4a83a2309893b03e94010b2d046de8c38e3d5af45366f84c registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:ae7c07fccaaec3ad4a83a2309893b03e94010b2d046de8c38e3d5af45366f84c sha256:af6079ab6f381f2f8eb7175c9bacb93d0c72ff022e97f28520c97b1633b109e2 registry.redhat.io/ubi10/perl-540@sha256:af6079ab6f381f2f8eb7175c9bacb93d0c72ff022e97f28520c97b1633b109e2 sha256:af676d250bb4ced265fece19dbb847133717e18341777df3a57550f53f6207cb registry.redhat.io/rhel8/redis-6@sha256:af676d250bb4ced265fece19dbb847133717e18341777df3a57550f53f6207cb sha256:af9c08644ca057d83ef4b7d8de1489f01c5a52ff8670133b8a09162831b7fb34 registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:af9c08644ca057d83ef4b7d8de1489f01c5a52ff8670133b8a09162831b7fb34 sha256:b053401886c06581d3c296855525cc13e0613100a596ed007bb69d5f8e972346 registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:b053401886c06581d3c296855525cc13e0613100a596ed007bb69d5f8e972346 sha256:b163564be6ed5b80816e61a4ee31e42f42dbbf345253daac10ecc9fadf31baa3 registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:b163564be6ed5b80816e61a4ee31e42f42dbbf345253daac10ecc9fadf31baa3 sha256:b25e2af772c13ae5ff3339bc4bbdf52c49011e750f40b37a0b736cb82768a349 registry.redhat.io/rhel8/httpd-24@sha256:b25e2af772c13ae5ff3339bc4bbdf52c49011e750f40b37a0b736cb82768a349 sha256:b4cb02a4e7cb915b6890d592ed5b4ab67bcef19bf855029c95231f51dd071352 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:b4cb02a4e7cb915b6890d592ed5b4ab67bcef19bf855029c95231f51dd071352 sha256:b52d47d62be6b57d4af722f98b3434016c99c54e04574236b017924046d323c0 registry.redhat.io/jboss-eap-7/eap74-openjdk11-runtime-openshift-rhel8@sha256:b52d47d62be6b57d4af722f98b3434016c99c54e04574236b017924046d323c0 sha256:b77dec59a72e9b6323e6fa2617f588f07518f44d2e9f6aa8f2ccd83d90e40203 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b77dec59a72e9b6323e6fa2617f588f07518f44d2e9f6aa8f2ccd83d90e40203 sha256:b80a514f136f738736d6bf654dc3258c13b04a819e001dd8a39ef2f7475fd9d9 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:b80a514f136f738736d6bf654dc3258c13b04a819e001dd8a39ef2f7475fd9d9 sha256:b838fa18dab68d43a19f0c329c3643850691b8f9915823c4f8d25685eb293a11 registry.access.redhat.com/ubi8/openjdk-8@sha256:b838fa18dab68d43a19f0c329c3643850691b8f9915823c4f8d25685eb293a11 sha256:b85cbdbc289752c91ac7f468cffef916fe9ab01865f3e32cfcc44ccdd633b168 registry.access.redhat.com/ubi8/openjdk-11@sha256:b85cbdbc289752c91ac7f468cffef916fe9ab01865f3e32cfcc44ccdd633b168 sha256:ba09ffc90313f71c748e38a6b7c68b20f4e42945eb88349a6596dea61c517637 registry.redhat.io/fuse7/fuse-eap-openshift-jdk8-rhel7@sha256:ba09ffc90313f71c748e38a6b7c68b20f4e42945eb88349a6596dea61c517637 sha256:ba0bb0b1b9bed00d24bd73b59b6a3f7a46714ba1c0e1b900572dc580eedde68c registry.redhat.io/fuse7/fuse-java-openshift@sha256:ba0bb0b1b9bed00d24bd73b59b6a3f7a46714ba1c0e1b900572dc580eedde68c sha256:babbc5613f8ad380b2b85564d74d1edfbb345a9481fa3c1891980bb75169c079 registry.redhat.io/jboss-eap-7/eap-xp4-openjdk11-openshift-rhel8@sha256:babbc5613f8ad380b2b85564d74d1edfbb345a9481fa3c1891980bb75169c079 sha256:bae3cc0e61e2d73c8dd1384cfa97c6e875ff4a5da7f6332075eb5cd95bc90a7c registry.redhat.io/ubi8/nodejs-22-minimal@sha256:bae3cc0e61e2d73c8dd1384cfa97c6e875ff4a5da7f6332075eb5cd95bc90a7c sha256:bc7fd6202c086ba5dec4dc27c888b96505ffbd37d1e124cd4abb72bd13a8f237 registry.redhat.io/fuse7/fuse-java-openshift-jdk11-rhel8@sha256:bc7fd6202c086ba5dec4dc27c888b96505ffbd37d1e124cd4abb72bd13a8f237 sha256:bcb0e15cc9d2d3449f0b1acac7b0275035a80e1b3b835391b5464f7bf4553b89 registry.access.redhat.com/ubi8/openjdk-17@sha256:bcb0e15cc9d2d3449f0b1acac7b0275035a80e1b3b835391b5464f7bf4553b89 sha256:be2c749e20118e80b11526eb52993e2055e035b211b2621dea9094dd4dcd9446 registry.redhat.io/rhel8/mysql-80@sha256:be2c749e20118e80b11526eb52993e2055e035b211b2621dea9094dd4dcd9446 sha256:be51ee43b1596078a17756f38a0017e9338c902f9094f1ad677844d165a02d43 registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:be51ee43b1596078a17756f38a0017e9338c902f9094f1ad677844d165a02d43 sha256:bf5e518dba2aa935829d9db88d933a264e54ffbfa80041b41287fd70c1c35ba5 registry.access.redhat.com/ubi8/openjdk-8@sha256:bf5e518dba2aa935829d9db88d933a264e54ffbfa80041b41287fd70c1c35ba5 sha256:c1360b136eae3f3e6eab6e446d6d7b78c53e829a61e2a0b802d55c7ba134ca0c registry.redhat.io/ubi10/nodejs-22-minimal@sha256:c1360b136eae3f3e6eab6e446d6d7b78c53e829a61e2a0b802d55c7ba134ca0c sha256:c5f7af4c8188bf4619eeaa0f20094bbf5fbfbd824c973ee7da722a48d67300a9 registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:c5f7af4c8188bf4619eeaa0f20094bbf5fbfbd824c973ee7da722a48d67300a9 sha256:c6edc9920c6038890d77d1daa135b7ae4a5a7fe2213b168dbc12311de0445791 registry.redhat.io/rhscl/mysql-80-rhel7@sha256:c6edc9920c6038890d77d1daa135b7ae4a5a7fe2213b168dbc12311de0445791 sha256:c9134d992b1ee68be3450debd70f96b9b353fec15fca331afcf7adb26a7f2f09 registry.redhat.io/rhel9/redis-7@sha256:c9134d992b1ee68be3450debd70f96b9b353fec15fca331afcf7adb26a7f2f09 sha256:c93f8f7e2d9522be4952071044457efde0a6c6fd59bd9adcd01e07d164d8235c registry.redhat.io/fuse7/fuse-karaf-openshift-rhel8@sha256:c93f8f7e2d9522be4952071044457efde0a6c6fd59bd9adcd01e07d164d8235c sha256:caba895933209aa9a4f3121f9ec8e5e8013398ab4f72bd3ff255227aad8d2c3e registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:caba895933209aa9a4f3121f9ec8e5e8013398ab4f72bd3ff255227aad8d2c3e sha256:cbc31c1b0625cb01f2b67d83e7b375d08d6aa756f24cf32fc3e82f0b76a4c976 registry.redhat.io/rhscl/redis-6-rhel7@sha256:cbc31c1b0625cb01f2b67d83e7b375d08d6aa756f24cf32fc3e82f0b76a4c976 sha256:cc6290111b86982db2789ed392d004c2c48ff7e5407a82bbeaa6d9ceb8a7963f registry.redhat.io/fuse7/fuse-java-openshift-rhel8@sha256:cc6290111b86982db2789ed392d004c2c48ff7e5407a82bbeaa6d9ceb8a7963f sha256:ce5c0becf829aca80734b4caf3ab6b76cb00f7d78f4e39fb136636a764dea7f6 registry.access.redhat.com/ubi8/openjdk-11@sha256:ce5c0becf829aca80734b4caf3ab6b76cb00f7d78f4e39fb136636a764dea7f6 sha256:cf965175e27f89fecae4982ff88f15b5711b60af5215fd9ef5ff1b6f6ddf9bcd registry.redhat.io/ubi10/ruby-33@sha256:cf965175e27f89fecae4982ff88f15b5711b60af5215fd9ef5ff1b6f6ddf9bcd sha256:cfd8c4ac1c495b766dd3ff1a85c35afe092858f8f65b52a5b044811719482236 registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:cfd8c4ac1c495b766dd3ff1a85c35afe092858f8f65b52a5b044811719482236 sha256:d09ebdfdd48b8c132bc13df5e20ac3ca00a2a0a25df0b552a6ce575383d71165 registry.redhat.io/ubi8/python-311@sha256:d09ebdfdd48b8c132bc13df5e20ac3ca00a2a0a25df0b552a6ce575383d71165 sha256:d0b4c0b12d8dd76a31a67bd429ce78d327d1c4fcd4896dd77557ee484379defa registry.redhat.io/ubi9/php-80@sha256:d0b4c0b12d8dd76a31a67bd429ce78d327d1c4fcd4896dd77557ee484379defa sha256:d138037e44e951accd8941222908b6d2291c8b2dfd4fad49195f83d0c5e6e77f registry.redhat.io/rhel9/redis-6@sha256:d138037e44e951accd8941222908b6d2291c8b2dfd4fad49195f83d0c5e6e77f sha256:d186c94f8843f854d77b2b05d10efb0d272f88a4bf4f1d8ebe304428b9396392 registry.access.redhat.com/ubi8/openjdk-17@sha256:d186c94f8843f854d77b2b05d10efb0d272f88a4bf4f1d8ebe304428b9396392 sha256:d19dcf07e61e96eaff277f3f1b41a802aee2031c561d63012d99b1f2ed51467e registry.redhat.io/ubi9/nginx-120@sha256:d19dcf07e61e96eaff277f3f1b41a802aee2031c561d63012d99b1f2ed51467e sha256:d212c0ad7ec7b340beff1776c0216ea8c0aa66423538a84910c36e00db2366b5 registry.redhat.io/fuse7/fuse-karaf-openshift-rhel8@sha256:d212c0ad7ec7b340beff1776c0216ea8c0aa66423538a84910c36e00db2366b5 sha256:d2838047b28f1385d57abc68a907830d48df647d3a06bb6ab155567097d940c7 registry.redhat.io/ubi9/nginx-126@sha256:d2838047b28f1385d57abc68a907830d48df647d3a06bb6ab155567097d940c7 sha256:d2e75dac02681ed5aded89115b736ba5e83011294686cd6d04780aebffc0ff5d registry.redhat.io/fuse7/fuse-eap-openshift@sha256:d2e75dac02681ed5aded89115b736ba5e83011294686cd6d04780aebffc0ff5d sha256:d2f17aaf2f871fda5620466d69ac67b9c355c0bae5912a1dbef9a51ca8813e50 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:d2f17aaf2f871fda5620466d69ac67b9c355c0bae5912a1dbef9a51ca8813e50 sha256:d4bd4b422ffcfd52334f42f372d511d68496991b47a24f7dacfb032edb250475 registry.redhat.io/ubi9/nodejs-22@sha256:d4bd4b422ffcfd52334f42f372d511d68496991b47a24f7dacfb032edb250475 sha256:d64489de1b4cb65c6d6de5add7ad6e9f4a6817c8e62987ad5859814085beac06 registry.redhat.io/fuse7/fuse-java-openshift-jdk11-rhel8@sha256:d64489de1b4cb65c6d6de5add7ad6e9f4a6817c8e62987ad5859814085beac06 sha256:d88de3935a4ad6a2a8a04a49733d1e7cba14688cd5c8081b78538a86fc499d5a registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:d88de3935a4ad6a2a8a04a49733d1e7cba14688cd5c8081b78538a86fc499d5a sha256:d8d642e25fc863ed0ee603addee008db975b1c6b73a02a1e5b4bc446fbf5ec76 registry.redhat.io/fuse7/fuse-eap-openshift-jdk8-rhel7@sha256:d8d642e25fc863ed0ee603addee008db975b1c6b73a02a1e5b4bc446fbf5ec76 sha256:da558fa9ad7c357ed794eb549ac12a799eab97951f2e3cbc9501e413a348119a registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:da558fa9ad7c357ed794eb549ac12a799eab97951f2e3cbc9501e413a348119a sha256:db3f5192237bfdab2355304f17916e09bc29d6d529fdec48b09a08290ae35905 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:db3f5192237bfdab2355304f17916e09bc29d6d529fdec48b09a08290ae35905 sha256:dbe9905fe2b20ed30b0e2d64543016fa9c145eeb5a678f720ba9d2055f0c9f88 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:dbe9905fe2b20ed30b0e2d64543016fa9c145eeb5a678f720ba9d2055f0c9f88 sha256:dc84fed0f6f40975a2277c126438c8aa15c70eeac75981dbaa4b6b853eff61a6 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:dc84fed0f6f40975a2277c126438c8aa15c70eeac75981dbaa4b6b853eff61a6 sha256:de3e8e869ab92ab5ad19919034b845b3fb01c7e57d49caf5899b68c0a4461c1b registry.redhat.io/ubi8/nginx-124@sha256:de3e8e869ab92ab5ad19919034b845b3fb01c7e57d49caf5899b68c0a4461c1b sha256:deaaa8255efc84a6a7fd4d1b6e5593eaab6c2753e1f2f84a5b83d4e047f03f3f registry.redhat.io/jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8@sha256:deaaa8255efc84a6a7fd4d1b6e5593eaab6c2753e1f2f84a5b83d4e047f03f3f sha256:df2b4b8e8f4b7b183c443653b6f11ac529d7c6421972078a464b765ab31b075d registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:df2b4b8e8f4b7b183c443653b6f11ac529d7c6421972078a464b765ab31b075d sha256:df8858f0c01ae1657a14234a94f6785cbb2fba7f12c9d0325f427a3f1284481b registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:df8858f0c01ae1657a14234a94f6785cbb2fba7f12c9d0325f427a3f1284481b sha256:e12d4107cfe12b7b9f3817bde90dcb07ff3ee7e3b6482120fec6d37583df727f registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:e12d4107cfe12b7b9f3817bde90dcb07ff3ee7e3b6482120fec6d37583df727f sha256:e241dd0049956f75f989687e80fafe7da2f2f25ef4ea762bdaabfe2161d20f64 registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:e241dd0049956f75f989687e80fafe7da2f2f25ef4ea762bdaabfe2161d20f64 sha256:e3727c305a54ac48c07688089e18e9abbe4ce98712a7ce59aa4b9ad7b7bc6514 registry.redhat.io/ubi9/ruby-30@sha256:e3727c305a54ac48c07688089e18e9abbe4ce98712a7ce59aa4b9ad7b7bc6514 sha256:e379727c63710610a1d6843aac4fe3c9e5d81fc0e58c171e88c55da4be1499f0 registry.redhat.io/fuse7/fuse-java-openshift-rhel8@sha256:e379727c63710610a1d6843aac4fe3c9e5d81fc0e58c171e88c55da4be1499f0 sha256:e37aeaeb0159194a9855350e13e399470f39ce340d6381069933742990741fb8 registry.access.redhat.com/ubi8/openjdk-17@sha256:e37aeaeb0159194a9855350e13e399470f39ce340d6381069933742990741fb8 sha256:e4223a60b887ec24cad7dd70fdb6c3f2c107fb7118331be6f45d626219cfe7f3 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:e4223a60b887ec24cad7dd70fdb6c3f2c107fb7118331be6f45d626219cfe7f3 sha256:e4b1599ba6e88f6df7c4e67d6397371d61b6829d926411184e9855e71e840b8c registry.redhat.io/openjdk/openjdk-11-rhel7@sha256:e4b1599ba6e88f6df7c4e67d6397371d61b6829d926411184e9855e71e840b8c sha256:e4be2fb7216f432632819b2441df42a5a0063f7f473c2923ca6912b2d64b7494 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:e4be2fb7216f432632819b2441df42a5a0063f7f473c2923ca6912b2d64b7494 sha256:e61420525f02c4e85ba9b79b9702880a11907d8ab79b9b0a36dbf559ce0f5234 registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:e61420525f02c4e85ba9b79b9702880a11907d8ab79b9b0a36dbf559ce0f5234 sha256:e6fc695cfd77ccff83ef22148c54f45ba8af41e2b69f6146d7ad588cb2aed780 registry.redhat.io/jboss-eap-7/eap74-openjdk8-runtime-openshift-rhel7@sha256:e6fc695cfd77ccff83ef22148c54f45ba8af41e2b69f6146d7ad588cb2aed780 sha256:e7713979a921ec8d2506fcb3fb3ee960fc757262f4567319ee5aa2b351d4f778 quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e7713979a921ec8d2506fcb3fb3ee960fc757262f4567319ee5aa2b351d4f778 sha256:e851770fd181ef49193111f7afcdbf872ad23f3a8234e0e07a742c4ca2882c3d registry.access.redhat.com/ubi8/openjdk-11@sha256:e851770fd181ef49193111f7afcdbf872ad23f3a8234e0e07a742c4ca2882c3d sha256:e90172ca0f09acf5db1721bd7df304dffd184e00145072132cb71c7f0797adf6 registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:e90172ca0f09acf5db1721bd7df304dffd184e00145072132cb71c7f0797adf6 sha256:ea065c1e423af8de839c84e1520131fe4ed4a1bda0ae8eeb078fc8a8368e9f0c registry.redhat.io/ubi9/nodejs-22-minimal@sha256:ea065c1e423af8de839c84e1520131fe4ed4a1bda0ae8eeb078fc8a8368e9f0c sha256:eaaf8ab6d318d72cc4e465609b213f4d9d9171f222f59ae012fa5b96fb3e4ea9 registry.redhat.io/rhscl/mariadb-103-rhel7@sha256:eaaf8ab6d318d72cc4e465609b213f4d9d9171f222f59ae012fa5b96fb3e4ea9 sha256:eab456afb39ed4607b2ee61c8c7635ab1c5ff8f8bddf7640c557e792504d545f registry.redhat.io/ocp-tools-4/jenkins-rhel8@sha256:eab456afb39ed4607b2ee61c8c7635ab1c5ff8f8bddf7640c557e792504d545f sha256:ebfca7a4e3506ee7f317acc7503ad46f2e1cf5605347a1b75fdd02bc77c7de02 registry.redhat.io/fuse7/fuse-eap-openshift-jdk11-rhel8@sha256:ebfca7a4e3506ee7f317acc7503ad46f2e1cf5605347a1b75fdd02bc77c7de02 sha256:ec1985bf5fca4d79054e4beadf7617c3b6400bad2325e47b00a90f7fe07540de registry.redhat.io/ubi8/ruby-25@sha256:ec1985bf5fca4d79054e4beadf7617c3b6400bad2325e47b00a90f7fe07540de sha256:ec784f172735873a5893504e07c57dcbd56b7b049a395c5629c6058dbfac21a3 registry.redhat.io/ubi8/dotnet-90@sha256:ec784f172735873a5893504e07c57dcbd56b7b049a395c5629c6058dbfac21a3 sha256:ed13779a6051e3b9588f5ebea6b66c0a2979512fdcc99bca1f910a577fb4c34a quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ed13779a6051e3b9588f5ebea6b66c0a2979512fdcc99bca1f910a577fb4c34a sha256:ed2da0eed3f495f5455f490cdf7f7943420f64b0cf541271a2d315a3f9e9744c registry.redhat.io/rhel8/postgresql-10@sha256:ed2da0eed3f495f5455f490cdf7f7943420f64b0cf541271a2d315a3f9e9744c sha256:eeb0c539ee7ffbd2f1e6eb326204c6f69c554ac5acf0454e9d68d75ffe954f7c registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:eeb0c539ee7ffbd2f1e6eb326204c6f69c554ac5acf0454e9d68d75ffe954f7c sha256:eed7e29bf583e4f01e170bb9f22f2a78098bf15243269b670c307caa6813b783 registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:eed7e29bf583e4f01e170bb9f22f2a78098bf15243269b670c307caa6813b783 sha256:f438230ed2c2e609d0d7dbc430ccf1e9bad2660e6410187fd6e9b14a2952e70b registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:f438230ed2c2e609d0d7dbc430ccf1e9bad2660e6410187fd6e9b14a2952e70b sha256:f4fecc3946301cef5130f3eaaa33d9c15426437d46efe70b507ac90f03364307 registry.redhat.io/ubi10/php-83@sha256:f4fecc3946301cef5130f3eaaa33d9c15426437d46efe70b507ac90f03364307 sha256:f74e72bc7bb13dec9f38ef9e00a8665a7c08a386176ca4b3c41075491e8d07e7 registry.redhat.io/fuse7/fuse-eap-openshift@sha256:f74e72bc7bb13dec9f38ef9e00a8665a7c08a386176ca4b3c41075491e8d07e7 sha256:f7ca08a8dda3610fcc10cc1fe5f5d0b9f8fc7a283b01975d0fe2c1e77ae06193 registry.access.redhat.com/ubi8/openjdk-8@sha256:f7ca08a8dda3610fcc10cc1fe5f5d0b9f8fc7a283b01975d0fe2c1e77ae06193 sha256:f7d4386680e3a44e3bf8bacc3ebfe3224232e44e4d1e2e7167aa1b4970f2866c registry.redhat.io/fuse7/fuse-eap-openshift-jdk8-rhel7@sha256:f7d4386680e3a44e3bf8bacc3ebfe3224232e44e4d1e2e7167aa1b4970f2866c sha256:f89a54e6d1340be8ddd84a602cb4f1f27c1983417f655941645bf11809d49f18 registry.access.redhat.com/ubi8/openjdk-17@sha256:f89a54e6d1340be8ddd84a602cb4f1f27c1983417f655941645bf11809d49f18 sha256:f953734d89252219c3dcd8f703ba8b58c9c8a0f5dfa9425c9e56ec0834f7d288 registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:f953734d89252219c3dcd8f703ba8b58c9c8a0f5dfa9425c9e56ec0834f7d288 sha256:fa9556628c15b8eb22cafccb737b3fbcecfd681a5c2cfea3302dd771c644a7db registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:fa9556628c15b8eb22cafccb737b3fbcecfd681a5c2cfea3302dd771c644a7db [INFO] oc get 'images' -oyaml apiVersion: v1 items: - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54e56e6f85721741ee7bf0336de8ad3bf138a56769a6d0097b600a0e361be58d size: 39618910 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f8ddd7f5a755f537dd9d5f553c8c78171dcf3018c5fc96676a07380d3e14e20 size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fa8e7ec856879c5c416f30aeaca26473064c725889c4a66f9f664909c5657afb size: 72401376 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: 696547b1166cc2d13341e0f5b0c183f4736e71d62b55b12046160d5877f17c09 Labels: architecture: x86_64 build-date: 2022-04-29T13:49:43.606616 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1651233090" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.12-1.1651233090 vcs-ref: d68a2b1f3342c920689ae6f7c0a9614570f9b9a0 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: sha256:1e817c050a87c981c80f34e27d52976449c423544defb888938b6d71aafd4fe4 Labels: architecture: x86_64 build-date: 2022-04-29T13:49:43.606616 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1651233090" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.12-1.1651233090 vcs-ref: d68a2b1f3342c920689ae6f7c0a9614570f9b9a0 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-04-29T13:51:52Z" DockerVersion: 1.13.1 Id: sha256:3bf069ed2b338f38b5eed9edb7916876ed2deca8dbc13fcb8d61ef6ba0f588a8 Size: 112027067 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:00cf28cf9a6c427962f922855a6cc32692c760764ce2ce7411cf605dd510367f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:00cf28cf9a6c427962f922855a6cc32692c760764ce2ce7411cf605dd510367f resourceVersion: "14220" uid: bf9f1d58-cbe4-43b7-a7f4-69fd862c09f0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9281c141a1bfec06e291d2ad29bfdedfd10a99d583fc0f48d3c26723ebe0761 size: 75827357 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:31114e120ca0c7dc51e01721c5a689a614edb6c86de11301d503c72be1540c79 size: 1325 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:97908a410b902748f50f7a00125b2bd6762668e88bba5f4a0c0d4e6f916fd7fb size: 346120986 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.2.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.2 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.2.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: c16126310035 Image: 3eb3eeda3399171b4c298813e2219138f8853ba30a4a7a1d3cb9eb441c9bdb1a Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T10:02:17.641107 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.2.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.2.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.2.GA release: "2.1567588117" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.2-2.1567588117 vcs-ref: 67deb13da66750a5d3c41f77b006da42a9ee76e8 vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.2.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.2 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.2.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: c16126310035 Image: sha256:99da8c1dc65f6d50835ebddf30111054b940f7a3f94b01821ab04539535961c5 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T10:02:17.641107 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.2.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.2.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.2.GA release: "2.1567588117" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.2-2.1567588117 vcs-ref: 67deb13da66750a5d3c41f77b006da42a9ee76e8 vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss Created: "2019-09-04T10:09:52Z" DockerVersion: 1.13.1 Id: sha256:9f07e18e8f8794425f581712f0daf8aa0d7fe4cc6298ce4c4bf2f05055637e02 Size: 421957660 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:01920073bbd480bd34e2d8e17dced64d342257fa9a263d1843edf1cc45a50a7c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:01920073bbd480bd34e2d8e17dced64d342257fa9a263d1843edf1cc45a50a7c resourceVersion: "13890" uid: ac550310-b854-44b6-b2eb-056ef12f8eff - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:79c20e727b94ea36ae8776eb9e4065b60dc1d396564a6a91ebb6ee334dfb5cea size: 79001473 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:526725917c850c5382c5cf83a3e8feebf44e29e34e293ba88a4336884a294be7 size: 18609572 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:029d17802350ff772a29106200a4b88238f0db6e3fec96c14435e92e18294a56 size: 154879763 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:18bc4f86cb290f4205bc684663bc9cea29e8cb1b9892289c6399f9c3138b0af8 size: 41134775 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=20 - PHP_VERSION=8.0 - PHP_VER_SHORT=80 - NAME=php - SUMMARY=Platform for building and running PHP 8.0 applications - DESCRIPTION=PHP 8.0 available as container is a base platform for building and running various PHP 8.0 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-01-21T18:01:06 com.redhat.component: php-80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: PHP 8.0 available as container is a base platform for building and running various PHP 8.0 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.33.8 io.k8s.description: PHP 8.0 available as container is a base platform for building and running various PHP 8.0 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 8.0 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php80,php-80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/php-80 release: "157" summary: Platform for building and running PHP 8.0 applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/php-80/images/1-157 usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=8.0/test/test-app ubi8/php-80 sample-server vcs-ref: ba2d688239bd5f982694de1f2fd86a2cf14db214 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-01-21T18:04:06Z" Id: sha256:2569ec6fa768b179b3708810892f8d9f4170dea29a575168ed37d365254b55bc Size: 293651467 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/php-80@sha256:0194269c95cbc1aeb509611e283fd404791a0bc79911532bd9835bb51a19aff6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:0194269c95cbc1aeb509611e283fd404791a0bc79911532bd9835bb51a19aff6 resourceVersion: "14079" uid: b4e6b8df-6d14-4dbf-bdc9-c2051e527552 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9e23b64ace00a199db21d302292b434e9d3956d79319d958ecc19603d00c946 size: 39622437 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:38b71301a1d9df24c98b5a5ee8515404f42c929003ad8b13ab83d2de7de34dec size: 1742 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b2f0e3786ba7d5fe750dc1adb8c654bc3a7e43ddc2980708ba1d93dfd56a796c size: 112815087 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 02d87ca75d31 Image: eacfec936445734e0a7266541a3bd3f8e2cdb579174f4655e197e023ea328f6d Labels: architecture: x86_64 build-date: 2022-03-28T09:44:20.823364 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1648459552" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.11-1.1648459552 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: cabd719a48515652b21a9a98ca3094610b3df338 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 02d87ca75d31 Image: sha256:3320c88e3477ed4089373445c70ee7d8549ad3af435409c0179ba14e4fe951bc Labels: architecture: x86_64 build-date: 2022-03-28T09:44:20.823364 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1648459552" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.11-1.1648459552 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: cabd719a48515652b21a9a98ca3094610b3df338 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T09:50:43Z" DockerVersion: 1.13.1 Id: sha256:1ec3bc3a3fa2aa2b01e7b49d9c9bd98bfccccdbe8aec5b933b278a3ab58bdc0c Size: 152446782 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:022488b1bf697b7dd8c393171a3247bef4ea545a9ab828501e72168f2aac9415 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:022488b1bf697b7dd8c393171a3247bef4ea545a9ab828501e72168f2aac9415 resourceVersion: "14208" uid: 5cf3489e-f50a-4641-a300-c6b1184e2b03 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:24f3b28ce6cdd681f947607add74a19f36e917c4fd4c0c1267c344fb045af850 size: 23487344 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=20 - PHP_VERSION=8.2 - PHP_VER_SHORT=82 - NAME=php - SUMMARY=Platform for building and running PHP 8.2 applications - DESCRIPTION=PHP 8.2 available as container is a base platform for building and running various PHP 8.2 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - PHP_FPM_CONF_D_PATH=/etc/php-fpm.d - PHP_FPM_CONF_FILE=www.conf - PHP_FPM_RUN_DIR=/run/php-fpm - PHP_CLEAR_ENV=ON - PHP_MAIN_FPM_CONF_FILE=/etc/php-fpm.conf - PHP_FPM_LOG_PATH=/var/log/php-fpm - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:08:01Z" com.redhat.component: php-82-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: PHP 8.2 available as container is a base platform for building and running various PHP 8.2 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.41.4 io.k8s.description: PHP 8.2 available as container is a base platform for building and running various PHP 8.2 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 8.2 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php82,php-82 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/php-82 org.opencontainers.image.revision: 8877791cc654b87b090d1ccabc3aa73c5c972a9a release: "1760386053" summary: Platform for building and running PHP 8.2 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=8.2/test/test-app ubi9/php-82 sample-server vcs-ref: 8877791cc654b87b090d1ccabc3aa73c5c972a9a vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:08:08Z" Id: sha256:3703f0311b7409b93c12089ee53bbd06803e01f55ebfa8354791757c66ef2f90 Size: 335792412 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/php-82@sha256:02c1739b727e3b15a76cdd44f92cf91336d7dd34d5e830b2fe4ab3f2af48fe60 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:02c1739b727e3b15a76cdd44f92cf91336d7dd34d5e830b2fe4ab3f2af48fe60 resourceVersion: "14082" uid: cc0f4cab-c927-4b75-8fb3-5fd706390f0a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:68214f4c07c8b187e0330525f5bde9ee8d07de0cbe382bc2a71142a1adf37630 size: 74581578 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el9 - NODEJS_VER=20 - PYTHON_VERSION=3.11 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi9-python-311 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.11 applications - DESCRIPTION=Python 3.11 available as container is a base platform for building and running various Python 3.11 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-14T05:46:35Z" com.redhat.component: python-311-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Python 3.11 available as container is a base platform for building and running various Python 3.11 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi9-python-311 io.k8s.description: Python 3.11 available as container is a base platform for building and running various Python 3.11 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.11 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python311,python-311,rh-python311 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/python-311 org.opencontainers.image.revision: 84052fc1eca7c447b0e3054540aa8cc7f5e993ed release: "1760420762" summary: Platform for building and running Python 3.11 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.11/test/setup-test-app/ ubi9/python-311 python-sample-app vcs-ref: 84052fc1eca7c447b0e3054540aa8cc7f5e993ed vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-14T05:46:50Z" Id: sha256:b709bd41f2bb4c7afdcce17e5fe447b290f07477f2af8d43b581c5ec25d3ea46 Size: 386885017 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/python-311@sha256:04b6d6c9d05857ca39c360caf8698f3081360eeffe85da2eaab3e7e91040030b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:04b6d6c9d05857ca39c360caf8698f3081360eeffe85da2eaab3e7e91040030b resourceVersion: "14126" uid: 174ca5bc-4bf3-4fc0-b62b-78b50cfbc29f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d7c06497d5cebd39c0a4feb14981ec940b5c863e49903d320f630805b049cbff size: 39279912 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b212228af90322a6b2d2422f3c21f1c2e04cf4ed316c6e789eccc4b8fd5c37d1 size: 111170397 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-02-07T17:22:26 com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "12.1675788288" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.14-12.1675788288 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: a305d5df96a43cd9cc90d723bcea4e824f917836 vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-02-07T17:33:43Z" Id: sha256:eb612086819b66209ef77e4e424c38cf311c5490594dc6401da1c6ceeeb87c0f Size: 150480601 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:0669a28577b41bb05c67492ef18a1d48a299ac54d1500df8f9f8f760ce4be24b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:0669a28577b41bb05c67492ef18a1d48a299ac54d1500df8f9f8f760ce4be24b resourceVersion: "14175" uid: b95fed3c-965d-4baf-853f-e2a4af3b3015 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9281c141a1bfec06e291d2ad29bfdedfd10a99d583fc0f48d3c26723ebe0761 size: 75827357 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:31114e120ca0c7dc51e01721c5a689a614edb6c86de11301d503c72be1540c79 size: 1325 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ebb6f9f5a86f545f3089e00644d627ac4efa1caa2bf80524a27d2aabe13621e4 size: 107810095 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HOME=/home/jboss - JAVA_DATA_DIR=/deployments/data - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.6 - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: c16126310035 Image: 9c8cdff623ea06fcf4498a5e32e90868a9d9a393a50762cdf05d3034d15cffbc Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T09:59:10.737369 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: 8080/tcp:webcache,8443/tcp:pcsync-https io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 2.2.1 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "23.1567588116" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.6-23.1567588116 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 5bdb7e62aa6179193d5fe001006126c053d4f820 vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HOME=/home/jboss - JAVA_DATA_DIR=/deployments/data - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.6 - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: c16126310035 Image: sha256:a7c217f8b8ba716e0093809740b942db647b6af05dafa60bcb5185e7a2d02531 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T09:59:10.737369 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: 8080/tcp:webcache,8443/tcp:pcsync-https io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 2.2.1 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "23.1567588116" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.6-23.1567588116 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 5bdb7e62aa6179193d5fe001006126c053d4f820 vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss Created: "2019-09-04T10:03:46Z" DockerVersion: 1.13.1 Id: sha256:74c8511ec481bb881c4c297ce6bd05fbf188587cadb9f7895c70bc0e3162b645 Size: 183646588 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:06bbbf9272d5c5161f444388593e9bd8db793d8a2d95a50b429b3c0301fafcdd kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:06bbbf9272d5c5161f444388593e9bd8db793d8a2d95a50b429b3c0301fafcdd resourceVersion: "14174" uid: 6726d73c-7d2d-4da4-821c-4a5727fc8a29 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2e449f0d8596b91719b366a7f134954cd2a03e99408e3899b004182af82a6979 size: 79829956 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7fff4c4748a270604546349c7386ae461ba01a71b193d651cfcb0abc5fa88a34 size: 7552327 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0b8fa8a9a8dc81f189373242af953709ea22ffa2b0a9c6d6070dc5027c12d432 size: 106012003 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:126a494c3c5f3513f4482bdcc5f0608a41c2f413c33b350e3ef2daef61cbe07c size: 164861497 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - BASH_ENV=/opt/app-root/etc/scl_enable - ENV=/opt/app-root/etc/scl_enable - PROMPT_COMMAND=. /opt/app-root/etc/scl_enable - NODEJS_SCL=rh-nodejs14 - NAME=golang - VERSION=1.18.10 - SUMMARY=Platform for building and running Go applications - DESCRIPTION=Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. Labels: architecture: x86_64 build-date: 2023-04-12T16:30:01 com.redhat.component: go-toolset-container com.redhat.license_terms: https://www.redhat.com/agreements description: Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. distribution-scope: public io.buildah.version: 1.27.3 io.k8s.description: Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. io.k8s.display-name: Go 1.18.10 io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,golang,golang117,rh-golang117,go io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: devtools/go-toolset-rhel7 release: "6.1681314820" summary: Platform for building and running Go applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/devtools/go-toolset-rhel7/images/1.18.10-6.1681314820 vcs-ref: 10d1ace5725c33abb5ceaf9c297d54bd34a13de9 vcs-type: git vendor: Red Hat, Inc. version: 1.18.10 User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2023-04-12T16:32:45Z" Id: sha256:524410e529e4a936dfcc37a6f8fa90f068ff9b3419fe5e9a264e8e359c11531b Size: 358277196 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi7/go-toolset@sha256:07addbabcfd72212a82efce053a70362a06925ee1522c4dd783be878ffad46cb kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:07addbabcfd72212a82efce053a70362a06925ee1522c4dd783be878ffad46cb resourceVersion: "13443" uid: 1f245879-53c0-4fff-97bf-7ba4293f16b9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e556fa14a02c7e2f1c7f24af127ffe7b0d4375fe2f1f4622fab0817cfc8b3ae7 size: 1603140567 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510211040.p2.ge238076.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510211040.p2.ge238076.assembly.stream.el9-e238076 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=ose-installer-artifacts - __doozer_uuid_tag=ose-installer-artifacts-rhel9-v4.20.0-20251021.105557 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=e238076 - SOURCE_DATE_EPOCH=1760727874 - SOURCE_GIT_COMMIT=e23807689ec464da30e771dda70fd8989680a011 - SOURCE_GIT_TAG=v1.4.19-ec5-379-ge23807689e - SOURCE_GIT_URL=https://github.com/openshift/installer Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T13:37:06Z" com.redhat.component: ose-installer-artifacts-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Empty io.k8s.display-name: Empty io.openshift.build.commit.id: e23807689ec464da30e771dda70fd8989680a011 io.openshift.build.commit.url: https://github.com/openshift/installer/commit/e23807689ec464da30e771dda70fd8989680a011 io.openshift.build.source-location: https://github.com/openshift/installer io.openshift.expose-services: "" io.openshift.maintainer.component: Installer / openshift-installer io.openshift.maintainer.project: OCPBUGS io.openshift.release.operator: "true" io.openshift.tags: Empty maintainer: Red Hat, Inc. name: openshift/ose-installer-artifacts-rhel9 org.opencontainers.image.revision: 2b6efba3ac3cacf5a896b33a86cafef7789bb1a2 release: 202510211040.p2.ge238076.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: 2b6efba3ac3cacf5a896b33a86cafef7789bb1a2 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T14:09:09Z" Id: sha256:d6c3fc36ab859b726e47238af5ca312464463e1d1c6ffcbf9071acbd87c2e82c Size: 1741576693 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:0a1a889dcfb66dfe73d30f6a7a18dace8796e66e9f2203de97955500ad76f4aa kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:0a1a889dcfb66dfe73d30f6a7a18dace8796e66e9f2203de97955500ad76f4aa resourceVersion: "13330" uid: 5534c026-f3c7-42d5-ba54-0eac527dd513 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d69140bdce18c2f525b2ad0cc3998a1c6f2bc0a850353b7b7feac66eca1da526 size: 75854078 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a82dd37af30d5ff9e805ceea67ea615a17dfaafba3135b12e6b2dab29ee2cff2 size: 1264 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9912708161e166fd25db1f05e024296dd82854ab2c2fab9e91bb5d1bca8864e9 size: 273648845 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_DATAGRID_VERSION=7.3.0.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.0 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - PRODUCT_VERSION=7.3.0.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: ed75d4430067 Image: 93e4722f3a6f12bdaefa2157c427c28d85451b6a6740d97925a9bc65f7581d5e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-05-06T22:52:07.700549 com.redhat.build-host: cpt-0003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/licenses/eulas description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.0.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.3.0.GA release: "3" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.0-3 vcs-ref: ced81ccfb5b1202379e1ef311e0128f62a7abfb7 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_DATAGRID_VERSION=7.3.0.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.0 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - PRODUCT_VERSION=7.3.0.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: ed75d4430067 Image: sha256:a03a0d2d69476e9d4f4e83882afa8718fa902df9d3cfcc941f2df5c7e87528de Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-05-06T22:52:07.700549 com.redhat.build-host: cpt-0003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/licenses/eulas description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.0.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.3.0.GA release: "3" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.0-3 vcs-ref: ced81ccfb5b1202379e1ef311e0128f62a7abfb7 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss Created: "2019-05-06T22:57:56Z" DockerVersion: 1.13.1 Id: sha256:d4b11285c6b753f4bc2bba205a56aeba8c069a95df61ac2c44be585077fde53b Size: 349512144 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:0a3a55052f6e8df1ea48de0c429c39e072b6aa8818250ccee634f96acacfd7c7 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:0a3a55052f6e8df1ea48de0c429c39e072b6aa8818250ccee634f96acacfd7c7 resourceVersion: "13883" uid: 0d0891fa-4574-4c67-830a-ced35b1e4273 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c26c53d1c36b15639046521ee8385d1ecf266b8bd2666a5386ac609586826fd size: 129289489 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el8 - NODEJS_VERSION=20 - NPM_RUN=start - NAME=nodejs - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - CNB_STACK_ID=com.redhat.stacks.ubi8-nodejs-20 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - SUMMARY=Platform for building and running Node.js 20 applications - DESCRIPTION=Node.js 20 available as container is a base platform for building and running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-28T04:27:47Z" com.redhat.component: nodejs-20-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Node.js 20 available as container is a base platform for building and running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi8-nodejs-20 io.k8s.description: Node.js 20 available as container is a base platform for building and running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 20 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs20 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/nodejs-20 org.opencontainers.image.revision: e3b2d882ca8bb840b7ff927ee91d2b24733e7502 release: "1761625603" summary: Platform for building and running Node.js 20 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi8/nodejs-20:latest vcs-ref: e3b2d882ca8bb840b7ff927ee91d2b24733e7502 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-28T04:28:15Z" Id: sha256:2262103dee868fb05af4d5744be2e5fb65d96cad5d8ce3698c67a036dc362ba8 Size: 224848286 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/nodejs-20@sha256:0ab9d7fe68c5aef87b591e40bd6be7479e913798348f86ac43d7c0357794bb3f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:0ab9d7fe68c5aef87b591e40bd6be7479e913798348f86ac43d7c0357794bb3f resourceVersion: "14088" uid: 39a20b22-012d-44a4-b840-4f90e25c794c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:55fe005a89f86f773a64202c819856af27f5435cd8e3c9af19672a0ca72c0a7c size: 130299748 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el9 - NODEJS_VERSION=20 - NPM_RUN=start - NAME=nodejs - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - CNB_STACK_ID=com.redhat.stacks.ubi9-nodejs-20 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - SUMMARY=Platform for building and running Node.js 20 applications - DESCRIPTION=Node.js 20 available as container is a base platform for building and running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-28T04:29:50Z" com.redhat.component: nodejs-20-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Node.js 20 available as container is a base platform for building and running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi9-nodejs-20 io.k8s.description: Node.js 20 available as container is a base platform for building and running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 20 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs20 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nodejs-20 org.opencontainers.image.revision: ba51077abb8f36c029a729dfd10d03f5f4ff3590 release: "1761625725" summary: Platform for building and running Node.js 20 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/nodejs-20:latest vcs-ref: ba51077abb8f36c029a729dfd10d03f5f4ff3590 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-28T04:30:05Z" Id: sha256:8685effefab381bd14d40df6c9c513c0833ddc7da8a897caf85baa4c62c45481 Size: 227400252 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nodejs-20@sha256:0d5551af0b9188a88eb39bf9745f992b1ab2ce4839d4a0555b59c58b5f3f6412 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:0d5551af0b9188a88eb39bf9745f992b1ab2ce4839d4a0555b59c58b5f3f6412 resourceVersion: "14089" uid: f6343cc8-8498-4c90-aeda-b28646a4b369 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3ba3333d1709318bb6e11b393259e7fb8977b0afb1489f4f030ea640ecf428e3 size: 116813486 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.15 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-21T04:57:59 com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1682053058" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.15-1.1682053058 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 64e82bc4e12a24837e440904151526afb2a2fe1b vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-21T05:10:17Z" Id: sha256:4d3db921ed0efa6fba3a14865cd9a17457693ad7d20c7701394c831358766acb Size: 156109729 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:0eea1d20aaa26041edf26b925fb204d839e5b93122190191893a0299b2e1b589 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:0eea1d20aaa26041edf26b925fb204d839e5b93122190191893a0299b2e1b589 resourceVersion: "14146" uid: abfd7565-c806-4d69-95b6-f3115ee3f103 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:76608b6b9d54251299c5d3be69fdf53e05f97a3735bbcd5889c30ebb78608428 size: 75827462 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3c81a5d20855a6cef8b997d709410e047e2839b5ad113f4c34d25e9fae9e3beb size: 1266 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:cf313e71c9fd8209b9df2ee0fb7471c014b3ec6f7144546b88ad7c46b5fb2cd4 size: 3891763 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:07c69678314f4cb7384c115ffd5040765fe1fe42db1b8c789af11ce865771f7b size: 84375848 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7ed15802bf28dc9bb8b0cd6444082661b0afb1c78519d6b89ed3634a96db8e10 size: 26652483 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.2 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-720061-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 4072265dcbc2 Image: d8d8b46372545d04c96dc972b55419fc9c5a27e5ff7e9be0949c210eaf6628bc Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-04-17T17:07:52.793607 com.redhat.build-host: cpt-0002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/licenses/eulas description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-720061-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Dhiraj Bokde name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "10.1555516883" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.2-10.1555516883 vcs-ref: cd49f262c6da2881ddc7ebb46cd69009e472c78d vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.2 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-720061-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 4072265dcbc2 Image: sha256:2f9245e0f02f784c280d2be34ec9e58fded9a51121ada54d4c9adebf87a9ec62 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-04-17T17:07:52.793607 com.redhat.build-host: cpt-0002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/licenses/eulas description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-720061-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Dhiraj Bokde name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "10.1555516883" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.2-10.1555516883 vcs-ref: cd49f262c6da2881ddc7ebb46cd69009e472c78d vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss Created: "2019-04-17T17:10:45Z" DockerVersion: 1.13.1 Id: sha256:47e204e5513257f9d649674aef3bf75227d79368ee6b4e08c8d3ea2757779d40 Size: 190755025 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:13577236b039ed11e9f1070f884e9836e731944575de2ee59b290b05e08ad5f8 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:13577236b039ed11e9f1070f884e9836e731944575de2ee59b290b05e08ad5f8 resourceVersion: "14026" uid: 35da11b0-1ace-4bac-80dc-3f20ac7ba064 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e74659e3e033616f4f0731f4c22814ff4543cb3c1b85a05f6484647b4fea7b3d size: 136155585 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2a2353c2e8f9aca8ce43a2cb751b126340cb9455e418eaa9cf3515fec24f2668 size: 5389944 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift-jdk11-rhel8 - FUSE_KARAF_IMAGE_VERSION=1.12 - JOLOKIA_VERSION=1.7.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.4.3.fuse-7_12_1-00009-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.18.0.redhat-00001 - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-23T17:52:18 com.redhat.component: fuse-karaf-openshift-jdk11-rhel-8-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.7.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.4.3.fuse-7_12_1-00009-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.18.0.redhat-00001 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift-jdk11-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "20.1716485725" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift-jdk11-rhel8/images/1.12-20.1716485725 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 6036739709302e2d07ffc82f65aac994e47173ee vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-05-23T17:54:46Z" Id: sha256:cba70185de70abde58315a01c3d4d07c680b12d031dfd1ff23d661e48c2ab63c Size: 180894665 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift-jdk11-rhel8@sha256:144941f6745ed291d11d94c66037cfbb1d7cd9cf28db4f2234d9265efe767eff kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:144941f6745ed291d11d94c66037cfbb1d7cd9cf28db4f2234d9265efe767eff resourceVersion: "13284" uid: aa56382f-fc51-47ca-aa84-32a6f9df3edb - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea092d7970b26c24007a670fc6d0810dbf9531dc0d3a9d6ea514134ba5686724 size: 7541063 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:512b052e86a3145118bb3f5b4c88a627e33696c374a01ea6363daebbb8868a65 size: 66269555 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - MYSQL_VERSION=10.5 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MariaDB 10.5 SQL database server - DESCRIPTION=MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/opt/rh/rh-mariadb105/root/usr - ENABLED_COLLECTIONS=rh-mariadb105 - BASH_ENV=/usr/share/container-scripts/mysql/scl_enable - ENV=/usr/share/container-scripts/mysql/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/mysql/scl_enable ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-30T11:32:54 com.redhat.component: rh-mariadb105-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. io.k8s.display-name: MariaDB 10.5 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mariadb,mariadb105,rh-mariadb105 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/mariadb-105-rhel7 release: "117" summary: MariaDB 10.5 SQL database server url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/mariadb-105-rhel7/images/1-117 usage: docker run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhscl/mariadb-105-rhel7 vcs-ref: 27d1f72a7ae349bd764e4339e98760ecb4bc209e vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-05-30T11:35:39Z" Id: sha256:2ec1a3e868f0672eeea1d5d8087bf28ab4dd0922ed6880a95ace70d33206e465 Size: 153835055 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/mariadb-105-rhel7@sha256:146789aaa36b11c2194c3a1cd1bbc9d56016a67cf2791401b59c339983dd2a5e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:146789aaa36b11c2194c3a1cd1bbc9d56016a67cf2791401b59c339983dd2a5e resourceVersion: "13841" uid: 151bcab6-0eef-41a5-9792-4b7b9af235cf - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:122eac69054b22f81a29f37eb7effe0a3038861b977db932717c5e068f649107 size: 92495022 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.15 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-21T04:57:58 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1682053056" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.15-1.1682053056 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 6a5c5590c7d59e1b896774e585212805732b5471 vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-21T05:07:45Z" Id: sha256:8fbbe930e704e3c67bcb081b63dcf705304e35f7a42a7a9dac8b169ce4c94aa6 Size: 131781843 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:14de89e89efc97aee3b50141108b7833708c3a93ad90bf89940025ab5267ba86 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:14de89e89efc97aee3b50141108b7833708c3a93ad90bf89940025ab5267ba86 resourceVersion: "14155" uid: b2044c96-0f73-410c-aebd-c11c8c3ad90c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50be7bb6ce3ddb41606e1956ba5c61072699ac536980f260a0db6dc59c8013fe size: 39575081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7646767ee9c1a95da81f72abc27df878a7d269206a1b13b6c9800ab249e506fc size: 91102728 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.23 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-10-22T20:10:44 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.13.0.dev0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "3.1761163790" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.23-3.1761163790 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: b40c568e2fa6d032648af2c70f3fe4f0cbf5ce66 vcs-type: git vendor: Red Hat, Inc. version: "1.23" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-10-22T20:21:26Z" Id: sha256:dd5977348fec2f471126802b174ae69c312e40255d812da77a5020200b13ccd5 Size: 130697700 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:15ca7a76fdcca80b800ea420857782badd4960d99d120322c255462c098ed641 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:15ca7a76fdcca80b800ea420857782badd4960d99d120322c255462c098ed641 resourceVersion: "13512" uid: 3642623d-aa7a-4ca4-877e-efc18b15ef52 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2444aca3edc3c9e28e56a5c4d7c88c3051c17434b6856023181a3334b6674430 size: 62785065 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - '"./${DOTNET_DEFAULT_CMD}"' Env: - container=oci - HOME=/opt/app-root - PATH=/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=8.0.21 - ASPNET_VERSION=8.0.21 ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:25:34Z" com.redhat.component: dotnet-80-runtime-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for running .NET 8 applications distribution-scope: public dotnet_version: 8.0.21 io.buildah.version: 1.41.4 io.k8s.description: Platform for running .NET 8 applications io.k8s.display-name: .NET 8 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: runtime,.net,dotnet,dotnetcore,dotnet80-runtime io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-80-runtime org.opencontainers.image.revision: 4907080e4fc083e8b03727da3ea81a299a8b009c release: "1761063846" summary: .NET 8 runtime url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: 4907080e4fc083e8b03727da3ea81a299a8b009c vcs-type: git vendor: Red Hat, Inc. version: "8.0" User: "1001" WorkingDir: /opt/app-root/app ContainerConfig: {} Created: "2025-10-21T16:25:46Z" Id: sha256:9237965377fe470e6f8145631e1cb8b993b464406af3bca26d8da22d8f2bdf62 Size: 102531391 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/dotnet-80-runtime@sha256:16aacc57365922ce9329f1306153e444a02c4883b5b6ea648b0e47ef286df396 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:16aacc57365922ce9329f1306153e444a02c4883b5b6ea648b0e47ef286df396 resourceVersion: "13456" uid: b7e5d4d4-f9fd-476e-b4c8-fb6b70aeea80 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1500af7621666bdae42658c39c0dc66ea092cda488d5e24241f2d81d1ad8afe1 size: 166780168 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:52c18fec11548362c672912bbfced716a5d386ca9d5bb66f7203b4dfbb223037 size: 5921596 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift-rhel8 - FUSE_JAVA_IMAGE_VERSION=1.11 - JOLOKIA_VERSION=1.7.1.redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-19T14:50:39 com.redhat.component: fuse-java-openshift-rhel-8-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "51.1687184685" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift-rhel8/images/1.11-51.1687184685 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: ab24e36120b1d3ad6a0b7d193f09dba8b20892d2 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-19T14:54:07Z" Id: sha256:5aceffb1d18a3fdef160339fe059108396ce0c9130fcdb5c3f35fe1e6253f0b0 Size: 212010284 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift-rhel8@sha256:1b7ca459c309d850b031f63618176b460fa348899201a6a82a5a42c75d09563d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:1b7ca459c309d850b031f63618176b460fa348899201a6a82a5a42c75d09563d resourceVersion: "14039" uid: 3ed941ea-b396-4615-8e42-3896877bff3b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:551849931ba0dec31d2e0b8b4490c168ccf5c5c75215fa094860547b6ae6a94e size: 33442256 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:dc337e84388ed5a4fa04bbbdbb0c9bf6ed33b1e3e615edce4e1529805a3832fe size: 25291412 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Env: - container=oci - PYTHON_VERSION=3.12 - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi10-python-312 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el10 - PATH=/opt/app-root/bin:/opt/app-root/src/bin:/opt/app-root/src/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - BASH_ENV=/opt/app-root/etc/scl_enable - ENV=/opt/app-root/etc/scl_enable - PROMPT_COMMAND=. /opt/app-root/etc/scl_enable - SUMMARY=Minimal platform for building and running Python 3.12 applications - DESCRIPTION=Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-09-24T20:11:21Z" com.redhat.component: python-312-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.3 io.buildpacks.stack.id: com.redhat.stacks.ubi10-python-312-minimal io.k8s.description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.12 io.openshift.expose-services: 8080:http io.openshift.tags: builder,python,python312,python-312,rh-python312 maintainer: SoftwareCollections.org name: ubi10/python-312-minimal org.opencontainers.image.revision: ce7da1608545e0c2787de380fac2ebcc3d170bb6 release: "1758744656" summary: Minimal platform for building and running Python 3.12 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.12-minimal/test/setup-test-app/ ubi10/python-312-minimal python-sample-app vcs-ref: ce7da1608545e0c2787de380fac2ebcc3d170bb6 vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-09-24T20:11:28Z" Id: sha256:36731e5a8253474ecf2e70403210051c451908672c289e5db52ecb29beb5aaa3 Size: 58746949 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/python-312-minimal@sha256:1ce541f489f72d4b354b96e9ad8b8f4e27099534d1cf5cebdfd505a1825f6545 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:1ce541f489f72d4b354b96e9ad8b8f4e27099534d1cf5cebdfd505a1825f6545 resourceVersion: "14127" uid: e302893c-9010-44e3-941e-4cf00971f37a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c43687042a41aad69fc526985ef2b82012c011db7e0e26faba4fc860ad32d88e size: 75837780 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b7b014ba1b80abb29391141385bd32668571313647317d1d64d8b5cebb1f228 size: 1331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2825299765472d0b62c1ed19ebb564a8a191b88ce49639471a274d03e7f9151e size: 3910026 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5832c11acc9bb0420072ec62a6cdcea1d8226ed89e430e3086f81bc7866631c2 size: 84374210 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f1bb2f47b521fb59f8a4590286fa7203e8abd6f5e689f8e657e8b9b18e9874c6 size: 15586020 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.3 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: 5b9d0512d6b123ac381febcad123629e032d0792908a1e96b7bc1608a38ac78e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T22:26:13.092645 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Dhiraj Bokde name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "13.1561751841" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.3-13.1561751841 vcs-ref: 5ceb5b20cb9d869438898007c2b654565213a789 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.3 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: sha256:c4ad52bee4ce91350bfeba9e154fb1ba8e54eaf2c4530a0867548fbbefce03a0 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T22:26:13.092645 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Dhiraj Bokde name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "13.1561751841" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.3-13.1561751841 vcs-ref: 5ceb5b20cb9d869438898007c2b654565213a789 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2019-06-28T22:28:04Z" DockerVersion: 1.13.1 Id: sha256:7ef3a2c26dbf1c3ac74d9d42a26d1666fd31de7f8eed883a267c47d71090c4be Size: 179715363 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:1d526f710f53ea2805441bbd04057242715d6fe2c91257b1ccd53c7a72c499be kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:1d526f710f53ea2805441bbd04057242715d6fe2c91257b1ccd53c7a72c499be resourceVersion: "14048" uid: 01866d35-623c-43e7-9b84-89f33728d0f0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8e663e919f6cd0805d39d14202a5c0b789e7df3c3051c54170b428086a1c9a91 size: 76431158 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1e6175de2c956530fa18c8a30722bf828d70a720afa2f2e481bfb4c520963c91 size: 1550 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2f43ddde7a16cdb478bdd65bc11fa9d36ce8f9b4d79a9fd14fb55eca50896695 size: 114124591 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.1 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f1d2c1bacaf9 Image: 52c600ad0bf93b882f403ca5caf69c0892efc98d71b70c520f10e623d9022e3e Labels: architecture: x86_64 build-date: 2021-07-19T14:11:58.537508 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "14" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.1-14 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0d5a34f0cee16a892d09e0256bfd9f447b72497b vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.1 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f1d2c1bacaf9 Image: sha256:78765e2d17dad7478c83077d01d1c5d50b6d9425f59e33991c0b2c553737dbe0 Labels: architecture: x86_64 build-date: 2021-07-19T14:11:58.537508 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "14" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.1-14 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0d5a34f0cee16a892d09e0256bfd9f447b72497b vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss Created: "2021-07-19T14:17:41Z" DockerVersion: 1.13.1 Id: sha256:129324a49414cc20ac92ca68484b9fb50f7d17881f908602305671db24918672 Size: 190564655 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:1d68b58a73f4cf15fcd886ab39fddf18be923b52b24cb8ec3ab1da2d3e9bd5f6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:1d68b58a73f4cf15fcd886ab39fddf18be923b52b24cb8ec3ab1da2d3e9bd5f6 resourceVersion: "14098" uid: f984a47d-dab7-42c9-8ac4-1f8755d0d4c3 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e88444eb2ec6d1c6fb00d9a743941e9840594479617d797c0b5cf81d8f4d5d5f size: 81543968 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el8 - NODEJS_VER=20 - PYTHON_VERSION=3.9 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi8-python-39 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.9 applications - DESCRIPTION=Python 3.9 available as container is a base platform for building and running various Python 3.9 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:33:14Z" com.redhat.component: python-39-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Python 3.9 available as container is a base platform for building and running various Python 3.9 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi8-python-39 io.k8s.description: Python 3.9 available as container is a base platform for building and running various Python 3.9 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.9 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python39,python-39,rh-python39 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/python-39 org.opencontainers.image.revision: 4600607c4ef2758fdb8cd425e7eb66493a22f2aa release: "1761841936" summary: Platform for building and running Python 3.9 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.9/test/setup-test-app/ ubi8/python-39 python-sample-app vcs-ref: 4600607c4ef2758fdb8cd425e7eb66493a22f2aa vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:33:36Z" Id: sha256:d1d5d4d918f0e995fc6971ccc8da1a42ea259154c7ea1398224e5c88ddcb240f Size: 331411863 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/python-39@sha256:1ef62ca43bc7b5be382470644797926cecd419f1496a471cc230d71147b8f878 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:1ef62ca43bc7b5be382470644797926cecd419f1496a471cc230d71147b8f878 resourceVersion: "14132" uid: adddc388-175d-4aea-93bc-2b9b5b8cd8bd - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:872582724f337bcc41b829d3b854567e146ab62aa3c7de0b37e18617d38f5d08 size: 76246809 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b13ffc206103620a7d59e4f5b72279b53e317ade5d545a3daa06ab9bed270f92 size: 1409 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b7f1305ca252f66148776525dde2bb4df6c83494633a8164b3fc6b1560b711bf size: 4028980 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9fcb9eb95cb77715beb5cf6e769bfb055fe46ac0cad1cdf99d229bce80c5b3b9 size: 87034685 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ead4a319242f483b7e6020227d3351779b2a02250160e2c859c109d8acb2a139 size: 15179641 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.7 - JOLOKIA_VERSION=1.6.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: d0feb8001879 Image: feaf94cd74565b914b1ae1d2e0e5bf488105c208b19ba847f41daa74f1f41cba Labels: architecture: x86_64 build-date: 2020-11-04T17:18:27.721406 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1604505243" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.7-12.1604505243 vcs-ref: cc40f7e04d3a96ec8a71069f58939519a05f565c vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.7 - JOLOKIA_VERSION=1.6.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: d0feb8001879 Image: sha256:7079b1364ee9644e1bec5a59428f3788bd245a8a94c11a95a5bb4486028c81ef Labels: architecture: x86_64 build-date: 2020-11-04T17:18:27.721406 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1604505243" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.7-12.1604505243 vcs-ref: cc40f7e04d3a96ec8a71069f58939519a05f565c vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss Created: "2020-11-04T17:19:51Z" DockerVersion: 1.13.1 Id: sha256:48fa17c7dad9bcbf3021e3923f9c1520a06be86b36b59630ce17a3f2cd7d6c51 Size: 182498026 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:1fe8afafb4a8cfe086dd3c3f59fc717ccc8924e570deaed38f4751962aed4211 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:1fe8afafb4a8cfe086dd3c3f59fc717ccc8924e570deaed38f4751962aed4211 resourceVersion: "14052" uid: f0be263c-f364-4139-984b-43a8a4f93b6f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a20dc09567a04bdff2ebfaa3d3917f64d7620555e6354d53b43dd7ebb0e0f575 size: 79751689 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:131f13a17b9e93af2594a586fed39af6ce5b77915f91df6b8a250f6117f20c95 size: 126692656 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.16 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-11-14T15:34:44 com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.16-1 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 26df1d32c23571c75bce3a5832334f8eb5dbcccc vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-11-14T15:38:26Z" Id: sha256:c00ed3cbc5358621df245781025be71249f17c03105f030018ecf6de29e3f88e Size: 206474283 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:2254dc2f421f496b504aafbbd8ea37e660652c4b6b4f9a0681664b10873be7fe kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:2254dc2f421f496b504aafbbd8ea37e660652c4b6b4f9a0681664b10873be7fe resourceVersion: "14104" uid: 2127959d-590e-47a5-9946-89e7cef1237a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ac3ec768cc7844a4e7923545731691c54ac62eeac11245c532b348cc01cdbfd3 size: 109500783 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T20:31:06 com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705602291" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.18-2.1705602291 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: db4b85aa377961470d01443597a69e4ef6daf294 vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T20:39:24Z" Id: sha256:6f15e81bfb09e0e3d2a9d8e8cbb08622cf2bd19aa248e7f5abfd6bb3fbb36e37 Size: 148827594 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:229ee7b88c5f700c95d557d0b37b8f78dbb6b125b188c3bf050cfdb32aec7962 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:229ee7b88c5f700c95d557d0b37b8f78dbb6b125b188c3bf050cfdb32aec7962 resourceVersion: "14195" uid: 7e75e29d-702e-4451-9fe2-2c4c3e9fc86d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8d9c78c7f9887170d08c57ec73b21e469b4120682a2e82883217535294878c5d size: 3805344 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f4350d5126d0895bb50c2c082a415ff417578d34508a0ef07ec20cebf661ebb7 size: 70368140 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:22d34e5ceb75822a5bc6be36447b7cee001ca3dbd61bae6a1e4fcd70db076460 size: 209163998 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0e3082a97b6f1819035738c7a146ca68a8f3652dbdd52eb05ec3b7f779e1b73e size: 268674990 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4380ed6ced5123a36dc514aa4180b10c1c9acca2ceedf4c5e0c2c7e8cee82188 size: 285285600 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-eap-7/eap71-openshift - JBOSS_IMAGE_VERSION=1.3 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_EAP_VERSION=7.1.4.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.1.4.GA - HTTPS_ENABLE_HTTP2=true - JOLOKIA_VERSION=1.5.0 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MAVEN_VERSION=3.5 - WILDFLY_CAMEL_VERSION=5.2.0.fuse-710021-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: c449653522f4e40ec8b87d5a846191c1e1ecfd39d0ab78d36aef85c5068c10f0 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T22:42:58.963999 com.redhat.build-host: cpt-0001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.1.4 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-eap-openshift org.concrt.version: 1.4.1 org.jboss.product: eap org.jboss.product.eap.version: 7.1.4.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.1.4.GA release: "5.1539812399" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.1-5.1539812399 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: afdbbdca5de2000dace048c344f9a15a962a393b vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-eap-7/eap71-openshift - JBOSS_IMAGE_VERSION=1.3 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_EAP_VERSION=7.1.4.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.1.4.GA - HTTPS_ENABLE_HTTP2=true - JOLOKIA_VERSION=1.5.0 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MAVEN_VERSION=3.5 - WILDFLY_CAMEL_VERSION=5.2.0.fuse-710021-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: sha256:4ab827020ad87739706046e64d592b99d5d8dc5c3899ded508ff1ac78aeeaf8e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T22:42:58.963999 com.redhat.build-host: cpt-0001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.1.4 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-eap-openshift org.concrt.version: 1.4.1 org.jboss.product: eap org.jboss.product.eap.version: 7.1.4.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.1.4.GA release: "5.1539812399" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.1-5.1539812399 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: afdbbdca5de2000dace048c344f9a15a962a393b vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss Created: "2018-10-17T22:43:58Z" DockerVersion: 1.13.1 Id: sha256:ba2baa466bb3166e9c5b51236146e10445f23586eb900902047a5b4477e5e3c2 Size: 912229825 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:230cd475733320b85bef99f7634b0f73ba82e323865d1e46be6d76fe03c337e8 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:230cd475733320b85bef99f7634b0f73ba82e323865d1e46be6d76fe03c337e8 resourceVersion: "14020" uid: 6147b18d-26d6-4d23-864a-43490dec8c35 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:79c20e727b94ea36ae8776eb9e4065b60dc1d396564a6a91ebb6ee334dfb5cea size: 79001473 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ae4cda028ce1cdcaf1f079c166ecd8e396e8c387627cf1951367ee98807a5e27 size: 111642334 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap-xp4-openjdk11-runtime-openshift-rhel8 - JBOSS_IMAGE_VERSION=4.0 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api - LAUNCH_JBOSS_IN_BACKGROUND=true - MICROPROFILE_CONFIG_DIR_ORDINAL=500 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-02-05T10:36:53 com.redhat.component: jboss-eap-xp4-openjdk11-runtime-openshift-rhel8-container com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform XP 4.0 OpenShift runtime image with OpenJDK 11 distribution-scope: public io.buildah.version: 1.33.8 io.cekit.version: 4.12.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.k8s.description: Base runtime image to run EAP XP server and application io.k8s.display-name: JBoss EAP XP runtime image io.openshift.expose-services: 8080:http io.openshift.tags: javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap-xp4-openjdk11-runtime-openshift-rhel8 org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "89" summary: Red Hat JBoss Enterprise Application Platform XP 4.0 OpenShift runtime image with OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap-xp4-openjdk11-runtime-openshift-rhel8/images/4.0-89 vcs-ref: 00387077df72f21059a527aa478178149208b1ba vcs-type: git vendor: Red Hat, Inc. version: "4.0" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-02-05T10:40:42Z" Id: sha256:d010d63fec4a6f088010579bd3f3b07052213204a69eb8122d96db754b17594b Size: 190667964 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap-xp4-openjdk11-runtime-openshift-rhel8@sha256:2406759804b3f8c4ea6a6ba582a1ab82be48362a8a815a82bb4aa04bf81e86e3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:2406759804b3f8c4ea6a6ba582a1ab82be48362a8a815a82bb4aa04bf81e86e3 resourceVersion: "13319" uid: a33f1392-2eef-4f84-9e1d-9f0aadf8b716 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:90ce8aca36136fcb5647ba06362d28586593fd2bfa1e83535e2d9d530eb930f3 size: 79225652 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a62cdec22511d0921fcd3d4a71e091a8461e7c9eb7767793a61e511c0bbbd996 size: 121935734 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ae77b936d1da82edf080dfd85b164277a30b1325095a8ed7f70c85ce48b7c446 size: 34792954 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.3 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.3 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - HOME=/home/jboss - AB_PROMETHEUS_ENABLE=True - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jws-jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9404 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk11-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.3 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9404/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-19T15:03:22 com.redhat.component: jboss-webserver-57-openjdk11-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.6.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK11 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK11 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk11-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 5.7.3 org.jboss.product.webserver-tomcat9.version: 5.7.3 release: "2.1687186259" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk11-rhel8-openshift/images/5.7.3-2.1687186259 vcs-ref: 0ccb4d1a73fd7227e8ade7173e71dce4d74ddf2d vcs-type: git vendor: Red Hat, Inc. version: 5.7.3 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-19T15:09:29Z" Id: sha256:4a538bbff8f5f1e215af6c5879498d47452671f6a71a8c4ec4ddaf12fb9b7558 Size: 235996644 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:25f6a298e4505c38e7220e8a654852de3822d40a99b5f47da657251f31c3ffc3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:25f6a298e4505c38e7220e8a654852de3822d40a99b5f47da657251f31c3ffc3 resourceVersion: "13701" uid: 5544c692-51d7-4cde-af1f-c403e45f2c0e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:78ba342552b07b7f64acdfee9b6cb3ae0a484e6bb39e5ee9d8fd99759eca0224 size: 39673781 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:23cac4d9b8155ea96002c5e7a388bc9b7ce57817342fc8ea166e7ba5865a38cd size: 62786710 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5b141278de81150c0fb400b1b75a39f53c238ed5165bfcd79aa5f22361cf2301 size: 150590071 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - /bin/bash Env: - container=oci - HOME=/opt/app-root - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=8.0.21 - ASPNET_VERSION=8.0.21 - PATH=/opt/app-root/src/.local/bin:/opt/app-root/src/bin:/opt/app-root/.dotnet/tools:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - STI_SCRIPTS_PATH=/usr/libexec/s2i - DOTNET_GENERATE_ASPNET_CERTIFICATE=false - DOTNET_NOLOGO=true - DOTNET_SDK_VERSION=8.0.121 - DOTNET_USE_POLLING_FILE_WATCHER=true ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-22T02:39:40Z" com.redhat.component: dotnet-80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for building and running .NET 8 applications distribution-scope: public dotnet_version: 8.0.21 io.buildah.version: 1.41.4 io.k8s.description: Platform for building and running .NET 8 applications io.k8s.display-name: .NET 8 SDK io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,.net,dotnet,dotnetcore,dotnet-80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-80 org.opencontainers.image.revision: 84854597b481c0a66f4f91103c2cd97c939bb22e release: "1761100714" sdk_version: 8.0.121 summary: .NET 8 SDK url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: 84854597b481c0a66f4f91103c2cd97c939bb22e vcs-type: git vendor: Red Hat, Inc. version: "8.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-22T02:39:56Z" Id: sha256:27a25c7f56d86d81fc6213242e2f31683cf6679777a750ea01f2f6a058a91f48 Size: 253069220 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/dotnet-80@sha256:26c16d866c58f4d47901f8a009d39b5f2ddc76a5d7b2cb5b0065ed8cd8eea1b6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:26c16d866c58f4d47901f8a009d39b5f2ddc76a5d7b2cb5b0065ed8cd8eea1b6 resourceVersion: "13397" uid: a8cf9840-1b68-4e97-adaa-1647f04f079d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1238570471bcd397e18009d29d1dac73c8db95daf823622782d740ad3bd280dd size: 40584254 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=20 - PHP_VERSION=8.2 - PHP_VER_SHORT=82 - NAME=php - SUMMARY=Platform for building and running PHP 8.2 applications - DESCRIPTION=PHP 8.2 available as container is a base platform for building and running various PHP 8.2 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:32:24Z" com.redhat.component: php-82-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: PHP 8.2 available as container is a base platform for building and running various PHP 8.2 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.41.4 io.k8s.description: PHP 8.2 available as container is a base platform for building and running various PHP 8.2 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 8.2 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php82,php-82 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/php-82 org.opencontainers.image.revision: e9685c355d0f2ed4ac052b288fede6fb199d1695 release: "1761841874" summary: Platform for building and running PHP 8.2 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=8.2/test/test-app ubi8/php-82 sample-server vcs-ref: e9685c355d0f2ed4ac052b288fede6fb199d1695 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:32:39Z" Id: sha256:0fbefa5d8a27e793390446e325849dfc6ca1ac8276984c833fdf4448c87414de Size: 290452878 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/php-82@sha256:2788719b2da9f43a904d670b43cf29445a687a1ad6eb96e4a052e15cd3188a0f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:2788719b2da9f43a904d670b43cf29445a687a1ad6eb96e4a052e15cd3188a0f resourceVersion: "14081" uid: d9d7a912-d7ab-410c-a8bc-35cd9672ce70 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2399a674882f7d84f588927a959aeebea8b0332dcce4eac3cc1ae4ac5a8e54df size: 20385592 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NAME=nginx - NGINX_VERSION=1.24 - NGINX_SHORT_VER=124 - VERSION=0 - SUMMARY=Platform for running nginx 1.24 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.24 daemon. The image can be used as a base image for other applications based on nginx 1.24 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:11:23Z" com.redhat.component: nginx-124-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.24 daemon. The image can be used as a base image for other applications based on nginx 1.24 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.24 daemon. The image can be used as a base image for other applications based on nginx 1.24 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.24 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-124 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nginx-124 org.opencontainers.image.revision: b3c974eee06912b47116c9135d3c8562e0e44ee4 release: "1760386249" summary: Platform for running nginx 1.24 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/nginx-124:latest vcs-ref: b3c974eee06912b47116c9135d3c8562e0e44ee4 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:11:33Z" Id: sha256:541d6ee22bb238479aa8d473b97608f13deb3f1be08a60ed925b0a6aebcaf325 Size: 117486923 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nginx-124@sha256:27901936ab8866d1d1293479bb3448cb3ff98cdcaa8242926a9c49896a864c2f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:27901936ab8866d1d1293479bb3448cb3ff98cdcaa8242926a9c49896a864c2f resourceVersion: "14059" uid: f15635b7-feaf-4d78-95fd-5112f7c378ed - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:72d37ae8760a66c6d3507cc766ab29e2e49082a565e2a531e4b0bea3c4385392 size: 79141222 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:710801b761b9b0683fc2c26ab4fe6d20214d236e60e23b1723bf37c30410d3a9 size: 17335836 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:deafbb8752779eee15f2f78eadddcff9f44c9abd53d93af6713bfe97fb5410ee size: 197753182 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:89357b5590a75c788c2e98c7c1c12be14ee0a3775ef687455f47a4a2b48d1363 size: 167451580 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=16 - NAME=golang - GO_MAJOR_VERSION=1 - GO_MINOR_VERSION=18 - GO_PATCH_VERSION=10 - CONTAINER_NAME=rhel9/go-toolset - VERSION=1.18.10 - SUMMARY=Platform for building and running Go Applications - DESCRIPTION=Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. Labels: architecture: x86_64 build-date: 2023-05-02T08:21:04 com.redhat.component: go-toolset-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. distribution-scope: public io.buildah.version: 1.27.3 io.k8s.description: Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. io.k8s.display-name: Go 1.18.10 io.openshift.expose-services: "" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,golang,golang118,rh-golang118,go io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: rhel9/go-toolset release: "4.1683015641" summary: Platform for building and running Go Applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel9/go-toolset/images/1.18.10-4.1683015641 vcs-ref: 7348cae7eba784f56a23908bc7a9104d0af9009c vcs-type: git vendor: Red Hat, Inc. version: 1.18.10 User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2023-05-02T08:23:45Z" Id: sha256:89dca1e330cacd3845f8dd96b7d8c422ad018cf8dc7b9b9c719f4702c26916de Size: 461702829 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/go-toolset@sha256:2861514e125903261aa0004883a7f7aeeb4c189b2d0d175372d1edc111942eda kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:2861514e125903261aa0004883a7f7aeeb4c189b2d0d175372d1edc111942eda resourceVersion: "13452" uid: 2d1723b9-e6b7-432a-8581-4db21e5284f5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:76608b6b9d54251299c5d3be69fdf53e05f97a3735bbcd5889c30ebb78608428 size: 75827462 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3c81a5d20855a6cef8b997d709410e047e2839b5ad113f4c34d25e9fae9e3beb size: 1266 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:cf313e71c9fd8209b9df2ee0fb7471c014b3ec6f7144546b88ad7c46b5fb2cd4 size: 3891763 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:07c69678314f4cb7384c115ffd5040765fe1fe42db1b8c789af11ce865771f7b size: 84375848 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e7c59289a7545b67b0e12162f8ce6b637cabaf93171e0d7e1bf3e98d286456c7 size: 26655623 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.2 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 4072265dcbc2 Image: dc97831ef9bf7072787e2aa389524b0307d3fa3f8034d1c21c67d82b14f771e2 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-04-17T17:00:50.328059 com.redhat.build-host: cpt-0010.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/licenses/eulas description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Dhiraj Bokde name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1555516864" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.2-12.1555516864 vcs-ref: 6bdabdeb4498074b6702f3c010f198d7176593da vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.2 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 4072265dcbc2 Image: sha256:46a2220b289e3c56f193a39c4b8510370f60e67ebf43a746f5f040e2dbf4f8c2 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-04-17T17:00:50.328059 com.redhat.build-host: cpt-0010.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/licenses/eulas description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Dhiraj Bokde name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1555516864" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.2-12.1555516864 vcs-ref: 6bdabdeb4498074b6702f3c010f198d7176593da vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss Created: "2019-04-17T17:03:28Z" DockerVersion: 1.13.1 Id: sha256:62f2af090f787d63bf85381a756334bfda633d175937b2dba90742cb6e16de95 Size: 190757902 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:29d831cb1f5fe839ab6e8c59d57f695094938a9f97e071bdbc854e54a90ecb94 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:29d831cb1f5fe839ab6e8c59d57f695094938a9f97e071bdbc854e54a90ecb94 resourceVersion: "14046" uid: ea5c0a9d-7d69-493b-aee0-439ade3631f0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d246e53da6241a619b7dcea7d4403071b9e1961797aa4f6c766786e29732651c size: 76526594 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:91bd2c541a17a588359eb054815718e41f871d03b4d4daf7b3584b25fbdcbb67 size: 1563 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:118a6721a11912d1a6b148b280cbac16e4d55fbafb7d347ac5ccf1cec9abbe66 size: 109507768 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f18f459d223a Image: e5bb351eb440daf702d40be0e683db2a675d02bb2a892d56046ba4d19228f8c5 Labels: architecture: x86_64 build-date: 2021-12-02T07:23:04.378078 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1638429538" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.10-1.1638429538 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0041074ffb8ad7c1f24a784a9a16da09ec7f0493 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f18f459d223a Image: sha256:2c422c19e229e6ceeec8689dbdc20af3334ba5413a394d9ef3ba7403ba366341 Labels: architecture: x86_64 build-date: 2021-12-02T07:23:04.378078 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1638429538" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.10-1.1638429538 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0041074ffb8ad7c1f24a784a9a16da09ec7f0493 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2021-12-02T07:26:57Z" DockerVersion: 1.13.1 Id: sha256:c282d2158da6fc28f5d2cf159a8d1788ac3e822c43a06cec4108f89cbaa99696 Size: 186043467 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:2ae058ee7239213fb495491112be8cc7e6d6661864fd399deb27f23f50f05eb4 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:2ae058ee7239213fb495491112be8cc7e6d6661864fd399deb27f23f50f05eb4 resourceVersion: "14161" uid: e3bffec7-fa9d-4100-b35e-ce3f1f7b4e39 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0866a63d5aaea50186eee6e8cf59eadea0b79fdfc3fa6c6ef75080baf6e6b9f9 size: 20330989 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NAME=nginx - NGINX_VERSION=1.22 - NGINX_SHORT_VER=122 - VERSION=0 - SUMMARY=Platform for running nginx 1.22 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.22 daemon. The image can be used as a base image for other applications based on nginx 1.22 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-28T04:21:38Z" com.redhat.component: nginx-122-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.22 daemon. The image can be used as a base image for other applications based on nginx 1.22 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.22 daemon. The image can be used as a base image for other applications based on nginx 1.22 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.22 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-122 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nginx-122 org.opencontainers.image.revision: f59505117b0a342c3c7dd28004ca2c60276eb0cf release: "1761625269" summary: Platform for running nginx 1.22 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/nginx-122:latest vcs-ref: f59505117b0a342c3c7dd28004ca2c60276eb0cf vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-28T04:21:47Z" Id: sha256:90463b415e63367ba5747d0b326e43e474360aa78e9e0f22c47e6fa82d3e94e3 Size: 117432320 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nginx-122@sha256:2c7e510949ed2c19504d0aaed1ad891f8aa03cd649d04359cc6a2cdffc40b594 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:2c7e510949ed2c19504d0aaed1ad891f8aa03cd649d04359cc6a2cdffc40b594 resourceVersion: "14057" uid: 1a08a60e-52d4-459f-bcfd-ebc4dce9e0ed - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4752687a61a97d6f352ae62c381c87564bcb2f5b6523a05510ca1fb60d640216 size: 36442442 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0344366a246a0f7590c2bae4536c01f15f20c6d802b4654ce96ac81047bc23f3 size: 1740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:90be64a48170f43260bea55930b0ebe9f7bd5fc847a09b4d1022a95b20d8b854 size: 72355508 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: d86420a5475900b43b2abf25ea61375bf6fef38baf6569782ed7241ce5e8e232 Labels: architecture: x86_64 build-date: 2022-06-15T16:29:14.850840 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1655306436" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.13-1.1655306436 vcs-ref: 3d6b138d69cf66fef47fe6e8c74e3c8145acfa38 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: sha256:bf001a5d4970d8ad95fc54d04f68a812c5e43305dd8a41bd336e0d06a39ef3e1 Labels: architecture: x86_64 build-date: 2022-06-15T16:29:14.850840 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1655306436" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.13-1.1655306436 vcs-ref: 3d6b138d69cf66fef47fe6e8c74e3c8145acfa38 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:31:58Z" DockerVersion: 1.13.1 Id: sha256:63ed77f7c4776e00061be61cb217a0aa2ec33e971df94b5c8fea0f4710c1ec1a Size: 108804727 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:2cee344e4cfcfdc9a117fd82baa6f2d5daa7eeed450e02cd5d5554b424410439 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:2cee344e4cfcfdc9a117fd82baa6f2d5daa7eeed450e02cd5d5554b424410439 resourceVersion: "14221" uid: dea4bfef-b23a-4e11-9250-c76e7423ec89 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e60f3410b0ae270c41e17f13f49e1e97f743d222162f589c4e4d5a3f51da928f size: 88591273 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - MYSQL_VERSION=8.0 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MySQL 8.0 SQL database server - DESCRIPTION=MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/usr ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:15:54Z" com.redhat.component: mysql-80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. io.k8s.display-name: MySQL 8.0 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mysql80,mysql-80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/mysql-80 org.opencontainers.image.revision: c1fa8a6addd490e169d843595ca59aa902d7a7b7 release: "1760386499" summary: MySQL 8.0 SQL database server url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhel9/mysql-80 vcs-ref: c1fa8a6addd490e169d843595ca59aa902d7a7b7 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:16:07Z" Id: sha256:e0d69a306b0f28204964dc50d393acbd1df568fe83fdc5df511f21fb309312f8 Size: 185690619 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel9/mysql-80@sha256:2ec1207cc75b74c26998f7c3386199a30213b37a05fab6e41a713d4a74bc4d5a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:2ec1207cc75b74c26998f7c3386199a30213b37a05fab6e41a713d4a74bc4d5a resourceVersion: "13787" uid: 24cfde81-bbba-45da-9912-82e2ad1337bb - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5329d7039f252afc1c5d69521ef7e674f71c36b50db99b369cbb52aa9e0a6782 size: 39330100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e2cf0521dced3a2dcaf10c83994ba00f009244b0515623829202b8f8c28a2b1c size: 107338232 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.16 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-19T16:04:12 com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "2" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.16-2 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 1e1e4cf1bf2d68ad0e1a2803e94e4627d30a2f8c vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-07-19T16:21:30Z" Id: sha256:36812abee8139cf91d6567b616299fbe6e03508b736605cdbf0d9634293865d3 Size: 146697982 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:2f59ad75b66a3169b0b03032afb09aa3cfa531dbd844e3d3a562246e7d09c282 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:2f59ad75b66a3169b0b03032afb09aa3cfa531dbd844e3d3a562246e7d09c282 resourceVersion: "14213" uid: 2f98c6a6-a4cd-4c75-afa2-886bf1854ac1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:90ce8aca36136fcb5647ba06362d28586593fd2bfa1e83535e2d9d530eb930f3 size: 79225652 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:12926432467a23e8b5478a1586ea3142a76e3fc5a16ea1903bdee9d0af226065 size: 103110488 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:12bea5e832dcaf62a54ba28101cce9d90c254ae13f2c8480c105791c93ac43c4 size: 30963473 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.3 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.3 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - HOME=/home/jboss - AB_PROMETHEUS_ENABLE=True - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jws-jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9404 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk8-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.3 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9404/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-19T15:02:36 com.redhat.component: jboss-webserver-57-openjdk8-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.6.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK8 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK8 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk8-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 5.7.3 org.jboss.product.webserver-tomcat9.version: 5.7.3 release: "2.1687186197" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk8-rhel8-openshift/images/5.7.3-2.1687186197 vcs-ref: 465f6bbc1badd3c93f9f9273410f1b4369279836 vcs-type: git vendor: Red Hat, Inc. version: 5.7.3 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-19T15:08:24Z" Id: sha256:350073fc9719547602f4d809e8e73b708f320110ed17949d71357e6292e390b4 Size: 213341876 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:301b093ae4fbc18f7d9d11b803d9da4220dad4556202a8de2f04377ff87c2f4d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:301b093ae4fbc18f7d9d11b803d9da4220dad4556202a8de2f04377ff87c2f4d resourceVersion: "13798" uid: 93b45e02-ada6-4d84-bd22-b2b2e708dd5a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea092d7970b26c24007a670fc6d0810dbf9531dc0d3a9d6ea514134ba5686724 size: 7541063 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9f34f3d634490c688b24ea6308ba4eb38d98227c030f87fb6ba3fe5bf68fc86 size: 46253494 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - POSTGRESQL_VERSION=12 - POSTGRESQL_PREV_VERSION=10 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS=rh-postgresql12 - BASH_ENV=/usr/share/container-scripts/postgresql/scl_enable - ENV=/usr/share/container-scripts/postgresql/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/postgresql/scl_enable ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-30T09:34:16 com.redhat.component: rh-postgresql12-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 12 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql12,rh-postgresql12 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/postgresql-12-rhel7 release: "145" summary: PostgreSQL is an advanced Object-Relational database management system url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/postgresql-12-rhel7/images/1-145 usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhscl/postgresql-12-rhel7 vcs-ref: 06913d0f260f4515284a2ab0a659f00217a432c8 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-05-30T09:38:03Z" Id: sha256:faaa9b1718aa7e90b9c101aa719c0207b4a98c6f3192002b40c0613af3d7e8cd Size: 133819444 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/postgresql-12-rhel7@sha256:306ce79b5647eb627aebbd6a9c956f9cd09c24ef7b12e42c1de6ba89f8fd8121 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:306ce79b5647eb627aebbd6a9c956f9cd09c24ef7b12e42c1de6ba89f8fd8121 resourceVersion: "14108" uid: 9d994295-a05c-4ae6-9f25-ae34cf9967d5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:017f542c0c5a83cf71321d76c7233cc3782038061e250a1be7e1be92bd81fe44 size: 88623070 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-23T16:06:08 com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "5" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.17-5 vcs-ref: 70947114745f06cb153005c197bcd8390045485b vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-23T16:11:55Z" Id: sha256:157e77d936fa1ffb2b7f7aa71eda401f997796cfe8ac5c6cd22d5aeaeeb1b332 Size: 127949437 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:32a5e806bd88b40568d46864fd313541498e38fabfc5afb5f3bdfe052c4b4c5f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:32a5e806bd88b40568d46864fd313541498e38fabfc5afb5f3bdfe052c4b4c5f resourceVersion: "14194" uid: 3a8629b7-e384-45ac-964d-2d9d6249d589 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c85ac87d44df4b64d7c273886fc5aed55a28422df33dcb641884ffa419db218 size: 76240885 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:51e9f237b750efcda2d5755785cdb8dd080d51585ae35d368e4f9b29a11b1994 size: 1329 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4339109fed706e213373c80a8e81481dbf63a9d30505dad20fe3801c024fa46a size: 347600654 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.5.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.5 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.5.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: 534540aef153 Image: cbb843e4683a7d56a7505fdc2c5659f13c18a3244c9ce0378a34100ff5d8baf5 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-03-17T16:44:39.221740 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.5.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.5.GA release: "2.1584463358" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.5-2.1584463358 vcs-ref: 740a8016ebd909ab092589f95cfebf5c15d5a281 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.5.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.5 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.5.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: 534540aef153 Image: sha256:9ffa9b1b6afd72decedb408ebf5c42ab908cd441c1f9885d9de45873a34c1822 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-03-17T16:44:39.221740 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.5.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.5.GA release: "2.1584463358" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.5-2.1584463358 vcs-ref: 740a8016ebd909ab092589f95cfebf5c15d5a281 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss Created: "2020-03-17T16:52:06Z" DockerVersion: 1.13.1 Id: sha256:48e4640bc12c43aa7c07c38fb8f76bcd23721951c41d5826fbebef9fc3f464b2 Size: 423850861 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:3375ec169d274278da56d1401c5c1285f7d2812ea0bde2ac9ad9652b69f80893 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:3375ec169d274278da56d1401c5c1285f7d2812ea0bde2ac9ad9652b69f80893 resourceVersion: "13900" uid: 870da698-d7e4-4311-9632-5d1a1ee331f4 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4752687a61a97d6f352ae62c381c87564bcb2f5b6523a05510ca1fb60d640216 size: 36442442 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0344366a246a0f7590c2bae4536c01f15f20c6d802b4654ce96ac81047bc23f3 size: 1740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2f6d0806a79dff81a406906c6f9c757a398407c0d3345fabdfe3b238afee6da4 size: 86586578 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: 271110da4538991404447fa5678366b7a276fb55f85af624d8c1e0292bcdd43f Labels: architecture: x86_64 build-date: 2022-06-15T16:15:55.785399 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1655306368" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.13-1.1655306368 vcs-ref: 7f244524449d43f35d1370cc10ec0cc48f521a42 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: sha256:afcad67a9a05ee0f28dc9c20da6d4a5a35712622f382a00156a653eaebdecf37 Labels: architecture: x86_64 build-date: 2022-06-15T16:15:55.785399 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1655306368" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.13-1.1655306368 vcs-ref: 7f244524449d43f35d1370cc10ec0cc48f521a42 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:18:26Z" DockerVersion: 1.13.1 Id: sha256:89061b4069d7f30c7432668e8ae7e27b17596a63354bd9bc2ac2586501c51b66 Size: 123035783 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:33d4dff40514e91d86b42e90b24b09a5ca770d9f67657c936363d348cd33d188 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:33d4dff40514e91d86b42e90b24b09a5ca770d9f67657c936363d348cd33d188 resourceVersion: "14190" uid: 4203b0fa-0475-47a7-8d7d-e1a43e1782c9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b294370e16a52df0c9f043fce2048ca6d059e5f4a9ea1f432b28ab32b456c1ce size: 65687544 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el8 - NODEJS_VER=20 - PYTHON_VERSION=3.6 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi8-python-36 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.6 applications - DESCRIPTION=Python 3.6 available as container is a base platform for building and running various Python 3.6 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:35:12Z" com.redhat.component: python-36-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Python 3.6 available as container is a base platform for building and running various Python 3.6 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi8-python-36 io.k8s.description: Python 3.6 available as container is a base platform for building and running various Python 3.6 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.6 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python36,python-36,rh-python36 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/python-36 org.opencontainers.image.revision: 997d83720c24e6dfa4402f799676970ab2b723dd release: "1761842005" summary: Platform for building and running Python 3.6 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.6/test/setup-test-app/ ubi8/python-36 python-sample-app vcs-ref: 997d83720c24e6dfa4402f799676970ab2b723dd vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:35:51Z" Id: sha256:648f1759d6cad13606fe9d58c8fa3cbf6c51ea5bd361812a63d06906259a7249 Size: 315555214 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/python-36@sha256:38427fd30565b66ec512fb8d86bf442a7ac4a100d44332e8c42b472fdf821db0 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:38427fd30565b66ec512fb8d86bf442a7ac4a100d44332e8c42b472fdf821db0 resourceVersion: "14131" uid: f961a18a-2cb6-4557-96e3-f81acf4320c8 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3019826b26b93fdb39b6e29614bc6b4d1ab879c596261851db4ff70706fa6c55 size: 183535774 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510212154.p2.g7f1d6f8.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510212154.p2.g7f1d6f8.assembly.stream.el9-7f1d6f8 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=ose-tools - __doozer_uuid_tag=ose-tools-rhel9-v4.20.0-20251021.223340 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=7f1d6f8 - SOURCE_DATE_EPOCH=1761075552 - SOURCE_GIT_COMMIT=7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 - SOURCE_GIT_TAG=openshift-clients-4.12.0-202208031327-1168-g7f1d6f88c - SOURCE_GIT_URL=https://github.com/openshift/oc Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T22:55:56Z" com.redhat.component: ose-tools-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Contains debugging and diagnostic tools for use with an OpenShift cluster. io.k8s.display-name: OpenShift Tools io.openshift.build.commit.id: 7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 io.openshift.build.commit.url: https://github.com/openshift/oc/commit/7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 io.openshift.build.source-location: https://github.com/openshift/oc io.openshift.build.versions: kubectl=1.33.3 io.openshift.expose-services: "" io.openshift.maintainer.component: oc io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,tools maintainer: Red Hat, Inc. name: openshift/ose-tools-rhel9 org.opencontainers.image.revision: cf8659ff1b7103f7d4b367026e60dea4406e5268 release: 202510212154.p2.g7f1d6f8.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: cf8659ff1b7103f7d4b367026e60dea4406e5268 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T22:57:41Z" Id: sha256:519ab5d05ba33f2164e24bf3505c43498624ee893f155583311878d37d373eb5 Size: 321972411 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3843651d85087f9f19c0047f3b0c09e41f241946867d4a78acfda37ca0a405e2 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:3843651d85087f9f19c0047f3b0c09e41f241946867d4a78acfda37ca0a405e2 resourceVersion: "14074" uid: 17754b38-b940-477b-b53e-c957d23bc2bf - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54e56e6f85721741ee7bf0336de8ad3bf138a56769a6d0097b600a0e361be58d size: 39618910 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f8ddd7f5a755f537dd9d5f553c8c78171dcf3018c5fc96676a07380d3e14e20 size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9790122be13ebce9b9c16d544b7f391cbf2d381d0efc0add7c4c8c0b554125f3 size: 86444530 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: 9d2d5144cc6c24a3d835c1b484e9e2c5906e05640072f4f72264bf657785a6ac Labels: architecture: x86_64 build-date: 2022-04-29T13:47:33.189049 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1651233097" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.12-1.1651233097 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8b193c120d3f6f9df9d2db10d4ec77a45fff797d vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: sha256:1b39c85695f2e9da2622310ed29696aaa33005dfbf72493fab250f6d7936eeba Labels: architecture: x86_64 build-date: 2022-04-29T13:47:33.189049 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1651233097" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.12-1.1651233097 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8b193c120d3f6f9df9d2db10d4ec77a45fff797d vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-04-29T13:50:43Z" DockerVersion: 1.13.1 Id: sha256:a7113ef97be034e38d3fe27aee31482811b54df0a5eaf4106b0bff4adb1be47c Size: 126070475 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:38c7e4f7dea04bb536f05d78e0107ebc2a3607cf030db7f5c249f13ce1f52d59 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:38c7e4f7dea04bb536f05d78e0107ebc2a3607cf030db7f5c249f13ce1f52d59 resourceVersion: "14152" uid: 2f982ef4-69b5-4ff2-8c9f-1dcbe88d5a6d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5329d7039f252afc1c5d69521ef7e674f71c36b50db99b369cbb52aa9e0a6782 size: 39330100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c29bbc660b4ec7e40639e8e74734c70db36cc6379574ca7c1a7abe3503a6d280 size: 115296436 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.16 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-19T16:14:53 com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "2" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.16-2 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: f3b78885ddc4a0f11fad92c8c52c4e8366510b9d vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-07-19T16:24:50Z" Id: sha256:6da9e746c5efe391addc022b93883e9f7eb997b21c7ca21634a7646b6823c9ec Size: 154654163 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:3b94ccfa422b8ba0014302a3cfc6916b69f0f5a9dfd757b6704049834d4ff0ae kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:3b94ccfa422b8ba0014302a3cfc6916b69f0f5a9dfd757b6704049834d4ff0ae resourceVersion: "14147" uid: 9f9ee43e-e235-4660-b0b7-107507748ea6 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c7965aa7086045a59bdb113a1fb8a19d7ccf7af4133e59af8ecefd39cda8e0b1 size: 78964242 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a47b24a64cfa083053cbc2215f10f9a84ef11dd992d4c2755f3f91e8ab9a38f size: 68231466 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:71ab0d8c6a4c41db5c7a2cde1f284080513b081bf45af5891eb8dd5a8cc4c373 size: 12192264 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3b84f6150455cc1a21301aa835b6c307f678241265c88e36cca2517be9a87a94 size: 61654235 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8de98ffb6690c14b53771251ef29ad27e82ec78db7f7e888c008552c0736f774 size: 508507060 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: OpenShift Developer Services Config: Entrypoint: - /usr/bin/go-init - -main - /usr/libexec/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - __doozer=merge - BUILD_RELEASE=202509140700.p0.gd192e90.assembly.stream.el8 - BUILD_VERSION=v4.13.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=13 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.13.0-202509140700.p0.gd192e90.assembly.stream.el8-d192e90 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.13 - __doozer_key=openshift-enterprise-cli - __doozer_version=v4.13.0 - OS_GIT_COMMIT=d192e90 - SOURCE_DATE_EPOCH=1712056277 - SOURCE_GIT_COMMIT=d192e901ece237d9ae1580d73e78f423ec2ef322 - SOURCE_GIT_TAG=openshift-clients-4.13.0-202304190216-95-gd192e901e - SOURCE_GIT_URL=https://github.com/openshift/oc - ART_BUILD_ENGINE=brew - ART_BUILD_DEPS_METHOD=cachito - ART_BUILD_NETWORK=internal-only - JENKINS_VERSION=2 - HOME=/var/lib/jenkins - JENKINS_HOME=/var/lib/jenkins - JENKINS_UC=https://updates.jenkins.io - OPENSHIFT_JENKINS_IMAGE_VERSION=4.13 - LANG=en_US.UTF-8 - LC_ALL=en_US.UTF-8 - INSTALL_JENKINS_VIA_RPMS=true ExposedPorts: 8080/tcp: {} 50000/tcp: {} Labels: License: GPLv2+ architecture: x86_64 build-date: 2025-09-22T10:45:44 com.redhat.component: openshift-jenkins-2-container com.redhat.license_terms: https://www.redhat.com/agreements description: Jenkins is a continuous integration server distribution-scope: public io.buildah.version: 1.33.12 io.jenkins.version: 2.516.3 io.k8s.description: Jenkins is a continuous integration server io.k8s.display-name: Jenkins 2 io.openshift.build.commit.id: cfb3a904331d93327ba0eb7145b78ac0b24f4675 io.openshift.build.commit.url: https://github.com/openshift/jenkins/commit/cfb3a904331d93327ba0eb7145b78ac0b24f4675 io.openshift.build.source-location: https://github.com/openshift/jenkins io.openshift.expose-services: 8080:http io.openshift.maintainer.component: Jenkins io.openshift.maintainer.product: OpenShift Container Platform io.openshift.maintainer.project: OCPBUGS io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: jenkins,jenkins2,ci maintainer: openshift-dev-services+jenkins@redhat.com name: openshift/ose-jenkins release: "1758537892" summary: Provides the latest release of Red Hat Universal Base Image 8. url: https://access.redhat.com/containers/#/registry.access.redhat.com/openshift/ose-jenkins/images/v4.13.0-1758537892 vcs-ref: 976802c3a15462ab81a28c7cd01c1c6765ddf8ed vcs-type: git vendor: Red Hat, Inc. version: v4.13.0 User: "1001" Volumes: /var/lib/jenkins: {} ContainerConfig: {} Created: "2025-09-22T10:52:31Z" Id: sha256:24a08856ee5258c2a25c61362db60a3dffec61997756f9b58e2c3cf2d25b84e3 Size: 729584584 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ocp-tools-4/jenkins-rhel8@sha256:3bc55cb1bafdd281a784ec7950c8e95914079522f152f642e8172869e83b4585 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:3bc55cb1bafdd281a784ec7950c8e95914079522f152f642e8172869e83b4585 resourceVersion: "13766" uid: 757352d1-87d2-430a-b4ae-a5a6f3f8c55e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54e56e6f85721741ee7bf0336de8ad3bf138a56769a6d0097b600a0e361be58d size: 39618910 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f8ddd7f5a755f537dd9d5f553c8c78171dcf3018c5fc96676a07380d3e14e20 size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bd7fede8be96f90f2657b88f88354c8b6589694c0ca7eb8800babca6e24674cb size: 117829840 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 65ec992ef2e6 Image: 5a26ca0d70ec13dbe4df66eba9eb35d58c0d38101682fc806ddf63a443024230 Labels: architecture: x86_64 build-date: 2022-04-29T13:51:35.051397 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1651233100" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.12-1.1651233100 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: ef89a1b14e17c770cc25cefddf1a67b084eaaa66 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 65ec992ef2e6 Image: sha256:c43c95bfb42baf8a0ecfe2a8bc338bd99e53ffd1f85a0d4a7cc7b366d1ea08f0 Labels: architecture: x86_64 build-date: 2022-04-29T13:51:35.051397 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1651233100" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.12-1.1651233100 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: ef89a1b14e17c770cc25cefddf1a67b084eaaa66 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-04-29T13:58:02Z" DockerVersion: 1.13.1 Id: sha256:e229a9be4f0b1494f655306d43c244853bd14d6515c50ff30085756fae147a68 Size: 157457992 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:3f00540ce2a3a01d2a147a7d73825fe78697be213a050bd09edae36266d6bc40 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:3f00540ce2a3a01d2a147a7d73825fe78697be213a050bd09edae36266d6bc40 resourceVersion: "14171" uid: 42db77f0-1f4f-4bf9-ba10-e58291d46e26 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50be7bb6ce3ddb41606e1956ba5c61072699ac536980f260a0db6dc59c8013fe size: 39575081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:58d74dec22f4fd2877a82b6d78f2c1b8ec40c5c6afab78d4d7daebc426ffe62b size: 114687323 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.23 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-10-22T20:10:51 com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.13.0.dev0 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "3.1761163783" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.23-3.1761163783 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: 24cbf05a8339c91f2ea7bd92a14704b97044859f vcs-type: git vendor: Red Hat, Inc. version: "1.23" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-10-22T20:25:12Z" Id: sha256:f560c666bc9eb16c34ffe6c73e4e99e56fa382a946dc391c63800af46241fc8c Size: 154290830 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/openjdk-17@sha256:3f01daca201d91f4989f8ffe80625d2e08fc0e69f241a7359d30c15cc7c9a419 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:3f01daca201d91f4989f8ffe80625d2e08fc0e69f241a7359d30c15cc7c9a419 resourceVersion: "13688" uid: 72240a14-c318-4519-ba30-66cb5dc4e5d5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:97da74cc6d8fa5d1634eb1760fd1da5c6048619c264c23e62d75f3bf6b8ef5c4 size: 79524639 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d8190195889efb5333eeec18af9b6c82313edd4db62989bd3a357caca4f13f0e size: 1438 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96dd49838ab46a776f95938f6cef26046a077c2dcb527828f66fadcbb249f5b0 size: 67513509 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:db9cd4defdff890334eb46715ad1e841eb9ec04d8a5ccfd6e8e74442bd69fda4 size: 11777712 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0764cc99fae67a407bab509725fb658cc690c6a72f8589950862ce88e3b51d05 size: 493546 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f529af401fa8e9c943fb318c335a7d251e20086ba67088341216de06f0d4ed97 size: 169500149 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - --database - /bundles.db Entrypoint: - /bin/registry-server Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - __doozer=merge - BUILD_RELEASE=202402011837.p0.g1a14e5c.assembly.stream - BUILD_VERSION=v4.13.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=13 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.13.0-202402011837.p0.g1a14e5c.assembly.stream-1a14e5c - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.13 - __doozer_key=openshift-enterprise-base - OS_GIT_COMMIT=1a14e5c - SOURCE_DATE_EPOCH=1706810462 - SOURCE_GIT_COMMIT=1a14e5c9896976b43767ab325a6fe35527a514a5 - SOURCE_GIT_TAG=1a14e5c - SOURCE_GIT_URL=https://github.com/openshift/images - GODEBUG=x509ignoreCN=0,madvdontneed=1 - OPENSHIFT_CI=true - BUILD_LOGLEVEL=0 - OPENSHIFT_BUILD_NAME=operator-registry-amd64 - OPENSHIFT_BUILD_NAMESPACE=ci-op-2qp121b3 ExposedPorts: 50051/tcp: {} Hostname: 22b55b50085b Labels: License: GPLv2+ architecture: x86_64 build-date: 2024-02-01T18:58:58 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openshift-enterprise-base-container com.redhat.license_terms: https://www.redhat.com/agreements description: This is the base image from which all OpenShift Container Platform images inherit. distribution-scope: public io.buildah.version: 1.32.2 io.k8s.description: This is a component of OpenShift Operator Lifecycle Manager and is the base for operator catalog API containers. io.k8s.display-name: OpenShift Operator Registry io.openshift.build.commit.author: "" io.openshift.build.commit.date: "" io.openshift.build.commit.id: 4cc5232d665151b2ee1c75df3061dfa11645fbbc io.openshift.build.commit.message: "" io.openshift.build.commit.ref: release-4.13 io.openshift.build.commit.url: https://github.com/openshift/images/commit/1a14e5c9896976b43767ab325a6fe35527a514a5 io.openshift.build.name: "" io.openshift.build.namespace: "" io.openshift.build.source-context-dir: "" io.openshift.build.source-location: https://github.com/openshift/operator-framework-olm io.openshift.ci.from.base: sha256:cfbb98ac2ff01ff28dc127650a11a936269a8a6a8f6e31041059aebd76aa8563 io.openshift.expose-services: "" io.openshift.maintainer.component: Release io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,base maintainer: Odin Team name: openshift/ose-base release: 202402011837.p0.g1a14e5c.assembly.stream summary: Operator Registry runs in a Kubernetes or OpenShift cluster to provide operator catalog data to Operator Lifecycle Manager. url: https://access.redhat.com/containers/#/registry.access.redhat.com/openshift/ose-base/images/v4.13.0-202402011837.p0.g1a14e5c.assembly.stream vcs-ref: 4cc5232d665151b2ee1c75df3061dfa11645fbbc vcs-type: git vcs-url: https://github.com/openshift/operator-framework-olm vendor: Red Hat, Inc. version: v4.13.0 User: "1001" WorkingDir: /registry Container: 6ec09c92264a99b6b9d1e125b1041060536a375dbc28ab423ed304e7fb7a7517 ContainerConfig: Cmd: - --database - /bundles.db Entrypoint: - /bin/registry-server Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - __doozer=merge - BUILD_RELEASE=202402011837.p0.g1a14e5c.assembly.stream - BUILD_VERSION=v4.13.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=13 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.13.0-202402011837.p0.g1a14e5c.assembly.stream-1a14e5c - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.13 - __doozer_key=openshift-enterprise-base - OS_GIT_COMMIT=1a14e5c - SOURCE_DATE_EPOCH=1706810462 - SOURCE_GIT_COMMIT=1a14e5c9896976b43767ab325a6fe35527a514a5 - SOURCE_GIT_TAG=1a14e5c - SOURCE_GIT_URL=https://github.com/openshift/images - GODEBUG=x509ignoreCN=0,madvdontneed=1 - OPENSHIFT_CI=true - BUILD_LOGLEVEL=0 - OPENSHIFT_BUILD_NAME=operator-registry-amd64 - OPENSHIFT_BUILD_NAMESPACE=ci-op-2qp121b3 ExposedPorts: 50051/tcp: {} Hostname: 22b55b50085b Labels: License: GPLv2+ architecture: x86_64 build-date: 2024-02-01T18:58:58 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openshift-enterprise-base-container com.redhat.license_terms: https://www.redhat.com/agreements description: This is the base image from which all OpenShift Container Platform images inherit. distribution-scope: public io.buildah.version: 1.32.2 io.k8s.description: This is a component of OpenShift Operator Lifecycle Manager and is the base for operator catalog API containers. io.k8s.display-name: OpenShift Operator Registry io.openshift.build.commit.author: "" io.openshift.build.commit.date: "" io.openshift.build.commit.id: 4cc5232d665151b2ee1c75df3061dfa11645fbbc io.openshift.build.commit.message: "" io.openshift.build.commit.ref: release-4.13 io.openshift.build.commit.url: https://github.com/openshift/images/commit/1a14e5c9896976b43767ab325a6fe35527a514a5 io.openshift.build.name: "" io.openshift.build.namespace: "" io.openshift.build.source-context-dir: "" io.openshift.build.source-location: https://github.com/openshift/operator-framework-olm io.openshift.ci.from.base: sha256:cfbb98ac2ff01ff28dc127650a11a936269a8a6a8f6e31041059aebd76aa8563 io.openshift.expose-services: "" io.openshift.maintainer.component: Release io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,base maintainer: Odin Team name: openshift/ose-base release: 202402011837.p0.g1a14e5c.assembly.stream summary: Operator Registry runs in a Kubernetes or OpenShift cluster to provide operator catalog data to Operator Lifecycle Manager. url: https://access.redhat.com/containers/#/registry.access.redhat.com/openshift/ose-base/images/v4.13.0-202402011837.p0.g1a14e5c.assembly.stream vcs-ref: 4cc5232d665151b2ee1c75df3061dfa11645fbbc vcs-type: git vcs-url: https://github.com/openshift/operator-framework-olm vendor: Red Hat, Inc. version: v4.13.0 User: "1001" WorkingDir: /registry Created: "2024-02-07T20:25:12Z" Id: sha256:297af6acc1bda4a2ff6f757a6753c4a38939b49e7634dfa22a27803adfe410b1 Parent: sha256:666bea379f96271140efccf685e0d32d0707481ff703793ff1f88a25ae6ece40 Size: 328836168 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:28:11Z" name: sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a resourceVersion: "43628" uid: 277a7062-138b-44a6-b924-8877bc78f360 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2920d84eafa0cf94806ab58f0a2124f7b2d35bcbb06fc89a9106dcc28efe397a size: 39653524 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0393e1c5dfe9ede3e742e787ae5d00503a4f0687aa66415c3ca87cac6e87880d size: 40503436 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - container=oci - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - NPM_RUN=start - PLATFORM=el9 - NODEJS_VERSION=20 - NAME=nodejs - SUMMARY=Minimal image for running Node.js 20 applications - DESCRIPTION=Node.js 20 available as container is a base platform for running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-15T16:15:05Z" com.redhat.component: nodejs-20-minimal-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Node.js 20 available as container is a base platform for running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.k8s.description: Node.js 20 available as container is a base platform for running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 20 Micro io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs20 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nodejs-20-minimal org.opencontainers.image.revision: e92372fde5f0369030c2af49d3c12ebf7f5f53d3 release: "1760544727" summary: Minimal image for running Node.js 20 applications url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: e92372fde5f0369030c2af49d3c12ebf7f5f53d3 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-15T16:15:10Z" Id: sha256:bb62c40b1e77fe2abab3e1b355e8f93cb953ba9c9cb33e3d9e29b7e5b409dfce Size: 80169352 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nodejs-20-minimal@sha256:3fdd215cd56a7bfe500482c73271dc1e79bf90457a0f245e6f50d3d63ca4da7c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:3fdd215cd56a7bfe500482c73271dc1e79bf90457a0f245e6f50d3d63ca4da7c resourceVersion: "14087" uid: ac0e9a45-8974-4c05-ad10-b1eb08b70882 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3fcbead5564d28a05c64093d61810ddf9336fd6a7d3132cbde762bd287b05b30 size: 423922375 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.18 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.18 - WILDFLY_VERSION=7.4.18.GA-redhat-00001 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.9.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - S2I_FP_VERSION=23.0.0.Final - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.4.18 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-07-22T13:35:54 com.redhat.component: jboss-eap-74-openjdk8-builder-openshift-rhel7-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform 7.4 OpenShift container image. distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running JavaEE applications on JBoss EAP 7.4 io.k8s.display-name: JBoss EAP 7.4 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap74-openjdk8-openshift-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.18 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.4.18 release: "2" summary: Red Hat JBoss Enterprise Application Platform 7.4 OpenShift container image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap74-openjdk8-openshift-rhel7/images/7.4.18-2 vcs-ref: 3989b0b160e6978695f83ec2fb79ba404572528d vcs-type: git vendor: Red Hat, Inc. version: 7.4.18 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-07-22T13:40:44Z" Id: sha256:852a703631f7a7cd4d48ed06fa933279c4436435a08416469777e96ff8ceccd0 Size: 503974300 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap74-openjdk8-openshift-rhel7@sha256:403c4481d100edee6355e3defc17cec304db6f510b0c953c298e171b16dbd8bf kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:403c4481d100edee6355e3defc17cec304db6f510b0c953c298e171b16dbd8bf resourceVersion: "13500" uid: e7960d75-9c0b-4384-a64b-92649412f6d9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4bf8bbfa97006b1ddb3d00e28ee8e476a94d23ad0c9b7d27dfbab02d3fa59957 size: 69975754 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el9 - NODEJS_VER=20 - PYTHON_VERSION=3.12 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi9-python-312 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.12 applications - DESCRIPTION=Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T16:22:08Z" com.redhat.component: python-312-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi9-python-312 io.k8s.description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.12 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python312,python-312,rh-python312 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/python-312 org.opencontainers.image.revision: 65d9db1c2b55904cec2bc20b7fe5c31bab89e7e1 release: "1760372467" summary: Platform for building and running Python 3.12 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.12/test/setup-test-app/ ubi9/python-312 python-sample-app vcs-ref: 65d9db1c2b55904cec2bc20b7fe5c31bab89e7e1 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T16:22:20Z" Id: sha256:a1ed2585f89a627604d7b2487507afa4d4c6fcd6f144fc604233c5fea7f33f31 Size: 382279194 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/python-312@sha256:4100f9633dd7533a6ab847ea2b666de21bbb84baf070945a3c142cb019cd9a5f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:4100f9633dd7533a6ab847ea2b666de21bbb84baf070945a3c142cb019cd9a5f resourceVersion: "14130" uid: a30c750f-4ce0-4f1f-9f4d-02e82f499e25 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:690c847f419672788dca52f9eb17b10133919a0aae947934f7bdf5ccf30f1546 size: 79990748 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8b0190b73e2fbfc495259da2dd7cb3dd11858057a17aba8e69b494f184278222 size: 113428012 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.17 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-06-04T14:54:34 com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.10.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "3.1717512819" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.17-3.1717512819 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: d57f866d5459ae47560699f643a117753a2f1ee0 vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-06-04T15:04:31Z" Id: sha256:e48dd217b99967a40735d2ac4dc2d1f1b438fe009280a869de143e47e6930d13 Size: 193448361 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/redhat-openjdk-18/openjdk18-openshift@sha256:421d1f6a10e263677b7687ccea8e4a59058e2e3c80585505eec9a9c2e6f9f40e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:421d1f6a10e263677b7687ccea8e4a59058e2e3c80585505eec9a9c2e6f9f40e resourceVersion: "13683" uid: 595390c4-28a1-4cfc-ab89-d5c0808a02df - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5329d7039f252afc1c5d69521ef7e674f71c36b50db99b369cbb52aa9e0a6782 size: 39330100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:70fe1bf188594ef42f2faa617005d3f397a7a395c9b41f0ebe25140adfb323a1 size: 111342558 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.16 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-20T20:11:28 com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "3" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.16-3 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 63a002ff432d7760adc5486db1a121a78718a340 vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-07-20T20:24:29Z" Id: sha256:998e41ed2c35c256fabfb4884b115e3b8e6f22458afd8a6fbf9e512f3a5fb100 Size: 150702816 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:425e2c7c355bea32be238aa2c7bdd363b6ab3709412bdf095efe28a8f6c07d84 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:425e2c7c355bea32be238aa2c7bdd363b6ab3709412bdf095efe28a8f6c07d84 resourceVersion: "14179" uid: 9f266725-ba2e-4032-961a-d8bd33261941 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2a99c93da16827d9a6254f86f495d2c72c62a916f9c398577577221d35d2c790 size: 39641757 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4418ace46c3dd933f98d83f357f31048e72d5db3d97bccfdb0acef769ee8234f size: 1743 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9675ea79d90c914f2530be70a2c90072eed62580297ca69aa1ab9d21290a3555 size: 87219093 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: be4e58a52d40 Image: 4f8b58094a49e52735fd2f81b53d0eb1e7a19e33777cba5893dbc7d69fdcfdec Labels: architecture: x86_64 build-date: 2021-12-01T18:39:55.002944 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "10.1638383033" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.10-10.1638383033 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 5dbba0eecb1056908b8875ef8df210953c55c03a vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: be4e58a52d40 Image: sha256:dc41aba32c4b97f20cbe61c6a4ccb9212445c80f4d02a97e0a18a2f65e8b779f Labels: architecture: x86_64 build-date: 2021-12-01T18:39:55.002944 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "10.1638383033" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.10-10.1638383033 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 5dbba0eecb1056908b8875ef8df210953c55c03a vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2021-12-01T18:42:25Z" DockerVersion: 1.13.1 Id: sha256:2ad55ed2b7c7daf918966cd955c79dd2b1eb534bffe90c8988c11bc90fca73f7 Size: 126867370 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:431753c8a6a8541fdc0edd3385b2c765925d244fdd2347d2baa61303789696be kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:431753c8a6a8541fdc0edd3385b2c765925d244fdd2347d2baa61303789696be resourceVersion: "14187" uid: cba71974-f4f8-4e06-887f-08f392eda4e3 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1f1202c893ce2775c72b2a3f42ac33b25231d16ca978244bb0c6d1453dc1f39e size: 76250035 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:32be9843afa050552a66345576a59497ba7c81c272aa895d67e6e349841714da size: 1320 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9f74ee800fdb2242405ca2ee7e74f612973956d1725766a1f2199339f92b8381 size: 4013823 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c47adc077129316d733e991a2da2c4bf7ec3d93b7836e0b97ddc5885f0e512ba size: 85699059 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:030ef875c16299c58b1fe1f5232cae9d08cbfc35d1e357ee72bfc8190cda2f40 size: 15165108 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.6 - JOLOKIA_VERSION=1.6.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 08c71bdb7e2a Image: 3ee01967bae94329abd65082510b9c33d2958d82a6e7964fa249518274fddf84 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-04-21T12:04:49.688935 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "20.1587470195" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.6-20.1587470195 vcs-ref: 31565b6a7a80bedca64476301fd32f1edaa04da4 vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.6 - JOLOKIA_VERSION=1.6.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 08c71bdb7e2a Image: sha256:38e2a78a809f259293280b80b098cf5674b594a54ea377d70e58df22ab789e97 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-04-21T12:04:49.688935 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "20.1587470195" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.6-20.1587470195 vcs-ref: 31565b6a7a80bedca64476301fd32f1edaa04da4 vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss Created: "2020-04-21T12:06:38Z" DockerVersion: 1.13.1 Id: sha256:e8dc9fce7fca406094a2871389ad7734d45e6c15fe7b5fb59e9508f86de065f7 Size: 181136027 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:4361e4a098acb1d1cbd79b6fb1e67a891949e65cdc40e286a8e5da9bfb7fa332 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:4361e4a098acb1d1cbd79b6fb1e67a891949e65cdc40e286a8e5da9bfb7fa332 resourceVersion: "14051" uid: 0fae2fb1-814e-4e34-bf2b-f33dbaf23a78 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7e5f40fe90b709b27097c5ff1121f56e562c0f563ba652f6cb1be1fe6b07253a size: 39746652 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9c85fc2090f278bfc806b061b9b56e6e666b6ece4edc407048de480129c76052 size: 63947093 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - '"./${DOTNET_DEFAULT_CMD}"' Env: - container=oci - HOME=/opt/app-root - PATH=/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=9.0.11 - ASPNET_VERSION=9.0.11 ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-12-10T16:51:08Z" com.redhat.component: dotnet-90-runtime-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for running .NET 9 applications distribution-scope: public dotnet_version: 9.0.11 io.buildah.version: 1.41.4 io.k8s.description: Platform for running .NET 9 applications io.k8s.display-name: .NET 9 io.openshift.expose-services: 8080:http io.openshift.tags: runtime,.net,dotnet,dotnetcore,dotnet90-runtime maintainer: Red Hat, Inc. name: ubi8/dotnet-90-runtime org.opencontainers.image.created: "2025-12-10T16:51:08Z" org.opencontainers.image.revision: 22f12d3e4eca143ba3f87b73d2d8cd7797d7afa4 release: "1765385235" summary: .NET 9 runtime url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: 22f12d3e4eca143ba3f87b73d2d8cd7797d7afa4 vcs-type: git vendor: Red Hat, Inc. version: "9.0" User: "1001" WorkingDir: /opt/app-root/app ContainerConfig: {} Created: "2025-12-10T16:51:18Z" Id: sha256:2b3c05da3642c01fdcb1e593ff6ad238f968d157b4aa37c46749f02e29027a5a Size: 103706593 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/dotnet-90-runtime@sha256:43920d10408205a379519e16530d5181db65a79df3c2725b3cbad26798d09037 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:25:13Z" name: sha256:43920d10408205a379519e16530d5181db65a79df3c2725b3cbad26798d09037 resourceVersion: "40625" uid: 8dc1bccd-7b22-4d9a-9f7f-f25600bc1697 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:55f517bff7f6fc3f62bf9d37135053370f0c7192b76017b1280f3ee23f51a8e3 size: 1071942759 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510220756.p2.g9d55fd1.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510220756.p2.g9d55fd1.assembly.stream.el9-9d55fd1 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=driver-toolkit - __doozer_uuid_tag=driver-toolkit-rhel9-v4.20.0-20251022.081026 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=9d55fd1 - SOURCE_DATE_EPOCH=1750755771 - SOURCE_GIT_COMMIT=9d55fd1aaba05830f857132bd149ee3cf18cc20f - SOURCE_GIT_TAG=9d55fd1a - SOURCE_GIT_URL=https://github.com/openshift/driver-toolkit Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-22T08:48:37Z" com.redhat.component: driver-toolkit-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: driver-toolkit is a container with the kernel packages necessary for building driver containers for deploying kernel modules/drivers on OpenShift io.k8s.display-name: Empty io.openshift.build.commit.id: 9d55fd1aaba05830f857132bd149ee3cf18cc20f io.openshift.build.commit.url: https://github.com/openshift/driver-toolkit/commit/9d55fd1aaba05830f857132bd149ee3cf18cc20f io.openshift.build.source-location: https://github.com/openshift/driver-toolkit io.openshift.expose-services: "" io.openshift.maintainer.component: Driver Toolkit io.openshift.maintainer.project: OCPBUGS io.openshift.release.operator: "true" io.openshift.tags: Empty maintainer: Red Hat, Inc. name: openshift/driver-toolkit-rhel9 org.opencontainers.image.revision: a0aa3a715de00eeed02beef413c6c06bdfadfdc1 release: 202510220756.p2.g9d55fd1.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: a0aa3a715de00eeed02beef413c6c06bdfadfdc1 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-22T08:51:02Z" Id: sha256:58d01ed48d76969a67687d2971bcffcf254217244cf137ba6dd2849ebaa02f82 Size: 1210383461 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:4396f6b4629ba45fe23c13c91aaa64427e957b15841bc65c84537763f00bcbe0 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:4396f6b4629ba45fe23c13c91aaa64427e957b15841bc65c84537763f00bcbe0 resourceVersion: "13296" uid: 50657682-4f9c-487f-b1a3-f356f1e7bd0f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:434c740598f75b4980d8de08b0ef259c5a66bbef05e784da9731b3fea33ef719 size: 22371746 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=20 - PERL_VERSION=5.32 - PERL_SHORT_VER=532 - NAME=perl - SUMMARY=Platform for building and running Perl 5.32 applications - DESCRIPTION=Perl 5.32 available as container is a base platform for building and running various Perl 5.32 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:08:27Z" com.redhat.component: perl-532-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Perl 5.32 available as container is a base platform for building and running various Perl 5.32 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-perl-container io.buildah.version: 1.41.4 io.k8s.description: Perl 5.32 available as container is a base platform for building and running various Perl 5.32 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. io.k8s.display-name: Apache 2.4 with mod_fcgid and Perl 5.32 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,perl,perl532,perl-532 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/perl-532 org.opencontainers.image.revision: 4c47d79a2b9652e6646d3826a4c4d9f235a10690 release: "1760386076" summary: Platform for building and running Perl 5.32 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/perl-532:latest vcs-ref: 4c47d79a2b9652e6646d3826a4c4d9f235a10690 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:08:36Z" Id: sha256:0979d51cefe37e42dbf0c650cb7f4e88307219bc6fecc6882132b6560a22f474 Size: 334674324 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/perl-532@sha256:44f6258460b45f78d229e9a3865e7481285d268460760710f69f951ed2b41aa5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:44f6258460b45f78d229e9a3865e7481285d268460760710f69f951ed2b41aa5 resourceVersion: "14013" uid: e0ecbf42-634d-4282-89da-f506c4eb4299 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:157428bbe422689f642c824a512700c63a930f9e9d7da34290fd559f624363cb size: 114679860 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-23T16:06:10 com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "4" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.17-4 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0903a764fc8f6f41c6003906542e9f7dbe6b0f7b vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-23T16:17:40Z" Id: sha256:ae3c380c4feff75a6e10386f81da0fee2c54364dc1d28920e1b266c91fc83ae1 Size: 154015376 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:46a4e73ddb085d1f36b39903ea13ba307bb958789707e9afde048764b3e3cae2 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:46a4e73ddb085d1f36b39903ea13ba307bb958789707e9afde048764b3e3cae2 resourceVersion: "14148" uid: d5c31426-7e47-4218-9057-501bc26d8a68 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2a99c93da16827d9a6254f86f495d2c72c62a916f9c398577577221d35d2c790 size: 39641757 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4418ace46c3dd933f98d83f357f31048e72d5db3d97bccfdb0acef769ee8234f size: 1743 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d0e25578e51bf59d53b2a82277e41fed5111f2c676c3a545ff1c9b5ffbddeb8f size: 113478789 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: be4e58a52d40 Image: 78ddbf525e8ed616920530e5e572cb514ac20237e5d502abf94d099cfbe256b3 Labels: architecture: x86_64 build-date: 2021-12-01T18:40:35.357924 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "10.1638383025" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.10-10.1638383025 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: fbc47d72a59516ad145cb5349a99a7b0a9deb333 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: be4e58a52d40 Image: sha256:f80060f8ab16e6d8d497c985ba706ceb278a075a28bcfc8e1f25d02fdeb08f03 Labels: architecture: x86_64 build-date: 2021-12-01T18:40:35.357924 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "10.1638383025" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.10-10.1638383025 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: fbc47d72a59516ad145cb5349a99a7b0a9deb333 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2021-12-01T18:46:56Z" DockerVersion: 1.13.1 Id: sha256:20c41960b22f85bc6433730098313516c0bcce9cfc7214cd69d892e8b938f055 Size: 153129803 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:496e23be70520863bce6f7cdc54d280aca2c133d06e992795c4dcbde1a9dd1ab kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:496e23be70520863bce6f7cdc54d280aca2c133d06e992795c4dcbde1a9dd1ab resourceVersion: "14207" uid: b1ed8e10-f98c-4f33-a79a-e0de1166ed4f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d246e53da6241a619b7dcea7d4403071b9e1961797aa4f6c766786e29732651c size: 76526594 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:91bd2c541a17a588359eb054815718e41f871d03b4d4daf7b3584b25fbdcbb67 size: 1563 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:be722ba66cc6549b4778e0f8d184a93fb0da47bf39c1046b0c45c163502b7cfe size: 353009795 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.8.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.8 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.8.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: f18f459d223a Image: ef2286cf701720e28215d073370ae5e7984c40bfcfeceef7ecaa7fb529d58f67 Labels: architecture: x86_64 build-date: 2021-12-02T07:22:02.281837 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.7.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.8.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.8.GA release: "6.1638429593" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.8-6.1638429593 vcs-ref: 9152a4fabf772370752a00282ad5f14ba0008d3f vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.8.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.8 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.8.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: f18f459d223a Image: sha256:241bc1bd19857024762f24ec6b5c747d4f82a6f313970d36395cb60618b075a0 Labels: architecture: x86_64 build-date: 2021-12-02T07:22:02.281837 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.7.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.8.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.8.GA release: "6.1638429593" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.8-6.1638429593 vcs-ref: 9152a4fabf772370752a00282ad5f14ba0008d3f vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss Created: "2021-12-02T07:28:37Z" DockerVersion: 1.13.1 Id: sha256:22a6577c0ae43c83f3e93bbe1495ba06066c164d9ad80b47295e322b28cd888a Size: 429545765 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:4a69ef73f4b6afb2819630baf5d169a038ed4def330a44534926aa933cbbd7e5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:4a69ef73f4b6afb2819630baf5d169a038ed4def330a44534926aa933cbbd7e5 resourceVersion: "13911" uid: 866ae5bf-1523-4f3e-8dea-f0262de38c6c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c3056c1dbb67649cd282429a7b39aaaae9866a9298cb78c197a3b4e099a75a85 size: 46059398 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - POSTGRESQL_VERSION=13 - POSTGRESQL_PREV_VERSION=12 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS= ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T16:12:36Z" com.redhat.component: postgresql-13-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 13 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql13,postgresql-13 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/postgresql-13 org.opencontainers.image.revision: 2b3e00a78de726dde88775e8037dd6ec891171d1 release: "1760371920" summary: PostgreSQL is an advanced Object-Relational database management system url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhel9/postgresql-13 vcs-ref: 2b3e00a78de726dde88775e8037dd6ec891171d1 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T16:12:49Z" Id: sha256:b1c48bb0da2b9f7532acc54cfc1f18109e5a86ac90b03c8a9dfbd48bccc9d813 Size: 143159246 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel9/postgresql-13@sha256:4b4e59365194340a2a68d6720b35f4f3434c75c7d504a9e785fb056f7e4212e7 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:4b4e59365194340a2a68d6720b35f4f3434c75c7d504a9e785fb056f7e4212e7 resourceVersion: "14111" uid: ab4c3940-25ed-4a3a-8912-fd8f916c0e9c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:eca9236fb686825c1ec7ba1f1b339f6300ed2d4fffdf50611dde66cb8f6eeaa9 size: 140240268 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-21 - JAVA_VENDOR=openjdk - JAVA_VERSION=21 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-21 - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-17T20:01:48 com.redhat.component: openjdk-21-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 21 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-21 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "21" org.jboss.product.version: "21" org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "3.1705519633" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 21 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-21/images/1.18-3.1705519633 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: c5efa867ee7f0301fb355289f8e839c9236007d8 vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-17T20:10:45Z" Id: sha256:6cc5e74a214a5396231402ca4386dceb067e112b8aad022961c5b51ded628172 Size: 179574802 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-21@sha256:4f35566977c35306a8f2102841ceb7fa10a6d9ac47c079131caed5655140f9b2 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:4f35566977c35306a8f2102841ceb7fa10a6d9ac47c079131caed5655140f9b2 resourceVersion: "14119" uid: d84c85f2-1c44-4035-abf9-69109c8327f0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:872582724f337bcc41b829d3b854567e146ab62aa3c7de0b37e18617d38f5d08 size: 76246809 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b13ffc206103620a7d59e4f5b72279b53e317ade5d545a3daa06ab9bed270f92 size: 1409 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b7f1305ca252f66148776525dde2bb4df6c83494633a8164b3fc6b1560b711bf size: 4028980 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9fcb9eb95cb77715beb5cf6e769bfb055fe46ac0cad1cdf99d229bce80c5b3b9 size: 87034685 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f6270eaa927580862840be94cdc8174a19ee9fc4aeb6d17580fde4dce18649d size: 15185511 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.7 - JOLOKIA_VERSION=1.6.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.2.6.fuse-770019-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: d0feb8001879 Image: 0d972d0393973e02536414b0b3b2347554ccb2035aa6cbf9b95c47d962077977 Labels: architecture: x86_64 build-date: 2020-11-04T17:19:28.608741 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.6.fuse-770019-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1604505258" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.7-12.1604505258 vcs-ref: c426cd20318ca9f4d60d8cf5d41f62dd13250608 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.7 - JOLOKIA_VERSION=1.6.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.2.6.fuse-770019-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: d0feb8001879 Image: sha256:3fd95b4d7581b6cfedace98588d128d9cd07e9b32593c6d9cf3eadb7ad56d0eb Labels: architecture: x86_64 build-date: 2020-11-04T17:19:28.608741 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.6.fuse-770019-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1604505258" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.7-12.1604505258 vcs-ref: c426cd20318ca9f4d60d8cf5d41f62dd13250608 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss Created: "2020-11-04T17:20:49Z" DockerVersion: 1.13.1 Id: sha256:7a51cb9d7ec1e5fde71dfa74773808c3f6238be8cb3d570ee1bee6356211bb2e Size: 182503791 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:4ffd7ccbe8ff0aa2e09e1c8a72410aadc721ed3ed227890f03ce3f8aa2b33700 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:4ffd7ccbe8ff0aa2e09e1c8a72410aadc721ed3ed227890f03ce3f8aa2b33700 resourceVersion: "14038" uid: 1c2abfe7-041c-48f8-b17b-38f1de62fafa - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:97da74cc6d8fa5d1634eb1760fd1da5c6048619c264c23e62d75f3bf6b8ef5c4 size: 79524639 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d8190195889efb5333eeec18af9b6c82313edd4db62989bd3a357caca4f13f0e size: 1438 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:37fcebd665b9bf280b3a7b7fc8cbbdd35c40de9fde97eec88a9efbb1a416cf0f size: 31542956 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:03bf2f9ff79ce68fdf647999d3c96dd98a59121fae75dd2c1dcce34e3e159eeb size: 13107144 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b42f43a3d9df8228ab00afc8ece1dbfafae24fbd2b3ea72b6234bb68dc2c1bf size: 59202343 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6a236d3f7133294b18fc16ae91db25789f7bc787026b7c8a9652066b26396ff7 size: 251378864 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: OpenShift Developer Services Config: Entrypoint: - /usr/bin/go-init - -main - /usr/local/bin/run-jnlp-client Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - __doozer=merge - BUILD_RELEASE=202306070816.p0.g05d83ef.assembly.stream - BUILD_VERSION=v4.13.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=13 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.13.0-202306070816.p0.g05d83ef.assembly.stream-05d83ef - SOURCE_GIT_TREE_STATE=clean - OS_GIT_COMMIT=05d83ef - SOURCE_DATE_EPOCH=1685556672 - SOURCE_GIT_COMMIT=05d83eff7e17160e679898a2a5cd6019ec252c49 - SOURCE_GIT_TAG=openshift-clients-4.13.0-202304190216-4-g05d83eff7 - SOURCE_GIT_URL=https://github.com/openshift/oc - HOME=/home/jenkins - LANG=en_US.UTF-8 - LC_ALL=en_US.UTF-8 Labels: License: GPLv2+ architecture: x86_64 build-date: 2023-06-13T18:31:11 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jenkins-agent-base-rhel8-container com.redhat.license_terms: https://www.redhat.com/agreements description: The jenkins agent base image is intended to be built on top of, to add your own tools that your jenkins job needs. The agent base image includes all the jenkins logic to operate as a agent, so users just have to yum install any additional packages their specific jenkins job will need distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: The jenkins agent base image is intended to be built on top of, to add your own tools that your jenkins job needs. The agent base image includes all the jenkins logic to operate as a agent, so users just have to yum install any additional packages their specific jenkins job will need io.k8s.display-name: Jenkins Agent Base io.openshift.build.commit.id: 418b910a5af2d9a46c4259fbdbe9a851f6a39820 io.openshift.build.commit.url: https://github.com/openshift/jenkins/commit/418b910a5af2d9a46c4259fbdbe9a851f6a39820 io.openshift.build.source-location: https://github.com/openshift/jenkins io.openshift.expose-services: "" io.openshift.maintainer.component: Jenkins io.openshift.maintainer.product: OpenShift Container Platform io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,jenkins,agent maintainer: openshift-dev-services+jenkins@redhat.com name: openshift/jenkins-agent-base release: "1686680363" summary: Provides the latest release of the Red Hat Extended Life Base Image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/openshift/jenkins-agent-base/images/v4.13.0-1686680363 vcs-ref: 512bc80b8c0842a55ce67759deb32d87dcc499ff vcs-type: git vendor: Red Hat, Inc. version: v4.13.0 User: root ContainerConfig: {} Created: "2023-06-13T18:37:00Z" Id: sha256:5e70ac6eee70fc29d831dd15aea72b5db1b18ca661116dad79ba8d51ed00a6ab Size: 434784606 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ocp-tools-4/jenkins-agent-base-rhel8@sha256:50729a37cfd9eeb05865038eef01b6a2baa92e2f12fe429de3f43d85ef8824b5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:50729a37cfd9eeb05865038eef01b6a2baa92e2f12fe429de3f43d85ef8824b5 resourceVersion: "13668" uid: 8e158c55-8622-4d24-8ed6-b0c27c96743a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:929840606b28e3c864a81c9f400404f190b84f76c4de3b364adb059c3051d45f size: 88849944 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el8 - NODEJS_VER=20 - PYTHON_VERSION=3.12 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi8-python-312 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.12 applications - DESCRIPTION=Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:33:11Z" com.redhat.component: python-312-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi8-python-312 io.k8s.description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.12 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python311,python-312,rh-python312 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/python-312 org.opencontainers.image.revision: 3f3c9bd8006baf6d2d2b8f43234653883eb93b13 release: "1761841938" summary: Platform for building and running Python 3.12 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.12/test/setup-test-app/ ubi8/python-312 python-sample-app vcs-ref: 3f3c9bd8006baf6d2d2b8f43234653883eb93b13 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:33:31Z" Id: sha256:1c22cc5c552c6e25081f04f207f1639163bf34ef4e249760f046eb408ba44808 Size: 338717870 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/python-312@sha256:50752ad94cb38e775d2a65fdc43c3249d6d24d87d15411af7b1525aa8a934277 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:50752ad94cb38e775d2a65fdc43c3249d6d24d87d15411af7b1525aa8a934277 resourceVersion: "14129" uid: 3d1d0144-5ac3-47c2-8d4f-50c4f59ab83c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:87df8a98f4ba365b04e9b199d1b37f1cb531f7ad3ba45202e39a6c7677f01ff0 size: 99998283 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - POSTGRESQL_VERSION=15 - POSTGRESQL_PREV_VERSION=13 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS= ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:15:24Z" com.redhat.component: postgresql-15-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 15 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql15,postgresql-15 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/postgresql-15 org.opencontainers.image.revision: 8c7bbd9aca352297ce1a0fb7876f79290eb33c43 release: "1761063286" summary: PostgreSQL is an advanced Object-Relational database management system url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhel8/postgresql-15 vcs-ref: 8c7bbd9aca352297ce1a0fb7876f79290eb33c43 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T16:15:43Z" Id: sha256:69739bf27af2b807dfc7e81309c24ede17165663d8c58a866cc71a1aba1befb2 Size: 195556514 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/postgresql-15@sha256:510b3f197ad39f82b828cc8be16e12a193d4dcd6ea27af01ce10c71b87c5cbfc kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:510b3f197ad39f82b828cc8be16e12a193d4dcd6ea27af01ce10c71b87c5cbfc resourceVersion: "14113" uid: 03042aae-f0de-4b07-80e6-e2b829c2f5da - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c7965aa7086045a59bdb113a1fb8a19d7ccf7af4133e59af8ecefd39cda8e0b1 size: 78964242 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a47b24a64cfa083053cbc2215f10f9a84ef11dd992d4c2755f3f91e8ab9a38f size: 68231466 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:71ab0d8c6a4c41db5c7a2cde1f284080513b081bf45af5891eb8dd5a8cc4c373 size: 12192264 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3b84f6150455cc1a21301aa835b6c307f678241265c88e36cca2517be9a87a94 size: 61654235 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:344f7237a2476a01eacbac1d548100b6222edea8b86f3e38ca1dc6011103d82d size: 198980913 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: OpenShift Developer Services Config: Entrypoint: - /usr/bin/go-init - -main - /usr/local/bin/run-jnlp-client Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - __doozer=merge - BUILD_RELEASE=202509140700.p0.gd192e90.assembly.stream.el8 - BUILD_VERSION=v4.13.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=13 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.13.0-202509140700.p0.gd192e90.assembly.stream.el8-d192e90 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.13 - __doozer_key=openshift-enterprise-cli - __doozer_version=v4.13.0 - OS_GIT_COMMIT=d192e90 - SOURCE_DATE_EPOCH=1712056277 - SOURCE_GIT_COMMIT=d192e901ece237d9ae1580d73e78f423ec2ef322 - SOURCE_GIT_TAG=openshift-clients-4.13.0-202304190216-95-gd192e901e - SOURCE_GIT_URL=https://github.com/openshift/oc - ART_BUILD_ENGINE=brew - ART_BUILD_DEPS_METHOD=cachito - ART_BUILD_NETWORK=internal-only - HOME=/home/jenkins - LANG=en_US.UTF-8 - LC_ALL=en_US.UTF-8 Labels: License: GPLv2+ architecture: x86_64 build-date: 2025-09-22T10:03:18 com.redhat.component: jenkins-agent-base-rhel8-container com.redhat.license_terms: https://www.redhat.com/agreements description: The jenkins agent base image is intended to be built on top of, to add your own tools that your jenkins job needs. The agent base image includes all the jenkins logic to operate as a agent, so users just have to yum install any additional packages their specific jenkins job will need distribution-scope: public io.buildah.version: 1.33.12 io.k8s.description: The jenkins agent base image is intended to be built on top of, to add your own tools that your jenkins job needs. The agent base image includes all the jenkins logic to operate as a agent, so users just have to yum install any additional packages their specific jenkins job will need io.k8s.display-name: Jenkins Agent Base io.openshift.build.commit.id: cfb3a904331d93327ba0eb7145b78ac0b24f4675 io.openshift.build.commit.url: https://github.com/openshift/jenkins/commit/cfb3a904331d93327ba0eb7145b78ac0b24f4675 io.openshift.build.source-location: https://github.com/openshift/jenkins io.openshift.expose-services: "" io.openshift.maintainer.component: Jenkins io.openshift.maintainer.product: OpenShift Container Platform io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,jenkins,agent maintainer: openshift-dev-services+jenkins@redhat.com name: openshift/jenkins-agent-base release: "1758535340" summary: Provides the latest release of Red Hat Universal Base Image 8. url: https://access.redhat.com/containers/#/registry.access.redhat.com/openshift/jenkins-agent-base/images/v4.13.0-1758535340 vcs-ref: 2d2a4f0dd7c20792a4308ed677a2e415846c1091 vcs-type: git vendor: Red Hat, Inc. version: v4.13.0 User: root ContainerConfig: {} Created: "2025-09-22T10:08:52Z" Id: sha256:8d2f1d46a14c4db10c03f38ddee5649dfc8eb652a20d0816f94dc5c315b77db8 Size: 420056604 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ocp-tools-4/jenkins-agent-base-rhel8@sha256:52f9b4df3f7833876ee502a6bff2539491db07e060b213b6a0a8fda0c4a881c1 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:52f9b4df3f7833876ee502a6bff2539491db07e060b213b6a0a8fda0c4a881c1 resourceVersion: "13674" uid: 23b166ba-7a04-42b8-ab48-7e0032844987 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9281c141a1bfec06e291d2ad29bfdedfd10a99d583fc0f48d3c26723ebe0761 size: 75827357 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:31114e120ca0c7dc51e01721c5a689a614edb6c86de11301d503c72be1540c79 size: 1325 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:733e0a770c8de64c133a2b22ac6258c124dcaab4c6efadbb1db3f5612206533b size: 436562513 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:333560b12f367c723c14d593bc2fe88f5b982bb4faee46e3108cea7a146cd741 size: 332935784 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - DEFAULT_ADMIN_USERNAME=eapadmin - HOME=/home/jboss - HTTPS_ENABLE_HTTP2=true - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_EAP_VERSION=7.2.1.GA - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.0 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=eap - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - PRODUCT_VERSION=7.2.1.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - WILDFLY_CAMEL_VERSION=5.3.0.fuse-740022-redhat-00002 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: c16126310035 Image: f5cd6e5c804e4cb7375b71e518aa9db68198247c7882137f1b9354c495c590d9 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T11:56:11.942792 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.1.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.1.GA release: "5.1567588144" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.4-5.1567588144 vcs-ref: fa998a5261ed25ce1445504d742098c8caf3bf10 vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - DEFAULT_ADMIN_USERNAME=eapadmin - HOME=/home/jboss - HTTPS_ENABLE_HTTP2=true - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_EAP_VERSION=7.2.1.GA - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.0 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=eap - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - PRODUCT_VERSION=7.2.1.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - WILDFLY_CAMEL_VERSION=5.3.0.fuse-740022-redhat-00002 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: c16126310035 Image: sha256:08d78aaef734f47c91cb4bf8d424275566304a5769258da5c4ae88fb0f886a1e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T11:56:11.942792 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.1.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.1.GA release: "5.1567588144" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.4-5.1567588144 vcs-ref: fa998a5261ed25ce1445504d742098c8caf3bf10 vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss Created: "2019-09-04T11:58:53Z" DockerVersion: 1.13.1 Id: sha256:eadfb5a143ec4bd3de7f37e16d2603a590eaccde0b5aadca389811a958c2ab5b Size: 845335492 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:5443b2d3e19c8f540cbe133113a7a4479f3ad98caa1a2a5e6ac48acbe4914b39 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:5443b2d3e19c8f540cbe133113a7a4479f3ad98caa1a2a5e6ac48acbe4914b39 resourceVersion: "14034" uid: ae14503f-4686-46f2-8383-c3e4ee1556d7 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2920d84eafa0cf94806ab58f0a2124f7b2d35bcbb06fc89a9106dcc28efe397a size: 39653524 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bb4ec6fa4af1ca6bd41e3cd542ef1eb3cf64d0a2d7dc5663106de7eb5e2c4b4a size: 25436791 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Env: - container=oci - PYTHON_VERSION=3.12 - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi9-python-312 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el9 - PATH=/opt/app-root/bin:/opt/app-root/src/bin:/opt/app-root/src/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - BASH_ENV=/opt/app-root/etc/scl_enable - ENV=/opt/app-root/etc/scl_enable - PROMPT_COMMAND=. /opt/app-root/etc/scl_enable - SUMMARY=Minimal platform for building and running Python 3.12 applications - DESCRIPTION=Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-15T09:56:13Z" com.redhat.component: python-312-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi9-python-312-minimal io.k8s.description: Python 3.12 available as container is a base platform for building and running various Python 3.12 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.12 io.openshift.expose-services: 8080:http io.openshift.tags: builder,python,python312,python-312,rh-python312 maintainer: SoftwareCollections.org name: ubi9/python-312-minimal org.opencontainers.image.revision: 3b0ddde7d89fee50cf4760c3de6d4f53add14f48 release: "1760522101" summary: Minimal platform for building and running Python 3.12 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.12-minimal/test/setup-test-app/ ubi9/python-312-minimal python-sample-app vcs-ref: 3b0ddde7d89fee50cf4760c3de6d4f53add14f48 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-15T09:56:19Z" Id: sha256:7c51211cb73e7d77a0a72a56e48f2ea637bfb5beb68f83b39f0ee9d64659268b Size: 65103547 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/python-312-minimal@sha256:547a068fddf44318e62b26caa267375dc77acc10ae5dcdd0869a1d60f8b93d5d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:547a068fddf44318e62b26caa267375dc77acc10ae5dcdd0869a1d60f8b93d5d resourceVersion: "14128" uid: 830d4c8c-c877-4bec-9091-d9cd057b55c7 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a6577091999bb0bb54af7b808b41c58aa9a79a61f12310b734f235b548159d75 size: 39651444 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a12747800228fe5e4dadcb597f69381b522d98a33585b5bb636e10b3f0355e9f size: 540283081 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JBOSS_HOME=/opt/eap - HOME=/home/jboss - LD_PRELOAD=libnss_wrapper.so - MAVEN_OPTS=-Duser.home= - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - CONFIG_ADJUSTMENT_MODE=xml_cli - DELETE_BUILD_ARTIFACTS=true - EAP_FULL_GROUPID=org.jboss.eap - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - JBOSS_EAP_VERSION=7.4.6 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - JBOSS_PRODUCT=sso - JOLOKIA_VERSION=1.7.1 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.6 - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - OFFLINER_VERSION=1.6 - PRODUCT_VERSION=7.5.3.GA - S2I_COPY_SERVER=true - S2I_FP_VERSION=23.0.0.Final - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - SSO_FORCE_LEGACY_SECURITY=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - WILDFLY_VERSION=7.4.6.GA-redhat-00002 - JBOSS_IMAGE_NAME=rh-sso-7/sso75-openshift-rhel8 - JBOSS_IMAGE_VERSION=7.5 - JBOSS_SSO_VERSION=7.5.3.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2022-10-28T16:22:34 com.redhat.component: redhat-sso-7-sso75-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat Single Sign-On 7.5 on OpenJDK OpenShift container image, based on the Red Hat Universal Base Image 8 Minimal container image distribution-scope: public io.buildah.version: 1.26.2 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for running Red Hat SSO io.k8s.display-name: Red Hat SSO 7.5 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: sso,sso75,keycloak maintainer: Red Hat, Inc. name: rh-sso-7/sso75-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: sso org.jboss.product.eap.version: 7.4.6 org.jboss.product.openjdk.version: "11" org.jboss.product.sso.version: 7.5.3.GA org.jboss.product.version: 7.5.3.GA release: "35" summary: Red Hat Single Sign-On 7.5 on OpenJDK OpenShift container image, based on the Red Hat Universal Base Image 8 Minimal container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/rh-sso-7/sso75-openshift-rhel8/images/7.5-35 vcs-ref: 841bdb7ce652ae6cd6faa6e8a1a379b3f9e6aa32 vcs-type: git vendor: Red Hat, Inc. version: "7.5" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2022-10-28T16:35:01Z" Id: sha256:89016e63254832f6f151dc964d0eb52dfbd4b33d4a033cc97475231cfcff3604 Size: 579966123 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rh-sso-7/sso75-openshift-rhel8@sha256:5522021e9081fa0f0163f75afedb9efaaad25c2a1dde6ce0fab3142ddcc7dd60 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:5522021e9081fa0f0163f75afedb9efaaad25c2a1dde6ce0fab3142ddcc7dd60 resourceVersion: "14063" uid: 14bfd18e-6c24-48ac-81ca-1884af8b352b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c43687042a41aad69fc526985ef2b82012c011db7e0e26faba4fc860ad32d88e size: 75837780 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b7b014ba1b80abb29391141385bd32668571313647317d1d64d8b5cebb1f228 size: 1331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2825299765472d0b62c1ed19ebb564a8a191b88ce49639471a274d03e7f9151e size: 3910026 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8df146e29e789eb3e8bec37172cca00cda60cf40f6924dda00379b283e2ce6db size: 85123374 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ace684c254cd984313cffa37b07e1ef036d8620ecf0c845567b758ffb58214db size: 22761222 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.4 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: a18474b8598a16e4515539f2ad4d40406409de5a319c983d2a9ee84509bdef23 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T11:27:17.557941 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1567588136" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.4-14.1567588136 vcs-ref: ea10f76c372c8fc587f741263211a3612179c660 vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.4 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: sha256:5e742973cb785b1573db78aa7928f46cdfacb4efa8f39ea4da2c78d68150e73a Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T11:27:17.557941 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1567588136" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.4-14.1567588136 vcs-ref: ea10f76c372c8fc587f741263211a3612179c660 vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss Created: "2019-09-04T11:29:40Z" DockerVersion: 1.13.1 Id: sha256:ad4d6e5244e83de4ddfb147e5bf12e47ad08f7a4a77e878141eafed93b3b4b32 Size: 187640400 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:5555a6031cbb8eb09cfeb73cacaabefd5a5824637b047af69b981bc66bdd8b3c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:5555a6031cbb8eb09cfeb73cacaabefd5a5824637b047af69b981bc66bdd8b3c resourceVersion: "14049" uid: d28d4c4e-f905-48bd-a8e7-16c2d7a9f256 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7e5f40fe90b709b27097c5ff1121f56e562c0f563ba652f6cb1be1fe6b07253a size: 39746652 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:80ec3d3dffebecdd28b23da645c7fd1735a5dfdda53c22af9d41bd1cdd0c6e40 size: 62851985 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - '"./${DOTNET_DEFAULT_CMD}"' Env: - container=oci - HOME=/opt/app-root - PATH=/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=8.0.22 - ASPNET_VERSION=8.0.22 ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-12-10T16:43:05Z" com.redhat.component: dotnet-80-runtime-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for running .NET 8 applications distribution-scope: public dotnet_version: 8.0.22 io.buildah.version: 1.41.4 io.k8s.description: Platform for running .NET 8 applications io.k8s.display-name: .NET 8 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: runtime,.net,dotnet,dotnetcore,dotnet80-runtime io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-80-runtime org.opencontainers.image.created: "2025-12-10T16:43:05Z" org.opencontainers.image.revision: 32b7b61dfccb30d909cedb7e0fb30a194e7b65ab release: "1765384715" summary: .NET 8 runtime url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: 32b7b61dfccb30d909cedb7e0fb30a194e7b65ab vcs-type: git vendor: Red Hat, Inc. version: "8.0" User: "1001" WorkingDir: /opt/app-root/app ContainerConfig: {} Created: "2025-12-10T16:43:18Z" Id: sha256:7c97519151a918398183811c1b1a854d05f3c8bfb5b98d34c2342925340d863d Size: 102612092 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/dotnet-80-runtime@sha256:55a832a2dd32c4ab288b2c76e1c531bd6df07651010f7b9f8f983dff5ee584ab kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:25:13Z" name: sha256:55a832a2dd32c4ab288b2c76e1c531bd6df07651010f7b9f8f983dff5ee584ab resourceVersion: "40624" uid: 98f724b0-92bf-47f0-afd1-3a24f45d603e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:79cf9a52657973381735680ff3e176fd5bc82924252f003178f258d16506319f size: 115252902 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=21 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-21-runtime - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-17T20:02:32 com.redhat.component: openjdk-21-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 21 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-21-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "21" org.jboss.product.version: "21" org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705519635" summary: Image for Red Hat OpenShift providing OpenJDK 21 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-21-runtime/images/1.18-2.1705519635 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: a6fac39569ba28195a4fbf27c1df65148afee408 vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-17T20:08:14Z" Id: sha256:196ba0ddcfa8f047fa7275966236356c2cf75d54b3ede1697f61a6ca3b5b533c Size: 154579690 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-21-runtime@sha256:55dc61c31ea50a8f7a45e993a9b3220097974948b5cd1ab3f317e7702e8cb6fc kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:55dc61c31ea50a8f7a45e993a9b3220097974948b5cd1ab3f317e7702e8cb6fc resourceVersion: "14123" uid: 3e7ee87f-253d-4093-bc3d-9fe0bad95d0d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a318ad6eeafd4f939586b6d0ca41adb994bcd581c9e548681ee3fc6071b1ba1b size: 39707507 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:04d49ef24fbfa7ac7d8ff5bfdf2b15330de0988960e0c494c0d08a13e8af7e8b size: 577875855 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JBOSS_HOME=/opt/eap - HOME=/home/jboss - LD_PRELOAD=libnss_wrapper.so - MAVEN_OPTS=-Duser.home= - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - CONFIG_ADJUSTMENT_MODE=xml_cli - DELETE_BUILD_ARTIFACTS=true - EAP_FULL_GROUPID=org.jboss.eap - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - GALLEON_VERSION=4.2.9.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - HTTPS_ENABLE_HTTP2=true - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - JBOSS_EAP_VERSION=7.4.23 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - JBOSS_PRODUCT=sso - JOLOKIA_VERSION=1.7.1 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.6 - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - OFFLINER_VERSION=1.6 - PRODUCT_VERSION=7.6.12.GA - S2I_COPY_SERVER=true - S2I_FP_VERSION=23.0.0.Final - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - SSO_FORCE_LEGACY_SECURITY=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - WILDFLY_VERSION=7.4.23.GA-redhat-00002 - JBOSS_IMAGE_NAME=rh-sso-7/sso76-openshift-rhel8 - JBOSS_IMAGE_VERSION=7.6 - JBOSS_SSO_VERSION=7.6.12.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-09-23T16:53:13 com.redhat.component: redhat-sso-7-sso76-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat Single Sign-On 7.6 on OpenJDK OpenShift container image, based on the Red Hat Universal Base Image 8 Minimal container image distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.10.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for running Red Hat SSO io.k8s.display-name: Red Hat SSO 7.6 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: sso,sso76,keycloak maintainer: Red Hat, Inc. name: rh-sso-7/sso76-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: sso org.jboss.product.eap.version: 7.4.23 org.jboss.product.openjdk.version: "11.0" org.jboss.product.sso.version: 7.6.12.GA org.jboss.product.version: 7.6.12.GA release: "71" summary: Red Hat Single Sign-On 7.6 on OpenJDK OpenShift container image, based on the Red Hat Universal Base Image 8 Minimal container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/rh-sso-7/sso76-openshift-rhel8/images/7.6-71 vcs-ref: a91ff864f43f537dcd775df2e17dfa60b463fa96 vcs-type: git vendor: Red Hat, Inc. version: "7.6" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-09-23T17:17:04Z" Id: sha256:9ac7f79e942bc8bc3a75bde325ddbbcfadf868774495fe95f235225777aad101 Size: 617614450 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rh-sso-7/sso76-openshift-rhel8@sha256:561ade81cb6455e0de35573d7ca68ac3f043c86385aac9e274dd53a7a75d3c16 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:561ade81cb6455e0de35573d7ca68ac3f043c86385aac9e274dd53a7a75d3c16 resourceVersion: "14067" uid: a350e0de-fad8-40db-a67d-021be325b2d9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50be7bb6ce3ddb41606e1956ba5c61072699ac536980f260a0db6dc59c8013fe size: 39575081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2df43de46d33e2fb030fde7397d323fd018dcc6279e8821ff18a51738eb71cd7 size: 81897201 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.23 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-11-25T10:28:24 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.13.0.dev0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "3.1764066421" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.23-3.1764066421 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: 1525ac5b44f35db161f3d3efa207ccd05b700efa vcs-type: git vendor: Red Hat, Inc. version: "1.23" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-11-25T10:40:22Z" Id: sha256:7f52c54192ccabfbf2e7e4d19561a61e22387830ae44dce3c2ad1ffdf837935c Size: 121492188 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/openjdk-8-runtime@sha256:57ab1f0ad24e02143978fc79c5219a02c4d6a5a27225ee5454c85a47839b6ddc kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:25:13Z" name: sha256:57ab1f0ad24e02143978fc79c5219a02c4d6a5a27225ee5454c85a47839b6ddc resourceVersion: "40606" uid: 3b47575b-eb77-4fc8-bcf3-32f3310af449 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0eeb656bc1e64b6c3ba63f2fa9450feaef3c60159d48eb2171ad1f25f5e655d9 size: 3805266 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:77187f02cbeff1e8de9cdd8e850f300e7267ddf19991dcbc588a498c14df3ff0 size: 70351735 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:863fb864126eaa9a42cc66096894ac7c91c5c82c467d86e149894bf874155679 size: 15177175 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.0 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: 3320f15730e3d95d98b084a00d040f8e97053ec1aa0858da207a0f4b332871b8 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T18:00:39.090809 com.redhat.build-host: osbs-cpt-004.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-java-openshift org.concrt.version: 2.0.0 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "11.1533127955" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.0-11.1533127955 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 93a32d6c79061022f016ecba4d6766fb0c10f876 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.0 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: sha256:297cabf44d7b083ba0895af2398a0e0204de7212a1e4cff8835c5c348ed7520c Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T18:00:39.090809 com.redhat.build-host: osbs-cpt-004.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-java-openshift org.concrt.version: 2.0.0 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "11.1533127955" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.0-11.1533127955 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 93a32d6c79061022f016ecba4d6766fb0c10f876 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss Created: "2018-08-01T18:02:34Z" DockerVersion: 1.12.6 Id: sha256:f01a081e1d1c2fd6682dab80abcf8a08ad66206d7d58a9033f3d53c51d95d636 Size: 164265393 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:57fa0cb158aa31193908df27fc707afcfdd4bdaf93b3286f5602d5f804e1927f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:57fa0cb158aa31193908df27fc707afcfdd4bdaf93b3286f5602d5f804e1927f resourceVersion: "14028" uid: c3c45f2c-8c8f-43bc-9c10-9f9b52dcf396 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6b42020daf7b58dee6d307d4868ab617031169687edc44eed7641b881391e6b6 size: 76864770 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.15 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-25T05:06:59 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1682399166" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.15-1.1682399166 vcs-ref: 490ce28b7e2fbd1f744e85d04aadcc31d252440e vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-25T05:16:48Z" Id: sha256:8b45a4a7e12236549e0e21ab9913daea195db2f5426b8e2898655db015957b9c Size: 116151330 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:59b88fb0c467ca43bf3c1af6bfd8777577638dd8079f995cdb20b6f4e20ce0b6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:59b88fb0c467ca43bf3c1af6bfd8777577638dd8079f995cdb20b6f4e20ce0b6 resourceVersion: "14223" uid: 4b41ce42-01e5-4c1e-a5e7-e496e22fa4de - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c43687042a41aad69fc526985ef2b82012c011db7e0e26faba4fc860ad32d88e size: 75837780 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b7b014ba1b80abb29391141385bd32668571313647317d1d64d8b5cebb1f228 size: 1331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:81b4c0b347a47353299773adfb6985fc16c599160cb67f748d2cd1196a3da72c size: 435771809 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bb83fc4990dbb7c52c1ae523f75fc498e1a353edd0a9f26adb59ec0fe198c823 size: 323115251 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - DEFAULT_ADMIN_USERNAME=eapadmin - HOME=/home/jboss - HTTPS_ENABLE_HTTP2=true - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_EAP_VERSION=7.2.1.GA - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.0 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=eap - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - PRODUCT_VERSION=7.2.1.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - WILDFLY_CAMEL_VERSION=5.3.0.fuse-731003-redhat-00002 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 8ea581222c6a Image: 4ac3e6c8c275deed8e72190fc5aad1fa217c585d9c895d304f98b67c688d37b3 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T17:23:13.179952 com.redhat.build-host: cpt-1004.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse name: fuse7/fuse-eap-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.1.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.1.GA release: "10.1561731101" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.3-10.1561731101 vcs-ref: 25f87039bfbc4b14263a1663da9e7f41f1eb13fd vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - DEFAULT_ADMIN_USERNAME=eapadmin - HOME=/home/jboss - HTTPS_ENABLE_HTTP2=true - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_EAP_VERSION=7.2.1.GA - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.0 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=eap - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - PRODUCT_VERSION=7.2.1.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - WILDFLY_CAMEL_VERSION=5.3.0.fuse-731003-redhat-00002 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 8ea581222c6a Image: sha256:5945902096c4dc3ab71ca6faf376b478cb5c2afbb8f71fc551044b083292b3e0 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T17:23:13.179952 com.redhat.build-host: cpt-1004.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse name: fuse7/fuse-eap-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.1.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.1.GA release: "10.1561731101" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.3-10.1561731101 vcs-ref: 25f87039bfbc4b14263a1663da9e7f41f1eb13fd vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2019-06-28T17:25:09Z" DockerVersion: 1.13.1 Id: sha256:28c2475c2961874ea89fb3681e4afd8a2271f55297255ff6ce2fd10ceeedc112 Size: 834734581 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:5bb11da5abfe6a1bc937f0439f08fa27efc96c438106b6a545be62672a39fc26 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:5bb11da5abfe6a1bc937f0439f08fa27efc96c438106b6a545be62672a39fc26 resourceVersion: "14032" uid: 8d55fdb6-f208-48c2-ab13-a0de64de975a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:90a82be6f2a43e7a2a943502b2f22b6bbca066d9427526f93f733c2a492596ed size: 77837369 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8bd6f3302451da24fd3b0df386f34bc127935a1e20aa71c61819bc742f2e1336 size: 17705582 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:682ed892da8c2b5be3d68dc4a07c0019f6a8d414e80f7847c561405c5fd1ff20 size: 98454985 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - MYSQL_VERSION=10.3 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MariaDB 10.3 SQL database server - DESCRIPTION=MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/usr ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-14T06:12:29Z" com.redhat.component: mariadb-103-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. io.k8s.display-name: MariaDB 10.3 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mariadb,mariadb103,mariadb-103 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/mariadb-103 org.opencontainers.image.revision: 985d3420c59d9f45e30b2a3743794fb2047f5e92 release: "1760422313" summary: MariaDB 10.3 SQL database server url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhel8/mariadb-103 vcs-ref: 985d3420c59d9f45e30b2a3743794fb2047f5e92 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-14T06:12:47Z" Id: sha256:e8b408fb20efc01a03a2ce947a68ba0994abc106571e326fc86fe7095e9625ed Size: 194013667 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/mariadb-103@sha256:5c16087bf81b2285e8815970b44385f7c98612fd6eb1af23b6d89db86004efa3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:5c16087bf81b2285e8815970b44385f7c98612fd6eb1af23b6d89db86004efa3 resourceVersion: "13839" uid: 06f91639-e590-4d7c-9a04-c127a004a83f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b954cb90d4571440ed86627198be2d74d7c3d264fe72e0af0f35f40f0da99ea8 size: 75745362 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510212154.p2.g7f1d6f8.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510212154.p2.g7f1d6f8.assembly.stream.el9-7f1d6f8 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=openshift-enterprise-cli - __doozer_uuid_tag=ose-cli-rhel9-v4.20.0-20251021.223340 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=7f1d6f8 - SOURCE_DATE_EPOCH=1761075552 - SOURCE_GIT_COMMIT=7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 - SOURCE_GIT_TAG=openshift-clients-4.12.0-202208031327-1168-g7f1d6f88c - SOURCE_GIT_URL=https://github.com/openshift/oc Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T22:51:33Z" com.redhat.component: openshift-enterprise-cli-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: OpenShift is a platform for developing, building, and deploying containerized applications. io.k8s.display-name: OpenShift Client io.openshift.build.commit.id: 7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 io.openshift.build.commit.url: https://github.com/openshift/oc/commit/7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 io.openshift.build.source-location: https://github.com/openshift/oc io.openshift.build.versions: kubectl=1.33.3 io.openshift.expose-services: "" io.openshift.maintainer.component: oc io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,cli maintainer: Red Hat, Inc. name: openshift/ose-cli-rhel9 org.opencontainers.image.revision: 8374844a9ef7a43b422ac806f4844ef29b0bae98 release: 202510212154.p2.g7f1d6f8.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: 8374844a9ef7a43b422ac806f4844ef29b0bae98 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T22:53:05Z" Id: sha256:d200844501bef80d82a3a592b66ff17cee232d887c785e2e4530312e8fe3be0a Size: 214181073 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5c5d7468f6838b6a714482e62ea956659212f3415ec8f69989f75eb6d8744a6e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:5c5d7468f6838b6a714482e62ea956659212f3415ec8f69989f75eb6d8744a6e resourceVersion: "13294" uid: 55a5d521-6f4b-43f5-bf81-5da11eade304 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:de1a4526c3d6cdf0e04f24b1888f5ef31425209f1c26e5a6ae7694cdad6e8688 size: 78973191 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:68e455cb0aea90f85be197ceadef7a56ca5a4d7bf6761a3a58b0ab36a65f770e size: 74468695 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - '"./${DOTNET_DEFAULT_CMD}"' Entrypoint: - container-entrypoint Env: - container=oci - HOME=/opt/app-root - PATH=/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_CORE_VERSION=6.0 - DOTNET_FRAMEWORK=net6.0 - DOTNET_RUNNING_IN_CONTAINER=true - DOTNET_SSL_CERT_DIR=/opt/app-root/ssl_dir - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - CONTAINER_SCRIPTS_PATH=/opt/app-root ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: 2024-11-13T22:14:36 com.redhat.component: dotnet-60-runtime-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Platform for running .NET 6 applications distribution-scope: public io.buildah.version: 1.33.8 io.k8s.description: Platform for running .NET 6 applications io.k8s.display-name: .NET 6 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: runtime,.net,dotnet,dotnetcore,dotnet60-runtime io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-60-runtime release: "56" summary: Provides the latest release of Red Hat Universal Base Image 8. url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/dotnet-60-runtime/images/6.0-56 vcs-ref: 052346f7c37602fffdeac6e8baa9163154cc2f35 vcs-type: git vendor: Red Hat, Inc. version: "6.0" User: "1001" WorkingDir: /opt/app-root/app ContainerConfig: {} Created: "2024-11-13T22:17:33Z" Id: sha256:617cbf0cc01d4c323d6ae1f01a1c706aa077d9a3e992e7a729d973ab5da75fdd Size: 153454771 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/dotnet-60-runtime@sha256:5dfcc5b000a1fab4be66bbd43e4db44b61176e2bcba9c24f6fe887dea9b7fd49 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:5dfcc5b000a1fab4be66bbd43e4db44b61176e2bcba9c24f6fe887dea9b7fd49 resourceVersion: "13454" uid: 635f30cb-762a-4a51-8cd8-3bab12f6564e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3019826b26b93fdb39b6e29614bc6b4d1ab879c596261851db4ff70706fa6c55 size: 183535774 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d93f73221d8935bd85cec6a9afac79e6b0af9c30fcfb56cd340fe1774720bb23 size: 319801066 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=open - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510212154.p2.g26d0917.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510212154.p2.g26d0917.assembly.stream.el9-26d0917 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=ose-network-tools - __doozer_uuid_tag=network-tools-rhel9-v4.20.0-20251021.223340 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=26d0917 - SOURCE_DATE_EPOCH=1756190912 - SOURCE_GIT_COMMIT=26d09174cbd92386469e777e3bf49bfa95d035d5 - SOURCE_GIT_TAG=26d09174 - SOURCE_GIT_URL=https://github.com/openshift/network-tools Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T23:43:54Z" com.redhat.component: ose-network-tools-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Empty io.k8s.display-name: Empty io.openshift.build.commit.id: 26d09174cbd92386469e777e3bf49bfa95d035d5 io.openshift.build.commit.url: https://github.com/openshift/network-tools/commit/26d09174cbd92386469e777e3bf49bfa95d035d5 io.openshift.build.source-location: https://github.com/openshift/network-tools io.openshift.build.versions: kubectl=1.33.3 io.openshift.expose-services: "" io.openshift.maintainer.component: Networking / network-tools io.openshift.maintainer.project: OCPBUGS io.openshift.tags: Empty maintainer: Red Hat, Inc. name: openshift/network-tools-rhel9 org.opencontainers.image.revision: 360618f7e1f1a5ed03b403962ae30851067beb72 release: 202510212154.p2.g26d0917.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: 360618f7e1f1a5ed03b403962ae30851067beb72 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 User: "0" ContainerConfig: {} Created: "2025-10-21T23:46:38Z" Id: sha256:889090cd25ede277ac9e464ae06bc072929108423054c02ff986650c20008cca Size: 641778969 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5e4e0fd08883744f35560eac43b8120f6324d9b488eb7a7716955fb98ddbace5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:5e4e0fd08883744f35560eac43b8120f6324d9b488eb7a7716955fb98ddbace5 resourceVersion: "13797" uid: 6f863fe7-2ea9-4b38-acac-0633054ada6b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea092d7970b26c24007a670fc6d0810dbf9531dc0d3a9d6ea514134ba5686724 size: 7541063 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4804e0fbd1e621e9faf332a12d9b81e87fba75f564744cad8c51ce03630375bc size: 62136250 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - POSTGRESQL_VERSION=13 - POSTGRESQL_PREV_VERSION=12 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS=rh-postgresql13 - BASH_ENV=/usr/share/container-scripts/postgresql/scl_enable - ENV=/usr/share/container-scripts/postgresql/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/postgresql/scl_enable ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-30T09:38:47 com.redhat.component: rh-postgresql13-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 13 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql13,rh-postgresql13 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/postgresql-13-rhel7 release: "112" summary: PostgreSQL is an advanced Object-Relational database management system url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/postgresql-13-rhel7/images/1-112 usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhscl/postgresql-13-rhel7 vcs-ref: 650a3080cdadfaf127db12676d7367028832a281 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-05-30T09:43:55Z" Id: sha256:d4bb2842be9d4c229a7b92c310850541e9c521e58968d69364012dd9eb361a05 Size: 149702202 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/postgresql-13-rhel7@sha256:5f474ef095d7b7aabc5b1c60818201aca66343856fb67eb93751f3b4a82d391b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:5f474ef095d7b7aabc5b1c60818201aca66343856fb67eb93751f3b4a82d391b resourceVersion: "14110" uid: 0ae99d61-0321-4a7e-a2d5-22217f3ca632 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9281c141a1bfec06e291d2ad29bfdedfd10a99d583fc0f48d3c26723ebe0761 size: 75827357 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:31114e120ca0c7dc51e01721c5a689a614edb6c86de11301d503c72be1540c79 size: 1325 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:64d1ef3b7da93a80b0f0dbb170bd0dae897197330eeca5d4b28b32406ce05bf5 size: 113496170 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HOME=/home/jboss - JAVA_DATA_DIR=/deployments/data - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.0 - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: c16126310035 Image: 0cb8f43f3c0ee74902bca4db6b407206e5e33ab673b54ce6b6a02e87931cd1d8 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T10:14:06.277221 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: 8080/tcp:webcache,8443/tcp:pcsync-https io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java name: openjdk/openjdk-11-rhel7 org.concrt.version: 2.2.1 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: "11.0" release: "16.1567588131" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.0-16.1567588131 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 1adfa44bec655763e1d25adea89281cc970ac1b2 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HOME=/home/jboss - JAVA_DATA_DIR=/deployments/data - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_HAWKULAR_MODULE=/opt/jboss/container/hawkular - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.0 - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: c16126310035 Image: sha256:29789ea7e724a2cbf6d875cdd47f0ba1965698730e597d32b5d08d4a662b8c08 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T10:14:06.277221 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: 8080/tcp:webcache,8443/tcp:pcsync-https io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java name: openjdk/openjdk-11-rhel7 org.concrt.version: 2.2.1 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: "11.0" release: "16.1567588131" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.0-16.1567588131 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 1adfa44bec655763e1d25adea89281cc970ac1b2 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss Created: "2019-09-04T10:19:20Z" DockerVersion: 1.13.1 Id: sha256:781deed5dc9a9b694bf99bc690876b040c3cd3a21cbbf91dc8d4e334774c8e58 Size: 189332535 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:5fb3543c0d42146f0506c1ea4d09575131da6a2f27885729b7cfce13a0fa90e3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:5fb3543c0d42146f0506c1ea4d09575131da6a2f27885729b7cfce13a0fa90e3 resourceVersion: "14097" uid: 3b4a180b-d365-4db2-8ac1-d5ec58452ee0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5329d7039f252afc1c5d69521ef7e674f71c36b50db99b369cbb52aa9e0a6782 size: 39330100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a39b89b913c41f6eef52956f4f2a7454ea948eb923171e858702e4eb01636fe2 size: 75754606 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.16 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-19T16:06:41 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "2" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.16-2 vcs-ref: 5c6db76f65c8629dda7e9b07fe613296f7db4ae0 vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-07-19T16:18:04Z" Id: sha256:d983d5dc836b5f862aa15132c161d29daa4fc81c170c35d054a505911678e37b Size: 115103153 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:603d10af5e3476add5b5726fdef893033869ae89824ee43949a46c9f004ef65d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:603d10af5e3476add5b5726fdef893033869ae89824ee43949a46c9f004ef65d resourceVersion: "14224" uid: 6d7ab759-df27-48ff-87a7-702d6adec1de - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a017d456fb3a760722ba4895579d8a412aec74e61d6805b04df6527b70fce6b size: 80807726 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:80a2bcb42ca25702f1e5b3b71dd25c6882ae0a2a03bb87d0b76c579cef9806a4 size: 1607 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4cef03da710b75dc7d3790dd132e70e8e0bdf5ec986c9ea019a18a41dd2a556b size: 115626485 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.14 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2022-10-17T14:03:16 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.26.2 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "4" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.14-4 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8b58570eb3bf3ee0233b3b719f570669f44b48d8 vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2022-10-17T14:10:23Z" Id: sha256:0ad46880060555ce33df90b0701e88a92f6888e0d4d2f09bfaf6f82800c4ad1e Size: 196461538 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:61555b923dabe4ff734279ed1bdb9eb6d450c760e1cc04463cf88608ac8d1338 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:61555b923dabe4ff734279ed1bdb9eb6d450c760e1cc04463cf88608ac8d1338 resourceVersion: "14102" uid: aedba0b1-0031-4f3f-a57a-a4f36f6e5fad - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9e23b64ace00a199db21d302292b434e9d3956d79319d958ecc19603d00c946 size: 39622437 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:38b71301a1d9df24c98b5a5ee8515404f42c929003ad8b13ab83d2de7de34dec size: 1742 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:21e3f6d4169e6fb80dc9f9f7e9d5c229320a19d8cd688742ed2dc8fdb9aacaf4 size: 86648421 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: 005148a7d0b230be58796f80ebff36dc276416b308d0616f226f95e91477533e Labels: architecture: x86_64 build-date: 2022-03-28T09:43:33.279257 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "2.1648459559" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.11-2.1648459559 vcs-ref: 61c1c4ec845dbdfee56c0e6e9c9371fb43f7d2c0 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: sha256:67b9e1b4ea15320d4b79b92b6652184e99f02cc13236284b8281be687d280f2b Labels: architecture: x86_64 build-date: 2022-03-28T09:43:33.279257 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "2.1648459559" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.11-2.1648459559 vcs-ref: 61c1c4ec845dbdfee56c0e6e9c9371fb43f7d2c0 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T09:46:42Z" DockerVersion: 1.13.1 Id: sha256:baa381a6295925b5de5c23b06999adf864e8b06511271e0bb45ef0d7e6fd60f8 Size: 126277133 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:64acf3403b5c2c85f7a28f326c63f1312b568db059c66d90b34e3c59fde3a74b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:64acf3403b5c2c85f7a28f326c63f1312b568db059c66d90b34e3c59fde3a74b resourceVersion: "14188" uid: 09192df6-f4d0-4934-853e-44944f0e9dea - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:158b4527561fa6bd9dc89217fff5b1f4cce16fdc5a5aef36345db0554ba996fc size: 39501292 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a3ba00ce78fe80837f49d37f5f538d9f7dc9eb8b1627350041496a99028cdf26 size: 1751 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96ed3211a1fdf39a2b337f485d061e7858eff93691c264d7dc88e82ca16d1a0d size: 117629593 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.3 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fa28dc65189d Image: 58729790e9a33cac990813c816c66f9894b930d910ff5a9e0d3770d68f22acf8 Labels: architecture: x86_64 build-date: 2021-07-23T17:36:41.656022 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "18" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.3-18 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: b751d29a1fc06c8bc7945e965162ca258d9a155a vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.3 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fa28dc65189d Image: sha256:7f1090033a054029fe4cd7d9ccd90cf8702821e13718db2ba4613346d7600290 Labels: architecture: x86_64 build-date: 2021-07-23T17:36:41.656022 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "18" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.3-18 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: b751d29a1fc06c8bc7945e965162ca258d9a155a vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2021-07-23T17:38:58Z" DockerVersion: 1.13.1 Id: sha256:5df8a34ef16500f3fcff12d1c388dfafacad19f5ad3fb2d827c25f8ee663060a Size: 157140080 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:663eb81388ae8f824e7920c272f6d2e2274cf6c140d61416607261cdce9d50e2 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:663eb81388ae8f824e7920c272f6d2e2274cf6c140d61416607261cdce9d50e2 resourceVersion: "14184" uid: fe7e0a19-edb4-4f7e-8550-11ca1b5ae4f3 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6208c5a2e205726f3a2cd42a392c5e4f05256850d13197a711000c4021ede87b size: 79073674 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:87b6121ef647e82c2efa8e6489d94c7668d88af38c138236592c6675acdf055a size: 18346487 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f6efb7833548fe17294f057c70215a3789c7ac86e39f698f00c4e7a895ccadf3 size: 148568415 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4186a94a1e5b175d7bc3dd68bc28daf97c822f6e56c9d8aee432af1508f245e7 size: 168253660 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=14 - NAME=golang - GO_MAJOR_VERSION=1 - GO_MINOR_VERSION=18 - GO_PATCH_VERSION=10 - CONTAINER_NAME=rhel8/go-toolset - VERSION=1.18.10 - SUMMARY=Platform for building and running Go Applications - DESCRIPTION=Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. Labels: architecture: x86_64 build-date: 2023-05-02T08:02:23 com.redhat.component: go-toolset-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. distribution-scope: public io.buildah.version: 1.27.3 io.k8s.description: Go Toolset available as a container is a base platform for building and running various Go applications and frameworks. Go is an easy to learn, powerful, statically typed language in the C/C++ tradition with garbage collection, concurrent programming support, and memory safety features. io.k8s.display-name: Go 1.18.10 io.openshift.expose-services: "" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,golang,golang118,rh-golang118,go io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: rhel8/go-toolset release: "1.1683014505" summary: Platform for building and running Go Applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel8/go-toolset/images/1.18.10-1.1683014505 vcs-ref: 0a0f3d90d55f2c2ce687a113e6b0ec4f6d5385f4 vcs-type: git vendor: Red Hat, Inc. version: 1.18.10 User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2023-05-02T08:06:40Z" Id: sha256:7deee1f302eac4ab5424e1c47cf2c33f8ae965cd09f9d9f476576eaba2171202 Size: 414263381 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/go-toolset@sha256:6740d72db4de99ecb4652cff89a239242afd150d6ccf6ed0ebff89ffcbbc649e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:6740d72db4de99ecb4652cff89a239242afd150d6ccf6ed0ebff89ffcbbc649e resourceVersion: "13447" uid: 51a3b05c-be7b-4696-ad15-1680689f6a3b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:872582724f337bcc41b829d3b854567e146ab62aa3c7de0b37e18617d38f5d08 size: 76246809 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b13ffc206103620a7d59e4f5b72279b53e317ade5d545a3daa06ab9bed270f92 size: 1409 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:600592def98ffdb45f9711ecab0f64a77461f9a274b30fc4fdaa94c1c595c4d6 size: 351208290 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.7.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.7 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.7.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: d0feb8001879 Image: 626700f3921fe594404fa4af91e1b528fc969cd693d12e85de97b25dff2abbef Labels: architecture: x86_64 build-date: 2020-11-04T17:56:28.528192 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.7.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.7.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.7.GA release: "1.1604508637" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.7-1.1604508637 vcs-ref: 96354c3fa6be20ad2282ccf66c3c9e0f116e4a31 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.7.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.7 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.7.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: d0feb8001879 Image: sha256:4659d5d8283f992a052a294cdafac078e42c8a28bd3b97020f8e4dacf085e879 Labels: architecture: x86_64 build-date: 2020-11-04T17:56:28.528192 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.7.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.7.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.7.GA release: "1.1604508637" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.7-1.1604508637 vcs-ref: 96354c3fa6be20ad2282ccf66c3c9e0f116e4a31 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss Created: "2020-11-04T18:03:48Z" DockerVersion: 1.13.1 Id: sha256:f93539c39983467734ce536a25b5e7488c2de0c0739baaafabc32330f51f02ae Size: 427464320 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:67a2ae44e1bd87166e5c70f8147ccc9064ddfc8f43170bc92db9b12568cc7f73 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:67a2ae44e1bd87166e5c70f8147ccc9064ddfc8f43170bc92db9b12568cc7f73 resourceVersion: "13906" uid: 05b083f9-217a-4112-9ae0-51187e0d1a41 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4009473c181390dadf086f42ecb4b10fb87f5e79de8d0195f6c22239985b2da0 size: 111002424 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-24T10:41:27 com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "9" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.17-9 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 7fe2971fe71fcf186dd65f26dc8d2ccacc59a7fa vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-24T10:48:08Z" Id: sha256:e043ac429b3b2377a20fccf384174860e43b30861514516c11461dd90459b337 Size: 150340465 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:67fee4b64b269f5666a1051d806635b675903ef56d07b7cc019d3d59ff1aa97c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:67fee4b64b269f5666a1051d806635b675903ef56d07b7cc019d3d59ff1aa97c resourceVersion: "14181" uid: 055ae67e-9e44-4bb7-9f37-bfeca8b3b1dd - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9e23b64ace00a199db21d302292b434e9d3956d79319d958ecc19603d00c946 size: 39622437 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:38b71301a1d9df24c98b5a5ee8515404f42c929003ad8b13ab83d2de7de34dec size: 1742 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:aec435fea4b5bc7075142be90e1a5f25f18ee0a579b426c353fa9023c57b7c42 size: 72381581 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: 7ece4ac7875a3b0fb858ee67afd96da4b03a11167a1b855ada8f9d7b947ad905 Labels: architecture: x86_64 build-date: 2022-03-28T09:54:50.779196 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1648459718" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.11-1.1648459718 vcs-ref: bd1e0ec3ad9d61f6e620f6e77bf65901cbf284c8 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: sha256:8bafa44b4af8bd05c194dc9e8cd756975bd19c786601a0575013b07e30d3618d Labels: architecture: x86_64 build-date: 2022-03-28T09:54:50.779196 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1648459718" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.11-1.1648459718 vcs-ref: bd1e0ec3ad9d61f6e620f6e77bf65901cbf284c8 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T09:56:57Z" DockerVersion: 1.13.1 Id: sha256:a8eff7d388f5bfe9a30050edd235326bd2f1832f142734ee37a4ba89988475f5 Size: 112010305 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:6a9e81b2eea2f32f2750909b6aa037c2c2e68be3bc9daf3c7a3163c9e1df379f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:6a9e81b2eea2f32f2750909b6aa037c2c2e68be3bc9daf3c7a3163c9e1df379f resourceVersion: "14219" uid: d1d21823-9515-4065-9fd7-ba877d3be5d8 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e0f71f706c2a1ff9efee4025e27d4dfd4f328190f31d16e11ef3283bc16d6842 size: 74922253 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:121ab4741000471a7e2ddc687dedb440060bf9845ca415a45e99e361706f1098 size: 1249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f3e8100df18b6435295877c8636d35e1b4dda1bec0feb4b4fb0e29524cd2a6f3 size: 3814449 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:442397635a987d4e69150861559f0a2fa5f70e76527f0b3b8455a9230fd7ebdb size: 70318532 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:05be2d56722fd9e005e3b97068207ca9e063318577c1c8dbd30fb5616495c81b size: 26047198 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.3 - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JAVA_DATA_DIR=/deployments/data - JOLOKIA_VERSION=1.5.0 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 275866c14a28 Image: 14d1397b80b912c40d9db41a956ab5c37241bb9d4c6a1a02dbb62173b373c795 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-05-25T09:36:46.776111 com.redhat.build-host: ip-10-29-120-249.ec2.internal com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Cloud Enablement Feedback name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 1.4.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "9" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.3-9 vcs-ref: 95a196009d4a0578f7322e736a14b855022025c0 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.3 - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JAVA_DATA_DIR=/deployments/data - JOLOKIA_VERSION=1.5.0 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 275866c14a28 Image: sha256:4f8141e8a5d50794ca5e21fa94ba3e97d1922230c9ffb34102d9959550293802 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-05-25T09:36:46.776111 com.redhat.build-host: ip-10-29-120-249.ec2.internal com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Cloud Enablement Feedback name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 1.4.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "9" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.3-9 vcs-ref: 95a196009d4a0578f7322e736a14b855022025c0 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2018-05-25T09:51:14Z" DockerVersion: 1.12.6 Id: sha256:3bb0d233f67591f36ffbecaef447c0733e1ce84b2e7cb69ede1767f50b38293b Size: 175109052 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:6c009f430da02bdcff618a7dcd085d7d22547263eeebfb8d6377a4cf6f58769d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:6c009f430da02bdcff618a7dcd085d7d22547263eeebfb8d6377a4cf6f58769d resourceVersion: "14168" uid: 3ffa58f9-ccc8-4cb2-8f62-8ce29833df4c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9abb5e74a5f821b8cab5d489b7a3ebe6bad4b3ee1eb3e7748583419f1ec6c43a size: 63943566 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8c8f181aaa5f9c2fdbb82bd10bfed7e2ecd90e482daf03b5a72a2df7d5e2858a size: 146125818 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - /bin/bash Env: - container=oci - HOME=/opt/app-root - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=9.0.10 - ASPNET_VERSION=9.0.10 - PATH=/opt/app-root/src/.local/bin:/opt/app-root/src/bin:/opt/app-root/.dotnet/tools:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - STI_SCRIPTS_PATH=/usr/libexec/s2i - DOTNET_GENERATE_ASPNET_CERTIFICATE=false - DOTNET_NOLOGO=true - DOTNET_SDK_VERSION=9.0.111 - DOTNET_USE_POLLING_FILE_WATCHER=true ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-23T04:19:56Z" com.redhat.component: dotnet-90-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for building and running .NET 9 applications distribution-scope: public dotnet_version: 9.0.10 io.buildah.version: 1.41.4 io.k8s.description: Platform for building and running .NET 9 applications io.k8s.display-name: .NET 9 SDK io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,.net,dotnet,dotnetcore,dotnet-90 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-90 org.opencontainers.image.revision: 99c3e369a8ad3eea3249fd405db03b23afc1b3bd release: "1761193002" sdk_version: 9.0.111 summary: .NET 9 SDK url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: 99c3e369a8ad3eea3249fd405db03b23afc1b3bd vcs-type: git vendor: Red Hat, Inc. version: "9.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-23T04:20:12Z" Id: sha256:2686d48a4222af19efa514806c5fe13f7d33d461c5e6d6889122b321227f0ddb Size: 249821433 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/dotnet-90@sha256:6cb572c7356c9d9ae6d2760491095de4b1797471ea97c50b330bc2ce1168da56 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:6cb572c7356c9d9ae6d2760491095de4b1797471ea97c50b330bc2ce1168da56 resourceVersion: "13402" uid: 62a8cd5d-bf08-4641-86b1-9e43a65f1c40 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6208c5a2e205726f3a2cd42a392c5e4f05256850d13197a711000c4021ede87b size: 79073674 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9114bbce1cf696d619180f9f7ff70c43b64cad2c61fee8484baf4937c0719df size: 103042110 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:63199b580b1b4782ed98f87da9c93405ef9adf963b642eb676dac6b4f9e69790 size: 30522080 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.2 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.2 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - HOME=/home/jboss - AB_PROMETHEUS_ENABLE=True - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jws-jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9404 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk8-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.2 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9404/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-26T09:25:21 com.redhat.component: jboss-webserver-57-openjdk8-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK8 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK8 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk8-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 5.7.2 org.jboss.product.webserver-tomcat9.version: 5.7.2 release: "8" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk8-rhel8-openshift/images/5.7.2-8 vcs-ref: 61b26d085598c85070ec2d9b38a315c0b8f991fb vcs-type: git vendor: Red Hat, Inc. version: 5.7.2 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-26T09:28:59Z" Id: sha256:ae400f247fd0a84dec56c018fb71e765bcb9f0492b8b61457133e03fcba53636 Size: 212680271 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:6d07cbaef7869b2e0d878740ad685b150f3d8ab960544c881916a01f25f9b6ef kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:6d07cbaef7869b2e0d878740ad685b150f3d8ab960544c881916a01f25f9b6ef resourceVersion: "13795" uid: 6c26885c-8c78-4c4c-b700-cbfa43605156 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:94343313ec1512ab02267e4bc3ce09eecb01fda5bf26c56e2f028ecc72e80b18 size: 79299514 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:daff7315ef7e4097ed9e5313418c762a6b7c3c33f6f8a6ce3c192659e1eb808a size: 467456758 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8a2ce3ce10b696b49ce3aab124b031a01fc32a4eb797cbf85957057bbfec603c size: 639203159 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.13 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.13 - WILDFLY_VERSION=7.4.13.GA-redhat-00001 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.9.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - S2I_FP_VERSION=23.0.0.Final - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk11-openshift-rhel8 - JBOSS_IMAGE_VERSION=7.4.13 - JOLOKIA_VERSION=1.7.2.redhat-00002 - WILDFLY_CAMEL_VERSION=5.10.0.fuse-7_12_1-00009-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T23:04:42 com.redhat.component: fuse-eap-openshift-jdk11-rhel-8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.4 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.2.redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.4 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Thomas Diesler name: fuse7/fuse-eap-openshift-jdk11-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.13 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 7.4.13 release: "23.1705611192" summary: Platform for building and running Apache Camel applications on EAP 7.4 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift-jdk11-rhel8/images/1.12-23.1705611192 vcs-ref: fb4db3d2f131e744a18bd1182cb386356aa87e41 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T23:09:14Z" Id: sha256:654f54c42824dc58d4369bb62b36219bc334a80410802bc3ac4739c751af4205 Size: 1186013410 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift-jdk11-rhel8@sha256:6d8afb6d1fced4deee8de43b935e2bf5164c81bc26bee01da0fce69b74b63f83 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:6d8afb6d1fced4deee8de43b935e2bf5164c81bc26bee01da0fce69b74b63f83 resourceVersion: "13333" uid: aa9e5690-5ac7-4e2a-a81f-47ec07fcfadc - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:de1a4526c3d6cdf0e04f24b1888f5ef31425209f1c26e5a6ae7694cdad6e8688 size: 78973191 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:68e455cb0aea90f85be197ceadef7a56ca5a4d7bf6761a3a58b0ab36a65f770e size: 74468695 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9ea21ed8be2224d8905e2b61fcfa7b7c3b28de2e57b2a57e1ed64106a67ab99 size: 166776237 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - /usr/libexec/s2i/usage Entrypoint: - container-entrypoint Env: - container=oci - HOME=/opt/app-root - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_CORE_VERSION=6.0 - DOTNET_FRAMEWORK=net6.0 - DOTNET_RUNNING_IN_CONTAINER=true - DOTNET_SSL_CERT_DIR=/opt/app-root/ssl_dir - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - CONTAINER_SCRIPTS_PATH=/opt/app-root - PATH=/opt/app-root/src/.local/bin:/opt/app-root/src/bin:/opt/app-root/node_modules/.bin:/opt/app-root/.dotnet/tools/:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - STI_SCRIPTS_PATH=/usr/libexec/s2i - DOTNET_GENERATE_ASPNET_CERTIFICATE=false - DOTNET_NOLOGO=true - DOTNET_USE_POLLING_FILE_WATCHER=true ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: 2024-11-13T23:08:15 com.redhat.component: dotnet-60-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Platform for building and running .NET 6 applications distribution-scope: public io.buildah.version: 1.33.8 io.k8s.description: Platform for building and running .NET 6 applications io.k8s.display-name: .NET 6 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,.net,dotnet,dotnetcore,dotnet-60 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-60 release: "56" summary: Provides the latest release of Red Hat Universal Base Image 8. url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/dotnet-60/images/6.0-56 vcs-ref: e084a6c82df0a7d768ee24821f4c3d6df23989e0 vcs-type: git vendor: Red Hat, Inc. version: "6.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-11-13T23:11:22Z" Id: sha256:a33af805134171f684198c9cc51cafc4e503192a39ee8cf8ef8c31fcf5fbcf87 Size: 320236550 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/dotnet-60@sha256:70a21b3f93c05843ce9d07f125b1464436caf01680bb733754a2a5df5bc3b11b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:70a21b3f93c05843ce9d07f125b1464436caf01680bb733754a2a5df5bc3b11b resourceVersion: "13393" uid: ec9c2072-4809-495e-b58b-3ba6a04f1496 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54e56e6f85721741ee7bf0336de8ad3bf138a56769a6d0097b600a0e361be58d size: 39618910 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f8ddd7f5a755f537dd9d5f553c8c78171dcf3018c5fc96676a07380d3e14e20 size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b77215bc44b3aef7ed84f29a90a5958ecd050911e76c25976762aba8350c33b7 size: 112841081 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 65ec992ef2e6 Image: 55dda0fa5938fcaf1dba5c82789815d9089c5b6f00c535de4b25d4e4f9e2e9b2 Labels: architecture: x86_64 build-date: 2022-04-29T13:50:53.410788 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1651233087" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.12-1.1651233087 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 3f38ac3e8fb9711c4f4ef88bbefcd7a2e4f23641 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 65ec992ef2e6 Image: sha256:27fa7839928d26abab22bf783e14f02e4a8e92b190cf567a166dac2fe3c7e0ea Labels: architecture: x86_64 build-date: 2022-04-29T13:50:53.410788 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1651233087" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.12-1.1651233087 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 3f38ac3e8fb9711c4f4ef88bbefcd7a2e4f23641 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-04-29T13:57:11Z" DockerVersion: 1.13.1 Id: sha256:b50aec0cad0399dde1d973e4b5d7572f5b6af9b3f8ff45952765178971535697 Size: 152469251 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:7164a06e9ba98a3ce9991bd7019512488efe30895175bb463e255f00eb9421fd kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:7164a06e9ba98a3ce9991bd7019512488efe30895175bb463e255f00eb9421fd resourceVersion: "14209" uid: fb60b5bf-a201-4453-a30d-bf8b2ae8d39b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a393588d7ce4791b5a481ad21808aca7eb0f342431a9f36b67ad14421c801862 size: 62821119 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4add4c8630229b186f76faa6d9b5e02453219ca65f300c550e54bae7eeaca9d1 size: 150291145 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - /bin/bash Env: - container=oci - HOME=/opt/app-root - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=8.0.22 - ASPNET_VERSION=8.0.22 - PATH=/opt/app-root/src/.local/bin:/opt/app-root/src/bin:/opt/app-root/.dotnet/tools:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - STI_SCRIPTS_PATH=/usr/libexec/s2i - DOTNET_GENERATE_ASPNET_CERTIFICATE=false - DOTNET_NOLOGO=true - DOTNET_SDK_VERSION=8.0.122 - DOTNET_USE_POLLING_FILE_WATCHER=true ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-12-02T13:31:27Z" com.redhat.component: dotnet-80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for building and running .NET 8 applications distribution-scope: public dotnet_version: 8.0.22 io.buildah.version: 1.41.4 io.k8s.description: Platform for building and running .NET 8 applications io.k8s.display-name: .NET 8 SDK io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,.net,dotnet,dotnetcore,dotnet-80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-80 org.opencontainers.image.created: "2025-12-02T13:31:27Z" org.opencontainers.image.revision: b1b4a6d9f69a6d37c7648ae3425bebcec9ae436b release: "1764682204" sdk_version: 8.0.122 summary: .NET 8 SDK url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: b1b4a6d9f69a6d37c7648ae3425bebcec9ae436b vcs-type: git vendor: Red Hat, Inc. version: "8.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-12-02T13:31:43Z" Id: sha256:bf642b10d531dd915ecb5053613dfcf62fb78dab5c90f8b19c06dd0dbabc0fa0 Size: 252865117 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/dotnet-80@sha256:7201e059b92acc55fe9fe1cc390d44e92f0e2af297fbe52b3f1bb56327f59624 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:25:14Z" name: sha256:7201e059b92acc55fe9fe1cc390d44e92f0e2af297fbe52b3f1bb56327f59624 resourceVersion: "40666" uid: 722b59bb-7931-4c17-8e06-09817275370e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d7c06497d5cebd39c0a4feb14981ec940b5c863e49903d320f630805b049cbff size: 39279912 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4a641477fd623eb5d13b9745c982b80afe91edd23076b1d351e94399b0d062c1 size: 115112568 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-02-07T17:20:49 com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "10.1675788279" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.14-10.1675788279 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 26c4e5bd1ffe379b617c1bc35be67a640fe496ec vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-02-07T17:33:34Z" Id: sha256:69b336536858bca7041cca9615f8e4cff18576dcad0bbfdea40760fb0034702c Size: 154420768 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:739fac452e78a21a16b66e0451b85590b9e48ec7a1ed3887fbb9ed85cf564275 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:739fac452e78a21a16b66e0451b85590b9e48ec7a1ed3887fbb9ed85cf564275 resourceVersion: "14145" uid: f3d8c053-1aff-49dd-bff0-be93acb9743f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54e56e6f85721741ee7bf0336de8ad3bf138a56769a6d0097b600a0e361be58d size: 39618910 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f8ddd7f5a755f537dd9d5f553c8c78171dcf3018c5fc96676a07380d3e14e20 size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ab02516cb47ff2ec425e12a109cf913f14e3444fc733c19d241f3e28cc7080db size: 86708756 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: d6c30419e2f08a4cc97f60318cd1cdfb9a334ff8b6e15ba8e3fcb122721fd52f Labels: architecture: x86_64 build-date: 2022-04-29T13:51:37.153929 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1651233103" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.12-1.1651233103 vcs-ref: 178c0c78042b665fdd32e3eed34c52377f6ff4a3 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: sha256:cfcfa013cb87d2a2715ac503134cef699c6178f084eb8d7c78258670e1ae426c Labels: architecture: x86_64 build-date: 2022-04-29T13:51:37.153929 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1651233103" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.12-1.1651233103 vcs-ref: 178c0c78042b665fdd32e3eed34c52377f6ff4a3 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-04-29T13:54:24Z" DockerVersion: 1.13.1 Id: sha256:943deef450894b400c5db2d98b4f1c84ffcdc3e8ed20effa3b3e99edeb8a53f3 Size: 126334433 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:74051f86b00fb102e34276f03a310c16bc57b9c2a001a56ba66359e15ee48ba6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:74051f86b00fb102e34276f03a310c16bc57b9c2a001a56ba66359e15ee48ba6 resourceVersion: "14189" uid: d819ce7d-f27c-42b8-b12e-06a5e4ea72ac - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a06cfa6e5ed77521218eaa75d023f86e156295cb20de1bda73e67b69c667962c size: 77840791 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:94130d9f48556208ec760273e324c3e5fdd13869d82c8eb8df8f97e1abe31641 size: 465790491 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.23 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.23 - WILDFLY_VERSION=7.4.23.GA-redhat-00002 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.9.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - S2I_FP_VERSION=23.0.0.Final - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk11-openshift-rhel8 - JBOSS_IMAGE_VERSION=7.4.23 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-07-28T22:21:52 com.redhat.component: jboss-eap-74-openjdk11-builder-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform 7.4 OpenShift container image. distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.12.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running JavaEE applications on JBoss EAP 7.4 io.k8s.display-name: JBoss EAP 7.4 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap74-openjdk11-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.23 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 7.4.23 release: "5.1753741010" summary: Red Hat JBoss Enterprise Application Platform 7.4 OpenShift container image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap74-openjdk11-openshift-rhel8/images/7.4.23-5.1753741010 vcs-ref: f1c3b1ac77f5103c962f07b0411ad9b14b5925b0 vcs-type: git vendor: Red Hat, Inc. version: 7.4.23 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-07-28T22:37:24Z" Id: sha256:f7f88d0ce38bf0f9c50ab92bd4fef694432cff97270032406cbbc75db4447773 Size: 543694450 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap74-openjdk11-openshift-rhel8@sha256:74cc70f8d3698d41793d19b0e23b0a79448d02ada93f402a1714d26d164e4c1d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:74cc70f8d3698d41793d19b0e23b0a79448d02ada93f402a1714d26d164e4c1d resourceVersion: "13358" uid: fde9ef5a-2c55-47e3-bba7-49b7fa5a3ed0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3146793ed37b0ecdd0f68824544cb4162400480c9a0d06ae7fc5bca6ac6a3a0d size: 98978618 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - POSTGRESQL_VERSION=13 - POSTGRESQL_PREV_VERSION=12 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS= ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:15:38Z" com.redhat.component: postgresql-13-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 13 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql13,postgresql-13 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/postgresql-13 org.opencontainers.image.revision: b12316bd67aee5e7048fa1b520505cbcf075a896 release: "1761063302" summary: PostgreSQL is an advanced Object-Relational database management system url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhel8/postgresql-13 vcs-ref: b12316bd67aee5e7048fa1b520505cbcf075a896 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T16:16:02Z" Id: sha256:2bb99a08b727d648d6ab6ade091e688ddf21681276dadff4410126185aeb21a6 Size: 194536851 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/postgresql-13@sha256:74efcab05b844a1c226bb18221a5309e7364b48d52757a809787e9b58e235ed9 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:74efcab05b844a1c226bb18221a5309e7364b48d52757a809787e9b58e235ed9 resourceVersion: "13981" uid: 2b488403-6dd0-4606-9ad2-b4f3c4903c9a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d66a972c08964d487e0b43062210ac00ce3dca3fe569ac665c4324ea6522507d size: 78753765 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:43c5ecd7e21e1320202951bf3c997c21d9536185b960a2cfe5263395e5aee621 size: 122268092 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:72e0ec2e9be9fab697250e486dda80e2b674ceabe4e0306176a289b8eb1130e4 size: 31500769 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.8 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.8 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - HOME=/home/jboss - AB_PROMETHEUS_ENABLE=True - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jws-jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9404 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk11-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.8 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9404/tcp: {} Labels: architecture: x86_64 build-date: 2024-03-05T15:41:58 com.redhat.component: jboss-webserver-57-openjdk11-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK11 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK11 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk11-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 5.7.8 org.jboss.product.webserver-tomcat9.version: 5.7.8 release: "4" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk11-rhel8-openshift/images/5.7.8-4 vcs-ref: 0a0a7d4d45f027ae0d48f69dd994aa4164eee2e0 vcs-type: git vendor: Red Hat, Inc. version: 5.7.8 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-03-05T15:47:14Z" Id: sha256:05ba63e3251af34924497f68fbfdec042537af397df7633333ceb3f6822497ab Size: 232565770 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:75fbf4aa5c14bba44b5dfbf6673dc80ce35376f626df3a102a5a2edf8141cd34 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:75fbf4aa5c14bba44b5dfbf6673dc80ce35376f626df3a102a5a2edf8141cd34 resourceVersion: "13703" uid: a14d22af-3cd6-4a35-b19b-940a3c56bfce - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d7c06497d5cebd39c0a4feb14981ec940b5c863e49903d320f630805b049cbff size: 39279912 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0840bf80f1ca1e24ec1664dbf77ee26aec2ed283560b6935dfdc1fd1d4155021 size: 88372182 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-02-07T17:35:49 com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "12.1675788327" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.14-12.1675788327 vcs-ref: 1d6d54438b510cefbe66061c3cf846f0f071658b vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-02-07T17:41:33Z" Id: sha256:b5eea4dda264f007084103bfd6966b49856638cc9c434984d3e616fa45fdbb35 Size: 127670678 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:7711108ef60ef6f0536bfa26914af2afaf6455ce6e4c4abd391e31a2d95d0178 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:7711108ef60ef6f0536bfa26914af2afaf6455ce6e4c4abd391e31a2d95d0178 resourceVersion: "14191" uid: a34d1dc9-cfc3-47ed-8f52-a8c0a704b833 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5fb1024fc39d4712fd639f83cdc55f79884635ea0dbffd5155e3e3d690e6f92c size: 18822709 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=20 - RUBY_MAJOR_VERSION=3 - RUBY_MINOR_VERSION=3 - RUBY_VERSION=3.3 - RUBY_SCL_NAME_VERSION=33 - RUBY_SCL=ruby-33 - IMAGE_NAME=ubi9/ruby-33 - SUMMARY=Platform for building and running Ruby 3.3 applications - DESCRIPTION=Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T16:11:32Z" com.redhat.component: ruby-33-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. io.k8s.display-name: Ruby 3.3 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,ruby,ruby33,ruby-33 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/ruby-33 org.opencontainers.image.revision: bfe1bee9763f056da6f889c824ccac428e9eecb7 release: "1760371851" summary: Platform for building and running Ruby 3.3 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-ruby-container.git --context-dir=3.3/test/puma-test-app/ ubi9/ruby-33 ruby-sample-app vcs-ref: bfe1bee9763f056da6f889c824ccac428e9eecb7 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T16:11:41Z" Id: sha256:352be925d76d335387ae901dbd4998706c3a84d3ede978998c8447b5501a47af Size: 331124694 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/ruby-33@sha256:77e5675e066943925eb228f51434080f10bb0be323c9c55ac62d223a0dd1b250 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:77e5675e066943925eb228f51434080f10bb0be323c9c55ac62d223a0dd1b250 resourceVersion: "14120" uid: 5c79851a-1cb5-4c26-920d-2358164f4610 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d66a972c08964d487e0b43062210ac00ce3dca3fe569ac665c4324ea6522507d size: 78753765 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5e0f5766bf222b480ac1f5f2b7f1cf48286aca5e7f3b5c54d15b8bf4b3e50abd size: 103172532 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7bac85c1acdb755667bc9c14ad5d1eae07632d73ebe3c41efebfaccaba397485 size: 30943484 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.8 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.8 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - HOME=/home/jboss - AB_PROMETHEUS_ENABLE=True - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jws-jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9404 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk8-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.8 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9404/tcp: {} Labels: architecture: x86_64 build-date: 2024-03-05T15:41:20 com.redhat.component: jboss-webserver-57-openjdk8-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.10.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK8 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK8 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk8-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 5.7.8 org.jboss.product.webserver-tomcat9.version: 5.7.8 release: "4" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk8-rhel8-openshift/images/5.7.8-4 vcs-ref: 5a9d2a110fb72b7fb28733440d129547f74a34f4 vcs-type: git vendor: Red Hat, Inc. version: 5.7.8 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-03-05T15:46:35Z" Id: sha256:1c36d88704e8e6f9cf66cfb9048f0c4d784d560ff92362e2522c1ad2cdae7930 Size: 212912911 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:789f5edf1369a40bf56ca698eafee86b74a8d53b39d100bbb91279aaebceb6d5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:789f5edf1369a40bf56ca698eafee86b74a8d53b39d100bbb91279aaebceb6d5 resourceVersion: "13801" uid: 253ef7bf-ce48-4c49-9f50-234cef50e904 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8d9c78c7f9887170d08c57ec73b21e469b4120682a2e82883217535294878c5d size: 3805344 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f4350d5126d0895bb50c2c082a415ff417578d34508a0ef07ec20cebf661ebb7 size: 70368140 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ff675fb12750095d377db23232ae2b63df8026ecf3008fcbe5c82431773e573a size: 25576914 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.5 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JAVA_DATA_DIR=/deployments/data - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: 874cb60ae76db201cf859a058c3682666664a852a1234bb6c551a151a030aa05 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T22:10:52.566133 com.redhat.build-host: cpt-0007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 2.1.2 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: 8778/tcp:uec,8080/tcp:webcache,8443/tcp:pcsync-https io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Cloud Enablement Feedback name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 2.1.2 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1539812388" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.5-14.1539812388 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 5d5b141069a0b956c0382d59a8384857da9fc950 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.5 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JAVA_DATA_DIR=/deployments/data - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: sha256:c35044a707a9fd042b68f5fd91151e355200f2a945dec8dcb029feadf696e5fc Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T22:10:52.566133 com.redhat.build-host: cpt-0007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 2.1.2 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: 8778/tcp:uec,8080/tcp:webcache,8443/tcp:pcsync-https io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Cloud Enablement Feedback name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 2.1.2 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1539812388" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.5-14.1539812388 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 5d5b141069a0b956c0382d59a8384857da9fc950 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss Created: "2018-10-17T22:12:34Z" DockerVersion: 1.13.1 Id: sha256:c1bf72469139cd567ebc2b432f68e99a592e29bc7fc688ae81e846be9a21c816 Size: 174681381 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:78af15475eac13d2ff439b33a9c3bdd39147858a824c420e8042fd5f35adce15 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:78af15475eac13d2ff439b33a9c3bdd39147858a824c420e8042fd5f35adce15 resourceVersion: "14172" uid: e9d068b1-637b-4a07-889e-2f54e1e97425 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96965a3a84248c364364702c0fb90543e329f86044b3394f97701f25b516b9ee size: 39507581 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4d0d850cd4adc37289686142206a183ccbd4e286765ce8fc9890539bbfd38827 size: 1735 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7504d0b31a9a6b70806a2c804c147b554524faabd19ae0d3172c75879fbb3f52 size: 88388953 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.9 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: e321cccbfab0 Image: 436032b41e1c9d4a06283e12624febac8c9e4dfc8068b9abe86406401a0bfb9a Labels: architecture: x86_64 build-date: 2021-07-21T09:25:08.039193 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "3" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.9-3 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 2c06861d0e0374d2c9493eaf59b2b8e129264d9d vcs-type: git vendor: Red Hat, Inc. version: "1.9" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.9 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: e321cccbfab0 Image: sha256:c2baa0a125c0b2910a423e9850f74f32c39a6178a19aa9ee1446e8f83c4e54ed Labels: architecture: x86_64 build-date: 2021-07-21T09:25:08.039193 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "3" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.9-3 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 2c06861d0e0374d2c9493eaf59b2b8e129264d9d vcs-type: git vendor: Red Hat, Inc. version: "1.9" User: "185" WorkingDir: /home/jboss Created: "2021-07-21T09:27:21Z" DockerVersion: 1.13.1 Id: sha256:8bc1ee1eb4c673b5aa3d2beb7c0de560dfe0a3e2672b88cb61bd098f79201a6d Size: 127903000 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:78bf175cecb15524b2ef81bff8cc11acdf7c0f74c08417f0e443483912e4878a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:78bf175cecb15524b2ef81bff8cc11acdf7c0f74c08417f0e443483912e4878a resourceVersion: "14196" uid: e608e06b-96d0-4f23-96d7-962bb76c0a70 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2a99c93da16827d9a6254f86f495d2c72c62a916f9c398577577221d35d2c790 size: 39641757 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4418ace46c3dd933f98d83f357f31048e72d5db3d97bccfdb0acef769ee8234f size: 1743 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3b94dc93b5dd50b77a3571bf8c83cd098425eaf04c3a86cf65eb823be8fe1a6e size: 73021285 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: be4e58a52d40 Image: cd6a2b0b52646e801ccf83a59221a56ae40c43593e0f88fbe0cc10162b2276bd Labels: architecture: x86_64 build-date: 2021-12-01T18:44:09.391033 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "11.1638383197" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.10-11.1638383197 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: c0151768fb10aa1bee0151ab5cbd5b9cbe107bea vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: be4e58a52d40 Image: sha256:537a71ebf8fe34479f79da33ceb0ffec71d10f7a412d859327dcfec33a3ffe09 Labels: architecture: x86_64 build-date: 2021-12-01T18:44:09.391033 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "11.1638383197" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.10-11.1638383197 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: c0151768fb10aa1bee0151ab5cbd5b9cbe107bea vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2021-12-01T18:46:28Z" DockerVersion: 1.13.1 Id: sha256:8a91374c6932c030f3ef070f4ef29aca06e60fc619ab61302e9d5b9829839071 Size: 112669576 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:7bcc365e0ba823ed020ee6e6c3e0c23be5871c8dea3f7f1a65029002c83f9e55 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:7bcc365e0ba823ed020ee6e6c3e0c23be5871c8dea3f7f1a65029002c83f9e55 resourceVersion: "14218" uid: 8879dcf4-7ccd-4b5e-9ef1-830d167d00cd - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b954cb90d4571440ed86627198be2d74d7c3d264fe72e0af0f35f40f0da99ea8 size: 75745362 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a473864e64707b90812a20a3cfe329daa0a7ec4fb2659d24094a29fc050bbacb size: 954091093 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510212154.p2.g7f1d6f8.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510212154.p2.g7f1d6f8.assembly.stream.el9-7f1d6f8 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=ose-cli-artifacts - __doozer_uuid_tag=ose-cli-artifacts-rhel9-v4.20.0-20251021.223340 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=7f1d6f8 - SOURCE_DATE_EPOCH=1761075552 - SOURCE_GIT_COMMIT=7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 - SOURCE_GIT_TAG=openshift-clients-4.12.0-202208031327-1168-g7f1d6f88c - SOURCE_GIT_URL=https://github.com/openshift/oc Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T23:17:58Z" com.redhat.component: ose-cli-artifacts-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: OpenShift is a platform for developing, building, and deploying containerized applications. io.k8s.display-name: OpenShift Clients io.openshift.build.commit.id: 7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 io.openshift.build.commit.url: https://github.com/openshift/oc/commit/7f1d6f88cb0c8ed5c877fc0ae2bd99298c6339f2 io.openshift.build.source-location: https://github.com/openshift/oc io.openshift.build.versions: kubectl=1.33.3 io.openshift.expose-services: "" io.openshift.maintainer.component: oc io.openshift.maintainer.project: OCPBUGS io.openshift.tags: openshift,cli maintainer: Red Hat, Inc. name: openshift/ose-cli-artifacts-rhel9 org.opencontainers.image.revision: 1b5c23575fef47279ac307305b759a33fe1233c4 release: 202510212154.p2.g7f1d6f8.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: 1b5c23575fef47279ac307305b759a33fe1233c4 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T23:28:28Z" Id: sha256:c4de69b5cefe6e6f8d32565b3bce1d50c55aca367aa292e87ea165eb663a5925 Size: 1168278638 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7c8a088031661d94022418e93fb63744c38e1c4cff93ea3b95c096a290c2b7a3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:7c8a088031661d94022418e93fb63744c38e1c4cff93ea3b95c096a290c2b7a3 resourceVersion: "13317" uid: 85fcad32-bf2c-42b5-b77f-af290bc0e371 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:20c07157affed21515c13c338c994513e9811a7e310ee4c942afb82555501651 size: 9157 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:694d8a4bfb69e1ec991e9503c181e43ae0d9587cbed135923a0e159d145f8c1f size: 585 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin Labels: License: ASL 2.0 com.redhat.component: service-telemetry-operator-bundle-container com.redhat.delivery.backport: "false" com.redhat.delivery.operator.bundle: "true" com.redhat.openshift.versions: v4.18-v4.20 description: service-telemetry-operator-bundle io.k8s.display-name: service-telemetry-operator-bundle io.openshift.expose-services: "" io.openshift.tags: monitoring,telemetry,faulting,serviceassurance maintainer: '[''leif+service-telemetry-operator@redhat.com'']' name: stf/service-telemetry-operator-bundle operators.operatorframework.io.bundle.channel.default.v1: unstable operators.operatorframework.io.bundle.channels.v1: unstable operators.operatorframework.io.bundle.manifests.v1: manifests/ operators.operatorframework.io.bundle.mediatype.v1: registry+v1 operators.operatorframework.io.bundle.metadata.v1: metadata/ operators.operatorframework.io.bundle.package.v1: service-telemetry-operator operators.operatorframework.io.metrics.builder: operator-sdk-v0.19.4 operators.operatorframework.io.metrics.mediatype.v1: metrics+v1 operators.operatorframework.io.metrics.project_layout: ansible summary: service-telemetry-operator-bundle version: 1.5.1765493224 WorkingDir: / ContainerConfig: {} Created: "2025-12-11T22:47:11Z" Id: sha256:b80004350271eb7d05d037314c48fc862fe9985cb1c0b578cd2b676b5d461974 Size: 14331 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/infrawatch-operators/service-telemetry-operator-bundle@sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:28:03Z" name: sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b resourceVersion: "43570" uid: 96590067-7d52-48ec-ad9f-8c6182838e82 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d246e53da6241a619b7dcea7d4403071b9e1961797aa4f6c766786e29732651c size: 76526594 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:91bd2c541a17a588359eb054815718e41f871d03b4d4daf7b3584b25fbdcbb67 size: 1563 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:aab734dcdabc81999b756ecd16b1b1c04fb52722314e2d5ff6e17b96d8a23fe3 size: 114160840 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f18f459d223a Image: 12bca108ee5d8cf1072eff2df3a23c55ee6c997a880cc49d70e95c3e094b5c99 Labels: architecture: x86_64 build-date: 2021-12-02T07:25:12.616806 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1638429558" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.10-1.1638429558 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: a137d0be0569c27ef415a990f8d2270938cd0663 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f18f459d223a Image: sha256:882a9c75ba965d44352aee03e17d3041bc59013a73b8dda503ce95a086505e4e Labels: architecture: x86_64 build-date: 2021-12-02T07:25:12.616806 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1638429558" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.10-1.1638429558 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: a137d0be0569c27ef415a990f8d2270938cd0663 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2021-12-02T07:44:18Z" DockerVersion: 1.13.1 Id: sha256:d496146df3a8259eca744feff56d02f72d80ca71e536df20954c7135b61dbc44 Size: 190696399 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:7de877b0e748cdb47cb702400f3ddaa3c3744a022887e2213c2bb27775ab4b25 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:7de877b0e748cdb47cb702400f3ddaa3c3744a022887e2213c2bb27775ab4b25 resourceVersion: "14099" uid: 6526584f-3a2f-4e0f-b172-575b87183894 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96965a3a84248c364364702c0fb90543e329f86044b3394f97701f25b516b9ee size: 39507581 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4d0d850cd4adc37289686142206a183ccbd4e286765ce8fc9890539bbfd38827 size: 1735 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc3f90f22ede9c1de11eddbc3e136e684bf80148929c8428bd14a39577420b6c size: 74106555 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.9 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: e321cccbfab0 Image: a7a02e661dc58c86283a3a273e95b435b00b501b9ceeb06834521db56c8d66bf Labels: architecture: x86_64 build-date: 2021-07-21T09:24:53.073347 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "3" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.9-3 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 27c8267ebad228142ab22492448953134db1c957 vcs-type: git vendor: Red Hat, Inc. version: "1.9" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.9 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: e321cccbfab0 Image: sha256:a3622fbebff55a0e4b6d12bbfd88c33e6cc9d2a5654b4545f7eb1df41f299d28 Labels: architecture: x86_64 build-date: 2021-07-21T09:24:53.073347 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "3" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.9-3 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 27c8267ebad228142ab22492448953134db1c957 vcs-type: git vendor: Red Hat, Inc. version: "1.9" User: "185" WorkingDir: /home/jboss Created: "2021-07-21T09:26:56Z" DockerVersion: 1.13.1 Id: sha256:ab46d56801e32ef028fb5ba139a6df1998650ad83bf1065611d48343056292ee Size: 113620622 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:7ef75cdbc399425105060771cb8e700198cc0bddcfb60bf4311bf87ea62fd440 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:7ef75cdbc399425105060771cb8e700198cc0bddcfb60bf4311bf87ea62fd440 resourceVersion: "14227" uid: fd8232eb-2bea-4f5f-8076-07ad88a6c9c2 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:af063699af1c142fce6707dc9306d122355e61bd23ded0d18f8a4ecfbf3aa89a size: 78847792 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:c81d6d556e6e3a4255dd2709ce18578bfbbf3eed10a4efb966bf99ab69c79e05 size: 9405288 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:40ebe11cc2334d0d9d9ed39393e6d921c4529efb2eb74cbbe16b6bae313b2061 size: 152207353 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - VERSION=10 - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el10 - NODEJS_VERSION=22 - NPM_RUN=start - NAME=nodejs - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - CNB_STACK_ID=com.redhat.stacks.c10s-nodejs-22 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - SUMMARY=Platform for building and running Node.js 22 applications - DESCRIPTION=Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-28T04:23:44Z" com.redhat.component: nodejs-22-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.c10s-nodejs-22 io.k8s.description: Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 22 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs22 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi10/nodejs-22 org.opencontainers.image.revision: 35fce3d2788a2879aeaa4c3d1bcc7362c248149b release: "1761625375" summary: Platform for building and running Node.js 22 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi10/nodejs-22 vcs-ref: 35fce3d2788a2879aeaa4c3d1bcc7362c248149b vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-28T04:23:59Z" Id: sha256:e54e0543361777408e7cdfbdde86102ebffe0e6e33a6148cf17cc3f8d2549d13 Size: 240477435 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/nodejs-22@sha256:7ff7af22f08d9dc8043a73013d629ee03277ff18ad94001092de70fd5917e9e8 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:7ff7af22f08d9dc8043a73013d629ee03277ff18ad94001092de70fd5917e9e8 resourceVersion: "14093" uid: 52b4be48-4fa0-42e2-9970-89b63e3ed21a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea092d7970b26c24007a670fc6d0810dbf9531dc0d3a9d6ea514134ba5686724 size: 7541063 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:df8f2f39be965b80a9b8ae52de53e3a71a587009acd1b972541eb0b74424f50b size: 35281086 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - POSTGRESQL_VERSION=10 - POSTGRESQL_PREV_VERSION=9.6 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS=rh-postgresql10 - BASH_ENV=/usr/share/container-scripts/postgresql/scl_enable - ENV=/usr/share/container-scripts/postgresql/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/postgresql/scl_enable ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-30T09:35:40 com.redhat.component: rh-postgresql10-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 10 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql10,rh-postgresql10 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/postgresql-10-rhel7 release: "185" summary: PostgreSQL is an advanced Object-Relational database management system url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/postgresql-10-rhel7/images/1-185 usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhscl/postgresql-10-rhel7 vcs-ref: f48185d986ac9bcb68b79dd9c3fd23967e61fba4 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-05-30T09:37:58Z" Id: sha256:79eaf6f17e462f0bb3abad43b13072b170e7310a619dd31eee96cddd7b6c3cec Size: 122846968 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/postgresql-10-rhel7@sha256:8027301bb8716941e2a15b7d31b055ec7eba327ad3b7c72fb5accfa077a32521 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:8027301bb8716941e2a15b7d31b055ec7eba327ad3b7c72fb5accfa077a32521 resourceVersion: "14106" uid: 733693e3-1efa-487a-aea8-fab62cd50b4d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4752687a61a97d6f352ae62c381c87564bcb2f5b6523a05510ca1fb60d640216 size: 36442442 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0344366a246a0f7590c2bae4536c01f15f20c6d802b4654ce96ac81047bc23f3 size: 1740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:27394721a6937e88a8f0b4e64ba92f04c32d0f91637b25dbbbd382880c255f36 size: 113467165 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fd16d6f2e774 Image: b24fd32b5f44d139cf40f237958aa6f4a340ca563e7e9c56101bab0b4b9f496d Labels: architecture: x86_64 build-date: 2022-06-15T16:29:25.438581 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1655306434" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.13-1.1655306434 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: b311957b2aafa86184f328b45510b317e72ea9c1 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fd16d6f2e774 Image: sha256:a82928d8da6c9096e81ebf31be43aa30c85918e46cb0c6e3e6e6ad3f3f75ca53 Labels: architecture: x86_64 build-date: 2022-06-15T16:29:25.438581 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1655306434" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.13-1.1655306434 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: b311957b2aafa86184f328b45510b317e72ea9c1 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:35:36Z" DockerVersion: 1.13.1 Id: sha256:f6e320209fcf654664094417273c9275e710e28a7e0393b6ff3c5fac472d7627 Size: 149918863 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:81684e422367a075ac113e69ea11d8721416ce4bedea035e25313c5e726fd7d1 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:81684e422367a075ac113e69ea11d8721416ce4bedea035e25313c5e726fd7d1 resourceVersion: "14210" uid: c5be21e6-0bd2-469b-88db-f486faa5f05c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c4877503c8d2f934dcdfd76623f2b9935529fe73a1432cae4abba022c6951afd size: 79158758 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7c48fd933e32825452e3b72ff9e56a3d4db20281e05205aa4de1a44101718288 size: 102994046 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e98abb245aac78ad91bd13290e6bd1e669049593c7ed8324739f01e747f3b5aa size: 29967136 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.1 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.1 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk8-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.1 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-03-22T11:42:21 com.redhat.component: jboss-webserver-57-openjdk8-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK8 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK8 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk8-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 5.7.1 org.jboss.product.webserver-tomcat9.version: 5.7.1 release: "2.1679484388" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk8-rhel8-openshift/images/5.7.1-2.1679484388 vcs-ref: b76974c8ca96f84dd046416e215d6bb6c6143809 vcs-type: git vendor: Red Hat, Inc. version: 5.7.1 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-03-22T11:46:08Z" Id: sha256:534ebd82522d920ad16a57815a20458bd5c8f1287794e1ed4f5c6bfc437d1b91 Size: 212161123 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:85093d0f55d06662420925f64e914ff05499c79c2ede3ef80085a44d40f16a80 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:85093d0f55d06662420925f64e914ff05499c79c2ede3ef80085a44d40f16a80 resourceVersion: "13791" uid: bb3a31c3-826f-44da-915e-cd276c53411e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fc5b206e9329a1674dd9e8efbee45c9be28d0d0dcbabba3c6bb67a2f22cfcf2a size: 76240726 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e7021e0589e97471d99c4265b7c8e64da328e48f116b5f260353b2e0a2adb373 size: 1744 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:aef76d9b756f4b41636967f6f95dc208dc083ff9330fa218bd2e2b066d48d9bd size: 369657334 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:437304b712427332554361693d6f14f04b53117528365b179b460ea3c0d731b4 size: 387995280 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_EAP_VERSION=7.3.2 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.2 - WILDFLY_VERSION=7.3.2.GA-redhat-00002 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.4.Final - GALLEON_WILDFLY_VERSION=4.2.7.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=18.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api,com.sun.crypto.provider - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap73-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.3.2 - WILDFLY_CAMEL_VERSION=5.6.0.fuse-780027-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 51124b2d4953 Image: 4a3600a70fd852a914595cce9bbd668dda44064bebf0ef91972d53757a29ba17 Labels: architecture: x86_64 build-date: 2021-06-22T10:26:50.499540 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.3 distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.3 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.3.2 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.3.2 release: "17" summary: Platform for building and running Apache Camel applications on EAP 7.3 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.8-17 vcs-ref: 3c5cf944b8a38ac0fbf6837278aba31d5155a185 vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_EAP_VERSION=7.3.2 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.2 - WILDFLY_VERSION=7.3.2.GA-redhat-00002 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.4.Final - GALLEON_WILDFLY_VERSION=4.2.7.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=18.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api,com.sun.crypto.provider - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap73-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.3.2 - WILDFLY_CAMEL_VERSION=5.6.0.fuse-780027-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 51124b2d4953 Image: sha256:4232cd4533a083abe38562a5778f59b40681c639a4716f8da57b7fa9c6dfb9e6 Labels: architecture: x86_64 build-date: 2021-06-22T10:26:50.499540 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.3 distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.3 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.3.2 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.3.2 release: "17" summary: Platform for building and running Apache Camel applications on EAP 7.3 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.8-17 vcs-ref: 3c5cf944b8a38ac0fbf6837278aba31d5155a185 vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss Created: "2021-06-22T10:28:43Z" DockerVersion: 1.13.1 Id: sha256:7fd513295ba07faabb1b854647ddc5efd7aaf5e1da154b62a76706281cbe93ad Size: 833907243 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:852d8ea448cfb93036fc5f1b69f58249bc2e4454d326bd927839c5de6ce50a7b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:852d8ea448cfb93036fc5f1b69f58249bc2e4454d326bd927839c5de6ce50a7b resourceVersion: "14045" uid: b639ff9d-e3cc-4164-9bba-2068a0077977 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:af063699af1c142fce6707dc9306d122355e61bd23ded0d18f8a4ecfbf3aa89a size: 78847792 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:c81d6d556e6e3a4255dd2709ce18578bfbbf3eed10a4efb966bf99ab69c79e05 size: 9405288 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:17685c6e7b61aa5529bde52c46f996d37a3a1e7b434b243f0a93af071b73c3b0 size: 14548139 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el10 - NAME=nginx - NGINX_VERSION=1.26 - NGINX_SHORT_VER=126 - VERSION=0 - SUMMARY=Platform for running nginx 1.26 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.26 daemon. The image can be used as a base image for other applications based on nginx 1.26 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-16T00:15:38Z" com.redhat.component: nginx-126-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.26 daemon. The image can be used as a base image for other applications based on nginx 1.26 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.26 daemon. The image can be used as a base image for other applications based on nginx 1.26 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.26 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-126 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi10/nginx-126 org.opencontainers.image.revision: 5f49473c03c662a2dd2cda010888ba458eaf82ea release: "1760573692" summary: Platform for running nginx 1.26 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi10/nginx-126 vcs-ref: 5f49473c03c662a2dd2cda010888ba458eaf82ea vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-16T00:15:44Z" Id: sha256:ee41d8c7a2e378523c7ec25c7706336823e6c11bc957f3b0b1b78f6ac79f748a Size: 102818888 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/nginx-126@sha256:865ce4a073a00133133ba2d375cb9529dab8d10cf2aebd5537e9028f21aa261b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:865ce4a073a00133133ba2d375cb9529dab8d10cf2aebd5537e9028f21aa261b resourceVersion: "14060" uid: 7fd1d5e1-c258-47bd-939d-51195fcb43f2 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4752687a61a97d6f352ae62c381c87564bcb2f5b6523a05510ca1fb60d640216 size: 36442442 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0344366a246a0f7590c2bae4536c01f15f20c6d802b4654ce96ac81047bc23f3 size: 1740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2d0ad29676d908739bbe514232a9d435a6846938a597d376bb8323e0c52c5fc2 size: 117783682 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fd16d6f2e774 Image: e68803b1f8baa603307a9ff6909df95f45ccb9d227d163263fb1da2f6e141c32 Labels: architecture: x86_64 build-date: 2022-06-15T16:19:30.415682 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1655306377" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.13-1.1655306377 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 5c420ef50250635153dd2f037402814b3555412f vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fd16d6f2e774 Image: sha256:1e9249edf3c6ae61ab3dc9e75321bb5fb114f71fbdbd5876ab64a745811d9872 Labels: architecture: x86_64 build-date: 2022-06-15T16:19:30.415682 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1655306377" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.13-1.1655306377 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 5c420ef50250635153dd2f037402814b3555412f vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:26:00Z" DockerVersion: 1.13.1 Id: sha256:f2fb2918903b2a27c34df5bc33958a39d01fc478e6940bde6fbb918cb841ee33 Size: 154235364 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:868224c3b7c309b9e04003af70a5563af8e4c662f0c53f2a7606e0573c9fad85 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:868224c3b7c309b9e04003af70a5563af8e4c662f0c53f2a7606e0573c9fad85 resourceVersion: "14173" uid: 8673a3ac-628a-461d-969d-81452ebfec3e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9abb5e74a5f821b8cab5d489b7a3ebe6bad4b3ee1eb3e7748583419f1ec6c43a size: 63943566 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - '"./${DOTNET_DEFAULT_CMD}"' Env: - container=oci - HOME=/opt/app-root - PATH=/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=9.0.10 - ASPNET_VERSION=9.0.10 ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:23:40Z" com.redhat.component: dotnet-90-runtime-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for running .NET 9 applications distribution-scope: public dotnet_version: 9.0.10 io.buildah.version: 1.41.4 io.k8s.description: Platform for running .NET 9 applications io.k8s.display-name: .NET 9 io.openshift.expose-services: 8080:http io.openshift.tags: runtime,.net,dotnet,dotnetcore,dotnet90-runtime maintainer: Red Hat, Inc. name: ubi8/dotnet-90-runtime org.opencontainers.image.revision: bb2560ddf8f7bde1e795ce741e9021d87dbf007b release: "1761063774" summary: .NET 9 runtime url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: bb2560ddf8f7bde1e795ce741e9021d87dbf007b vcs-type: git vendor: Red Hat, Inc. version: "9.0" User: "1001" WorkingDir: /opt/app-root/app ContainerConfig: {} Created: "2025-10-21T16:23:47Z" Id: sha256:9f658b1b18972ec44a4e3b7cebb13ec240b200dd4cd04b05e5c481dd291f837d Size: 103689289 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/dotnet-90-runtime@sha256:88751105dd023552164e3c312742986b011078becb28f1464f1524d134925d73 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:88751105dd023552164e3c312742986b011078becb28f1464f1524d134925d73 resourceVersion: "13461" uid: 53c20b81-5b20-4ae5-8583-23b0e91ec178 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:cbec86182abf32d32e3a23824ea1b33138ab11eacd2791db4a9cd206f29762b4 size: 204149931 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el8 - NODEJS_VERSION=22 - NPM_RUN=start - NAME=nodejs - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - CNB_STACK_ID=com.redhat.stacks.ubi8-nodejs-22 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - SUMMARY=Platform for building and running Node.js 22 applications - DESCRIPTION=Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. - BASH_ENV=/opt/rh/gcc-toolset-13/enable - ENV=/opt/rh/gcc-toolset-13/enable - PROMPT_COMMAND=. /opt/rh/gcc-toolset-13/enable ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:22:36Z" com.redhat.component: nodejs-22-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi8-nodejs-22 io.k8s.description: Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 22 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs22 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/nodejs-22 org.opencontainers.image.revision: 49b8b2f2a34142b3b914203f829d6e96d9a2683d release: "1761063689" summary: Platform for building and running Node.js 22 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi8/nodejs-22:latest vcs-ref: 49b8b2f2a34142b3b914203f829d6e96d9a2683d vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T16:23:03Z" Id: sha256:db248d0ec5a7ea1a2f95f44b96f93595609a7215d8e02010960c8bc6562fd830 Size: 299709077 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/nodejs-22@sha256:887cc9e7fd3ce89adf6233aaf52b0243930e1a958190a09bf37c10f069890ee7 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:887cc9e7fd3ce89adf6233aaf52b0243930e1a958190a09bf37c10f069890ee7 resourceVersion: "14094" uid: b882c3ce-1465-4d5c-9580-7c78039a4b2e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ec0a4551131fd4f14d0a75627716987d815e6a7d000c6aec0ad2250db63285fb size: 76260862 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:448f7cafed668a949885b3817a3154f2d7f933119c6bc15f497e5889ba562000 size: 1319 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:09046bcf6b9ce63012e726b1303b9bfe5d8267a8d69bc652cfe15ce360906ae8 size: 358721836 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:959b6ec1c4635f73fc3e1116039ec4924ad272a0bca2250c54b93048fcf3b878 size: 348675297 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_EAP_VERSION=7.2.8.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.2.8.GA - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.2 - WILDFLY_CAMEL_VERSION=5.4.0.fuse-760021-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: eec8c28ca87f Image: 1dec826b36b51f1cb896136514b1350aed992762fe4e2b1e18822e01a27d3bb5 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-06-11T14:36:50.138967 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.8.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.8.GA release: "23.1591885742" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.6-23.1591885742 vcs-ref: 6b471823b8f4348a63e3ce19cb5b05b4c46f6d2f vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_EAP_VERSION=7.2.8.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.2.8.GA - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.2 - WILDFLY_CAMEL_VERSION=5.4.0.fuse-760021-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: eec8c28ca87f Image: sha256:d178cb6268264a07bb6fccc8a95c20057b8eb16e5717e1e8885533aeba109d9b Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-06-11T14:36:50.138967 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.8.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.8.GA release: "23.1591885742" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.6-23.1591885742 vcs-ref: 6b471823b8f4348a63e3ce19cb5b05b4c46f6d2f vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss Created: "2020-06-11T14:37:49Z" DockerVersion: 1.13.1 Id: sha256:a539fa5a2747ab4170a2965b45d87714801a81010359cb90a8bae7b1841b706f Size: 783667654 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:8968c86f5e2831796cf5f464c87a911b5513fd543b7f3485ccc497fe05ad6bca kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:8968c86f5e2831796cf5f464c87a911b5513fd543b7f3485ccc497fe05ad6bca resourceVersion: "14040" uid: 6410eb36-632b-47c9-9d9d-7e7fe7ac6d73 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b7df3aef01631b78d685e3ac9288f9670fc617b99af7198c052e2ad54085150e size: 108996511 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.15 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-25T05:07:18 com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1682399183" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.15-1.1682399183 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: abf4e05b84271b5340aee7845f3c982034b4b70a vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-25T05:22:09Z" Id: sha256:df2c1c9fe6c03102b3715d957eba61d386cba34ba89ffa1283f7664a807ed12e Size: 148294269 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:8a5b580b76c2fc2dfe55d13bb0dd53e8c71d718fc1a3773264b1710f49060222 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:8a5b580b76c2fc2dfe55d13bb0dd53e8c71d718fc1a3773264b1710f49060222 resourceVersion: "14212" uid: 08988084-56c5-426d-b5cf-4e08844cd751 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8abc860612807800afc77e8631e844c183642c3fd7bf28098329a9a471c51bfa size: 79390915 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:97b035c29708db1135c3cf7073001a81c20971955c0c36ac92e2ffd4f9171c2d size: 119428849 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:12311e71d6e48d14a6017415e1f8be8ac187a74a92fc102876c1728de7ea0468 size: 33661931 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.0 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.0 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk11-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.0 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2022-10-28T10:50:40 com.redhat.component: jboss-webserver-57-openjdk11-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 distribution-scope: public io.buildah.version: 1.26.2 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK11 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK11 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk11-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 5.7.0 org.jboss.product.webserver-tomcat9.version: 5.7.0 release: "6" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk11-rhel8-openshift/images/5.7.0-6 vcs-ref: cbf826cef3c5ebd698d9a52a2819dad1f0228bf4 vcs-type: git vendor: Red Hat, Inc. version: 5.7.0 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2022-10-28T10:54:50Z" Id: sha256:8b30e9c7537de1962f24c17565a9f8b9ff19a7a06f3f31671926bd9fac428a1c Size: 232522643 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:8a7d4c245418f8099293270f8bbcf7a4207839c4c4ef9974c2e16e303329edf3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:8a7d4c245418f8099293270f8bbcf7a4207839c4c4ef9974c2e16e303329edf3 resourceVersion: "13695" uid: cd973a16-ec29-4198-90d1-efc075a01254 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50be7bb6ce3ddb41606e1956ba5c61072699ac536980f260a0db6dc59c8013fe size: 39575081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ff358ef63d0b0476b2197592c761610d777f3f52226ba4e1db7df1aedc6de3c7 size: 107181264 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.23 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-10-23T20:03:09 com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.13.0.dev0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "3.1761249764" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.23-3.1761249764 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: 085819642ed3bd1514fc178d5c27c6ca99781b74 vcs-type: git vendor: Red Hat, Inc. version: "1.23" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-10-23T20:17:06Z" Id: sha256:4c55761852e50dbdc4c9f62ac6d4322e7b375dcc3cbf0efee03fe015f8039003 Size: 146786806 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/openjdk-8@sha256:8abcc25fba7202b68818271353a9203677ac3c2d638dafc84e6b45e68d913f59 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:8abcc25fba7202b68818271353a9203677ac3c2d638dafc84e6b45e68d913f59 resourceVersion: "13690" uid: f16c313c-5584-4753-96b7-006072345b71 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e0348fdb2685077d22116d294a90a253709aba78815882a57fcc536b22dcae2f size: 39488293 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50d776090f4e8d167cbe918c0da58f7b67533ab58d59ffa6acb6f2fad084834a size: 113694045 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.21 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-01-08T12:07:12 com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.33.8 io.cekit.version: 4.13.0.dev0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "1.1736337912" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.21-1.1736337912 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: 1a9ebd45b5208ff39ad07a2699430945b7d00eda vcs-type: git vendor: Red Hat, Inc. version: "1.21" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-01-08T12:35:33Z" Id: sha256:0c46377f1021ce6db9f2457907fe43fd1001b2ecc1ae2ac8ad2e699377813dea Size: 153213042 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/openjdk-11@sha256:8be99c30a4e5b021129310847bddca64a93fb38b0c8dfeac482b4c28de062e5a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:8be99c30a4e5b021129310847bddca64a93fb38b0c8dfeac482b4c28de062e5a resourceVersion: "13686" uid: 003d986f-aa2d-4ef7-b224-a906805a94a9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96bd051f1942fe5ab409f17e3e423d97832439ba0ff2a8e505a039d6d08bfb73 size: 76410645 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:159a7b5d1b30a534b218f68e14a0f86ec7e6968fbf582c899d60754f2a063f20 size: 1500 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0e3a6ea4497c7683ea0b09b9b426edaf96e9dea0e3a448d48bc893b77b850738 size: 109409861 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b8a8fa65e643e0f369106eb809d0471ed5214056191a5a4c573ad4ef8bf1b649 size: 17127007 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2.redhat-00002 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.8 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: b621bb5542e9 Image: 6cd30300db17eeb6079dde6b2884979aaaf45b4caecd6f6fd1b0032c05d7d11e Labels: architecture: x86_64 build-date: 2021-06-22T13:46:32.233791 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "19" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.8-19 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 9bc77b209025a4153be16c8f1c2aa12e43d1f0a2 vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2.redhat-00002 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.8 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: b621bb5542e9 Image: sha256:edc6088e532f9ac489c5c9b4cb0c73b67c6ce16ee4447d7ee63c0c249f453e55 Labels: architecture: x86_64 build-date: 2021-06-22T13:46:32.233791 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "19" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.8-19 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 9bc77b209025a4153be16c8f1c2aa12e43d1f0a2 vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss Created: "2021-06-22T13:49:14Z" DockerVersion: 1.13.1 Id: sha256:4c5c8b73561437cf5c6e0f629b05b150ef1815ed011d8663088ee856b751ed1f Size: 202957959 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:8c148fc54caeb860262c3710675c97c58aba79af2d3c76de795b97143aae0e3f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:8c148fc54caeb860262c3710675c97c58aba79af2d3c76de795b97143aae0e3f resourceVersion: "14053" uid: 10d1343c-b57f-45f7-ab3f-beac30bd545b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fbacd34c462506669dc03b1b82d7b82d81294c00bba26f1d3768de23c66b1bf9 size: 48612301 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el9 - NODEJS_VER=20 - PYTHON_VERSION=3.9 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi9-python-39 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.9 applications - DESCRIPTION=Python 3.9 available as container is a base platform for building and running various Python 3.9 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T16:22:20Z" com.redhat.component: python-39-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Python 3.9 available as container is a base platform for building and running various Python 3.9 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi9-python-39 io.k8s.description: Python 3.9 available as container is a base platform for building and running various Python 3.9 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.9 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python39,python-39,rh-python39 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/python-39 org.opencontainers.image.revision: 8a625fa3d73e8e925536aa57b3b34af04b6d61a1 release: "1760372481" summary: Platform for building and running Python 3.9 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.9/test/setup-test-app/ ubi9/python-39 python-sample-app vcs-ref: 8a625fa3d73e8e925536aa57b3b34af04b6d61a1 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T16:22:34Z" Id: sha256:b544bb26771b4c0ca36a51ef3de16ab2b3a0d13c93bdc2a318509cb56032a0e9 Size: 360915719 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/python-39@sha256:8c1d5419ebfe1eb1a888c32a456f48ea01e1a6a33c7db59acf689ede1d944516 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:8c1d5419ebfe1eb1a888c32a456f48ea01e1a6a33c7db59acf689ede1d944516 resourceVersion: "14133" uid: bf56d930-176a-4e8d-b394-4bad482d912a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:235e42580a3cfee97f71e29c6ec70496e11ed2f12a7aa75234398ebbc328e586 size: 98096183 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - POSTGRESQL_VERSION=12 - POSTGRESQL_PREV_VERSION=10 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS= ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:20:00Z" com.redhat.component: postgresql-12-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 12 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql12,postgresql-12 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/postgresql-12 org.opencontainers.image.revision: 34b340acc69e166be1ace1971cec764be1bd32be release: "1761063442" summary: PostgreSQL is an advanced Object-Relational database management system url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhel8/postgresql-12 vcs-ref: 34b340acc69e166be1ace1971cec764be1bd32be vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T16:20:21Z" Id: sha256:63ac874262365667e82296277bcd9ddba79c4ee9183b83a242c06a8ac51fe98e Size: 193654414 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/postgresql-12@sha256:8d57f273f8521c9b2d55756dbff05559184d1aeec46517e46c71de97cd72c12b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:8d57f273f8521c9b2d55756dbff05559184d1aeec46517e46c71de97cd72c12b resourceVersion: "14109" uid: 92336955-41f7-4aad-adcd-63211f4ccf0b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96bd051f1942fe5ab409f17e3e423d97832439ba0ff2a8e505a039d6d08bfb73 size: 76410645 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:159a7b5d1b30a534b218f68e14a0f86ec7e6968fbf582c899d60754f2a063f20 size: 1500 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0e3a6ea4497c7683ea0b09b9b426edaf96e9dea0e3a448d48bc893b77b850738 size: 109409861 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:eb9c9fe2d77eb7fc3c382d2a817ad73a42753bbb3264cde397ca1956461082dd size: 17129421 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2.redhat-00002 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.8 - KARAF_FRAMEWORK_VERSION=4.2.9.fuse-780023-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: b621bb5542e9 Image: c7de340e3efa553fdb8638489a9e9a3e20caaf712f7045da505cdd7c40524367 Labels: architecture: x86_64 build-date: 2021-06-22T13:20:43.822235 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.9.fuse-780023-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "27" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.8-27 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8cd8b16f18f4e66c80db9c8e34f4a25931beecac vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2.redhat-00002 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.8 - KARAF_FRAMEWORK_VERSION=4.2.9.fuse-780023-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: b621bb5542e9 Image: sha256:4ce058a449ae714e130b49bd15101cac9e7ed5e19995086476c9f04930b3bf4f Labels: architecture: x86_64 build-date: 2021-06-22T13:20:43.822235 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.9.fuse-780023-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "27" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.8-27 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8cd8b16f18f4e66c80db9c8e34f4a25931beecac vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss Created: "2021-06-22T13:23:40Z" DockerVersion: 1.13.1 Id: sha256:05b513e1291369e7ba146b899e91248c77c5c60f0f0d7de69c6847f937e38541 Size: 202960338 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:8e83f3ef3b5ad4bd7c4002d0201e4d5dd26a158c0be3ad29405ff4800d5661b8 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:8e83f3ef3b5ad4bd7c4002d0201e4d5dd26a158c0be3ad29405ff4800d5661b8 resourceVersion: "14041" uid: 031bb92a-af53-44a5-a92c-789a8cecf141 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d09aca24592b99820eb623c3a56914ab82562e5a4e37aa67ece0402d832e3100 size: 112844388 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.15 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-21T06:45:36 com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1682059493" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.15-1.1682059493 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 90e40029d38fdb9478a55b716811d1cb08fd31f8 vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-21T07:01:06Z" Id: sha256:d1ce871371c268991ea2f4c4dd5b5dcd972f9a68bc55f48b320afe6fa43482b9 Size: 152142633 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:9036a59a8275f9c205ef5fc674f38c0495275a1a7912029f9a784406bb00b1f5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:9036a59a8275f9c205ef5fc674f38c0495275a1a7912029f9a784406bb00b1f5 resourceVersion: "14177" uid: 6bd04940-d553-41ba-aa4c-5bdc1794ee94 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:cc5e0be4f784d567d0ee79ad3c2c2de31e847a26edf772079b0837fd059ee64e size: 6832 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6e8a338842e1c40648096356a92b5ef70b859e820d1e6b3bdc429bee54d38faa size: 410 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin Labels: License: ASL 2.0 com.redhat.component: smart-gateway-operator-bundle-container com.redhat.delivery.backport: "false" com.redhat.delivery.operator.bundle: "true" com.redhat.openshift.versions: v4.18-v4.20 description: smart-gateway-operator-bundle io.k8s.display-name: smart-gateway-operator-bundle io.openshift.expose-services: "" io.openshift.tags: monitoring,telemetry,faulting,serviceassurance maintainer: '[''leif+smart-gateway-operator@redhat.com'']' name: stf/smart-gateway-operator-bundle operators.operatorframework.io.bundle.channel.default.v1: unstable operators.operatorframework.io.bundle.channels.v1: unstable operators.operatorframework.io.bundle.manifests.v1: manifests/ operators.operatorframework.io.bundle.mediatype.v1: registry+v1 operators.operatorframework.io.bundle.metadata.v1: metadata/ operators.operatorframework.io.bundle.package.v1: smart-gateway-operator operators.operatorframework.io.metrics.builder: operator-sdk-v0.19.4 operators.operatorframework.io.metrics.mediatype.v1: metrics+v1 operators.operatorframework.io.metrics.project_layout: ansible summary: smart-gateway-operator-bundle version: 5.0.1765493221 WorkingDir: / ContainerConfig: {} Created: "2025-12-11T22:47:05Z" Id: sha256:013e832e356eaf9890124db5a0ac9595deaa76bbdb566c6697491928dc3a4a82 Size: 11775 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/infrawatch-operators/smart-gateway-operator-bundle@sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:28:04Z" name: sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc resourceVersion: "43576" uid: 876f0d1c-fb0e-4e4b-b7f7-48cc6e2bd432 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5329d7039f252afc1c5d69521ef7e674f71c36b50db99b369cbb52aa9e0a6782 size: 39330100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4010c146a5d3c1fc55941f0f2d867dcec12af55b5f2f76ef470fb867848424a5 size: 88987292 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.16 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-20T20:08:20 com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "3" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.16-3 vcs-ref: eeccc001a658b49635214b48ff00acf4fa195ada vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-07-20T20:13:11Z" Id: sha256:b3ab963ea6f591c8d7d66719238097970502eabdc5e76ce15ee35ad31e2515f4 Size: 128335849 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:920ff7e5efc777cb523669c425fd7b553176c9f4b34a85ceddcb548c2ac5f78a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:920ff7e5efc777cb523669c425fd7b553176c9f4b34a85ceddcb548c2ac5f78a resourceVersion: "14193" uid: 5b0435d0-2cb9-43a5-938e-f599b45d67f4 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:49a47137b1a2136617a3a472bc16821c293335abafc0672f42561e1bf16a0f88 size: 39940782 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - container=oci - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - NPM_RUN=start - PLATFORM=el8 - NODEJS_VERSION=20 - NAME=nodejs - SUMMARY=Minimal image for running Node.js 20 applications - DESCRIPTION=Node.js 20 available as container is a base platform for running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:14:58Z" com.redhat.component: nodejs-20-minimal-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Node.js 20 available as container is a base platform for running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.k8s.description: Node.js 20 available as container is a base platform for running various Node.js 20 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 20 Minimal io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs20 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/nodejs-20-minimal org.opencontainers.image.revision: cb238afef55d7c411b9322efeba590433e424d52 release: "1761063274" summary: Minimal image for running Node.js 20 applications url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: cb238afef55d7c411b9322efeba590433e424d52 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T16:15:09Z" Id: sha256:6b7383f3a11cf290881ca9dacadc5025b578a8432b13b182037e69e040134b6c Size: 79686598 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/nodejs-20-minimal@sha256:936daac34be9105cb05ca1eb7bcf89b280df38060709cf581df03fb69362e4df kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:936daac34be9105cb05ca1eb7bcf89b280df38060709cf581df03fb69362e4df resourceVersion: "14086" uid: 2f90468a-f59e-48f7-9592-6949468df17b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:db0f4cd412505c5cc2f31cf3c65db80f84d8656c4bfa9ef627a6f532c0459fc4 size: 78359137 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7e3624512448126fd29504b9af9bc034538918c54f0988fb08c03ff7a3a9a4cb size: 1789 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:14f31f43ca6d86bcd2e6a968f079af4a3c7e5bde11c32fe252cff5e99d41364b size: 450111139 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_EAP_VERSION=7.4.6 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap-xp - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=3.0.0 - WILDFLY_VERSION=3.0.0.GA-redhat-00016 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=3.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8 - JBOSS_IMAGE_VERSION=3.0 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 7323ecf6dc56 Image: 7c29609a8c38f5c5df600979d42b6face2facfbe29a9e30d7462cac65d5d08f5 Labels: architecture: x86_64 build-date: 2022-08-03T11:41:48.394081 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-eap-xp3-openjdk11-builder-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift container image. distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running JavaEE applications on JBoss EAP XP 3.0 io.k8s.display-name: JBoss EAP XP io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,eap-xp maintainer: Red Hat name: jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap-xp org.jboss.product.eap.version: 7.4.6 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 3.0.0 release: "21" summary: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift container image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8/images/3.0-21 vcs-ref: cfdfab38abf90a24b6ae340095d579fae34ac19f vcs-type: git vendor: Red Hat, Inc. version: "3.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_EAP_VERSION=7.4.6 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap-xp - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=3.0.0 - WILDFLY_VERSION=3.0.0.GA-redhat-00016 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=3.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8 - JBOSS_IMAGE_VERSION=3.0 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 7323ecf6dc56 Image: sha256:87b6aca14aa56e8872822e964f37b69a378849208dd9e7f9eed9f16a10ca0c34 Labels: architecture: x86_64 build-date: 2022-08-03T11:41:48.394081 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-eap-xp3-openjdk11-builder-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift container image. distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running JavaEE applications on JBoss EAP XP 3.0 io.k8s.display-name: JBoss EAP XP io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,eap-xp maintainer: Red Hat name: jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap-xp org.jboss.product.eap.version: 7.4.6 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 3.0.0 release: "21" summary: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift container image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8/images/3.0-21 vcs-ref: cfdfab38abf90a24b6ae340095d579fae34ac19f vcs-type: git vendor: Red Hat, Inc. version: "3.0" User: "185" WorkingDir: /home/jboss Created: "2022-08-03T11:47:40Z" DockerVersion: 1.13.1 Id: sha256:651ba477f1729377820e3c4d37b16cb7c970b4c616387639d7a2c3e75604addb Size: 528484179 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap-xp3-openjdk11-openshift-rhel8@sha256:953aeb7c686ebe9359eb9e020aabaa011e47de9a0c38a3e97f85ff038abef5e6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:953aeb7c686ebe9359eb9e020aabaa011e47de9a0c38a3e97f85ff038abef5e6 resourceVersion: "13382" uid: e2937c86-2422-4b3e-bd8c-6be83ebe66c1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:34cbb242d65bd3b5ea98fd0bf5be8ce2b2316994693b130adb043cd6537ee9ca size: 76239722 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a4b6512aa42577405f0f324407ee3140e668e9bb470c3fb472e11266482468f size: 1414 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:43af5cb8726181f66a714e7b2e0fedeaafc94f41d9da7b8bbaf60cccbd282947 size: 360161439 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9189bb850e4ef7fc92a22a5e0d669a118f5ec3e16ec679c116d63a3af3840f7b size: 362224783 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_EAP_VERSION=7.2.8.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.2.8.GA - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.2 - WILDFLY_CAMEL_VERSION=5.5.0.fuse-770010-redhat-00003 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: e1afd63c27cb Image: cd014bc1a847cd3fed2f90a3fbc523a426e4ec7008cebec062413b1810264433 Labels: architecture: x86_64 build-date: 2020-11-04T18:20:14.968019 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.8.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.8.GA release: "12.1604512956" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.7-12.1604512956 vcs-ref: eefa649e2597e8e14192fabab6d39c751a947fa7 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_EAP_VERSION=7.2.8.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.2.8.GA - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.2 - WILDFLY_CAMEL_VERSION=5.5.0.fuse-770010-redhat-00003 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: e1afd63c27cb Image: sha256:285de3f29c8aa04b61c47d78bb30fda0cef3f92acb0e7130ab1d8e555aa70467 Labels: architecture: x86_64 build-date: 2020-11-04T18:20:14.968019 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.8.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.8.GA release: "12.1604512956" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.7-12.1604512956 vcs-ref: eefa649e2597e8e14192fabab6d39c751a947fa7 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss Created: "2020-11-04T18:21:27Z" DockerVersion: 1.13.1 Id: sha256:be40aa00260f66d90539c0d1cd5026c62b7911c1c7342db50c7580e98e0a828f Size: 798635518 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:956ad570b06da524a856c6c2b421c8b4aab160fc4565cde798c72fa050c2dedf kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:956ad570b06da524a856c6c2b421c8b4aab160fc4565cde798c72fa050c2dedf resourceVersion: "14043" uid: 5cb6f520-db7b-4ee2-ab5f-b41168cc7821 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50be7bb6ce3ddb41606e1956ba5c61072699ac536980f260a0db6dc59c8013fe size: 39575081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:911b8848d090c0978905e7ddcc09c6d1b296d99a98f1d5c74c4d20e96b994577 size: 75953565 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.23 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-10-23T20:03:09 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.13.0.dev0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "3.1761249763" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.23-3.1761249763 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: 1525ac5b44f35db161f3d3efa207ccd05b700efa vcs-type: git vendor: Red Hat, Inc. version: "1.23" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-10-23T20:12:43Z" Id: sha256:3f0bcc6c0d5b6748f2ad906c60fff669c4098b54a58af97082bb036d4ac4deb7 Size: 115548551 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:963cb8b96a26857034d16753409e0b48eb8c7e2702ad97ea53136a705946e535 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:963cb8b96a26857034d16753409e0b48eb8c7e2702ad97ea53136a705946e535 resourceVersion: "13514" uid: bd8ab23f-c9ac-4f68-bb3e-1c6155e0217d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3019826b26b93fdb39b6e29614bc6b4d1ab879c596261851db4ff70706fa6c55 size: 183535774 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:24ecfccd693adf38a95b31fa28a1832484a4b78bf63d1e7ca71d60123c17ead0 size: 127962077 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510212154.p2.g69ff479.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510212154.p2.g69ff479.assembly.stream.el9-69ff479 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=openshift-enterprise-tests - __doozer_uuid_tag=ose-tests-rhel9-v4.20.0-20251021.223340 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=69ff479 - SOURCE_DATE_EPOCH=1760012512 - SOURCE_GIT_COMMIT=69ff479373a432c4c48b76ff5ae1c5c56b9ffc5b - SOURCE_GIT_TAG=v4.1.0-9941-g69ff479373 - SOURCE_GIT_URL=https://github.com/openshift/origin Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T23:23:44Z" com.redhat.component: openshift-enterprise-tests-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: OpenShift is a platform for developing, building, and deploying containerized applications. io.k8s.display-name: OpenShift End-to-End Tests io.openshift.build.commit.id: 69ff479373a432c4c48b76ff5ae1c5c56b9ffc5b io.openshift.build.commit.url: https://github.com/openshift/origin/commit/69ff479373a432c4c48b76ff5ae1c5c56b9ffc5b io.openshift.build.source-location: https://github.com/openshift/origin io.openshift.build.versions: kubernetes-tests=1.33.4 io.openshift.expose-services: "" io.openshift.maintainer.component: Test Framework io.openshift.maintainer.project: OCPBUGS io.openshift.release.operator: "true" io.openshift.tags: openshift,tests,e2e maintainer: Red Hat, Inc. name: openshift/ose-tests-rhel9 org.opencontainers.image.revision: cc08071546e345c534a6083c6b2ca879e49007a9 release: 202510212154.p2.g69ff479.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: cc08071546e345c534a6083c6b2ca879e49007a9 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T23:26:44Z" Id: sha256:8a04a9c6687001c900f85968c7ab65743914c5abff0bbd6e76241d46cad011fc Size: 449939838 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9a1ff2292e9e3aa41290373a931e9b52de2b206e4da35dc12dc553f7b0e58146 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:9a1ff2292e9e3aa41290373a931e9b52de2b206e4da35dc12dc553f7b0e58146 resourceVersion: "14076" uid: 4105d75c-fd5f-4a23-8837-1febdd98746d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a20dc09567a04bdff2ebfaa3d3917f64d7620555e6354d53b43dd7ebb0e0f575 size: 79751689 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ac59b0c289e29a4926bf37ba68fd96d88bfb088939fb2141d2fd7663f7a43a65 size: 119102707 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.15 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-17T21:52:36 com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "10" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.15-10 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: bc2e8cbed5194676f5e562e97468861dc04c055f vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-17T22:20:18Z" Id: sha256:e9847fcee45301c4851138e5224a14e27a2c94844d178c65dd2784d4cdb5628c Size: 198884342 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:9ab26cb4005e9b60fd6349950957bbd0120efba216036da53c547c6f1c9e5e7f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:9ab26cb4005e9b60fd6349950957bbd0120efba216036da53c547c6f1c9e5e7f resourceVersion: "14103" uid: 68b084a9-f039-4e0a-a93d-800f75ea0732 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:50be7bb6ce3ddb41606e1956ba5c61072699ac536980f260a0db6dc59c8013fe size: 39575081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9fda820972db65f84f083456d1ef6c072602f72f8849f448f7cb5b051e38af20 size: 97135313 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.23 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-11-25T10:27:55 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.13.0.dev0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "3.1764066412" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.23-3.1764066412 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: b40c568e2fa6d032648af2c70f3fe4f0cbf5ce66 vcs-type: git vendor: Red Hat, Inc. version: "1.23" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-11-25T10:40:37Z" Id: sha256:aa8e84022629e81f8870b1c37d3fc34cd9cea85a45ddc5634740763624f3c364 Size: 136730282 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/openjdk-17-runtime@sha256:9b5d2fc574a13613f18fa983ac2901593c1e812836e918095bc3d15b6cc4ba57 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:25:13Z" name: sha256:9b5d2fc574a13613f18fa983ac2901593c1e812836e918095bc3d15b6cc4ba57 resourceVersion: "40605" uid: 7c24a3bc-e9db-40ef-958b-2ce137d5502c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8abc860612807800afc77e8631e844c183642c3fd7bf28098329a9a471c51bfa size: 79390915 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:646e25f094bfdf115abaa765c7e87d0695757439d2d01c5a96950d1a8230095c size: 102110606 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9f629aedc79c0f56aca3877c00f3d459e5c4e1cd8c2462b3b2f50d374f5051da size: 29827212 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.0 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.0 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk8-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.0 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2022-10-28T09:24:39 com.redhat.component: jboss-webserver-57-openjdk8-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 distribution-scope: public io.buildah.version: 1.26.2 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK8 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK8 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk8-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 5.7.0 org.jboss.product.webserver-tomcat9.version: 5.7.0 release: "6" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK8 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk8-rhel8-openshift/images/5.7.0-6 vcs-ref: 30c25e5f7426300944d59bdf2441b77a306eec9a vcs-type: git vendor: Red Hat, Inc. version: 5.7.0 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2022-10-28T09:28:29Z" Id: sha256:fc147f625fd89d8e9a8e60e53d35939809477d4254f43722faafecc1ee2ae2bb Size: 211369649 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk8-openshift-rhel8@sha256:9c10ee657f8d4fc4cee2d6f3fca56a8ded4354b90beea00e8274d1927e0fe8c7 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:9c10ee657f8d4fc4cee2d6f3fca56a8ded4354b90beea00e8274d1927e0fe8c7 resourceVersion: "13788" uid: f265bf7a-7a2e-4fcb-87bb-715647526958 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e2305491eb6ffd1d8265760823bba728f83c81461eff41237c57e74c5fb2e7a3 size: 11193462 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/bin/run-httpd Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - HTTPD_VERSION=2.4 - HTTPD_SHORT_VERSION=24 - NAME=httpd - SUMMARY=Platform for running Apache httpd 2.4 or building httpd-based application - DESCRIPTION=Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. - HTTPD_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/httpd/ - HTTPD_APP_ROOT=/opt/app-root - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/httpd.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_MODULES_D_PATH=/etc/httpd/conf.modules.d - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_TLS_CERT_PATH=/etc/httpd/tls - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_LOG_PATH=/var/log/httpd ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:11:21Z" com.redhat.component: httpd-24-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. io.k8s.display-name: Apache httpd 2.4 io.openshift.expose-services: 8080:http,8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,httpd,httpd-24 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/httpd-24 org.opencontainers.image.revision: 03c614bc240603ef13f05d4366c5027b8f79a40b release: "1760386250" summary: Platform for running Apache httpd 2.4 or building httpd-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/httpd-container.git --context-dir=examples/sample-test-app/ rhel9/httpd-24 sample-server vcs-ref: 03c614bc240603ef13f05d4366c5027b8f79a40b vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:11:25Z" Id: sha256:3a17e28bd30d0ed0fd57d89760e591d16f93c412a8166f5560d4596b9909a74e Size: 108293615 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/httpd-24@sha256:9c8685e9b35e0262af34b42288252f421e0791efd835b5673cf9d10c90863a36 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:9c8685e9b35e0262af34b42288252f421e0791efd835b5673cf9d10c90863a36 resourceVersion: "13589" uid: 7ccc37db-beff-432b-8ea3-e1379779e1e7 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c9ff3e9281bcbcadd57f37cc0e47a4081cc20a091749d7a33d56496a60a2c1be size: 76240719 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f897b9608c98d944929bd778316439ac000b43d974c70efb678187e436f095fa size: 1320 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:84d82243c4d526fb64212dbf143c4bda2e708fdfaf270853a2d588c185b1bbfe size: 358310228 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2ac19899f11cfecf13fcc6858cec9aa84a088eebd01245dc25cd9c9ffc9efc32 size: 351119588 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_EAP_VERSION=7.2.7.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.2.7.GA - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.2 - WILDFLY_CAMEL_VERSION=5.3.0.fuse-750026-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 7158789667d6 Image: 791bedb1a4c1b02af8001a2b4aaae305444773523d7b83cb8a99e9e34d69e5c1 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-03-17T17:08:43.683394 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 3.2.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.7.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.7.GA release: "12.1584463377" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.5-12.1584463377 vcs-ref: 1e76ab152c0fb1ff811ef366b74092352dc2fe65 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_EAP_VERSION=7.2.7.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.2.7.GA - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap72-openshift - JBOSS_IMAGE_VERSION=1.2 - WILDFLY_CAMEL_VERSION=5.3.0.fuse-750026-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 7158789667d6 Image: sha256:382de91c612b07160f62b17aaaa4cc84d1015b829d0f886d824e2be05515cfdd Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-03-17T17:08:43.683394 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Camel applications on EAP 7.2.1 distribution-scope: public io.cekit.version: 3.2.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.2.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Otavio Piske name: fuse7/fuse-eap-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.2.7.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.2.7.GA release: "12.1584463377" summary: Platform for building and running Apache Camel applications on EAP 7.2.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.5-12.1584463377 vcs-ref: 1e76ab152c0fb1ff811ef366b74092352dc2fe65 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss Created: "2020-03-17T17:10:28Z" DockerVersion: 1.13.1 Id: sha256:dd66efe4ea6ed2453c3eaad23b8fd09f7ac35203cefee8e997c4b718a2ff129d Size: 785680195 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:9d0ed8688df061f7555046a61cec60f909b325e77dee6aec9d2350f81efa0b46 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:9d0ed8688df061f7555046a61cec60f909b325e77dee6aec9d2350f81efa0b46 resourceVersion: "14037" uid: cdf30894-7900-4273-b02a-9792d5df0697 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:96cf2b875e2a36c8825aadbf02bb8c31fe8e9de2f715f939a18f669de38ee76c size: 106992460 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-23T16:01:28 com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "4" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.17-4 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 733fe9b2adb674b4b549a51a35f6a4a6a38ad037 vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-23T16:17:36Z" Id: sha256:35d26bac52276039582d07b462c291fea0355814ab44f5b68fd3df8c060562aa Size: 146329998 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:9d759db3bb650e5367216ce261779c5a58693fc7ae10f21cd264011562bd746d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:9d759db3bb650e5367216ce261779c5a58693fc7ae10f21cd264011562bd746d resourceVersion: "14214" uid: 212b2942-dfd8-4665-8e18-ff37bdc44f6a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:52f2e6ec5ef8a4dbdb3c4274ef76ba2037ebbd724573f2057856b01a2a7ea1b8 size: 19585623 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=20 - RUBY_MAJOR_VERSION=3 - RUBY_MINOR_VERSION=3 - RUBY_VERSION=3.3 - RUBY_SCL_NAME_VERSION=33 - RUBY_SCL=ruby-33 - IMAGE_NAME=ubi8/ruby-33 - SUMMARY=Platform for building and running Ruby 3.3 applications - DESCRIPTION=Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T12:10:39Z" com.redhat.component: ruby-33-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. io.k8s.display-name: Ruby 3.3 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,ruby,ruby33,ruby-33 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/ruby-33 org.opencontainers.image.revision: ef7b2eae3d26a61613cc71dd1291080399bce4be release: "1761826171" summary: Platform for building and running Ruby 3.3 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-ruby-container.git --context-dir=3.3/test/puma-test-app/ ubi8/ruby-33 ruby-sample-app vcs-ref: ef7b2eae3d26a61613cc71dd1291080399bce4be vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T12:10:48Z" Id: sha256:c2496e7ea1b21694f5bcf9dd6902807d8f852f4eef30466504885ea8f1e2286b Size: 269452030 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/ruby-33@sha256:9d895b1202aaf35bbe7b432736307c965d9d84fd91c06e41f8ac21c7ea0590a0 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:9d895b1202aaf35bbe7b432736307c965d9d84fd91c06e41f8ac21c7ea0590a0 resourceVersion: "14118" uid: 5f5938c0-5510-4115-93d9-a072382b27a1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d9e82098d9baea2e101f08de3cadfb5eb9155ed9b0c3cc58d0dd84acd4f8f6ec size: 103715605 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - MYSQL_VERSION=10.5 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MariaDB 10.5 SQL database server - DESCRIPTION=MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/usr ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T20:14:59Z" com.redhat.component: mariadb-105-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. io.k8s.display-name: MariaDB 10.5 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mariadb,mariadb105,mariadb-105 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/mariadb-105 org.opencontainers.image.revision: fd3cb58cc55ba53455c39286fd22514602d64ae0 release: "1761077645" summary: MariaDB 10.5 SQL database server url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhel8/mariadb-105 vcs-ref: fd3cb58cc55ba53455c39286fd22514602d64ae0 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T20:15:20Z" Id: sha256:62490f22b0ec3d319419337e5fb84c1841bfd3501b23fe94782ac7dde285c879 Size: 199273271 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/mariadb-105@sha256:9dea3590288a2f7e58af534af889123e044b0e4e03d179664bf5ac2206e9e91d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:9dea3590288a2f7e58af534af889123e044b0e4e03d179664bf5ac2206e9e91d resourceVersion: "13846" uid: 3bed6e22-edbc-44db-8b1c-41b88432cadc - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a20dc09567a04bdff2ebfaa3d3917f64d7620555e6354d53b43dd7ebb0e0f575 size: 79751689 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6ff043a38df4eb0e1a08336063c455b1d291b3d5e449b30447ea6ddbb53c6019 size: 116748485 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.15 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-17T22:18:35 com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "8" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.15-8 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: b16c13bab105cc94b77b4618298049fffd555436 vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-17T22:25:03Z" Id: sha256:15dc0efab317ab7c66332e12636ebf6db08387dc5c8e3f2860fe5e8f39fa6973 Size: 196529781 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:a0a6db2dcdb3d49e36bd0665e3e00f242a690391700e42cab14e86b154152bfd kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:a0a6db2dcdb3d49e36bd0665e3e00f242a690391700e42cab14e86b154152bfd resourceVersion: "14165" uid: fced16ec-331b-4ac2-bd70-ffe48bb75529 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:af063699af1c142fce6707dc9306d122355e61bd23ded0d18f8a4ecfbf3aa89a size: 78847792 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:c81d6d556e6e3a4255dd2709ce18578bfbbf3eed10a4efb966bf99ab69c79e05 size: 9405288 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:59dd25a7c2c2bfb972a6bfb598baa03168f94c7c6baba45e3938b157001c799a size: 10317076 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/bin/run-httpd Entrypoint: - container-entrypoint Env: - container=oci - VERSION=10 - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el10 - HTTPD_VERSION=2.4 - HTTPD_SHORT_VERSION=24 - NAME=httpd - ARCH=x86_64 - SUMMARY=Platform for running Apache httpd 2.4 or building httpd-based application - DESCRIPTION=Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. - HTTPD_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/httpd/ - HTTPD_APP_ROOT=/opt/app-root - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/httpd.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_MODULES_D_PATH=/etc/httpd/conf.modules.d - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_TLS_CERT_PATH=/etc/httpd/tls - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_LOG_PATH=/var/log/httpd ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-16T00:10:06Z" com.redhat.component: httpd-24-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. io.k8s.display-name: Apache httpd 2.4 io.openshift.expose-services: 8080:http,8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,httpd,httpd-24 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi10/httpd-24 org.opencontainers.image.revision: 449b2e89f3530eb9c28bedbaa094dc0fad934fc5 release: "1760573367" summary: Platform for running Apache httpd 2.4 or building httpd-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/httpd-container.git --context-dir=examples/sample-test-app/ ubi10/httpd-24 sample-server vcs-ref: 449b2e89f3530eb9c28bedbaa094dc0fad934fc5 vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-16T00:10:14Z" Id: sha256:f3b8e2913562adc779d57d8404ac62844571fbea11c93544ec50d7906429e362 Size: 98586659 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/httpd-24@sha256:a1f3f9545a7657a88c4bddbc00f4df862b7658f247e195704b6d9f8a0249c9fa kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:a1f3f9545a7657a88c4bddbc00f4df862b7658f247e195704b6d9f8a0249c9fa resourceVersion: "13584" uid: ae481df1-77ed-4c22-836f-881bf7d76399 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9b71c3290efcf757caf7bdafce1c7203a98ff3556a9e4141b456c5914839cb07 size: 38715659 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=20 - PHP_VERSION=7.4 - PHP_VER_SHORT=74 - NAME=php - SUMMARY=Platform for building and running PHP 7.4 applications - DESCRIPTION=PHP 7.4 available as container is a base platform for building and running various PHP 7.4 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:32:28Z" com.redhat.component: php-74-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: PHP 7.4 available as container is a base platform for building and running various PHP 7.4 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.41.4 io.k8s.description: PHP 7.4 available as container is a base platform for building and running various PHP 7.4 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 7.4 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php74,php-74 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/php-74 org.opencontainers.image.revision: 430df8993713269f2565e87e177403da7a38aea7 release: "1761841890" summary: Platform for building and running PHP 7.4 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=7.4/test/test-app ubi8/php-74 sample-server vcs-ref: 430df8993713269f2565e87e177403da7a38aea7 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:32:43Z" Id: sha256:f3770d7717e39e6d29572469c3ffcff31832649a905cc7347243ca102890af27 Size: 288584302 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/php-74@sha256:a3ca570ae1293a2d88bd995b972f67a784a416cd9916a3d2bef7b38e23b88df3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:a3ca570ae1293a2d88bd995b972f67a784a416cd9916a3d2bef7b38e23b88df3 resourceVersion: "14078" uid: 00420108-d6e4-4b70-88fd-a833c793ebf3 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f0eb64f6b4304af6cc936a29678a7b7703a81003803268163e95597bbd6aa023 size: 46988995 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - POSTGRESQL_VERSION=15 - POSTGRESQL_PREV_VERSION=13 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS= ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T16:13:57Z" com.redhat.component: postgresql-15-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 15 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql15,postgresql-15 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/postgresql-15 org.opencontainers.image.revision: c6a2ac2bc62d533e26f617990996bf2e1cbf83d0 release: "1760372009" summary: PostgreSQL is an advanced Object-Relational database management system url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhel9/postgresql-15 vcs-ref: c6a2ac2bc62d533e26f617990996bf2e1cbf83d0 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T16:14:11Z" Id: sha256:b861c547e3bac8eb09c55dd74c4b91a3d83364cd101b057d35bf3b45c04cb3a4 Size: 144088905 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel9/postgresql-15@sha256:a4c6773a5eb5183f6dad3b029aa2cc4b6715797985dc53a3faf972007e7ad4d3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:a4c6773a5eb5183f6dad3b029aa2cc4b6715797985dc53a3faf972007e7ad4d3 resourceVersion: "14115" uid: af85429e-2044-4c84-a5ed-7d86c3b5d2ed - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7ef1e3a3a6838c7a9be47c4fdc5a8b177583baa77397397e76933831c0379d45 size: 132132261 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d6210f4b9831987f0f0ee1b4d658e8745e16f3185aeb15918185be6e5e30e813 size: 5397755 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift-rhel8 - FUSE_KARAF_IMAGE_VERSION=1.12 - JOLOKIA_VERSION=1.7.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.4.3.fuse-7_12_1-00009-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.18.0.redhat-00001 - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-23T17:52:21 com.redhat.component: fuse-karaf-openshift-rhel-8-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.7.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.4.3.fuse-7_12_1-00009-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.18.0.redhat-00001 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "24.1716486067" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift-rhel8/images/1.12-24.1716486067 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 9b5c4d1d209ef01d3ac0c65027716152267938bd vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-05-23T17:55:01Z" Id: sha256:c9d720d89bdd36cd4657a20663839a59e89f88ab2269907c99b839928aa33aab Size: 176878546 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift-rhel8@sha256:a4dcf2213376727c3da1d10efbea52f20c74674bfb06643723f195a849171c10 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:a4dcf2213376727c3da1d10efbea52f20c74674bfb06643723f195a849171c10 resourceVersion: "14023" uid: d51abb6e-117c-46e4-85c1-bd8935cfd3ef - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:00f17e0b37b0515380a4aece3cb72086c0356fc780ef4526f75476bea36a2c8b size: 76243402 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:305d73a95c8fece2b53a34e040df1c97eb6b7f7cc4e0a7933465f0b7325e3d72 size: 1329 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:37de28a834ce4d54511a99e150f4c70ff0754aabff9c9f0b28fdee25deca1ad0 size: 347613312 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.4.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.4 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.4.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: 9d3d66a8bfcc Image: 7eb94d3c73ed835c09c1b8813a8f6e987fa5fe5529b3ec4b22fb0df3397d9a50 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-01-16T13:46:51.487692 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.5.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.4.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.4.GA release: "1.1579182050" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.4-1.1579182050 vcs-ref: c9b538c57579aed85692246a7d8bc2bab8b0be56 vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.4.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.4 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.4.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: 9d3d66a8bfcc Image: sha256:a9d14d8a70648d60c0f677ee633180046573d5ce8d1e897855fee8da99ddac13 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-01-16T13:46:51.487692 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.5.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.4.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.4.GA release: "1.1579182050" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.4-1.1579182050 vcs-ref: c9b538c57579aed85692246a7d8bc2bab8b0be56 vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss Created: "2020-01-16T13:54:25Z" DockerVersion: 1.13.1 Id: sha256:19452076ffee7516280344d1889aeffb4d3208391ee4b2ffa937214e0a090f94 Size: 423866036 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:a4e8d81c9e54234f84072295425120e358752d58af8297ecfbae5d4ad01e6a2e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:a4e8d81c9e54234f84072295425120e358752d58af8297ecfbae5d4ad01e6a2e resourceVersion: "13895" uid: 32f3c3bc-0386-4373-a81f-7bff34c7d66b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9e23b64ace00a199db21d302292b434e9d3956d79319d958ecc19603d00c946 size: 39622437 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:38b71301a1d9df24c98b5a5ee8515404f42c929003ad8b13ab83d2de7de34dec size: 1742 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1c2420d17c6f75270607383f1a32012b5819be784c002fa6a756b3824a069450 size: 86385743 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: 1e3b9ba31f7cfb0e7e3e77274b62a3a343bcc69a6e7329fa417b8b66608d60ca Labels: architecture: x86_64 build-date: 2022-03-28T09:59:21.117794 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "2.1648459728" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.11-2.1648459728 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0fb451aea3eff8370b37e5f3d692a298a0bc0499 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: sha256:53c2cd8aa6cf62155798343d7f9f6d5e7288e3c1f88e332e6eeb78682793d35a Labels: architecture: x86_64 build-date: 2022-03-28T09:59:21.117794 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "2.1648459728" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.11-2.1648459728 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 0fb451aea3eff8370b37e5f3d692a298a0bc0499 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T10:01:39Z" DockerVersion: 1.13.1 Id: sha256:cc5623b2d45529c4ea9e9fe64dc1c252a12b09410c1e582132f75b108cb56bfa Size: 126014723 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:a8e4081414cfa644e212ded354dfee12706e63afb19a27c0c0ae2c8c64e56ca6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:a8e4081414cfa644e212ded354dfee12706e63afb19a27c0c0ae2c8c64e56ca6 resourceVersion: "14151" uid: ee1360fe-9f4c-450b-98aa-5d7a39a889b7 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d7c06497d5cebd39c0a4feb14981ec940b5c863e49903d320f630805b049cbff size: 39279912 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea34baa9442f6f18b965e373ba72ccd388af746f26cd13a57d4b0f3978d70252 size: 75251776 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-02-07T17:35:05 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "12.1675788326" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.14-12.1675788326 vcs-ref: 3c231ab5dfb5c3160b156c67544b0a2f09a46c7e vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-02-07T17:42:27Z" Id: sha256:67d8a30d80c9ce332312a73ec1cc72ce355ee66bbd300f57cfd8d914fc047022 Size: 114550290 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:aa02a20c2edf83a009746b45a0fd2e0b4a2b224fdef1581046f6afef38c0bee2 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:aa02a20c2edf83a009746b45a0fd2e0b4a2b224fdef1581046f6afef38c0bee2 resourceVersion: "14222" uid: 7df72731-2440-4a30-b660-688bfe476159 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54d004b54a5584b6a6762c182b47e78caec9c907cc299fb8025d94f7450a1acb size: 25767662 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=20 - PHP_VERSION=8.3 - PHP_VER_SHORT=83 - NAME=php - SUMMARY=Platform for building and running PHP 8.3 applications - DESCRIPTION=PHP 8.3 available as container is a base platform for building and running various PHP 8.3 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - PHP_FPM_CONF_D_PATH=/etc/php-fpm.d - PHP_FPM_CONF_FILE=www.conf - PHP_FPM_RUN_DIR=/run/php-fpm - PHP_CLEAR_ENV=ON - PHP_MAIN_FPM_CONF_FILE=/etc/php-fpm.conf - PHP_FPM_LOG_PATH=/var/log/php-fpm - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:08:14Z" com.redhat.component: php-83-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: PHP 8.3 available as container is a base platform for building and running various PHP 8.3 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.41.4 io.k8s.description: PHP 8.3 available as container is a base platform for building and running various PHP 8.3 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 8.3 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php83,php-83 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/php-83 org.opencontainers.image.revision: ff65cb718897c8cbf24d68ffe1e50cf086618888 release: "1760386068" summary: Platform for building and running PHP 8.3 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=8.3/test/test-app ubi9/php-83 sample-server vcs-ref: ff65cb718897c8cbf24d68ffe1e50cf086618888 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:08:26Z" Id: sha256:4ca50a412a6c1eadd443bf5ee00cca9c5a9750f4d4cc2c0b5221c8dbd61c1813 Size: 338073105 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/php-83@sha256:aabd8f354cad901a56c029dbd62483262f4c435a329882ba5c3f7041c8cc04f8 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:aabd8f354cad901a56c029dbd62483262f4c435a329882ba5c3f7041c8cc04f8 resourceVersion: "14084" uid: 36de5d4e-6134-42ad-a084-82d3a9283898 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e8ae870cf5fcc3cd0cba15ea38346df88f604a80291c11db01739e63b9ccc818 size: 19569683 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=20 - PERL_VERSION=5.26 - PERL_SHORT_VER=526 - NAME=perl - SUMMARY=Platform for building and running Perl 5.26 applications - DESCRIPTION=Perl 5.26 available as container is a base platform for building and running various Perl 5.26 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:32:44Z" com.redhat.component: perl-526-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Perl 5.26 available as container is a base platform for building and running various Perl 5.26 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-perl-container io.buildah.version: 1.41.4 io.k8s.description: Perl 5.26 available as container is a base platform for building and running various Perl 5.26 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. io.k8s.display-name: Apache 2.4 with mod_fcgid and Perl 5.26 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,perl,perl526,perl-526 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/perl-526 org.opencontainers.image.revision: 64887b602551cd69e73f7bb2ec62867ee1860c3d release: "1761841872" summary: Platform for building and running Perl 5.26 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi8/perl-526:latest vcs-ref: 64887b602551cd69e73f7bb2ec62867ee1860c3d vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:33:04Z" Id: sha256:58a046cf8359a65ef865458801bb5894f3140b951e0095fbb78e06ea795ed2d5 Size: 269436903 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/perl-526@sha256:abf2ce8a3db39549eb583fd38851e51a1d34c3448225d0cf1aede35e5e0cccfd kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:abf2ce8a3db39549eb583fd38851e51a1d34c3448225d0cf1aede35e5e0cccfd resourceVersion: "14012" uid: f2c63986-c86c-4b6b-9fe1-9288c71e3494 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ff7f9283a88b236942df38c7ee59d7ff36fff7c14ad453263361be85c2f83924 size: 51636332 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - MYSQL_VERSION=10.5 - MYSQL_SHORT_VERSION=105 - VERSION=10.5 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - NAME=mariadb - SUMMARY=MariaDB 10.5 SQL database server - DESCRIPTION=MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/usr ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-14T05:46:43Z" com.redhat.component: mariadb-105-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. io.k8s.display-name: MariaDB 10.5 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mariadb,mariadb105,mariadb-105 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/mariadb-105 org.opencontainers.image.revision: da3a493ec7749f7b71dafb4985b134df39a6550b release: "1760420779" summary: MariaDB 10.5 SQL database server url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhel9/mariadb-105 vcs-ref: da3a493ec7749f7b71dafb4985b134df39a6550b vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-14T05:46:50Z" Id: sha256:214233ff6a1c53648e90d99cf1de3973c486c2f461e61da35dd3ac61df8a738f Size: 148736029 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel9/mariadb-105@sha256:acc20d8b3eb50bef50abda0e44d46d353ba28be950b1685cc8053c977b31eaf6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:acc20d8b3eb50bef50abda0e44d46d353ba28be950b1685cc8053c977b31eaf6 resourceVersion: "13851" uid: 4e33f54a-e28c-4224-b81f-ae19257725b1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c85ac87d44df4b64d7c273886fc5aed55a28422df33dcb641884ffa419db218 size: 76240885 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:51e9f237b750efcda2d5755785cdb8dd080d51585ae35d368e4f9b29a11b1994 size: 1329 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5ccfe6e48f4f71d761b34c61586ac1808cca10bf7e543a3666b802f38625c5a9 size: 4013312 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35875365be086462ee5d275b62cfc13046029a9a084880c18583b932a5b23632 size: 85346475 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:95ba73f94fc8f4c85e9b37f9b95f4f15fda80446024f985171f454b03e194462 size: 15177610 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.5 - JOLOKIA_VERSION=1.6.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 534540aef153 Image: 5f54774f24df8f7779413d55677257eb8d1f2c57e1ddbd52056bb5d13db9a8b3 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-01-27T12:37:34.107857 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "19.1580118028" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.5-19.1580118028 vcs-ref: 26b81f6597d0da63d9f8d1bc0d8f4cd10566a149 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.5 - JOLOKIA_VERSION=1.6.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages - JAVACONFDIRS=/opt/rh/rh-maven35/root/etc/java - XDG_CONFIG_DIRS=/opt/rh/rh-maven35/root/etc/xdg:/etc/xdg - XDG_DATA_DIRS=/opt/rh/rh-maven35/root/usr/share:/usr/local/share:/usr/share ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 534540aef153 Image: sha256:991a18ae1ad710759516c2ab8d5e2f09b20b442cb0ad47ea82db9d1b46455987 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-01-27T12:37:34.107857 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "19.1580118028" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.5-19.1580118028 vcs-ref: 26b81f6597d0da63d9f8d1bc0d8f4cd10566a149 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss Created: "2020-01-27T12:39:42Z" DockerVersion: 1.13.1 Id: sha256:8af4fe9149f32a43d83ebf4f38a44447324e8bf9509bcffb9c6540c19c5af26d Size: 180786295 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:adc5b484c12f915309a95acb71890e4a1a8148d5dadd6cc22d0794cdab81557b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:adc5b484c12f915309a95acb71890e4a1a8148d5dadd6cc22d0794cdab81557b resourceVersion: "14050" uid: 7c1e6225-b5e2-4f1b-821f-c12d1a208467 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:59f5134b8f53d5010d7921666aac69cac4622f8da09fe053bd167d1d0245b1e6 size: 69305150 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NAME=nginx - NGINX_VERSION=1.22 - NGINX_SHORT_VER=122 - VERSION=0 - SUMMARY=Platform for running nginx 1.22 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.22 daemon. The image can be used as a base image for other applications based on nginx 1.22 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T20:13:40Z" com.redhat.component: nginx-122-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.22 daemon. The image can be used as a base image for other applications based on nginx 1.22 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.22 daemon. The image can be used as a base image for other applications based on nginx 1.22 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.22 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-122 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/nginx-122 org.opencontainers.image.revision: 59f8de1161ae9544103554406bee3514cad28e67 release: "1761077586" summary: Platform for running nginx 1.22 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi8/nginx-122:latest vcs-ref: 59f8de1161ae9544103554406bee3514cad28e67 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T20:13:59Z" Id: sha256:eab4b30b2ffa298e34fba65ad21d6c5088a8e3712036e47f9c9d58aa4fcb4441 Size: 164864869 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/nginx-122@sha256:ade3b81041a336ef1a37f3c52f85fc0b92bd62b76f304a86d262770110e3fbab kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:ade3b81041a336ef1a37f3c52f85fc0b92bd62b76f304a86d262770110e3fbab resourceVersion: "14056" uid: 370a2e2b-9302-4661-b3e6-cc68a75ead42 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c43687042a41aad69fc526985ef2b82012c011db7e0e26faba4fc860ad32d88e size: 75837780 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b7b014ba1b80abb29391141385bd32668571313647317d1d64d8b5cebb1f228 size: 1331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5dcc89faa7e0fc98d4ed9da03b2e3498f33372bace47228179ed794bc10ed474 size: 273870240 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_DATAGRID_VERSION=7.3.1.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.1 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - PRODUCT_VERSION=7.3.1.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: 8ea581222c6a Image: f68aa65ca7049d78d71065e100f917db5ba6547f5bb153c50eadd1db2813ea52 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T16:19:09.007404 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.1.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.3.1.GA release: "1.1561730992" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.1-1.1561730992 vcs-ref: 6d070f0036864b7609858c5bb9549f4d7c78d827 vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_DATAGRID_VERSION=7.3.1.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.1 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - JOLOKIA_VERSION=1.5.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - MAVEN_VERSION=3.5 - PRODUCT_VERSION=7.3.1.GA - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: 8ea581222c6a Image: sha256:b2a19f9545b2b0b3129158e46b3cdbfc293ad8546ed037ab87d11f79e8c4ec6e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T16:19:09.007404 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 2.2.7 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.concrt.version: 2.2.7 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.1.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.3.1.GA release: "1.1561730992" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.1-1.1561730992 vcs-ref: 6d070f0036864b7609858c5bb9549f4d7c78d827 vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss Created: "2019-06-28T16:24:24Z" DockerVersion: 1.13.1 Id: sha256:611247135e7a777add961048b86506b83c6794ccaf096ae8e39b4440178af485 Size: 349717412 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:ae7c07fccaaec3ad4a83a2309893b03e94010b2d046de8c38e3d5af45366f84c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:ae7c07fccaaec3ad4a83a2309893b03e94010b2d046de8c38e3d5af45366f84c resourceVersion: "13886" uid: ec5f5192-a8fb-4f15-9ebf-1f03c7234733 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:af063699af1c142fce6707dc9306d122355e61bd23ded0d18f8a4ecfbf3aa89a size: 78847792 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:c81d6d556e6e3a4255dd2709ce18578bfbbf3eed10a4efb966bf99ab69c79e05 size: 9405288 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:d9e405c2a1a004feffa67c8e53dc0965b77b9c67d91626366ee9e53dd24e3de4 size: 199332407 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:7a55cac5354fba5a1f03b73b3034cf29119ae01c24d39e990271dcfbba9988e8 size: 27645571 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - VERSION=10 - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el10 - NODEJS_VER=22 - PERL_VERSION=5.40 - PERL_SHORT_VER=540 - NAME=perl - SUMMARY=Platform for building and running Perl 5.40 applications - DESCRIPTION=Perl 5.40 available as container is a base platform for building and running various Perl 5.40 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-29T04:09:14Z" com.redhat.component: perl-540-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Perl 5.40 available as container is a base platform for building and running various Perl 5.40 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-perl-container io.buildah.version: 1.41.4 io.k8s.description: Perl 5.40 available as container is a base platform for building and running various Perl 5.40 applications and frameworks. Perl is a high-level programming language with roots in C, sed, awk and shell scripting. Perl is good at handling processes and files, and is especially good at handling text. Perl's hallmarks are practicality and efficiency. While it is used to do a lot of different things, Perl's most common applications are system administration utilities and web programming. io.k8s.display-name: Apache 2.4 with mod_fcgid and Perl 5.40 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,perl,perl540,perl-540 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi10/perl-540 org.opencontainers.image.revision: 7d7b85e40a84102741c1b0a2a39e850e64c82377 release: "1761710868" summary: Platform for building and running Perl 5.40 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi10/perl-540 vcs-ref: 7d7b85e40a84102741c1b0a2a39e850e64c82377 vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-29T04:09:21Z" Id: sha256:51774f0232204c84d437a2dc118f8021a5c917d49efd196372a749b65cc09106 Size: 315250419 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/perl-540@sha256:af6079ab6f381f2f8eb7175c9bacb93d0c72ff022e97f28520c97b1633b109e2 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:af6079ab6f381f2f8eb7175c9bacb93d0c72ff022e97f28520c97b1633b109e2 resourceVersion: "14014" uid: 725cfa36-aa71-4a1c-a1d1-afc4035d2d46 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bbdea0ea4cfcb830755d69ff087eb173b3cd6ce711fc4266094beb10942aa814 size: 7511919 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-redis Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - REDIS_VERSION=6 - HOME=/var/lib/redis - SUMMARY=Redis in-memory data structure store, used as database, cache and message broker - DESCRIPTION=Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/redis - REDIS_PREFIX=/usr - REDIS_CONF=/etc/redis.conf ExposedPorts: 6379/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-29T20:21:51Z" com.redhat.component: redis-6-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. io.k8s.display-name: Redis 6 io.openshift.expose-services: 6379:redis io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,redis,redis6,redis-6 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/redis-6 org.opencontainers.image.revision: 80c0f0e5463c2391b7f40881d425988852ba24af release: "1761769259" summary: Redis in-memory data structure store, used as database, cache and message broker url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name redis_database -p 6379:6379 rhel8/redis-6 vcs-ref: 80c0f0e5463c2391b7f40881d425988852ba24af vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" Volumes: /var/lib/redis/data: {} WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-29T20:21:58Z" Id: sha256:540836c38bf0d9bee27b45bb7e26784cb5ccea590191ede24b07ea46a6fa3256 Size: 103070929 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/redis-6@sha256:af676d250bb4ced265fece19dbb847133717e18341777df3a57550f53f6207cb kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:af676d250bb4ced265fece19dbb847133717e18341777df3a57550f53f6207cb resourceVersion: "14070" uid: 21bfeb15-362c-46c3-ba5b-5d13ac999927 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:751bf1af528874dba437014af54078013e46b2eca91e82aab200d452b0165af9 size: 76529550 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:71bd5e95c80acea5839e4c515a585f43158bffd718c2be1795b4825b043975a3 size: 1565 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e702d958f3035edf60c80c93b45f1a170c2160ae9a580d59267584434c1ae1a2 size: 114191748 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.11 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 1c8f39c8f8b8 Image: e4929ae9a5fbdb97b858381fba014fcb191cf1723a49766f613f834e7f121f31 Labels: architecture: x86_64 build-date: 2022-03-28T13:32:20.449707 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1648472919" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.11-1.1648472919 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: a224a5bd86d644ba003811652b2ace5d3fc217aa vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.11 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 1c8f39c8f8b8 Image: sha256:f699d571b436296fdb1b633cec0b4061b1782920263560dbfe8d653ff1f8fa7d Labels: architecture: x86_64 build-date: 2022-03-28T13:32:20.449707 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1648472919" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.11-1.1648472919 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: a224a5bd86d644ba003811652b2ace5d3fc217aa vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T13:36:44Z" DockerVersion: 1.13.1 Id: sha256:016ab3c8b22634c1850dd1dd0edb10113e0a08de60126436768f03f4f0fffcc0 Size: 190730265 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:af9c08644ca057d83ef4b7d8de1489f01c5a52ff8670133b8a09162831b7fb34 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:af9c08644ca057d83ef4b7d8de1489f01c5a52ff8670133b8a09162831b7fb34 resourceVersion: "14100" uid: 87da1a13-79ff-4530-a66a-2f95be7ce08e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:71104d043b69aa31878ef66c6d1f40dbbd9f37c12bdec9e915d592a38c07903d size: 76550480 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fde28b70fde89c673900ca961a8c0abf6a0e6c405de070ed53aba447d6678093 size: 1565 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ad3a5abe8fcd7caca242109ea38318432cbaff8fb6b8e3cd1962d631c7b1e9be size: 130952044 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.12 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 57b2f60fd930 Image: ecfd0fb8d1633ddd2d2d1149949d32265f14f0b9674fbcf25dc55b0a5afb77bf Labels: architecture: x86_64 build-date: 2022-08-24T22:41:49.577295 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1661378019" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.12-1.1661378019 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: ca8b57a053f33ff9aa8c3ba9e045a18f62d9a21d vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.12 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 57b2f60fd930 Image: sha256:a361be97d9ff5e76f35f122a97cfbb500e5789603df2b3758a107cbb37519442 Labels: architecture: x86_64 build-date: 2022-08-24T22:41:49.577295 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1661378019" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.12-1.1661378019 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: ca8b57a053f33ff9aa8c3ba9e045a18f62d9a21d vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-08-24T22:47:13Z" DockerVersion: 1.13.1 Id: sha256:a338d26dd42dd15057389d95bb79f53dc9a51880e38a8237ce6b6b680c85fd3c Size: 207511491 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/openjdk/openjdk-11-rhel7@sha256:b053401886c06581d3c296855525cc13e0613100a596ed007bb69d5f8e972346 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:b053401886c06581d3c296855525cc13e0613100a596ed007bb69d5f8e972346 resourceVersion: "14101" uid: d1ec9b2e-83c5-4a70-acec-c69083610127 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d4bfe8be4e0654a9ecfb806061b5a762d86293abd82aa45c817cb070ddb5e085 size: 90090661 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.15 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-21T06:46:01 com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "1.1682059521" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.15-1.1682059521 vcs-ref: 0fbf133aeed4118c485646cc3101c8b3df8c301d vcs-type: git vendor: Red Hat, Inc. version: "1.15" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-21T06:58:19Z" Id: sha256:cb4d904bda259d7a9bc535393ce1a24ce92ce04a61cf11723ea85cfbf58e27d9 Size: 129377201 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:b163564be6ed5b80816e61a4ee31e42f42dbbf345253daac10ecc9fadf31baa3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:b163564be6ed5b80816e61a4ee31e42f42dbbf345253daac10ecc9fadf31baa3 resourceVersion: "14192" uid: b6e6b291-0209-471e-9c56-ae8b05771378 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c253a5224a10e39430451b18c897d0006e39de12e5730a7f76402fa6e252a839 size: 72342076 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/bin/run-httpd Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - HTTPD_VERSION=2.4 - SUMMARY=Platform for running Apache httpd 2.4 or building httpd-based application - DESCRIPTION=Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. - HTTPD_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/httpd/ - HTTPD_APP_ROOT=/opt/app-root - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/httpd.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_MODULES_D_PATH=/etc/httpd/conf.modules.d - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_TLS_CERT_PATH=/etc/httpd/tls - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_LOG_PATH=/var/log/httpd ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T20:13:41Z" com.redhat.component: httpd-24-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Apache httpd 2.4 available as container, is a powerful, efficient, and extensible web server. Apache supports a variety of features, many implemented as compiled modules which extend the core functionality. These can range from server-side programming language support to authentication schemes. Virtual hosting allows one Apache installation to serve many different Web sites. io.k8s.display-name: Apache httpd 2.4 io.openshift.expose-services: 8080:http,8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,httpd,httpd-24 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/httpd-24 org.opencontainers.image.revision: f57227a4ba2b888b2f3cb78ff9c0a1f9650ed11a release: "1761077590" summary: Platform for running Apache httpd 2.4 or building httpd-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/httpd-container.git --context-dir=examples/sample-test-app/ rhel8/httpd-24 sample-server vcs-ref: f57227a4ba2b888b2f3cb78ff9c0a1f9650ed11a vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T20:13:57Z" Id: sha256:c2f9d24c33cf2adf540b309202af42f8b7f5631abbb56edbbe4533eef37313ae Size: 167900505 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/httpd-24@sha256:b25e2af772c13ae5ff3339bc4bbdf52c49011e750f40b37a0b736cb82768a349 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:b25e2af772c13ae5ff3339bc4bbdf52c49011e750f40b37a0b736cb82768a349 resourceVersion: "13579" uid: f8686563-e875-4328-964e-1eb1909f8321 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:71104d043b69aa31878ef66c6d1f40dbbd9f37c12bdec9e915d592a38c07903d size: 76550480 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fde28b70fde89c673900ca961a8c0abf6a0e6c405de070ed53aba447d6678093 size: 1565 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3b35f0ac7e52b492dabd9f9844ea2b5437163636fa07884dfc6ac4dfce4ae715 size: 128628837 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.12 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 57b2f60fd930 Image: 3b64936c5fae0217c844f01b11192af76cc16ce72fc9a8b9cec7606beb2fc986 Labels: architecture: x86_64 build-date: 2022-08-24T22:40:56.439104 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1661378017" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.12-1.1661378017 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 023752341349e641b29886492a623cc8af26b7a7 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.12 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 57b2f60fd930 Image: sha256:9d606ca24547a4b4bc19bc738b94db15ca6739c9be9e617db37d2636e5eb07de Labels: architecture: x86_64 build-date: 2022-08-24T22:40:56.439104 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1661378017" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.12-1.1661378017 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 023752341349e641b29886492a623cc8af26b7a7 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-08-24T22:45:36Z" DockerVersion: 1.13.1 Id: sha256:b412ff26e5487d30ee0d188ee5dc3c748a635a378edcea4c834d0535d78d9b6c Size: 205188424 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:b4cb02a4e7cb915b6890d592ed5b4ab67bcef19bf855029c95231f51dd071352 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:b4cb02a4e7cb915b6890d592ed5b4ab67bcef19bf855029c95231f51dd071352 resourceVersion: "14163" uid: f15648c0-6467-4dec-ab74-722969ad7e9a - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a06cfa6e5ed77521218eaa75d023f86e156295cb20de1bda73e67b69c667962c size: 77840791 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a8ed13ed8366237556c3d4c3e812ce913366275ddc02b9daa458e7bb3aea46a4 size: 112317807 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk11-runtime-openshift-rhel8 - JBOSS_IMAGE_VERSION=7.4.23 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api - LAUNCH_JBOSS_IN_BACKGROUND=true - SSO_FORCE_LEGACY_SECURITY=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-07-28T22:21:45 com.redhat.component: jboss-eap-74-openjdk11-runtime-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.license_terms: https://www.redhat.com/agreements description: The JBoss EAP 7.4 OpenJDK 11 runtime image distribution-scope: public io.buildah.version: 1.33.12 io.cekit.version: 4.12.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.k8s.description: Base image to run an EAP server and application io.k8s.display-name: JBoss EAP runtime image io.openshift.expose-services: 8080:http io.openshift.tags: javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap74-openjdk11-runtime-openshift-rhel8 org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "5.1753741009" summary: The JBoss EAP 7.4 OpenJDK 11 runtime image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap74-openjdk11-runtime-openshift-rhel8/images/7.4.23-5.1753741009 vcs-ref: 01ae679e0f6a42f995f880902de2d245eae7d26f vcs-type: git vendor: Red Hat, Inc. version: 7.4.23 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-07-28T22:29:23Z" Id: sha256:aaef104173d392640a16093b0375ab1848ab77c390f057d4432e3cf13ecc14bb Size: 190181406 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap74-openjdk11-runtime-openshift-rhel8@sha256:b52d47d62be6b57d4af722f98b3434016c99c54e04574236b017924046d323c0 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:b52d47d62be6b57d4af722f98b3434016c99c54e04574236b017924046d323c0 resourceVersion: "13298" uid: f420861d-0e16-4593-b55e-442c437ac95d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:603a40d66abe93fd4805fe8f433e5c21fcc2671bde3798f573282e03296d55d3 size: 37049867 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Entrypoint: - /usr/bin/oauth-proxy Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510211040.p2.g6649cb8.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510211040.p2.g6649cb8.assembly.stream.el9-6649cb8 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=golang-github-openshift-oauth-proxy - __doozer_uuid_tag=ose-oauth-proxy-rhel9-v4.20.0-20251021.105557 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=6649cb8 - SOURCE_DATE_EPOCH=1756110665 - SOURCE_GIT_COMMIT=6649cb8a7c4dd7d1c4e03c9b0bc7594089fba991 - SOURCE_GIT_TAG=v4.4-imagestream-1-137-g6649cb8a - SOURCE_GIT_URL=https://github.com/openshift/oauth-proxy Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T12:59:58Z" com.redhat.component: golang-github-openshift-oauth-proxy-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: OpenShift OAuth Proxy. io.k8s.display-name: OpenShift OAuth Proxy io.openshift.build.commit.id: 6649cb8a7c4dd7d1c4e03c9b0bc7594089fba991 io.openshift.build.commit.url: https://github.com/openshift/oauth-proxy/commit/6649cb8a7c4dd7d1c4e03c9b0bc7594089fba991 io.openshift.build.source-location: https://github.com/openshift/oauth-proxy io.openshift.expose-services: "" io.openshift.maintainer.component: apiserver-auth io.openshift.maintainer.project: OCPBUGS io.openshift.tags: oauth maintainer: Red Hat, Inc. name: openshift/ose-oauth-proxy-rhel9 org.opencontainers.image.revision: 48d86f0ebc64bbddc42cb2f3953cd22a470f09ed release: 202510211040.p2.g6649cb8.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: 48d86f0ebc64bbddc42cb2f3953cd22a470f09ed vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T13:00:46Z" Id: sha256:8667c6123d18f3974181dd6bcc4d9a6de908c9f1c9028e62699dc33d19f53304 Size: 175485249 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b77dec59a72e9b6323e6fa2617f588f07518f44d2e9f6aa8f2ccd83d90e40203 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:b77dec59a72e9b6323e6fa2617f588f07518f44d2e9f6aa8f2ccd83d90e40203 resourceVersion: "13845" uid: f6d233f9-1923-4eb6-927d-f2ea2cb4ef30 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea87c7dcd2a872d002f8496004b71abd95b626a9b357c9ecd052d57fab6205b8 size: 96274787 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T10:15:53 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705572797" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.18-2.1705572797 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: b3fd77ac1425a4ff4908049b21b743ab30e063aa vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T10:37:55Z" Id: sha256:2aa213d5224d35c38242ad04f0e1ec5a61a81cd04bc5aaa7f4454122a201e804 Size: 135601584 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:b80a514f136f738736d6bf654dc3258c13b04a819e001dd8a39ef2f7475fd9d9 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:b80a514f136f738736d6bf654dc3258c13b04a819e001dd8a39ef2f7475fd9d9 resourceVersion: "14226" uid: d4d0a17b-169a-48a3-a329-966713d880b0 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d7c06497d5cebd39c0a4feb14981ec940b5c863e49903d320f630805b049cbff size: 39279912 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8f863159a00163b597034b832591613ce6016e7029a6351f07d1a500e9cc4b28 size: 107301065 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-02-07T17:21:35 com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1675788284" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.14-14.1675788284 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 8e04a5356a8315ea64fd704987c0014137fb0559 vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-02-07T17:33:06Z" Id: sha256:079f46516ee586b6f5e921c55135ece0457d66cfe60c206cc886c134e7db796f Size: 146610789 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:b838fa18dab68d43a19f0c329c3643850691b8f9915823c4f8d25685eb293a11 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:b838fa18dab68d43a19f0c329c3643850691b8f9915823c4f8d25685eb293a11 resourceVersion: "14211" uid: 8d91fe9b-87fd-49b6-a5bc-c76a0adee45b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:00002eebe0f599331474f07547167ad83154a7a902f19a133af9a6f5e08a1dfa size: 131864212 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T20:22:45 com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705602259" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.18-2.1705602259 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: ca7ddac9f7758dacb1a7a347130d7d8d6ad50922 vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T20:37:30Z" Id: sha256:17d970f7d5f789087e305ec68b8146195107db18aca710ae8c7de6ac91051504 Size: 171202432 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:b85cbdbc289752c91ac7f468cffef916fe9ab01865f3e32cfcc44ccdd633b168 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:b85cbdbc289752c91ac7f468cffef916fe9ab01865f3e32cfcc44ccdd633b168 resourceVersion: "14182" uid: 8c17324b-f890-40a9-b05a-d79a008ebd91 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8aa7c030671f84c4f190806311febadc9d9ba286d9992325e2dca46970eca591 size: 79774934 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:66741bf06f08d8eb2f9c74cd995a0da2dafe94e6a770a4d7cdaf66b3d7cd9036 size: 422978473 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:34a590fa7e41e139b2ea3b81c70b50da6c11fbd3ab3a414038e98d20ce38db26 size: 1153561491 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.13 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.13 - WILDFLY_VERSION=7.4.13.GA-redhat-00001 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.9.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - S2I_FP_VERSION=23.0.0.Final - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.4.13 - LANG=C.utf8 - JOLOKIA_VERSION=1.7.2.redhat-00002 - WILDFLY_CAMEL_VERSION=5.10.0.fuse-7_12_1-00009-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-11-07T10:26:31 com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.4 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.2.redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.4 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Thomas Diesler name: fuse7/fuse-eap-openshift-jdk8-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.13 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.4.13 release: "33" summary: Platform for building and running Apache Camel applications on EAP 7.4 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift-jdk8-rhel7/images/1.12-33 vcs-ref: b52098af4a5dd2f87cb51b4f0fac33d2ec7f0c68 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-11-07T10:30:23Z" Id: sha256:fc5c81041b0b8b9b3b0e5511873397a44b77ae5f0ac04cc9f693e17ec66d233c Size: 1656368397 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift-jdk8-rhel7@sha256:ba09ffc90313f71c748e38a6b7c68b20f4e42945eb88349a6596dea61c517637 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:ba09ffc90313f71c748e38a6b7c68b20f4e42945eb88349a6596dea61c517637 resourceVersion: "14025" uid: b30f4618-e380-4e41-8f7c-d14b4ac292d1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8d9c78c7f9887170d08c57ec73b21e469b4120682a2e82883217535294878c5d size: 3805344 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f4350d5126d0895bb50c2c082a415ff417578d34508a0ef07ec20cebf661ebb7 size: 70368140 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f14ceb8ccc89fe79eddcedf96ffffcc68e8b376a7bfee81da406c8825cabb254 size: 30912001 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.1 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: 6d7d8134d45d36ea1654ff54f10affaef7a20322ab846455d3f6881dd0890204 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T21:54:48.549067 com.redhat.build-host: cpt-0009.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.1.4 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-java-openshift org.concrt.version: 2.1.4 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "4.1539812368" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.1-4.1539812368 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 68c37e19a2e9f85e503652418dfa19c2d2f71349 vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift - FUSE_JAVA_IMAGE_VERSION=1.1 - JOLOKIA_VERSION=1.5.0.redhat-1 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: sha256:27fd2d68b6c16f4c29e862ed42b06417d370f20e4a4e96b9d8a7271f3fe0db51 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T21:54:48.549067 com.redhat.build-host: cpt-0009.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 2.1.4 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-java-openshift org.concrt.version: 2.1.4 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "4.1539812368" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift/images/1.1-4.1539812368 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 68c37e19a2e9f85e503652418dfa19c2d2f71349 vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss Created: "2018-10-17T21:57:10Z" DockerVersion: 1.13.1 Id: sha256:1398678780b6a8488a7407b7fc5f2c6a784b4212b682a3fe2911125ed95fb147 Size: 180016684 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift@sha256:ba0bb0b1b9bed00d24bd73b59b6a3f7a46714ba1c0e1b900572dc580eedde68c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:ba0bb0b1b9bed00d24bd73b59b6a3f7a46714ba1c0e1b900572dc580eedde68c resourceVersion: "14031" uid: 4db4407d-e010-4d0d-9812-5be98937692d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:79c20e727b94ea36ae8776eb9e4065b60dc1d396564a6a91ebb6ee334dfb5cea size: 79001473 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bf8eef1d7ebdf53d35a677908dd07dc73e7afa302fa4b3375a7729d92640097b size: 470350057 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_EAP_VERSION=7.4.21 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap-xp - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=4.0.2 - WILDFLY_VERSION=4.0.2.GA-redhat-00010 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.9.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - S2I_FP_VERSION=4.0.0.Final - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - OFFLINER_VERSION=1.6 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap-xp4-openjdk11-openshift-rhel8 - JBOSS_IMAGE_VERSION=4.0 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2025-02-05T10:36:55 com.redhat.component: jboss-eap-xp4-openjdk11-builder-openshift-rhel8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform XP 4.0 OpenShift container image. distribution-scope: public io.buildah.version: 1.33.8 io.cekit.version: 4.12.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running JavaEE applications on JBoss EAP XP 4.0 io.k8s.display-name: JBoss EAP XP io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,eap-xp maintainer: Red Hat name: jboss-eap-7/eap-xp4-openjdk11-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap-xp org.jboss.product.eap.version: 7.4.21 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 4.0.2 release: "83" summary: Red Hat JBoss Enterprise Application Platform XP 4.0 OpenShift container image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap-xp4-openjdk11-openshift-rhel8/images/4.0-83 vcs-ref: abc2234f37ee2dac02e16a7e65775cba8bbf8e40 vcs-type: git vendor: Red Hat, Inc. version: "4.0" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-02-05T10:46:22Z" Id: sha256:8230c513a7a2ac6b05fc08213144ce749631e57ee94bb872d761fcff7d65ab36 Size: 549416678 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap-xp4-openjdk11-openshift-rhel8@sha256:babbc5613f8ad380b2b85564d74d1edfbb345a9481fa3c1891980bb75169c079 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:babbc5613f8ad380b2b85564d74d1edfbb345a9481fa3c1891980bb75169c079 resourceVersion: "13346" uid: 292eb814-18c6-45d9-bc75-750692de4cf9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b3f8e70833f9e7ef0aa29948526b214496524ca42b8de4326fe5720275dbc259 size: 49612025 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - container=oci - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - NPM_RUN=start - PLATFORM=el8 - NODEJS_VERSION=22 - NAME=nodejs - SUMMARY=Minimal image for running Node.js 22 applications - DESCRIPTION=Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T16:11:37Z" com.redhat.component: nodejs-22-minimal-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.k8s.description: Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 22 Minimal io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs22 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/nodejs-22-minimal org.opencontainers.image.revision: 036610bcf0884b09cd66a4cf998af0d7d63e7842 release: "1761063072" summary: Minimal image for running Node.js 22 applications url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: 036610bcf0884b09cd66a4cf998af0d7d63e7842 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T16:11:48Z" Id: sha256:1adff3a9f4ac4cf97c5cded69314721442c670cd69767d11a303d2ac50fa6ed6 Size: 89357843 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/nodejs-22-minimal@sha256:bae3cc0e61e2d73c8dd1384cfa97c6e875ff4a5da7f6332075eb5cd95bc90a7c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:bae3cc0e61e2d73c8dd1384cfa97c6e875ff4a5da7f6332075eb5cd95bc90a7c resourceVersion: "14091" uid: 849487c3-a5cc-4fdc-9d17-1232da4612df - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a33df5fac14bd7628194e14e695a825432b6aa4d9c795c83e3229ea9ef2f6d44 size: 170473264 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a44a7d80cb9334975310d04287283937a5384129d34cb2196e8da1289234ed03 size: 5896388 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift-jdk11-rhel8 - FUSE_JAVA_IMAGE_VERSION=1.11 - JOLOKIA_VERSION=1.7.1.redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-19T14:53:12 com.redhat.component: fuse-java-openshift-jdk11-rhel-8-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift-jdk11-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "46.1687184935" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift-jdk11-rhel8/images/1.11-46.1687184935 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 3c42e10f87d022df6d119889367e08bf6ca02870 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-19T14:55:47Z" Id: sha256:ca576f3f73d2ec54d1a5a8c8d11da3a2ac3b3f92106caa72a9648a0eac042579 Size: 215678711 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift-jdk11-rhel8@sha256:bc7fd6202c086ba5dec4dc27c888b96505ffbd37d1e124cd4abb72bd13a8f237 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:bc7fd6202c086ba5dec4dc27c888b96505ffbd37d1e124cd4abb72bd13a8f237 resourceVersion: "13357" uid: 483c08ce-77e4-479b-99d9-dca42620545c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a450a68cb6f27c66e8e7eb103f60d8c45ece038e05a5296aded6ab51b9792a0b size: 135499428 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T10:37:37 com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705573234" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.18-2.1705573234 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: 3e770bd714724a77d062c8bd4113bbc2a69f1024 vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T10:49:24Z" Id: sha256:a46c965d1c38df2fa4417ddccafa0c1d63e071ffc1892c44be25e35b793c7dde Size: 174835100 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:bcb0e15cc9d2d3449f0b1acac7b0275035a80e1b3b835391b5464f7bf4553b89 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:bcb0e15cc9d2d3449f0b1acac7b0275035a80e1b3b835391b5464f7bf4553b89 resourceVersion: "14149" uid: 782357b6-85ce-40c7-89a5-3bbba40c1af4 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5396332cda6c9dfb73ed1971c8a7f1db24cb7f627c2533281db28851b94d1aae size: 142828242 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - MYSQL_VERSION=8.0 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MySQL 8.0 SQL database server - DESCRIPTION=MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/usr ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T20:14:21Z" com.redhat.component: mysql-80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. io.k8s.display-name: MySQL 8.0 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mysql80,mysql-80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/mysql-80 org.opencontainers.image.revision: 8d1c59058e8eb28303db2dc941e91618c91d5078 release: "1761077583" summary: MySQL 8.0 SQL database server url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhel8/mysql-80 vcs-ref: 8d1c59058e8eb28303db2dc941e91618c91d5078 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T20:14:47Z" Id: sha256:749185756a4fa7efa62e641706ba2af7f38fae9706189ba10ee5fbf9dd431c40 Size: 238386021 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/mysql-80@sha256:be2c749e20118e80b11526eb52993e2055e035b211b2621dea9094dd4dcd9446 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:be2c749e20118e80b11526eb52993e2055e035b211b2621dea9094dd4dcd9446 resourceVersion: "13782" uid: b0f29749-c87b-4d06-add2-1f714e217a16 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0eeb656bc1e64b6c3ba63f2fa9450feaef3c60159d48eb2171ad1f25f5e655d9 size: 3805266 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:77187f02cbeff1e8de9cdd8e850f300e7267ddf19991dcbc588a498c14df3ff0 size: 70351735 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e5ce1c514681e586df1d1bc14ebc3e37b9ef2164202b54267be700e3fd445fa1 size: 15174769 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Dhiraj Bokde Config: Cmd: - usage Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.0 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-000280-redhat-2 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: 3320f15730e3d95d98b084a00d040f8e97053ec1aa0858da207a0f4b332871b8 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T18:19:12.507455 com.redhat.build-host: osbs-cpt-003.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-000280-redhat-2 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Cloud Enablement Feedback name: fuse7/fuse-karaf-openshift org.concrt.version: 2.0.0 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "13.1533127989" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.0-13.1533127989 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 01db3ab4a812607ef299e01de1ad666c75d170be vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.0 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-000280-redhat-2 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: sha256:d36ff9014d3950ec0cd231dffbb7b8506923bad04203505d38e32cdd5891593d Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T18:19:12.507455 com.redhat.build-host: osbs-cpt-003.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-000280-redhat-2 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Cloud Enablement Feedback name: fuse7/fuse-karaf-openshift org.concrt.version: 2.0.0 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "13.1533127989" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.0-13.1533127989 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 01db3ab4a812607ef299e01de1ad666c75d170be vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss Created: "2018-08-01T18:20:49Z" DockerVersion: 1.12.6 Id: sha256:02c9b9ef185aade797ca6141ca66a53afa7f6bf70ee0ec336643d0470ebc3042 Size: 164263281 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:be51ee43b1596078a17756f38a0017e9338c902f9094f1ad677844d165a02d43 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:be51ee43b1596078a17756f38a0017e9338c902f9094f1ad677844d165a02d43 resourceVersion: "14016" uid: 8d1449f9-2c88-4129-94ba-f2c60f8f4a40 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:881beec6738623be08c01402ad3ff01db770251227a38829fa2a65f00db2624f size: 127909585 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T10:15:55 com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.8" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705572794" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.18-2.1705572794 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: 48715c823f569ad02c9840e572698f88fe5be83e vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T10:42:18Z" Id: sha256:eb4b532f0cd1dd337cbb3e6cfc2adbc4b92e3af4974b973f2365e8f6065e6978 Size: 167247302 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:bf5e518dba2aa935829d9db88d933a264e54ffbfa80041b41287fd70c1c35ba5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:bf5e518dba2aa935829d9db88d933a264e54ffbfa80041b41287fd70c1c35ba5 resourceVersion: "14215" uid: ff70890b-bc5d-4ac0-9429-9800e042cdfe - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:551849931ba0dec31d2e0b8b4490c168ccf5c5c75215fa094860547b6ae6a94e size: 33442256 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:6db543cc86de251a80f1ff8eb92ee1e9db423350ed5389a1a80bac5820af205e size: 49344347 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - container=oci - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - NPM_RUN=start - PLATFORM=el10 - NODEJS_VERSION=22 - NAME=nodejs - SUMMARY=Minimal image for running Node.js 22 applications - DESCRIPTION=Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-09-24T20:12:57Z" com.redhat.component: nodejs-22-minimal-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.3 io.k8s.description: Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 22 Micro io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs22 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nodejs-22-minimal org.opencontainers.image.revision: dc5f2425bbb71d01f4e07ce058f5ec5140ce7bed release: "1758744753" summary: Minimal image for running Node.js 22 applications url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: dc5f2425bbb71d01f4e07ce058f5ec5140ce7bed vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-09-24T20:13:01Z" Id: sha256:39375a53734dc6b989c9dee3d76dc6865790d8bfc8837bc79e27ba77a791ae50 Size: 82798859 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/nodejs-22-minimal@sha256:c1360b136eae3f3e6eab6e446d6d7b78c53e829a61e2a0b802d55c7ba134ca0c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:c1360b136eae3f3e6eab6e446d6d7b78c53e829a61e2a0b802d55c7ba134ca0c resourceVersion: "14090" uid: 977c08b5-54a9-4387-a5e2-34a829a67e86 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1f1202c893ce2775c72b2a3f42ac33b25231d16ca978244bb0c6d1453dc1f39e size: 76250035 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:32be9843afa050552a66345576a59497ba7c81c272aa895d67e6e349841714da size: 1320 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9f74ee800fdb2242405ca2ee7e74f612973956d1725766a1f2199339f92b8381 size: 4013823 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c47adc077129316d733e991a2da2c4bf7ec3d93b7836e0b97ddc5885f0e512ba size: 85699059 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e829e5fa91fb38993da6e85432f8dab8bfec2f2414833e0fa76b718d00de53ad size: 15166148 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.6 - JOLOKIA_VERSION=1.6.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.2.6.fuse-760032-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 08c71bdb7e2a Image: 36850ef4cd3173c72395f1aac96aa90a2c066aa2c0f88fc824d7763162843bd5 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-04-21T12:13:12.350188 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.6.fuse-760032-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "18.1587470206" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.6-18.1587470206 vcs-ref: e20dbf2d77a8e6138f22685c91d477de5da0647b vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.6 - JOLOKIA_VERSION=1.6.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.2.6.fuse-760032-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 08c71bdb7e2a Image: sha256:e9d11a3997dd125da1a40ea98732a32a3bfb4a01bf6e49f6cca9bff8a0d935de Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-04-21T12:13:12.350188 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.6.fuse-760032-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "18.1587470206" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.6-18.1587470206 vcs-ref: e20dbf2d77a8e6138f22685c91d477de5da0647b vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss Created: "2020-04-21T12:15:11Z" DockerVersion: 1.13.1 Id: sha256:42fef5b8761737fa5ed970d007f015bf7d0ae6a33a9f3c9f960618fd09c7ca6e Size: 181136962 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:c5f7af4c8188bf4619eeaa0f20094bbf5fbfbd824c973ee7da722a48d67300a9 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:c5f7af4c8188bf4619eeaa0f20094bbf5fbfbd824c973ee7da722a48d67300a9 resourceVersion: "14036" uid: 78809bc7-f2ae-4929-8bf5-4d9ea2b1f3e1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea092d7970b26c24007a670fc6d0810dbf9531dc0d3a9d6ea514134ba5686724 size: 7541063 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d67269c45d19cfd63d3f53259d7d133172dc412b125cb18bca8908ec6076db2f size: 98202646 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - MYSQL_VERSION=8.0 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MySQL 8.0 SQL database server - DESCRIPTION=MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/opt/rh/rh-mysql80/root/usr - ENABLED_COLLECTIONS=rh-mysql80 - BASH_ENV=/usr/share/container-scripts/mysql/scl_enable - ENV=/usr/share/container-scripts/mysql/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/mysql/scl_enable ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-30T09:34:55 com.redhat.component: rh-mysql80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: MySQL is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MySQL mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MySQL databases on behalf of the clients. io.k8s.display-name: MySQL 8.0 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mysql80,rh-mysql80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/mysql-80-rhel7 release: "169" summary: MySQL 8.0 SQL database server url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/mysql-80-rhel7/images/8.0-169 usage: docker run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhscl/mysql-80-rhel7 vcs-ref: ac5e8692458a3f710e4eb220961251f49bd6aaed vcs-type: git vendor: Red Hat, Inc. version: "8.0" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-05-30T09:38:25Z" Id: sha256:2249d8cbe7e4d466373623d1225bab06fa658fb88b54665be29d6c947a59ff8a Size: 185768272 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/mysql-80-rhel7@sha256:c6edc9920c6038890d77d1daa135b7ae4a5a7fe2213b168dbc12311de0445791 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:c6edc9920c6038890d77d1daa135b7ae4a5a7fe2213b168dbc12311de0445791 resourceVersion: "13781" uid: 92efcc5c-c595-4fca-ba33-ba866e2bd0eb - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b10a2aeb4cf985c4be41816387a0bd677b81d034bb43163651162de8ea98c654 size: 7732234 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-redis Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - REDIS_VERSION=7 - HOME=/var/lib/redis - SUMMARY=Redis in-memory data structure store, used as database, cache and message broker - DESCRIPTION=Redis 7 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/redis - REDIS_PREFIX=/usr - REDIS_CONF=/etc/redis/redis.conf ExposedPorts: 6379/tcp: {} Labels: architecture: x86_64 build-date: "2025-11-03T00:14:42Z" com.redhat.component: redis-7-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Redis 7 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Redis 7 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. io.k8s.display-name: Redis 7 io.openshift.expose-services: 6379:redis io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,redis,redis7,redis-7 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/redis-7 org.opencontainers.image.revision: 330dc8fd9187f2fe8a1334ee7a7aabea889f86e8 release: "1762128860" summary: Redis in-memory data structure store, used as database, cache and message broker url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name redis_database -p 6379:6379 rhel9/redis-7 vcs-ref: 330dc8fd9187f2fe8a1334ee7a7aabea889f86e8 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" Volumes: /var/lib/redis/data: {} WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-11-03T00:14:53Z" Id: sha256:eab2e91ca850a59599f1c0ace5a0f8e9d2637f02512fd5a15135d6914623649a Size: 104833093 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel9/redis-7@sha256:c9134d992b1ee68be3450debd70f96b9b353fec15fca331afcf7adb26a7f2f09 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:c9134d992b1ee68be3450debd70f96b9b353fec15fca331afcf7adb26a7f2f09 resourceVersion: "14072" uid: ce22dffc-b4b1-4593-9b40-86bc4c8cc43f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:06f86e50a0b74ff9eb161a7d781228877c90e8ff57e9689e8cb8b0f092a2a9f9 size: 39268171 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1500af7621666bdae42658c39c0dc66ea092cda488d5e24241f2d81d1ad8afe1 size: 166780168 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a83f74eef045dca9d4bd2b650fda62543999849f80d4ab4a8bcda33a19998746 size: 5902520 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift-rhel8 - FUSE_KARAF_IMAGE_VERSION=1.11 - JOLOKIA_VERSION=1.7.1.redhat-00001 - KARAF_FRAMEWORK_VERSION=4.2.15.fuse-7_11_1-00017-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-19T14:50:43 com.redhat.component: fuse-karaf-openshift-rhel-8-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.karaf: 4.2.15.fuse-7_11_1-00017-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "37.1687184687" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift-rhel8/images/1.11-37.1687184687 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 01516c4a1897f4dc6761d0738897971a9dca1d74 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-19T14:54:20Z" Id: sha256:c3b90c523bb27ad0b6651583718da08fb31a7310141f311e65a7a250fc39f5e8 Size: 211991770 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift-rhel8@sha256:c93f8f7e2d9522be4952071044457efde0a6c6fd59bd9adcd01e07d164d8235c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:c93f8f7e2d9522be4952071044457efde0a6c6fd59bd9adcd01e07d164d8235c resourceVersion: "14021" uid: 771e734c-1830-46d1-8a11-c153ab58ad9c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:00f17e0b37b0515380a4aece3cb72086c0356fc780ef4526f75476bea36a2c8b size: 76243402 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:305d73a95c8fece2b53a34e040df1c97eb6b7f7cc4e0a7933465f0b7325e3d72 size: 1329 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6b6fe63f06045c5597f7f548d6d4a7f27873232f1df1329a8777854db2d53877 size: 96601064 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.7 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 9d3d66a8bfcc Image: 6f334e5707aa88cd74baf5f1191a984b6ab9652184956460a93fa570097dec2e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-12-10T17:45:20.904669 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.5.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "5.1575996300" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.7-5.1575996300 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: c47a11f2fd7bf261b6db74ff9bc60b96e7b1b340 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.7 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 9d3d66a8bfcc Image: sha256:314dd01fece012f6910bfedbd9c1144669c4c26706e28fe98a2550a030a28dba Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-12-10T17:45:20.904669 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.5.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "5.1575996300" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.7-5.1575996300 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: c47a11f2fd7bf261b6db74ff9bc60b96e7b1b340 vcs-type: git vendor: Red Hat, Inc. version: "1.7" User: "185" WorkingDir: /home/jboss Created: "2019-12-10T17:50:03Z" DockerVersion: 1.13.1 Id: sha256:cac978dfd8347708b1aeb7a2e4b0200b78e6838a6920fabfbf2f26a608c58d6d Size: 172853383 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:caba895933209aa9a4f3121f9ec8e5e8013398ab4f72bd3ff255227aad8d2c3e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:caba895933209aa9a4f3121f9ec8e5e8013398ab4f72bd3ff255227aad8d2c3e resourceVersion: "14176" uid: 3c74d758-90f3-4950-ae45-073e3886b95b - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ea092d7970b26c24007a670fc6d0810dbf9531dc0d3a9d6ea514134ba5686724 size: 7541063 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:eea016ca58c4fab7ad66cb94edce0f1dac9aa75ca3c0870b686985ef13b3f139 size: 8177824 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-redis Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - REDIS_VERSION=6 - HOME=/var/lib/redis - SUMMARY=Redis in-memory data structure store, used as database, cache and message broker - DESCRIPTION=Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/redis - REDIS_PREFIX=/opt/rh/rh-redis6/root/usr - ENABLED_COLLECTIONS=rh-redis6 - REDIS_CONF=/etc/redis.conf - BASH_ENV=/usr/share/container-scripts/redis/scl_enable - ENV=/usr/share/container-scripts/redis/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/redis/scl_enable ExposedPorts: 6379/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-30T09:35:38 com.redhat.component: rh-redis6-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. io.k8s.display-name: Redis 6 io.openshift.expose-services: 6379:redis io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,redis,redis6,rh-redis6 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/redis-6-rhel7 release: "71" summary: Redis in-memory data structure store, used as database, cache and message broker url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/redis-6-rhel7/images/6-71 usage: podman run -d --name redis_database -p 6379:6379 rhscl/redis-6-rhel7 vcs-ref: 586a73398776013846e6cc6c4bc2660c113c7b3d vcs-type: git vendor: Red Hat, Inc. version: "6" User: "1001" Volumes: /var/lib/redis/data: {} WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-05-30T09:37:23Z" Id: sha256:fa2824ab06c9166639a8c0857f4c15dd3bb3f441376297cd7f8b354482534319 Size: 95744613 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/redis-6-rhel7@sha256:cbc31c1b0625cb01f2b67d83e7b375d08d6aa756f24cf32fc3e82f0b76a4c976 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:cbc31c1b0625cb01f2b67d83e7b375d08d6aa756f24cf32fc3e82f0b76a4c976 resourceVersion: "14069" uid: 4f504656-8e3f-4bfa-bebb-e95ee26d88e5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bd3de4798eadff692d5419649a6ba8eca9914d6fdb51d150d669643645898104 size: 36581100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8c19addc28dd2002288fa9a90333e5048784868aeb080286355c2d6731715d4a size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:59f7a530b4030b4e0863fd682a8037648ffae5246a8b56b236c01e25f9526ec0 size: 177830421 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:63f28e9cbf4e4f467de5e09f5230baac60604301b4c85c45f67ceca80b7acb8a size: 796867 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1.redhat-00001 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift-rhel8 - FUSE_JAVA_IMAGE_VERSION=1.10 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: 877ccd18b302 Image: 4d7821d2c772ca5c7991d6ee11d1adffdb058f4d96a7eb435266bced2399fe5d Labels: architecture: x86_64 build-date: 2022-06-15T19:29:11.010231 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-rhel-8-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "25.1655314763" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift-rhel8/images/1.10-25.1655314763 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 43a7424b0412e1f017e02872d779b2560450cd15 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1.redhat-00001 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift-rhel8 - FUSE_JAVA_IMAGE_VERSION=1.10 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: 877ccd18b302 Image: sha256:a4774b2e9b5c153470c333e2ddea57230634857bb59f65e069a0d065251f9813 Labels: architecture: x86_64 build-date: 2022-06-15T19:29:11.010231 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-java-openshift-rhel-8-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "25.1655314763" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift-rhel8/images/1.10-25.1655314763 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: 43a7424b0412e1f017e02872d779b2560450cd15 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T19:29:48Z" DockerVersion: 1.13.1 Id: sha256:8245537b67f8092bc57dc6cd9d563b8f43213fcaec511a21c2c9c663c0b820bd Size: 215218543 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift-rhel8@sha256:cc6290111b86982db2789ed392d004c2c48ff7e5407a82bbeaa6d9ceb8a7963f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:cc6290111b86982db2789ed392d004c2c48ff7e5407a82bbeaa6d9ceb8a7963f resourceVersion: "14035" uid: 9637a78d-2d44-4cce-9c75-a90ad47179d6 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9e23b64ace00a199db21d302292b434e9d3956d79319d958ecc19603d00c946 size: 39622437 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:38b71301a1d9df24c98b5a5ee8515404f42c929003ad8b13ab83d2de7de34dec size: 1742 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:439121684ff1f1dbfe8da567fa15a71a40d2149fd6fbc110f6445a3e55145b5d size: 117810118 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 02d87ca75d31 Image: 5a41a6a038b563327a70e646ddb8b620257e97900589766d84e857455f8bf41f Labels: architecture: x86_64 build-date: 2022-03-28T09:45:03.388815 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "2.1648459569" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.11-2.1648459569 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: cc09248411a10e536e6b541f2ce7f8d409fef523 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 02d87ca75d31 Image: sha256:39e6e04e3d15579f8e1b73a1e60ef2000d912c47d499a9394b1fa0167544d464 Labels: architecture: x86_64 build-date: 2022-03-28T09:45:03.388815 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "2.1648459569" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.11-2.1648459569 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: cc09248411a10e536e6b541f2ce7f8d409fef523 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T09:51:56Z" DockerVersion: 1.13.1 Id: sha256:90a04ce690841235ca437be771fce7a4ea51a6511aae3906adb68827b860fdd1 Size: 157441797 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:ce5c0becf829aca80734b4caf3ab6b76cb00f7d78f4e39fb136636a764dea7f6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:ce5c0becf829aca80734b4caf3ab6b76cb00f7d78f4e39fb136636a764dea7f6 resourceVersion: "14169" uid: e38dae74-8a02-4a53-a92a-7e2b3b0c694e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:af063699af1c142fce6707dc9306d122355e61bd23ded0d18f8a4ecfbf3aa89a size: 78847792 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:c81d6d556e6e3a4255dd2709ce18578bfbbf3eed10a4efb966bf99ab69c79e05 size: 9405288 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:d9e405c2a1a004feffa67c8e53dc0965b77b9c67d91626366ee9e53dd24e3de4 size: 199332407 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:e7ee0b12d498d3af9dcfe16241baaf8e8d0b79d5cb9f8826d719189092b982ff size: 18647985 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - VERSION=10 - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el10 - NODEJS_VER=22 - NAME=s2i-base - RUBY_MAJOR_VERSION=3 - RUBY_MINOR_VERSION=3 - RUBY_VERSION=3.3 - RUBY_SCL_NAME_VERSION=33 - RUBY_SCL=ruby-33 - IMAGE_NAME=ubi10/ruby-33 - SUMMARY=Platform for building and running Ruby 3.3 applications - DESCRIPTION=Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-29T04:09:10Z" com.redhat.component: ruby-33-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Ruby 3.3 available as container is a base platform for building and running various Ruby 3.3 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. io.k8s.display-name: Ruby 3.3 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,ruby,ruby33,ruby-33 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi10/ruby-33 org.opencontainers.image.revision: 0099448b57ea01623d6738ce02d9a166c76a667a release: "1761710926" summary: Platform for building and running Ruby 3.3 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-ruby-container.git --context-dir=3.3/test/puma-test-app/ ubi10/ruby-33 ruby-sample-app vcs-ref: 0099448b57ea01623d6738ce02d9a166c76a667a vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-29T04:09:16Z" Id: sha256:162811fbac5572f40d84e45492457a1c1b9386344f8ec3777f395585120be2c1 Size: 306252278 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/ruby-33@sha256:cf965175e27f89fecae4982ff88f15b5711b60af5215fd9ef5ff1b6f6ddf9bcd kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:cf965175e27f89fecae4982ff88f15b5711b60af5215fd9ef5ff1b6f6ddf9bcd resourceVersion: "14116" uid: 984ffe39-fff8-459d-9605-5bc0b41dc54d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:17942523bc4bb2db6eb9f7519db38bbb70e47356d3f0ae0f15b967c0628234c6 size: 76229428 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4c98734f24339b059854b6f7ad77928ffb6b84756ecd4eeec4a15870b082d906 size: 1283 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bf6c4b8e57dfb8d977021b349c8198d68a042c99fc7a4d799519890dcff81b63 size: 343983498 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.3.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.3 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.3.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: bd4e8b9572dd Image: ba7679b1847ecccb4610537e6ead4e8bb449e2901673ab3576edf0df635d1c25 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-10-16T15:04:47.036161 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.4.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.3.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.3.GA release: "2.1571238163" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.3-2.1571238163 vcs-ref: 78674a771ad66e4d799081bee305010e5d2d6609 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.3.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.3 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.3.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: bd4e8b9572dd Image: sha256:505111cf48572d6f44fd5851cfd6de30f45dae1089a4a09cc1ac49fa56aa2420 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-10-16T15:04:47.036161 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.4.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.3.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.3.GA release: "2.1571238163" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.3-2.1571238163 vcs-ref: 78674a771ad66e4d799081bee305010e5d2d6609 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2019-10-16T15:13:42Z" DockerVersion: 1.13.1 Id: sha256:8676dc6ddc5c3a1757d7d50d5d1e6b1e30e405212f01f066e8437a9943f67901 Size: 420222201 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:cfd8c4ac1c495b766dd3ff1a85c35afe092858f8f65b52a5b044811719482236 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:cfd8c4ac1c495b766dd3ff1a85c35afe092858f8f65b52a5b044811719482236 resourceVersion: "13893" uid: 28662931-803e-40b9-b644-869ecb63eb63 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3f5bd5395b121000ac160ee0f28db065b18174ed7001d8c8dca68dbb9c3fc98a size: 93224067 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el8 - NODEJS_VER=20 - PYTHON_VERSION=3.11 - PATH=/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PYTHONUNBUFFERED=1 - PYTHONIOENCODING=UTF-8 - LC_ALL=en_US.UTF-8 - LANG=en_US.UTF-8 - CNB_STACK_ID=com.redhat.stacks.ubi8-python-311 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - PIP_NO_CACHE_DIR=off - SUMMARY=Platform for building and running Python 3.11 applications - DESCRIPTION=Python 3.11 available as container is a base platform for building and running various Python 3.11 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. - BASH_ENV=/opt/app-root/bin/activate - ENV=/opt/app-root/bin/activate - PROMPT_COMMAND=. /opt/app-root/bin/activate ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T16:35:42Z" com.redhat.component: python-311-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Python 3.11 available as container is a base platform for building and running various Python 3.11 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. distribution-scope: public io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi8-python-311 io.k8s.description: Python 3.11 available as container is a base platform for building and running various Python 3.11 applications and frameworks. Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python's elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. io.k8s.display-name: Python 3.11 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,python,python311,python-311,rh-python311 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/python-311 org.opencontainers.image.revision: 86f6ba63ace0416eedaf4a37645e9cc96c402bf8 release: "1761841987" summary: Platform for building and running Python 3.11 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-python-container.git --context-dir=3.11/test/setup-test-app/ ubi8/python-311 python-sample-app vcs-ref: 86f6ba63ace0416eedaf4a37645e9cc96c402bf8 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T16:36:04Z" Id: sha256:c52cfb0a5dc8339fd4fa9096e42836c347a839d38e76925f284eabe3e7f6b763 Size: 343091990 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/python-311@sha256:d09ebdfdd48b8c132bc13df5e20ac3ca00a2a0a25df0b552a6ce575383d71165 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:06Z" name: sha256:d09ebdfdd48b8c132bc13df5e20ac3ca00a2a0a25df0b552a6ce575383d71165 resourceVersion: "14125" uid: 0e8cb77d-1577-451b-95ac-9ba729abc531 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:82c85555fc90687a7373da5a5f6c9d13ace683105995255164339610dc2b5731 size: 22521219 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=20 - PHP_VERSION=8.0 - PHP_VER_SHORT=80 - NAME=php - SUMMARY=Platform for building and running PHP 8.0 applications - DESCRIPTION=PHP 8.0 available as container is a base platform for building and running various PHP 8.0 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - PHP_FPM_CONF_D_PATH=/etc/php-fpm.d - PHP_FPM_CONF_FILE=www.conf - PHP_FPM_RUN_DIR=/run/php-fpm - PHP_CLEAR_ENV=ON - PHP_MAIN_FPM_CONF_FILE=/etc/php-fpm.conf - PHP_FPM_LOG_PATH=/var/log/php-fpm - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-14T05:43:23Z" com.redhat.component: php-80-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: PHP 8.0 available as container is a base platform for building and running various PHP 8.0 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.41.4 io.k8s.description: PHP 8.0 available as container is a base platform for building and running various PHP 8.0 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 8.0 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php80,php-80 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/php-80 org.opencontainers.image.revision: c62fb82ae700fbde24b80ce66c6ea1613291b2a0 release: "1760420580" summary: Platform for building and running PHP 8.0 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=8.0/test/test-app ubi9/php-80 sample-server vcs-ref: c62fb82ae700fbde24b80ce66c6ea1613291b2a0 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-14T05:43:29Z" Id: sha256:648f601acba4f5b492725482d61cabf7b14a4b775cfa1edd9e4c86f6a33e9075 Size: 334826231 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/php-80@sha256:d0b4c0b12d8dd76a31a67bd429ce78d327d1c4fcd4896dd77557ee484379defa kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:d0b4c0b12d8dd76a31a67bd429ce78d327d1c4fcd4896dd77557ee484379defa resourceVersion: "14080" uid: 4579b568-c9bb-4bc3-ba6c-5cba65557c3d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:36fc70f9a3e5e588e4b861b4e8a25ce702f902f79945e8608dc289fc8a829b74 size: 7299368 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-redis Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - REDIS_VERSION=6 - HOME=/var/lib/redis - SUMMARY=Redis in-memory data structure store, used as database, cache and message broker - DESCRIPTION=Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/redis - REDIS_PREFIX=/usr - REDIS_CONF=/etc/redis/redis.conf ExposedPorts: 6379/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-29T20:19:08Z" com.redhat.component: redis-6-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Redis 6 available as container, is an advanced key-value store. It is often referred to as a data structure server since keys can contain strings, hashes, lists, sets and sorted sets. You can run atomic operations on these types, like appending to a string; incrementing the value in a hash; pushing to a list; computing set intersection, union and difference; or getting the member with highest ranking in a sorted set. In order to achieve its outstanding performance, Redis works with an in-memory dataset. Depending on your use case, you can persist it either by dumping the dataset to disk every once in a while, or by appending each command to a log. io.k8s.display-name: Redis 6 io.openshift.expose-services: 6379:redis io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,redis,redis6,redis-6 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel9/redis-6 org.opencontainers.image.revision: 7741b3b4d51d1c99b4a6715c14ba99227121ed55 release: "1761769097" summary: Redis in-memory data structure store, used as database, cache and message broker url: https://catalog.redhat.com/en/search?searchType=containers usage: podman run -d --name redis_database -p 6379:6379 rhel9/redis-6 vcs-ref: 7741b3b4d51d1c99b4a6715c14ba99227121ed55 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" Volumes: /var/lib/redis/data: {} WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-29T20:19:10Z" Id: sha256:fcaea22f5bf4c9321db7b2cf866a03114b45cad202af5f812d3144d3ad020918 Size: 104399952 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel9/redis-6@sha256:d138037e44e951accd8941222908b6d2291c8b2dfd4fad49195f83d0c5e6e77f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:d138037e44e951accd8941222908b6d2291c8b2dfd4fad49195f83d0c5e6e77f resourceVersion: "14071" uid: 598ac33f-35b7-4783-95eb-818c5e0f20fe - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a9e23b64ace00a199db21d302292b434e9d3956d79319d958ecc19603d00c946 size: 39622437 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:38b71301a1d9df24c98b5a5ee8515404f42c929003ad8b13ab83d2de7de34dec size: 1742 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f7b4b66acf4505d5bd283601f810ded19c2f40df11bd59f44fc824c1c5895f79 size: 108674770 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: 6f6b991d3055d8eb98f901f1714f9845c029d05fe23f80f12d7fe50d486e9d83 Labels: architecture: x86_64 build-date: 2022-03-28T09:59:20.384463 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "2.1648459725" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.11-2.1648459725 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 21ecff68424e72e8c23c4ee3c91afee2eff02ab2 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.11 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 02d87ca75d31 Image: sha256:758aee90e87fb437215e51e1410e455fd310f123412653c7194ecc3d722fc7f3 Labels: architecture: x86_64 build-date: 2022-03-28T09:59:20.384463 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "2.1648459725" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.11-2.1648459725 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 21ecff68424e72e8c23c4ee3c91afee2eff02ab2 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T10:04:36Z" DockerVersion: 1.13.1 Id: sha256:b8a60b7b036137f59ef1ae634e92cbb6cdb714d8b4bcfe51977b76f5ab05fa7e Size: 148305773 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:d186c94f8843f854d77b2b05d10efb0d272f88a4bf4f1d8ebe304428b9396392 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:d186c94f8843f854d77b2b05d10efb0d272f88a4bf4f1d8ebe304428b9396392 resourceVersion: "14142" uid: 928e1d6f-216d-4e57-8c3f-4cc78d0209e5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d586dbf955a96ae6f2e98084e23c2221bba37c37d96406cb7eaab67a00e9cd27 size: 37762932 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NAME=nginx - NGINX_VERSION=1.20 - NGINX_SHORT_VER=120 - VERSION=0 - SUMMARY=Platform for running nginx 1.20 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.20 daemon. The image can be used as a base image for other applications based on nginx 1.20 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-28T04:21:26Z" com.redhat.component: nginx-120-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.20 daemon. The image can be used as a base image for other applications based on nginx 1.20 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.20 daemon. The image can be used as a base image for other applications based on nginx 1.20 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.20 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-120 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nginx-120 org.opencontainers.image.revision: e317f97237142a78b31e5928e3eac651abc6e2e3 release: "1761625252" summary: Platform for running nginx 1.20 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/nginx-120:latest vcs-ref: e317f97237142a78b31e5928e3eac651abc6e2e3 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-28T04:21:33Z" Id: sha256:32f9d8bbd3e177aa319aa13820e12eb60f31cbc006ab9fb1cdbe9026c1507bce Size: 134864201 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nginx-120@sha256:d19dcf07e61e96eaff277f3f1b41a802aee2031c561d63012d99b1f2ed51467e kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:d19dcf07e61e96eaff277f3f1b41a802aee2031c561d63012d99b1f2ed51467e resourceVersion: "14055" uid: 58f58ce0-33c1-40e6-82e9-203f17ada00e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bd3de4798eadff692d5419649a6ba8eca9914d6fdb51d150d669643645898104 size: 36581100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8c19addc28dd2002288fa9a90333e5048784868aeb080286355c2d6731715d4a size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:59f7a530b4030b4e0863fd682a8037648ffae5246a8b56b236c01e25f9526ec0 size: 177830421 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:57792bf6bcdf0f6f01f10a3a49f34f0c26792350151e7cd45b2a833c4fbcfb7f size: 794525 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1.redhat-00001 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift-rhel8 - FUSE_KARAF_IMAGE_VERSION=1.10 - KARAF_FRAMEWORK_VERSION=4.2.12.fuse-7_10_2-00002-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: 877ccd18b302 Image: b0c0979fe11582f33509094a2436aa5f81c7a37444cfb3e268f9b721463aa1e9 Labels: architecture: x86_64 build-date: 2022-06-15T18:47:17.496427 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-rhel-8-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.karaf: 4.2.12.fuse-7_10_2-00002-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "26.1655314764" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift-rhel8/images/1.10-26.1655314764 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: d9a489d2f70213966675d5a84d68c22707386be8 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1.redhat-00001 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift-rhel8 - FUSE_KARAF_IMAGE_VERSION=1.10 - KARAF_FRAMEWORK_VERSION=4.2.12.fuse-7_10_2-00002-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Hostname: 877ccd18b302 Image: sha256:0b9bf9c45a550a1bee13e260db6a1b9c2085682d834d26a5c68144950baabcd7 Labels: architecture: x86_64 build-date: 2022-06-15T18:47:17.496427 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-rhel-8-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.karaf: 4.2.12.fuse-7_10_2-00002-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "26.1655314764" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift-rhel8/images/1.10-26.1655314764 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: d9a489d2f70213966675d5a84d68c22707386be8 vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T18:48:01Z" DockerVersion: 1.13.1 Id: sha256:93093892a482ac1f257f578d028aa1f9d1777a8d33cf930ed19c85878d140435 Size: 215216562 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift-rhel8@sha256:d212c0ad7ec7b340beff1776c0216ea8c0aa66423538a84910c36e00db2366b5 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:d212c0ad7ec7b340beff1776c0216ea8c0aa66423538a84910c36e00db2366b5 resourceVersion: "14019" uid: 39bfb7ec-b9b6-420e-97f3-af659b115a13 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a5be390fa01f3f78ea6913ede564517ee9621c40efa71a3466f9f9e72d79a5fd size: 20421583 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NAME=nginx - NGINX_VERSION=1.26 - NGINX_SHORT_VER=126 - VERSION=0 - SUMMARY=Platform for running nginx 1.26 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.26 daemon. The image can be used as a base image for other applications based on nginx 1.26 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T20:11:35Z" com.redhat.component: nginx-126-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.26 daemon. The image can be used as a base image for other applications based on nginx 1.26 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.26 daemon. The image can be used as a base image for other applications based on nginx 1.26 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.26 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-126 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nginx-126 org.opencontainers.image.revision: 5f8ce4a574aabfe3f73e75e8f4b33038def1e044 release: "1760386259" summary: Platform for running nginx 1.26 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/nginx-126:latest vcs-ref: 5f8ce4a574aabfe3f73e75e8f4b33038def1e044 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T20:11:50Z" Id: sha256:52b3fb383d8c89dd21390d27bce5bbbd8b629eff408b6069f3f10b74db7736c8 Size: 117522913 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nginx-126@sha256:d2838047b28f1385d57abc68a907830d48df647d3a06bb6ab155567097d940c7 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:d2838047b28f1385d57abc68a907830d48df647d3a06bb6ab155567097d940c7 resourceVersion: "14061" uid: d04255f3-cd9f-4933-b59c-538573e3966e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0eeb656bc1e64b6c3ba63f2fa9450feaef3c60159d48eb2171ad1f25f5e655d9 size: 3805266 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:77187f02cbeff1e8de9cdd8e850f300e7267ddf19991dcbc588a498c14df3ff0 size: 70351735 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:08665aa8b0b6412d51fa56dad388c147dd7625d1bfd2b39a7b6e26e58f1b17e0 size: 208478086 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:330e257ad115763faa220fe3358d9d1606b01628ecfa750111a16677ca14eff8 size: 268095081 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:cd1e6817f6d19716cffe8dca60922c869613a8bfbeee9e77817f5ef2a5849e01 size: 285771422 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-eap-7/eap71-openshift - JBOSS_IMAGE_VERSION=1.3 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - JBOSS_PRODUCT=eap - JBOSS_EAP_VERSION=7.1.3.GA - PRODUCT_VERSION=7.1.3.GA - JBOSS_HOME=/opt/eap - HTTPS_ENABLE_HTTP2=true - JOLOKIA_VERSION=1.5.0 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: 4ad69aa23cfb9b892e7aa2c52e1c86d14f17b78daf50fa008d176947f0c2d68e Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T21:57:33.376903 com.redhat.build-host: osbs-cpt-001.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-eap-openshift org.concrt.version: 1.4.1 org.jboss.product: eap org.jboss.product.eap.version: 7.1.3.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.1.3.GA release: "15.1533128084" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.0-15.1533128084 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 13dc6bd54a7966b1a10762193c0f65690991c351 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-eap-7/eap71-openshift - JBOSS_IMAGE_VERSION=1.3 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - LAUNCH_JBOSS_IN_BACKGROUND=true - JBOSS_PRODUCT=eap - JBOSS_EAP_VERSION=7.1.3.GA - PRODUCT_VERSION=7.1.3.GA - JBOSS_HOME=/opt/eap - HTTPS_ENABLE_HTTP2=true - JOLOKIA_VERSION=1.5.0 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - DEFAULT_ADMIN_USERNAME=eapadmin - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: sha256:4ef9eecb5632e9bf83b9d40c895b92f3013f73f90e1b64fede0acdf97e79e06f Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T21:57:33.376903 com.redhat.build-host: osbs-cpt-001.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-eap-openshift org.concrt.version: 1.4.1 org.jboss.product: eap org.jboss.product.eap.version: 7.1.3.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.1.3.GA release: "15.1533128084" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.0-15.1533128084 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: 13dc6bd54a7966b1a10762193c0f65690991c351 vcs-type: git vendor: Red Hat, Inc. version: "1.0" User: "185" WorkingDir: /home/jboss Created: "2018-08-01T21:58:39Z" DockerVersion: 1.12.6 Id: sha256:b50c5d57a003d8a0559558bdb58c1fd491474308d8ede5e0349ff59e7d922435 Size: 911433251 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:d2e75dac02681ed5aded89115b736ba5e83011294686cd6d04780aebffc0ff5d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:d2e75dac02681ed5aded89115b736ba5e83011294686cd6d04780aebffc0ff5d resourceVersion: "14018" uid: 34bc2e21-d4d9-400e-8c71-6df54bf63884 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4752687a61a97d6f352ae62c381c87564bcb2f5b6523a05510ca1fb60d640216 size: 36442442 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0344366a246a0f7590c2bae4536c01f15f20c6d802b4654ce96ac81047bc23f3 size: 1740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1df969f83266d1d6babc933f3905ecf8ed6121632b2291d337ec8825c3287228 size: 86345800 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: ed43a9acbbcac68ad808bc163372b006bb9b47bffe2498c136a3469958f9e1ed Labels: architecture: x86_64 build-date: 2022-06-15T16:16:56.687745 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1655306380" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.13-1.1655306380 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 9a9c6a4fdb98eb95ee6b0c854104d865f0f7ccea vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: sha256:01b3c3845ea792b83a73e0ca7c751d608c72213b4e8a2d6e3028d46fe877e3ef Labels: architecture: x86_64 build-date: 2022-06-15T16:16:56.687745 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1655306380" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.13-1.1655306380 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 9a9c6a4fdb98eb95ee6b0c854104d865f0f7ccea vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:19:38Z" DockerVersion: 1.13.1 Id: sha256:eadf411b954405e4febecea4f7e9b7e2da59a2f9f33f8cfebe4c11522ade1c23 Size: 122795273 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:d2f17aaf2f871fda5620466d69ac67b9c355c0bae5912a1dbef9a51ca8813e50 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:d2f17aaf2f871fda5620466d69ac67b9c355c0bae5912a1dbef9a51ca8813e50 resourceVersion: "14153" uid: 34350304-813c-4ea9-905e-e269ac2d99f5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f91ed22a103a436900108dad5caaa0ee9f83605296df3d5164ec58bc5ab37d7b size: 140292386 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PLATFORM=el9 - NODEJS_VERSION=22 - NPM_RUN=start - NAME=nodejs - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - CNB_STACK_ID=com.redhat.stacks.ubi9-nodejs-20 - CNB_USER_ID=1001 - CNB_GROUP_ID=0 - SUMMARY=Platform for building and running Node.js 22 applications - DESCRIPTION=Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-28T04:26:51Z" com.redhat.component: nodejs-22-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.buildpacks.stack.id: com.redhat.stacks.ubi9-nodejs-20 io.k8s.description: Node.js 22 available as container is a base platform for building and running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 22 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs22 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nodejs-22 org.opencontainers.image.revision: 1c0398f9866ff799beb429e1f088ff3bd30e0431 release: "1761625559" summary: Platform for building and running Node.js 22 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi9/nodejs-22:latest vcs-ref: 1c0398f9866ff799beb429e1f088ff3bd30e0431 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-28T04:27:05Z" Id: sha256:7e94f6aafe4801710b4ecc6c784eb2bd38100ac8930520765981c0c3321b44c7 Size: 237392889 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nodejs-22@sha256:d4bd4b422ffcfd52334f42f372d511d68496991b47a24f7dacfb032edb250475 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:d4bd4b422ffcfd52334f42f372d511d68496991b47a24f7dacfb032edb250475 resourceVersion: "14095" uid: c66ff5a6-e757-4c4c-875f-8f6e57a9b299 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e74659e3e033616f4f0731f4c22814ff4543cb3c1b85a05f6484647b4fea7b3d size: 136155585 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d4ef397ac9c5a3062631f4563f08fe2a5f2f2f63c78082be9a57a2961ca9f577 size: 233168554 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift-jdk11-rhel8 - FUSE_JAVA_IMAGE_VERSION=1.12 - JOLOKIA_VERSION=1.7.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.18.0.redhat-00001 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-23T17:52:16 com.redhat.component: fuse-java-openshift-jdk11-rhel-8-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.7.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.18.0.redhat-00001 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift-jdk11-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "18.1716485727" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift-jdk11-rhel8/images/1.12-18.1716485727 usage: https://access.redhat.com/documentation/en-us/openjdk/11/html/using_openjdk_11_source-to-image_for_openshift/index vcs-ref: 980be2285f68b4a7eb9a182c894bd0624ae8ba9b vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-05-23T17:55:58Z" Id: sha256:2611e0e61605a190ac6fd2304ab7713a3961e12d9909766fb43e7eab4790894b Size: 408672834 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift-jdk11-rhel8@sha256:d64489de1b4cb65c6d6de5add7ad6e9f4a6817c8e62987ad5859814085beac06 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:d64489de1b4cb65c6d6de5add7ad6e9f4a6817c8e62987ad5859814085beac06 resourceVersion: "13361" uid: e4845dec-de0f-4be3-97f2-ae50522c6ab7 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c43687042a41aad69fc526985ef2b82012c011db7e0e26faba4fc860ad32d88e size: 75837780 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b7b014ba1b80abb29391141385bd32668571313647317d1d64d8b5cebb1f228 size: 1331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2825299765472d0b62c1ed19ebb564a8a191b88ce49639471a274d03e7f9151e size: 3910026 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8df146e29e789eb3e8bec37172cca00cda60cf40f6924dda00379b283e2ce6db size: 85123374 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:850e610772239a316cb32b95f0db47b4f86d97f53de39c62741f70e618799232 size: 22750438 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.4 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-740027-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: bb51c0358eb39adbae046487f6e4c23a7787741b6006759fd05f7cb8caacd754 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T11:36:32.262209 com.redhat.build-host: cpt-1004.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-740027-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "5.1567588143" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.4-5.1567588143 vcs-ref: 33e2d89f70a9779f7acf9bafde91ce363a7147ea vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.4 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-740027-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: sha256:4cc8443c7956b69a425ddc4f570ac545138e4dfb4863964a1c5d3107c15a2cb1 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-09-04T11:36:32.262209 com.redhat.build-host: cpt-1004.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-740027-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "5.1567588143" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.4-5.1567588143 vcs-ref: 33e2d89f70a9779f7acf9bafde91ce363a7147ea vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss Created: "2019-09-04T11:39:05Z" DockerVersion: 1.13.1 Id: sha256:b493d8f46bf13e5356aab08bd2bfeda4e457fe06d5605cea87cc49fc9f6dd58f Size: 187629507 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:d88de3935a4ad6a2a8a04a49733d1e7cba14688cd5c8081b78538a86fc499d5a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:d88de3935a4ad6a2a8a04a49733d1e7cba14688cd5c8081b78538a86fc499d5a resourceVersion: "14030" uid: 1b652439-0c90-45a6-a6ce-dd36fab1f991 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fb81340b71482eacd09e8c4908880e35352653ddb5edb14e1fd8b12c06bdac9e size: 76548605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:76e5e9028347afe28f3682b2d663ac154c0c9523fe03874b18ad8ad5c6e931f9 size: 1817 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:29c0ad2914112972d23ebf1b10ae9c04c800344caa8294ab7911d249923c6f4e size: 400776328 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a4a72eedab58ee3cfdd8432e34b85da0be50f6c39e649617934b755313a52e5 size: 632585890 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.2 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.2 - WILDFLY_VERSION=7.4.2.GA-redhat-00002 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.1.2.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=23.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.4.2 - WILDFLY_CAMEL_VERSION=5.8.0.fuse-7_10_2-00001-redhat-00005 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: be161c538685 Image: f15e1a7ef28f62202923f0c4571a60a22e4773abe33e14c68c5fa30644dfc779 Labels: architecture: x86_64 build-date: 2022-06-15T16:48:32.571018 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.4 distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.4 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Thomas Diesler name: fuse7/fuse-eap-openshift-jdk8-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.2 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.4.2 release: "31.1655306691" summary: Platform for building and running Apache Camel applications on EAP 7.4 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift-jdk8-rhel7/images/1.10-31.1655306691 vcs-ref: 0e0c78a6b40a9b0c638b08375587fcb368bbb7de vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.2 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.2 - WILDFLY_VERSION=7.4.2.GA-redhat-00002 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.1.2.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=23.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.4.2 - WILDFLY_CAMEL_VERSION=5.8.0.fuse-7_10_2-00001-redhat-00005 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: be161c538685 Image: sha256:d51a7aa81bc636826a578f96ce9d30132ce93881c49d988dc90ce013a20de91b Labels: architecture: x86_64 build-date: 2022-06-15T16:48:32.571018 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.4 distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.4 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Thomas Diesler name: fuse7/fuse-eap-openshift-jdk8-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.2 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.4.2 release: "31.1655306691" summary: Platform for building and running Apache Camel applications on EAP 7.4 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift-jdk8-rhel7/images/1.10-31.1655306691 vcs-ref: 0e0c78a6b40a9b0c638b08375587fcb368bbb7de vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:50:19Z" DockerVersion: 1.13.1 Id: sha256:641183ac81d50ec5ed2f3940403ff4023133e0dac0cb762fcd9d2772b12de418 Size: 1109925063 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift-jdk8-rhel7@sha256:d8d642e25fc863ed0ee603addee008db975b1c6b73a02a1e5b4bc446fbf5ec76 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:d8d642e25fc863ed0ee603addee008db975b1c6b73a02a1e5b4bc446fbf5ec76 resourceVersion: "14022" uid: f9f16912-cbbf-496f-a92b-2567b64a5176 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:34cbb242d65bd3b5ea98fd0bf5be8ce2b2316994693b130adb043cd6537ee9ca size: 76239722 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a4b6512aa42577405f0f324407ee3140e668e9bb470c3fb472e11266482468f size: 1414 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3822f045e5c5f05ab31bc7884c10f4b349f1e7f53f6cab701196045f2b1acac1 size: 349609792 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/datagrid/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.6.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.6 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.6.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: e1afd63c27cb Image: 51ef416e309aac0c4832a4ec44b115ed1b6e1a681e77c41ee7901080de344d84 Labels: architecture: x86_64 build-date: 2020-08-05T02:32:18.482432 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.7.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.6.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.6.GA release: "3" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.6-3 vcs-ref: ad8cdc7342bc6d7fe3e4da0467c51cd59818b54e vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_EAP_S2I_MODULE=/opt/jboss/container/eap/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.5.0 - AB_PROMETHEUS_ENABLE=false - AB_PROMETHEUS_JMX_EXPORTER_PORT=9779 - DEFAULT_ADMIN_USERNAME=jdgadmin - JBOSS_DATAGRID_VERSION=7.3.6.GA - JBOSS_HOME=/opt/datagrid - JBOSS_IMAGE_NAME=jboss-datagrid-7/datagrid73-openshift - JBOSS_IMAGE_VERSION=1.6 - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JBOSS_PRODUCT=datagrid - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.3.6.GA ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} 11211/tcp: {} 11222/tcp: {} 11333/tcp: {} Hostname: e1afd63c27cb Image: sha256:1d86aa65659094520db0c6fa32cff6b7742a5611f61932da1b198aa3fa46e45b Labels: architecture: x86_64 build-date: 2020-08-05T02:32:18.482432 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-datagrid-7-datagrid73-openshift-container com.redhat.deployments-dir: /opt/datagrid/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Data Grid 7.3 for OpenShift container image distribution-scope: public io.cekit.version: 3.7.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Provides a scalable in-memory distributed database designed for fast access to large volumes of data. io.k8s.display-name: JBoss Data Grid 7.3 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: datagrid,java,jboss,xpaas name: jboss-datagrid-7/datagrid73-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/datagrid/standalone/deployments org.jboss.product: datagrid org.jboss.product.datagrid.version: 7.3.6.GA org.jboss.product.openjdk.version: "11.0" org.jboss.product.version: 7.3.6.GA release: "3" summary: Red Hat JBoss Data Grid 7.3 for OpenShift container image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-datagrid-7/datagrid73-openshift/images/1.6-3 vcs-ref: ad8cdc7342bc6d7fe3e4da0467c51cd59818b54e vcs-type: git vendor: Red Hat, Inc. version: "1.6" User: "185" WorkingDir: /home/jboss Created: "2020-08-05T02:40:36Z" DockerVersion: 1.13.1 Id: sha256:7c793a770769ee3c6fc74a6d6c8c3d590a24674a554cc3ec01fda70f1f6722d6 Size: 425858697 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-datagrid-7/datagrid73-openshift@sha256:da558fa9ad7c357ed794eb549ac12a799eab97951f2e3cbc9501e413a348119a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:02Z" name: sha256:da558fa9ad7c357ed794eb549ac12a799eab97951f2e3cbc9501e413a348119a resourceVersion: "13902" uid: e45bd5c2-e7fb-45d6-9ac9-52821f410679 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:751bf1af528874dba437014af54078013e46b2eca91e82aab200d452b0165af9 size: 76529550 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:71bd5e95c80acea5839e4c515a585f43158bffd718c2be1795b4825b043975a3 size: 1565 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1340551e53a0ce649afcd25813bdc14222bc37d0bb3e7b829da3b7db1038c58e size: 109759662 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.11 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 1c8f39c8f8b8 Image: dc17dbe64962e44e928d2232fe07e31b03e3521d15d6085ac633c36bd193653a Labels: architecture: x86_64 build-date: 2022-03-28T13:32:16.229370 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1648472891" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.11-1.1648472891 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 94c118a151e3ff78308dff9b2e00f847fa40acec vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.11 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 1c8f39c8f8b8 Image: sha256:c7e9b52fd53b0088d71ec229796dec7e6340022b55033abbeaf5a95514234e89 Labels: architecture: x86_64 build-date: 2022-03-28T13:32:16.229370 com.redhat.build-host: cpt-1005.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1.1648472891" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.11-1.1648472891 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 94c118a151e3ff78308dff9b2e00f847fa40acec vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss Created: "2022-03-28T13:36:36Z" DockerVersion: 1.13.1 Id: sha256:4e13cb8b2a169481a53ce6cc4e1e12434dac7d7171285331fbb914c88931f146 Size: 186298319 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:db3f5192237bfdab2355304f17916e09bc29d6d529fdec48b09a08290ae35905 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:db3f5192237bfdab2355304f17916e09bc29d6d529fdec48b09a08290ae35905 resourceVersion: "14162" uid: 25dfdf2e-0d42-48c5-a49b-3a7a2d54cc05 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8e663e919f6cd0805d39d14202a5c0b789e7df3c3051c54170b428086a1c9a91 size: 76431158 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1e6175de2c956530fa18c8a30722bf828d70a720afa2f2e481bfb4c520963c91 size: 1550 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b82cf656d66b57b9fb0c59738c187b4e18d6d595000ca270a8a82ceed87a4333 size: 109471667 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f1d2c1bacaf9 Image: e79f508f51bdd8e69bb312a4312de33c167d592a105ed97de8c7d0772cbff6fa Labels: architecture: x86_64 build-date: 2021-07-21T13:17:35.687663 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "30.1626872912" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.8-30.1626872912 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 9deec894b992116cb4d7a0a9f656b48f14f63ef5 vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f1d2c1bacaf9 Image: sha256:f041e64b091906d78a657370488791753e15dc123ba389392358b74cd1151ed6 Labels: architecture: x86_64 build-date: 2021-07-21T13:17:35.687663 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.cekit.version: 3.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "30.1626872912" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.8-30.1626872912 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 9deec894b992116cb4d7a0a9f656b48f14f63ef5 vcs-type: git vendor: Red Hat, Inc. version: "1.8" User: "185" WorkingDir: /home/jboss Created: "2021-07-21T13:24:07Z" DockerVersion: 1.13.1 Id: sha256:11c20bcfd2f3b136893b899f7f1d71bc0434ea3a3bcec6b25c08ae4a7af220ac Size: 185911913 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:dbe9905fe2b20ed30b0e2d64543016fa9c145eeb5a678f720ba9d2055f0c9f88 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:dbe9905fe2b20ed30b0e2d64543016fa9c145eeb5a678f720ba9d2055f0c9f88 resourceVersion: "14178" uid: 297f13a5-f23e-47f3-bfc6-b46db51a7722 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0eeb656bc1e64b6c3ba63f2fa9450feaef3c60159d48eb2171ad1f25f5e655d9 size: 3805266 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:77187f02cbeff1e8de9cdd8e850f300e7267ddf19991dcbc588a498c14df3ff0 size: 70351735 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c74ad84907a7cc736fea212bfcfff402c892a0a34e31f31b117ce8de7ed7f84d size: 25587180 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.4 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JAVA_DATA_DIR=/deployments/data - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: 3320f15730e3d95d98b084a00d040f8e97053ec1aa0858da207a0f4b332871b8 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T18:24:20.705731 com.redhat.build-host: osbs-cpt-001.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Cloud Enablement Feedback name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 2.0.0 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "6.1533127995" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.4-6.1533127995 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: f9975670f0ca1779afb4d9c0b36a388af39d00ae vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.4 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JAVA_DATA_DIR=/deployments/data - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 3183206101f3 Image: sha256:6a0570f2e2c7fcfbcce59d977caa7b10135131c3b13eb44225b6266bb1860eaf Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-08-01T18:24:20.705731 com.redhat.build-host: osbs-cpt-001.ocp.osbs.upshift.eng.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 2.0.0 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Cloud Enablement Feedback name: redhat-openjdk-18/openjdk18-openshift org.concrt.version: 2.0.0 org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "6.1533127995" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.4-6.1533127995 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: f9975670f0ca1779afb4d9c0b36a388af39d00ae vcs-type: git vendor: Red Hat, Inc. version: "1.4" User: "185" WorkingDir: /home/jboss Created: "2018-08-01T18:26:00Z" DockerVersion: 1.12.6 Id: sha256:7b46b9b6f72aec4ee9638e2e13c508df2e7db860a6eaa9475a6fbb7cb42f2928 Size: 174675038 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:dc84fed0f6f40975a2277c126438c8aa15c70eeac75981dbaa4b6b853eff61a6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:dc84fed0f6f40975a2277c126438c8aa15c70eeac75981dbaa4b6b853eff61a6 resourceVersion: "14170" uid: aba4a7f3-49f7-4123-a54a-edb01fe0daf1 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f84507046cf9a3a75793faf6f9259d563ae08337fd9eba430318be22110ab718 size: 69296290 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NAME=nginx - NGINX_VERSION=1.24 - NGINX_SHORT_VER=124 - VERSION=0 - SUMMARY=Platform for running nginx 1.24 or building nginx-based application - DESCRIPTION=Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.24 daemon. The image can be used as a base image for other applications based on nginx 1.24 web server. Nginx server image can be extended using source-to-image tool. - NGINX_CONFIGURATION_PATH=/opt/app-root/etc/nginx.d - NGINX_CONF_PATH=/etc/nginx/nginx.conf - NGINX_DEFAULT_CONF_PATH=/opt/app-root/etc/nginx.default.d - NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx - NGINX_APP_ROOT=/opt/app-root - NGINX_LOG_PATH=/var/log/nginx - NGINX_PERL_MODULE_PATH=/opt/app-root/etc/perl ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-21T20:12:54Z" com.redhat.component: nginx-124-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.24 daemon. The image can be used as a base image for other applications based on nginx 1.24 web server. Nginx server image can be extended using source-to-image tool. distribution-scope: public help: For more information visit https://github.com/sclorg/nginx-container io.buildah.version: 1.41.4 io.k8s.description: Nginx is a web server and a reverse proxy server for HTTP, SMTP, POP3 and IMAP protocols, with a strong focus on high concurrency, performance and low memory usage. The container image provides a containerized packaging of the nginx 1.24 daemon. The image can be used as a base image for other applications based on nginx 1.24 web server. Nginx server image can be extended using source-to-image tool. io.k8s.display-name: Nginx 1.24 io.openshift.expose-services: 8443:https io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nginx,nginx-124 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/nginx-124 org.opencontainers.image.revision: fdf2a6fd7c37b9afe85e8fb9c97cce8d17a5b913 release: "1761077540" summary: Platform for running nginx 1.24 or building nginx-based application url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build ubi8/nginx-124:latest vcs-ref: fdf2a6fd7c37b9afe85e8fb9c97cce8d17a5b913 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-21T20:13:13Z" Id: sha256:ad8014cfc43d3e0063b5ee0ebcbb4d2e54b1de52bd97cc0e0b517c1a11f43036 Size: 164856009 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/nginx-124@sha256:de3e8e869ab92ab5ad19919034b845b3fb01c7e57d49caf5899b68c0a4461c1b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:de3e8e869ab92ab5ad19919034b845b3fb01c7e57d49caf5899b68c0a4461c1b resourceVersion: "14058" uid: 650d1e06-01ef-49e5-b649-45fcab3c2f89 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:db0f4cd412505c5cc2f31cf3c65db80f84d8656c4bfa9ef627a6f532c0459fc4 size: 78359137 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7e3624512448126fd29504b9af9bc034538918c54f0988fb08c03ff7a3a9a4cb size: 1789 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b9404c765c209f7b6c036000e21903a15cf69485d699f09967f0aba458381d8b size: 106246762 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8 - JBOSS_IMAGE_VERSION=3.0 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api - LAUNCH_JBOSS_IN_BACKGROUND=true - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - SSO_FORCE_LEGACY_SECURITY=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 7323ecf6dc56 Image: 9cb50e53a79e8f0f3435761b450f6db4429fad23b42f97ddbf63a46cd062be91 Labels: architecture: x86_64 build-date: 2022-08-03T11:41:15.058748 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-eap-xp3-openjdk11-runtime-openshift-rhel8-container com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift runtime image with OpenJDK 11 distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.k8s.description: Base runtime image to run EAP XP server and application io.k8s.display-name: JBoss EAP XP runtime image io.openshift.expose-services: 8080:http io.openshift.tags: javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8 org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "20" summary: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift runtime image with OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8/images/3.0-20 vcs-ref: 0abf15128641d27740a7ef71f0a5f1207b0218ef vcs-type: git vendor: Red Hat, Inc. version: "3.0" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8 - JBOSS_IMAGE_VERSION=3.0 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api - LAUNCH_JBOSS_IN_BACKGROUND=true - MICROPROFILE_CONFIG_DIR_ORDINAL=500 - SSO_FORCE_LEGACY_SECURITY=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: 7323ecf6dc56 Image: sha256:1fb08a410caa49e5f8b17f4230704c6ace838c69797493c2c113c595b1799de6 Labels: architecture: x86_64 build-date: 2022-08-03T11:41:15.058748 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: jboss-eap-xp3-openjdk11-runtime-openshift-rhel8-container com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift runtime image with OpenJDK 11 distribution-scope: public io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.k8s.description: Base runtime image to run EAP XP server and application io.k8s.display-name: JBoss EAP XP runtime image io.openshift.expose-services: 8080:http io.openshift.tags: javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8 org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "20" summary: Red Hat JBoss Enterprise Application Platform XP 3.0 OpenShift runtime image with OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8/images/3.0-20 vcs-ref: 0abf15128641d27740a7ef71f0a5f1207b0218ef vcs-type: git vendor: Red Hat, Inc. version: "3.0" User: "185" WorkingDir: /home/jboss Created: "2022-08-03T11:43:29Z" DockerVersion: 1.13.1 Id: sha256:21dea2b05afe0affbdf8aedb6249b73b1f1f903144eebb827c027f094e9ad094 Size: 184613957 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap-xp3-openjdk11-runtime-openshift-rhel8@sha256:deaaa8255efc84a6a7fd4d1b6e5593eaab6c2753e1f2f84a5b83d4e047f03f3f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:deaaa8255efc84a6a7fd4d1b6e5593eaab6c2753e1f2f84a5b83d4e047f03f3f resourceVersion: "13413" uid: 2fa11596-7557-458c-9302-01f9be7cb829 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c85ac87d44df4b64d7c273886fc5aed55a28422df33dcb641884ffa419db218 size: 76240885 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:51e9f237b750efcda2d5755785cdb8dd080d51585ae35d368e4f9b29a11b1994 size: 1329 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5ccfe6e48f4f71d761b34c61586ac1808cca10bf7e543a3666b802f38625c5a9 size: 4013312 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35875365be086462ee5d275b62cfc13046029a9a084880c18583b932a5b23632 size: 85346475 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3a3fb8d8a857d4b21711172a18b68ff8739599f21ba0264b9caad55100e36571 size: 15178667 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.5 - JOLOKIA_VERSION=1.6.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.2.6.fuse-750016-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 534540aef153 Image: 4ceea0094875275ec73d3687995a675070b48d2d5714ed5d7e33145a368afa76 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-01-27T12:38:41.799969 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.6.fuse-750016-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1580118141" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.5-14.1580118141 vcs-ref: 415e7fbfd545dfa7a134c6e095a5c34f2a0a22a2 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/opt/rh/rh-maven35/root/usr/bin:/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.5 - JOLOKIA_VERSION=1.6.2.redhat-00002 - KARAF_FRAMEWORK_VERSION=4.2.6.fuse-750016-redhat-00001 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - 'MANPATH=/opt/rh/rh-maven35/root/usr/share/man:' - PYTHONPATH=/opt/rh/rh-maven35/root/usr/lib/python2.7/site-packages ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 534540aef153 Image: sha256:681bf5e54f14d808c4a60d22bc9a135c1a01f8f73761ed5d29d9cda095ba5533 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2020-01-27T12:38:41.799969 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.6.2.redhat-00002 io.fabric8.s2i.version.karaf: 4.2.6.fuse-750016-redhat-00001 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Otavio Piske name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "14.1580118141" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.5-14.1580118141 vcs-ref: 415e7fbfd545dfa7a134c6e095a5c34f2a0a22a2 vcs-type: git vendor: Red Hat, Inc. version: "1.5" User: "185" WorkingDir: /home/jboss Created: "2020-01-27T12:40:18Z" DockerVersion: 1.13.1 Id: sha256:85deacd9f44c18327242bef91783c68732fe6a82ccefdc70f9d5f957422c94e9 Size: 180787247 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:df2b4b8e8f4b7b183c443653b6f11ac529d7c6421972078a464b765ab31b075d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:df2b4b8e8f4b7b183c443653b6f11ac529d7c6421972078a464b765ab31b075d resourceVersion: "14033" uid: 481f6e27-da3d-46aa-a43e-0b070be924d5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e0348fdb2685077d22116d294a90a253709aba78815882a57fcc536b22dcae2f size: 39488293 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:689ca74a7d3bce7261a934f55c5f2cb819b684beaf0330ead965091221c3f4e2 size: 91664214 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-11-runtime - JBOSS_IMAGE_VERSION=1.21 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2025-01-08T12:07:07 com.redhat.component: openjdk-11-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 11 runtime distribution-scope: public io.buildah.version: 1.33.8 io.cekit.version: 4.13.0.dev0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" org.opencontainers.image.documentation: https://rh-openjdk.github.io/redhat-openjdk-containers/ release: "1.1736337918" summary: Image for Red Hat OpenShift providing OpenJDK 11 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11-runtime/images/1.21-1.1736337918 usage: https://rh-openjdk.github.io/redhat-openjdk-containers/ vcs-ref: c7e21327e7823f0c54fbed52da81fa426d3d6d59 vcs-type: git vendor: Red Hat, Inc. version: "1.21" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2025-01-08T12:28:07Z" Id: sha256:92f5ca828ad33dd55c896ad2b4d3820979cf4f14353f219d2f7e6ddc4eac59e5 Size: 131172143 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11-runtime@sha256:df8858f0c01ae1657a14234a94f6785cbb2fba7f12c9d0325f427a3f1284481b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:df8858f0c01ae1657a14234a94f6785cbb2fba7f12c9d0325f427a3f1284481b resourceVersion: "13508" uid: 5cfd3195-ac03-4606-ab75-79c8fb14618d - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c43687042a41aad69fc526985ef2b82012c011db7e0e26faba4fc860ad32d88e size: 75837780 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b7b014ba1b80abb29391141385bd32668571313647317d1d64d8b5cebb1f228 size: 1331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2825299765472d0b62c1ed19ebb564a8a191b88ce49639471a274d03e7f9151e size: 3910026 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5832c11acc9bb0420072ec62a6cdcea1d8226ed89e430e3086f81bc7866631c2 size: 84374210 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:08887a20d77482db42bf0b3302e0fb5080b0e292086c3ad3bc440a46a0847049 size: 15577091 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - usage Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.3 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-731003-redhat-00003 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: 6b6ffd776185a38822468b3667a44cab0e1f21f13a14cda9078cbfff54fcec6c Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T22:30:18.045503 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-731003-redhat-00003 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Dhiraj Bokde name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "10.1561752280" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.3-10.1561752280 vcs-ref: 8f1e45d934761ae48ada4113b985e8b907552e74 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.3 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-731003-redhat-00003 - PROMETHEUS_JMX_EXPORTER_VERSION=0.3.1.redhat-00006 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 8ea581222c6a Image: sha256:ff5f5f580c891a6d4f7513093335bc83fa2be4316bc21bbe8b85e4df301532d8 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-06-28T22:30:18.045503 com.redhat.build-host: cpt-1008.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.2.1 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-731003-redhat-00003 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.3.1.redhat-00006 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Dhiraj Bokde name: fuse7/fuse-karaf-openshift org.concrt.version: 2.2.1 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "10.1561752280" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.3-10.1561752280 vcs-ref: 8f1e45d934761ae48ada4113b985e8b907552e74 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2019-06-28T22:32:10Z" DockerVersion: 1.13.1 Id: sha256:2d5c68055f82485218766edee2968eae15cef0423947a341f7ead7a75e601153 Size: 179706701 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:e12d4107cfe12b7b9f3817bde90dcb07ff3ee7e3b6482120fec6d37583df727f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:e12d4107cfe12b7b9f3817bde90dcb07ff3ee7e3b6482120fec6d37583df727f resourceVersion: "14029" uid: a03e254d-ddab-41cc-872a-fb1821948c4c - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6208c5a2e205726f3a2cd42a392c5e4f05256850d13197a711000c4021ede87b size: 79073674 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7a18d7eaea2d291c5d580d166e4ceaf15ac961db4ea2abe773a353e088a1b74b size: 121891551 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:69ddf622f141ad37579e8ee0b486f8a4289b4406915985db7082014ec65d31c7 size: 34326547 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.2 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.2 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - HOME=/home/jboss - AB_PROMETHEUS_ENABLE=True - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jws-jmx-exporter-config.yaml - AB_PROMETHEUS_JMX_EXPORTER_PORT=9404 - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk11-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.2 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9404/tcp: {} Labels: architecture: x86_64 build-date: 2023-04-26T09:26:53 com.redhat.component: jboss-webserver-57-openjdk11-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.6.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK11 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK11 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk11-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 5.7.2 org.jboss.product.webserver-tomcat9.version: 5.7.2 release: "8" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk11-rhel8-openshift/images/5.7.2-8 vcs-ref: 768e16aeb3ed190e276d819840a2de074fda37d1 vcs-type: git vendor: Red Hat, Inc. version: 5.7.2 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-04-26T09:30:29Z" Id: sha256:7a6b7b88ed8e89e32fc56e35f07608e2357d6fa3e3a996bec2f65c63fb271151 Size: 235334209 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:e241dd0049956f75f989687e80fafe7da2f2f25ef4ea762bdaabfe2161d20f64 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:e241dd0049956f75f989687e80fafe7da2f2f25ef4ea762bdaabfe2161d20f64 resourceVersion: "13699" uid: 938832de-81fc-41f0-9e3f-c5bd5b711fdb - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:35d6a5cce6a146ea7213377edde7d1d27f34dda18ce9fac98ea2ab40a6b110f6 size: 79593305 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26e557f159f6398eefc1611d2b3bd0fe06872b96a779bcf01a3acaa819938c56 size: 17490249 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d2bb55755c2edf5423e38bd42c48b277e0cb4c5c765218247001a8c8eb479a87 size: 215199578 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e6bff1e34aa6582524f2497681213cc7697e9596d432796d3d5f29b24248cd58 size: 17064811 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el9 - NODEJS_VER=20 - RUBY_MAJOR_VERSION=3 - RUBY_MINOR_VERSION=0 - RUBY_VERSION=3.0 - RUBY_SCL_NAME_VERSION=30 - RUBY_SCL=ruby-30 - IMAGE_NAME=ubi9/ruby-30 - SUMMARY=Platform for building and running Ruby 3.0 applications - DESCRIPTION=Ruby 3.0 available as container is a base platform for building and running various Ruby 3.0 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-13T16:12:15Z" com.redhat.component: ruby-30-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Ruby 3.0 available as container is a base platform for building and running various Ruby 3.0 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Ruby 3.0 available as container is a base platform for building and running various Ruby 3.0 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. io.k8s.display-name: Ruby 3.0 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,ruby,ruby30,ruby-30 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/ruby-30 org.opencontainers.image.revision: a15c9c337bac22b7a02fde2215d14046d2418bca release: "1760371911" summary: Platform for building and running Ruby 3.0 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-ruby-container.git --context-dir=3.0/test/puma-test-app/ ubi9/ruby-30 ruby-sample-app vcs-ref: a15c9c337bac22b7a02fde2215d14046d2418bca vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-13T16:12:20Z" Id: sha256:55edf0b42b55dab2ccf52b5e19e10c4821f47fa94fff9fb5b3a9c72a8c7540be Size: 329366717 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/ruby-30@sha256:e3727c305a54ac48c07688089e18e9abbe4ce98712a7ce59aa4b9ad7b7bc6514 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:e3727c305a54ac48c07688089e18e9abbe4ce98712a7ce59aa4b9ad7b7bc6514 resourceVersion: "14114" uid: 3876e657-5ceb-484c-8532-50efc059eea8 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7ef1e3a3a6838c7a9be47c4fdc5a8b177583baa77397397e76933831c0379d45 size: 132132261 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:17659dbf6db2c126d42ce17461e7ebffa0681e9c07bb5ac44f7f70ad4c05bf17 size: 5393909 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_HTTPS=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JBOSS_CONTAINER_MAVEN_38_MODULE=/opt/jboss/container/maven/38/ - MAVEN_VERSION=3.8 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar quarkus-app - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 - FUSE_JAVA_IMAGE_NAME=fuse7/fuse-java-openshift-rhel8 - FUSE_JAVA_IMAGE_VERSION=1.12 - JOLOKIA_VERSION=1.7.2.redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.18.0.redhat-00001 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i:/usr/local/s2i - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true - JAVA_DATA_DIR=/deployments/data ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} 9779/tcp: {} Labels: architecture: x86_64 build-date: 2024-05-23T17:52:19 com.redhat.component: fuse-java-openshift-rhel-8-container com.redhat.deployments-dir: /deployments com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 com.redhat.license_terms: https://www.redhat.com/agreements description: Build and run Spring Boot-based integration applications distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.fabric8.s2i.version.jolokia: 1.7.2.redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.18.0.redhat-00001 io.k8s.description: Build and run Spring Boot-based integration applications io.k8s.display-name: Fuse for OpenShift io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,fuse maintainer: Otavio Piske name: fuse7/fuse-java-openshift-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "26.1716486065" summary: Build and run Spring Boot-based integration applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-java-openshift-rhel8/images/1.12-26.1716486065 usage: https://access.redhat.com/documentation/en-us/openjdk/8/html/using_openjdk_8_source-to-image_for_openshift/index vcs-ref: c71a1efc48cdb3fbbd3afac4577b3dcc6b4e847c vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-05-23T17:54:51Z" Id: sha256:740a5d928f171dde6da2bd1563c71946f07c90b21bf32ab1917e2c0476e29a0b Size: 176874147 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-java-openshift-rhel8@sha256:e379727c63710610a1d6843aac4fe3c9e5d81fc0e58c171e88c55da4be1499f0 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:e379727c63710610a1d6843aac4fe3c9e5d81fc0e58c171e88c55da4be1499f0 resourceVersion: "14042" uid: 6d304100-713b-48f1-bbe4-edd5ebbb8511 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:54e56e6f85721741ee7bf0336de8ad3bf138a56769a6d0097b600a0e361be58d size: 39618910 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4f8ddd7f5a755f537dd9d5f553c8c78171dcf3018c5fc96676a07380d3e14e20 size: 1745 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6a26daebeda0b951f71cb9428ea142d5b29f8b9d34ade08ee7c9f323b43a580f size: 108734553 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: 9123ac2298437939745a0cdb21826da267ffacb7eb36c5ab1b10e081dbb4b39c Labels: architecture: x86_64 build-date: 2022-04-29T13:52:42.413180 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1651233093" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.12-1.1651233093 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 934846907ba3af890fb4de2c384c9f21d3f7ab29 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.12 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: 65ec992ef2e6 Image: sha256:04a95b886edf0abd76bd342b161b53d93e066e3e55ed9fa2a5a82087584a3ba4 Labels: architecture: x86_64 build-date: 2022-04-29T13:52:42.413180 com.redhat.build-host: cpt-1003.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1651233093" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.12-1.1651233093 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 934846907ba3af890fb4de2c384c9f21d3f7ab29 vcs-type: git vendor: Red Hat, Inc. version: "1.12" User: "185" WorkingDir: /home/jboss Created: "2022-04-29T13:58:21Z" DockerVersion: 1.13.1 Id: sha256:de955e624ee3104841bbab009a43a1e1e9a30a38bbfa5ea966f1c2bbc33578bf Size: 148362031 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:e37aeaeb0159194a9855350e13e399470f39ce340d6381069933742990741fb8 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:e37aeaeb0159194a9855350e13e399470f39ce340d6381069933742990741fb8 resourceVersion: "14143" uid: d23b1f49-1ae8-42be-ac53-93628316081f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0e6a27b858072801aa26e46f548f8e4a9d823aa2ded217561242f1aaa50912c8 size: 111652172 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GECOS=JBoss user - HOME=/home/jboss - UID=185 - USER=jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.18 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2024-01-18T10:36:52 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" org.opencontainers.image.documentation: https://jboss-container-images.github.io/openjdk/ release: "2.1705573231" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.18-2.1705573231 usage: https://jboss-container-images.github.io/openjdk/ vcs-ref: 12aac04fffa038f171574a2c3f057a2c253f5c27 vcs-type: git vendor: Red Hat, Inc. version: "1.18" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-01-18T10:45:46Z" Id: sha256:f2df3bbcae9065c5976d45b3e1c4e5717d8f3bc38910ce02f04b2c40d3239e6b Size: 150978954 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:e4223a60b887ec24cad7dd70fdb6c3f2c107fb7118331be6f45d626219cfe7f3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:e4223a60b887ec24cad7dd70fdb6c3f2c107fb7118331be6f45d626219cfe7f3 resourceVersion: "14158" uid: d1835de4-11db-461a-8299-335d906df746 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:690c847f419672788dca52f9eb17b10133919a0aae947934f7bdf5ccf30f1546 size: 79990748 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0f257b509d08e949ce3d20cf20f1a21b7ec8a6b4b6edfef6a67a23d11b8da3ef size: 115756384 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=openjdk/openjdk-11-rhel7 - JBOSS_IMAGE_VERSION=1.17 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-06-04T14:59:17 com.redhat.component: openjdk-11-rhel7-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.10.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: openjdk/openjdk-11-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "3.1717512827" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/openjdk/openjdk-11-rhel7/images/1.17-3.1717512827 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 354a181e3ce930de9fb8a9a34e74118bfa657ad7 vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-06-04T15:08:18Z" Id: sha256:cdc667485ef8fdf408862233fc5f22556665356272a83f2784dd5c28e24c28a5 Size: 195777053 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/openjdk/openjdk-11-rhel7@sha256:e4b1599ba6e88f6df7c4e67d6397371d61b6829d926411184e9855e71e840b8c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:e4b1599ba6e88f6df7c4e67d6397371d61b6829d926411184e9855e71e840b8c resourceVersion: "13678" uid: 10f5e7a9-9ded-439f-ac6d-43587d9d20f2 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d7c06497d5cebd39c0a4feb14981ec940b5c863e49903d320f630805b049cbff size: 39279912 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:26a68d65f3a58a10359cbec9abc880891c92df5adc10551e0830c2c517b60167 size: 90728260 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.14 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-02-07T17:21:37 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 3.11.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "9.1675788286" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.14-9.1675788286 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 043854ec5a8a145469c0504968ee45e7c1566392 vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-02-07T17:29:14Z" Id: sha256:ea9bcdde5d9e1ec04f89e02ad1d7dc59bec3711e572b80cc43a63f4190f2d320 Size: 130027029 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:e4be2fb7216f432632819b2441df42a5a0063f7f473c2923ca6912b2d64b7494 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:e4be2fb7216f432632819b2441df42a5a0063f7f473c2923ca6912b2d64b7494 resourceVersion: "14154" uid: 070441fd-b49d-421d-8cc4-529daeb2a7ef - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:378837c0e24ad4a2e33f0eb3d68dc0c31d9a7dbbd5357d4acafec1d3a7930602 size: 74923740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e17262bc23414bd3c0e9808ad7a87b055fe5afec386da42115a839ea2083d233 size: 1303 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8d9c78c7f9887170d08c57ec73b21e469b4120682a2e82883217535294878c5d size: 3805344 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:f4350d5126d0895bb50c2c082a415ff417578d34508a0ef07ec20cebf661ebb7 size: 70368140 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:79cc30b4ba3e86f08bd7ddc3a33d86bf36534d8ccb32b1c8c1f66a57b1170bb9 size: 30906970 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Dhiraj Bokde Config: Cmd: - usage Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.1 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-710024-redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: 043f153b9d5b36ae5f5dd5da8923edab1bd13d357c8909bf2e808da6932c5cb5 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T22:07:46.745472 com.redhat.build-host: cpt-0006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.1.4 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-710024-redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Cloud Enablement Feedback name: fuse7/fuse-karaf-openshift org.concrt.version: 2.1.4 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "4.1539812382" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.1-4.1539812382 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: b1de626b2e1de9de9af75367b70862009e65df91 vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/s2i:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss/openjdk18-rhel7 - JBOSS_IMAGE_VERSION=1.1 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - FUSE_KARAF_IMAGE_NAME=fuse7/fuse-karaf-openshift - FUSE_KARAF_IMAGE_VERSION=1.1 - JOLOKIA_VERSION=1.5.0.redhat-1 - KARAF_FRAMEWORK_VERSION=4.2.0.fuse-710024-redhat-00002 - PROMETHEUS_JMX_EXPORTER_VERSION=0.10.0.redhat-2 - AB_JOLOKIA_PASSWORD_RANDOM=true - AB_JOLOKIA_AUTH_OPENSHIFT=true ExposedPorts: 8778/tcp: {} 9779/tcp: {} Hostname: 3183206101f3 Image: sha256:18998c62224921fd54ef88babf0d5a0fc13c00ff60dab143bc946fb958892bde Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2018-10-17T22:07:46.745472 com.redhat.build-host: cpt-0006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-karaf-openshift-container com.redhat.deployments-dir: /deployments/karaf com.redhat.dev-mode: JAVA_DEBUG:false com.redhat.dev-mode.port: JAVA_DEBUG_PORT:5005 description: Platform for building and running Apache Karaf OSGi applications distribution-scope: public io.cekit.version: 2.1.4 io.fabric8.s2i.version.jolokia: 1.5.0.redhat-1 io.fabric8.s2i.version.karaf: 4.2.0.fuse-710024-redhat-00002 io.fabric8.s2i.version.maven: 3.3.3-1.el7 io.fabric8.s2i.version.prometheus.jmx_exporter: 0.10.0.redhat-2 io.k8s.description: Platform for building and running Apache Karaf OSGi applications io.k8s.display-name: Fuse for OpenShift - Karaf based io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,karaf maintainer: Cloud Enablement Feedback name: fuse7/fuse-karaf-openshift org.concrt.version: 2.1.4 org.jboss.deployments-dir: /deployments/karaf org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "4.1539812382" summary: Platform for building and running Apache Karaf OSGi applications url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-karaf-openshift/images/1.1-4.1539812382 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: b1de626b2e1de9de9af75367b70862009e65df91 vcs-type: git vendor: Red Hat, Inc. version: "1.1" User: "185" WorkingDir: /home/jboss Created: "2018-10-17T22:10:08Z" DockerVersion: 1.13.1 Id: sha256:e2a9658ebf12193de8f236125832ea2941b95ea3188bfbb788bbb346351a6ee6 Size: 180011963 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-karaf-openshift@sha256:e61420525f02c4e85ba9b79b9702880a11907d8ab79b9b0a36dbf559ce0f5234 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:e61420525f02c4e85ba9b79b9702880a11907d8ab79b9b0a36dbf559ce0f5234 resourceVersion: "14017" uid: fa7da5ba-8f2c-46eb-a3d3-06f48b8f006e - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f2c2c4492b6b2d181be862a0a1d1b6f6851cb07244efbcb43d44f9936aa78d5 size: 80005019 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:abbe4d8a684d1158a98a0d568e683035c9379b514c6e27cfb85fdb989e849368 size: 91217246 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_HOME=/opt/eap - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk8-runtime-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.4.18 - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - LAUNCH_JBOSS_IN_BACKGROUND=true - SSO_FORCE_LEGACY_SECURITY=true ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2024-07-22T13:35:03 com.redhat.component: jboss-eap-74-openjdk8-runtime-openshift-rhel7-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.license_terms: https://www.redhat.com/agreements description: The JBoss EAP 7.4 OpenJDK 8 runtime image distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.k8s.description: Base image to run an EAP server and application io.k8s.display-name: JBoss EAP runtime image io.openshift.expose-services: 8080:http io.openshift.tags: javaee,eap,eap7 maintainer: Red Hat name: jboss-eap-7/eap74-openjdk8-runtime-openshift-rhel7 org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "6" summary: The JBoss EAP 7.4 OpenJDK 8 runtime image url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-eap-7/eap74-openjdk8-runtime-openshift-rhel7/images/7.4.18-6 vcs-ref: 3bd793c5d56a86086de93246a63bfc3312df588a vcs-type: git vendor: Red Hat, Inc. version: 7.4.18 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2024-07-22T13:38:30Z" Id: sha256:055765bee165f34aea152152acabd2b49594742c636a426afe0f9d851122dd36 Size: 171242646 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-eap-7/eap74-openjdk8-runtime-openshift-rhel7@sha256:e6fc695cfd77ccff83ef22148c54f45ba8af41e2b69f6146d7ad588cb2aed780 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:e6fc695cfd77ccff83ef22148c54f45ba8af41e2b69f6146d7ad588cb2aed780 resourceVersion: "13313" uid: 1c4ee5c6-f94d-4f62-97ea-20a36d0ac4da - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:65b1f5b0fed902160219dd5c084e53de4f052d43d177a5ae3a3549fb62bb85cb size: 415348847 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Entrypoint: - /bin/openshift-install Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=open - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510211040.p2.ge238076.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510211040.p2.ge238076.assembly.stream.el9-e238076 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=ose-installer - __doozer_uuid_tag=ose-installer-rhel9-v4.20.0-20251021.105557 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=e238076 - SOURCE_DATE_EPOCH=1760727874 - SOURCE_GIT_COMMIT=e23807689ec464da30e771dda70fd8989680a011 - SOURCE_GIT_TAG=v1.4.19-ec5-379-ge23807689e - SOURCE_GIT_URL=https://github.com/openshift/installer - HOME=/output Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T13:33:19Z" com.redhat.component: ose-installer-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Empty io.k8s.display-name: Empty io.openshift.build.commit.id: e23807689ec464da30e771dda70fd8989680a011 io.openshift.build.commit.url: https://github.com/openshift/installer/commit/e23807689ec464da30e771dda70fd8989680a011 io.openshift.build.source-location: https://github.com/openshift/installer io.openshift.expose-services: "" io.openshift.maintainer.component: Installer / openshift-installer io.openshift.maintainer.project: OCPBUGS io.openshift.release.operator: "true" io.openshift.tags: Empty maintainer: Red Hat, Inc. name: openshift/ose-installer-rhel9 org.opencontainers.image.revision: 79818eff324aee33104736839fba6775d39383d7 release: 202510211040.p2.ge238076.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: 79818eff324aee33104736839fba6775d39383d7 vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 User: 1000:1000 WorkingDir: /output ContainerConfig: {} Created: "2025-10-21T13:43:59Z" Id: sha256:b437aabdb1940863e4510026463d0fcfe0e88f534ebaeedbc663115821d29154 Size: 553786370 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e7713979a921ec8d2506fcb3fb3ee960fc757262f4567319ee5aa2b351d4f778 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:e7713979a921ec8d2506fcb3fb3ee960fc757262f4567319ee5aa2b351d4f778 resourceVersion: "13292" uid: fd93268b-d615-4f70-8cdc-993cadb95c44 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2a99c93da16827d9a6254f86f495d2c72c62a916f9c398577577221d35d2c790 size: 39641757 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4418ace46c3dd933f98d83f357f31048e72d5db3d97bccfdb0acef769ee8234f size: 1743 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:21d80832abff381642c2152c7d9ae05bc0d2683be5f6cfbe25024738f7a5895b size: 118407698 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: be4e58a52d40 Image: 3eec95fdbe14c33d646a38ecfa0ffcd9b21f02758ebff6139972f2996b4d504a Labels: architecture: x86_64 build-date: 2021-12-01T18:42:06.107591 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "10.1638383051" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.10-10.1638383051 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: ca0a104dbc88f2b53c59a11da2aacb90f0bb479c vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-11 - JBOSS_IMAGE_VERSION=1.10 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: be4e58a52d40 Image: sha256:e87ffd7792f5de35a5622f520bc825e692d51d8c063da8385687889c098f5717 Labels: architecture: x86_64 build-date: 2021-12-01T18:42:06.107591 com.redhat.build-host: cpt-1006.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-11-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-11 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "11" org.jboss.product.version: "11" release: "10.1638383051" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 11 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-11/images/1.10-10.1638383051 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: ca0a104dbc88f2b53c59a11da2aacb90f0bb479c vcs-type: git vendor: Red Hat, Inc. version: "1.10" User: "185" WorkingDir: /home/jboss Created: "2021-12-01T18:48:13Z" DockerVersion: 1.13.1 Id: sha256:26261c21856ce658c807d2a7172be92faf7469efc753dd69fac16cc856fb9471 Size: 158058692 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-11@sha256:e851770fd181ef49193111f7afcdbf872ad23f3a8234e0e07a742c4ca2882c3d kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:e851770fd181ef49193111f7afcdbf872ad23f3a8234e0e07a742c4ca2882c3d resourceVersion: "14167" uid: ab005a4b-51b2-4b40-afd6-324daede91e9 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a20dc09567a04bdff2ebfaa3d3917f64d7620555e6354d53b43dd7ebb0e0f575 size: 79751689 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e7b8202be3fe050cb969cfab5cfb888dadab37a8fe818411e9674df89e0a989c size: 124303924 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.16 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-11-14T15:35:29 com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.9.1 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "1" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.16-1 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 3fe6df1e2c390588459d91f828a74bf0500b82cc vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-11-14T15:38:35Z" Id: sha256:0dbe7f254fb89345741da46333fda218550459fc3991c019639585c84cf22e70 Size: 204085222 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:e90172ca0f09acf5db1721bd7df304dffd184e00145072132cb71c7f0797adf6 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:e90172ca0f09acf5db1721bd7df304dffd184e00145072132cb71c7f0797adf6 resourceVersion: "14166" uid: 59fccd7d-2b98-4fe1-aa40-6dd676ee3101 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2920d84eafa0cf94806ab58f0a2124f7b2d35bcbb06fc89a9106dcc28efe397a size: 39653524 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7180a091b58fc553a5990486ab5028d0fe8361e9c62ca537c2324fe3997d339a size: 50614128 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - container=oci - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - NPM_RUN=start - PLATFORM=el9 - NODEJS_VERSION=22 - NAME=nodejs - SUMMARY=Minimal image for running Node.js 22 applications - DESCRIPTION=Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. - NPM_CONFIG_PREFIX=/opt/app-root/src/.npm-global - PATH=/opt/app-root/src/node_modules/.bin/:/opt/app-root/src/.npm-global/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-15T16:11:24Z" com.redhat.component: nodejs-22-minimal-container com.redhat.deployments-dir: /opt/app-root/src com.redhat.dev-mode: DEV_MODE:false com.redhat.dev-mode.port: DEBUG_PORT:5858 com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:9::appstream description: Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-nodejs-container io.buildah.version: 1.41.4 io.k8s.description: Node.js 22 available as container is a base platform for running various Node.js 22 applications and frameworks. Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices. io.k8s.display-name: Node.js 22 Micro io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,nodejs,nodejs22 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi9/nodejs-22-minimal org.opencontainers.image.revision: e19300a1ca03c44fa04c9dfd76d31f292aee7cca release: "1760544659" summary: Minimal image for running Node.js 22 applications url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: e19300a1ca03c44fa04c9dfd76d31f292aee7cca vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-15T16:11:29Z" Id: sha256:2ac495ca952b9d2ee4ad236f7d5d31cd3b3641e1aaa01a4d4c5bb039a7cb82ca Size: 90280042 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi9/nodejs-22-minimal@sha256:ea065c1e423af8de839c84e1520131fe4ed4a1bda0ae8eeb078fc8a8368e9f0c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:ea065c1e423af8de839c84e1520131fe4ed4a1bda0ae8eeb078fc8a8368e9f0c resourceVersion: "14092" uid: da460b6b-80e6-44b6-ac2e-2f0d3e699938 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d43c95783c3d99a3c275f4c278f8d68a1dfda166c399fd55aee8c1dce7d76611 size: 79767891 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:e53ac5fae1ac340de75c4c78c6eea9df409b45b2ffee95cd8085a8ed3b9cbf6c size: 7515417 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0df98180b96394ec85a9f587d6e3304fa7628ec4a3029269cb44f30b4dd38a5a size: 63419156 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - run-mysqld Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el7 - MYSQL_VERSION=10.3 - APP_DATA=/opt/app-root/src - HOME=/var/lib/mysql - SUMMARY=MariaDB 10.3 SQL database server - DESCRIPTION=MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/mysql - MYSQL_PREFIX=/opt/rh/rh-mariadb103/root/usr - ENABLED_COLLECTIONS=rh-mariadb103 - BASH_ENV=/usr/share/container-scripts/mysql/scl_enable - ENV=/usr/share/container-scripts/mysql/scl_enable - PROMPT_COMMAND=. /usr/share/container-scripts/mysql/scl_enable ExposedPorts: 3306/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-19T11:53:14 com.redhat.component: rh-mariadb103-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: MariaDB is a multi-user, multi-threaded SQL database server. The container image provides a containerized packaging of the MariaDB mysqld daemon and client application. The mysqld server daemon accepts connections from clients and provides access to content from MariaDB databases on behalf of the clients. io.k8s.display-name: MariaDB 10.3 io.openshift.expose-services: 3306:mysql io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,mysql,mariadb,mariadb103,rh-mariadb103 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhscl/mariadb-103-rhel7 release: "157" summary: MariaDB 10.3 SQL database server url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhscl/mariadb-103-rhel7/images/1-157 usage: docker run -d -e MYSQL_USER=user -e MYSQL_PASSWORD=pass -e MYSQL_DATABASE=db -p 3306:3306 rhscl/mariadb-103-rhel7 vcs-ref: b2fd6429b719ad746d34aa403ec724eab594969d vcs-type: git vendor: Red Hat, Inc. version: "1" User: "27" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2023-07-19T11:57:03Z" Id: sha256:b76c7d766cfbe3c5a9dd260aab10d3ad34c4f6e9f5eda825ea41bba9fe9709ca Size: 150721951 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhscl/mariadb-103-rhel7@sha256:eaaf8ab6d318d72cc4e465609b213f4d9d9171f222f59ae012fa5b96fb3e4ea9 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:eaaf8ab6d318d72cc4e465609b213f4d9d9171f222f59ae012fa5b96fb3e4ea9 resourceVersion: "13836" uid: ef06e2e6-4dab-4aae-8ea3-5c1e7479a976 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:97da74cc6d8fa5d1634eb1760fd1da5c6048619c264c23e62d75f3bf6b8ef5c4 size: 79524639 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d8190195889efb5333eeec18af9b6c82313edd4db62989bd3a357caca4f13f0e size: 1438 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:37fcebd665b9bf280b3a7b7fc8cbbdd35c40de9fde97eec88a9efbb1a416cf0f size: 31542956 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:03bf2f9ff79ce68fdf647999d3c96dd98a59121fae75dd2c1dcce34e3e159eeb size: 13107144 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:2b42f43a3d9df8228ab00afc8ece1dbfafae24fbd2b3ea72b6234bb68dc2c1bf size: 59202343 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:fe12f4f7de241e6f0f92729bb06b5400f8936a47d5bbaa4b521d4b656ad61ae1 size: 589260181 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: OpenShift Developer Services Config: Entrypoint: - /usr/bin/go-init - -main - /usr/libexec/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - __doozer=merge - BUILD_RELEASE=202306070816.p0.g05d83ef.assembly.stream - BUILD_VERSION=v4.13.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=13 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.13.0-202306070816.p0.g05d83ef.assembly.stream-05d83ef - SOURCE_GIT_TREE_STATE=clean - OS_GIT_COMMIT=05d83ef - SOURCE_DATE_EPOCH=1685556672 - SOURCE_GIT_COMMIT=05d83eff7e17160e679898a2a5cd6019ec252c49 - SOURCE_GIT_TAG=openshift-clients-4.13.0-202304190216-4-g05d83eff7 - SOURCE_GIT_URL=https://github.com/openshift/oc - JENKINS_VERSION=2 - HOME=/var/lib/jenkins - JENKINS_HOME=/var/lib/jenkins - JENKINS_UC=https://updates.jenkins.io - OPENSHIFT_JENKINS_IMAGE_VERSION=4.13 - LANG=en_US.UTF-8 - LC_ALL=en_US.UTF-8 - INSTALL_JENKINS_VIA_RPMS=true ExposedPorts: 8080/tcp: {} 50000/tcp: {} Labels: License: GPLv2+ architecture: x86_64 build-date: 2023-06-13T18:51:10 com.redhat.build-host: cpt-1001.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openshift-jenkins-2-container com.redhat.license_terms: https://www.redhat.com/agreements description: Jenkins is a continuous integration server distribution-scope: public io.buildah.version: 1.29.0 io.jenkins.version: 2.401.1 io.k8s.description: Jenkins is a continuous integration server io.k8s.display-name: Jenkins 2 io.openshift.build.commit.id: 418b910a5af2d9a46c4259fbdbe9a851f6a39820 io.openshift.build.commit.url: https://github.com/openshift/jenkins/commit/418b910a5af2d9a46c4259fbdbe9a851f6a39820 io.openshift.build.source-location: https://github.com/openshift/jenkins io.openshift.expose-services: 8080:http io.openshift.maintainer.component: Jenkins io.openshift.maintainer.product: OpenShift Container Platform io.openshift.maintainer.project: OCPBUGS io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: jenkins,jenkins2,ci maintainer: openshift-dev-services+jenkins@redhat.com name: openshift/ose-jenkins release: "1686682222" summary: Provides the latest release of the Red Hat Extended Life Base Image. url: https://access.redhat.com/containers/#/registry.access.redhat.com/openshift/ose-jenkins/images/v4.13.0-1686682222 vcs-ref: 938ba00e0b9ebc6013b8da4c8466e78680c8908c vcs-type: git vendor: Red Hat, Inc. version: v4.13.0 User: "1001" Volumes: /var/lib/jenkins: {} ContainerConfig: {} Created: "2023-06-13T18:58:20Z" Id: sha256:fbe7cc900bb77ed63ee417950f83153544d870fa450890667cedb5492b9be228 Size: 772667538 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ocp-tools-4/jenkins-rhel8@sha256:eab456afb39ed4607b2ee61c8c7635ab1c5ff8f8bddf7640c557e792504d545f kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:01Z" name: sha256:eab456afb39ed4607b2ee61c8c7635ab1c5ff8f8bddf7640c557e792504d545f resourceVersion: "13760" uid: 054532a0-f182-42d2-bc10-27b80a53a3cc - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:969b2129f884091816f6451dc7954be84cf70867f64c7f3448a4d7045c405fed size: 79215420 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:3b71c1233e1e26d1d75f29179b892cb4d3f5b87540f0d4f377bc9c8b040e77fa size: 468504636 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:29f302bc38599f9c2abd333785f5a01382dd7b3008e8430d3f6c619488e03b3c size: 627309857 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.7 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.7 - WILDFLY_VERSION=7.4.7.GA-redhat-00003 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=23.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk11-openshift-rhel8 - JBOSS_IMAGE_VERSION=7.4.7 - WILDFLY_CAMEL_VERSION=5.9.0.fuse-7_11_1-00014-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-19T15:17:59 com.redhat.component: fuse-eap-openshift-jdk11-rhel-8-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.4 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.4 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Thomas Diesler name: fuse7/fuse-eap-openshift-jdk11-rhel8 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.7 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 7.4.7 release: "28.1687187287" summary: Platform for building and running Apache Camel applications on EAP 7.4 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift-jdk11-rhel8/images/1.11-28.1687187287 vcs-ref: f45d02aaf2c4c13f2a24ced8fbda98fe24a4db35 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-19T15:22:01Z" Id: sha256:436462f5a261850bb0367468bcbe4ac7f6bceeae89b9620f660aa184b40d7936 Size: 1175083214 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift-jdk11-rhel8@sha256:ebfca7a4e3506ee7f317acc7503ad46f2e1cf5605347a1b75fdd02bc77c7de02 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:10:59Z" name: sha256:ebfca7a4e3506ee7f317acc7503ad46f2e1cf5605347a1b75fdd02bc77c7de02 resourceVersion: "13331" uid: 0efd9cf8-b97b-4567-9c02-7e3328dfb8eb - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:6c502b378234995dc83300d3008b7d19853c50e1b89dc9c0d5917de51b589452 size: 77837331 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:01326448827ea7f69fcd30abaf1d354438c9c1dc2cc2d6529832431a7dae1afb size: 17704605 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:172ffdf04fe0ffd5a0117272da0333271a3c558dbeaf357e7412638d99a1e462 size: 154305621 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9a64d33ea5fbab73d73042b20b6c62bf0e07762623ac1ec63cb42bed2f673f58 size: 17205967 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - NODEJS_VER=20 - RUBY_MAJOR_VERSION=2 - RUBY_MINOR_VERSION=5 - RUBY_VERSION=2.5 - RUBY_SCL_NAME_VERSION=25 - RUBY_SCL=ruby-25 - IMAGE_NAME=ubi8/ruby-25 - SUMMARY=Platform for building and running Ruby 2.5 applications - DESCRIPTION=Ruby 2.5 available as container is a base platform for building and running various Ruby 2.5 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-30T12:10:33Z" com.redhat.component: ruby-25-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Ruby 2.5 available as container is a base platform for building and running various Ruby 2.5 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Ruby 2.5 available as container is a base platform for building and running various Ruby 2.5 applications and frameworks. Ruby is the interpreted scripting language for quick and easy object-oriented programming. It has many features to process text files and to do system management tasks (as in Perl). It is simple, straight-forward, and extensible. io.k8s.display-name: Ruby 2.5 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,ruby,ruby25,ruby-25 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi8/ruby-25 org.opencontainers.image.revision: 324e7447cf2a64a60ebd3933f0a913d135a35dc9 release: "1761826163" summary: Platform for building and running Ruby 2.5 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-ruby-container.git --context-dir=2.5/test/puma-test-app/ ubi8/ruby-25 ruby-sample-app vcs-ref: 324e7447cf2a64a60ebd3933f0a913d135a35dc9 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-30T12:10:44Z" Id: sha256:4719337b9191f74d84f235858b5f264d4f259e64364228aaef2e7a26260a8c2a Size: 267072353 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/ruby-25@sha256:ec1985bf5fca4d79054e4beadf7617c3b6400bad2325e47b00a90f7fe07540de kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:ec1985bf5fca4d79054e4beadf7617c3b6400bad2325e47b00a90f7fe07540de resourceVersion: "14112" uid: 709a2874-5dbf-45be-97ed-64598de48114 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c8c24bf4ef435ee29d7495ca732a4d82374c1a11c25ca6aae12f997f45ca5a26 size: 39733597 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:ad40b487e8868da46f3b7e7132544b5ef3e0b50704629e039a8128a1cc36078d size: 63940431 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:60b639def07c06efcb08c22201a35f1f049f35e81ac616f57ddfb8548fbcd0d1 size: 146167365 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - /bin/bash Env: - container=oci - HOME=/opt/app-root - DOTNET_APP_PATH=/opt/app-root/app - DOTNET_DATA_PATH=/opt/app-root/data - DOTNET_DEFAULT_CMD=default-cmd.sh - DOTNET_RUNNING_IN_CONTAINER=true - NUGET_XMLDOC_MODE=skip - ASPNETCORE_URLS=http://*:8080 - APP_UID=1001 - DOTNET_VERSION=9.0.11 - ASPNET_VERSION=9.0.11 - PATH=/opt/app-root/src/.local/bin:/opt/app-root/src/bin:/opt/app-root/.dotnet/tools:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - STI_SCRIPTS_PATH=/usr/libexec/s2i - DOTNET_GENERATE_ASPNET_CERTIFICATE=false - DOTNET_NOLOGO=true - DOTNET_SDK_VERSION=9.0.112 - DOTNET_USE_POLLING_FILE_WATCHER=true ExposedPorts: 8080/tcp: {} Labels: architecture: x86_64 build-date: "2025-12-02T13:26:21Z" com.redhat.component: dotnet-90-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:enterprise_linux:8::appstream description: Platform for building and running .NET 9 applications distribution-scope: public dotnet_version: 9.0.11 io.buildah.version: 1.41.4 io.k8s.description: Platform for building and running .NET 9 applications io.k8s.display-name: .NET 9 SDK io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,.net,dotnet,dotnetcore,dotnet-90 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: Red Hat, Inc. name: ubi8/dotnet-90 org.opencontainers.image.created: "2025-12-02T13:26:21Z" org.opencontainers.image.revision: c3c19ed75250cb6c67f90925540f8ae29e952ff1 release: "1764681922" sdk_version: 9.0.112 summary: .NET 9 SDK url: https://catalog.redhat.com/en/search?searchType=containers vcs-ref: c3c19ed75250cb6c67f90925540f8ae29e952ff1 vcs-type: git vendor: Red Hat, Inc. version: "9.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-12-02T13:26:38Z" Id: sha256:bbd3fbbe4fd412ea9ee27d8d5db2239a0d2e7dc8e60d848dd6ed8e7cf61e7604 Size: 249860453 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi8/dotnet-90@sha256:ec784f172735873a5893504e07c57dcbd56b7b049a395c5629c6058dbfac21a3 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-12-12T16:25:14Z" name: sha256:ec784f172735873a5893504e07c57dcbd56b7b049a395c5629c6058dbfac21a3 resourceVersion: "40667" uid: 6a57396b-5fd2-495a-9afb-2a5122730c07 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:25c75c34b2e2b68ba9245d9cddeb6b8a0887371ed30744064f85241a75704d87 size: 79262296 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:67705065e025181e4faca8aabe1305bdd92f5bdf8a2b8009cdb69183ac2e2c47 size: 49851946 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9d458e2e81cb0fa811f569aaf711628309c0372c7d5eed4a8ea9ec96b4aeeb42 size: 9300456 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:b954cb90d4571440ed86627198be2d74d7c3d264fe72e0af0f35f40f0da99ea8 size: 75745362 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:92c7fb0cb7f3bfe054a3c805669529daa76dec4e9e05fcf4a097579c78dd575c size: 6212869 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/bash Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - GODEBUG=x509ignoreCN=0,madvdontneed=1 - ART_BUILD_ENGINE=konflux - ART_BUILD_DEPS_METHOD=cachi2 - ART_BUILD_NETWORK=hermetic - ART_BUILD_DEPS_MODE=default - __doozer=merge - BUILD_RELEASE=202510212154.p2.gf0c6474.assembly.stream.el9 - BUILD_VERSION=v4.20.0 - OS_GIT_MAJOR=4 - OS_GIT_MINOR=20 - OS_GIT_PATCH=0 - OS_GIT_TREE_STATE=clean - OS_GIT_VERSION=4.20.0-202510212154.p2.gf0c6474.assembly.stream.el9-f0c6474 - SOURCE_GIT_TREE_STATE=clean - __doozer_group=openshift-4.20 - __doozer_key=ose-must-gather - __doozer_uuid_tag=ose-must-gather-rhel9-v4.20.0-20251021.223340 - __doozer_version=v4.20.0 - OS_GIT_COMMIT=f0c6474 - SOURCE_DATE_EPOCH=1755014140 - SOURCE_GIT_COMMIT=f0c64742c79afce5e367ca2c7b830ad448abe1df - SOURCE_GIT_TAG=f0c64742 - SOURCE_GIT_URL=https://github.com/openshift/must-gather Labels: License: GPLv2+ architecture: x86_64 build-date: "2025-10-21T23:11:28Z" com.redhat.component: ose-must-gather-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/a:redhat:openshift:4.20::el9 description: Empty distribution-scope: public io.buildah.version: 1.41.4 io.k8s.description: Empty io.k8s.display-name: Empty io.openshift.build.commit.id: f0c64742c79afce5e367ca2c7b830ad448abe1df io.openshift.build.commit.url: https://github.com/openshift/must-gather/commit/f0c64742c79afce5e367ca2c7b830ad448abe1df io.openshift.build.source-location: https://github.com/openshift/must-gather io.openshift.build.versions: kubectl=1.33.3 io.openshift.expose-services: "" io.openshift.maintainer.component: oc io.openshift.maintainer.project: OCPBUGS io.openshift.tags: Empty maintainer: Red Hat, Inc. name: openshift/ose-must-gather-rhel9 org.opencontainers.image.revision: be5bc98f6ab4fd760cb24ef79b794f829a6cca0d release: 202510212154.p2.gf0c6474.assembly.stream.el9 summary: Empty url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel-els/images/9.4-847.1719484506 vcs-ref: be5bc98f6ab4fd760cb24ef79b794f829a6cca0d vcs-type: git vendor: Red Hat, Inc. version: v4.20.0 ContainerConfig: {} Created: "2025-10-21T23:11:33Z" Id: sha256:59d431a1f16db8ec0cac568e88b46898f9ce3ecdcb913aafcd9c7cc9f60ab2bd Size: 220397991 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ed13779a6051e3b9588f5ebea6b66c0a2979512fdcc99bca1f910a577fb4c34a kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:ed13779a6051e3b9588f5ebea6b66c0a2979512fdcc99bca1f910a577fb4c34a resourceVersion: "13671" uid: 31dd898c-aba6-4eb1-aeca-092ef136ec86 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c7965aa7086045a59bdb113a1fb8a19d7ccf7af4133e59af8ecefd39cda8e0b1 size: 78964242 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:629f5551eb35561edd4fb4df147019d1b61fecb5cff49308880466f66c7c3287 size: 18497941 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:401a443a6b88e0eaecefc030affbc1dd5b32713ba8253d2bb6553083ea3f0b4e size: 78897336 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - run-postgresql Entrypoint: - container-entrypoint Env: - container=oci - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el8 - POSTGRESQL_VERSION=10 - POSTGRESQL_PREV_VERSION=9.6 - HOME=/var/lib/pgsql - PGUSER=postgres - APP_DATA=/opt/app-root - SUMMARY=PostgreSQL is an advanced Object-Relational database management system - DESCRIPTION=PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. - CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/postgresql - ENABLED_COLLECTIONS= ExposedPorts: 5432/tcp: {} Labels: architecture: x86_64 build-date: 2024-06-05T11:48:51 com.redhat.component: postgresql-10-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#rhel description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. distribution-scope: public io.buildah.version: 1.29.0 io.k8s.description: PostgreSQL is an advanced Object-Relational database management system (DBMS). The image contains the client and server programs that you'll need to create, run, maintain and access a PostgreSQL DBMS server. io.k8s.display-name: PostgreSQL 10 io.openshift.expose-services: 5432:postgresql io.openshift.s2i.assemble-user: "26" io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: database,postgresql,postgresql10,postgresql-10 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: rhel8/postgresql-10 release: "245.1717586538" summary: PostgreSQL is an advanced Object-Relational database management system url: https://access.redhat.com/containers/#/registry.access.redhat.com/rhel8/postgresql-10/images/1-245.1717586538 usage: podman run -d --name postgresql_database -e POSTGRESQL_USER=user -e POSTGRESQL_PASSWORD=pass -e POSTGRESQL_DATABASE=db -p 5432:5432 rhel8/postgresql-10 vcs-ref: 1f9e79b1dd5ff57cc1d06493e1c759c06caebd31 vcs-type: git vendor: Red Hat, Inc. version: "1" User: "26" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2024-06-05T11:51:40Z" Id: sha256:28986f9837d55db9e0d95235c90362d2f8d13120e6445c32ffee00a74f6a5309 Size: 176378368 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/rhel8/postgresql-10@sha256:ed2da0eed3f495f5455f490cdf7f7943420f64b0cf541271a2d315a3f9e9744c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:05Z" name: sha256:ed2da0eed3f495f5455f490cdf7f7943420f64b0cf541271a2d315a3f9e9744c resourceVersion: "14107" uid: 2c14eb43-97be-4531-906c-a55d7d1032b6 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c4877503c8d2f934dcdfd76623f2b9935529fe73a1432cae4abba022c6951afd size: 79158758 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:c6ace093db543ef6bb833893c286f6ea30dbca731056cc2609a543935b53b61b size: 121874290 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a56226cc9e15515d8100879ba29506372250c387adb71cc3d5b382242374aa59 size: 33823585 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jws-5.7/tomcat/bin/launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - JAVA_HOME=/usr/lib/jvm/java-11 - JAVA_VENDOR=openjdk - JAVA_VERSION=11 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - CATALINA_OPTS=-Djava.security.egd=file:/dev/./urandom - JBOSS_PRODUCT=webserver - JBOSS_WEBSERVER_VERSION=5.7.1 - JPDA_ADDRESS=8000 - JWS_HOME=/opt/jws-5.7/tomcat - PRODUCT_VERSION=5.7.1 - TOMCAT_VERSION=9.0.62 - JBOSS_CONTAINER_MAVEN_35_MODULE=/opt/jboss/container/maven/35/ - MAVEN_VERSION=3.5 - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JBOSS_CONTAINER_JWS_S2I_MODULE=/opt/jboss/container/jws/s2i - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-webserver-5/jws57-openjdk11-rhel8-openshift - JBOSS_IMAGE_VERSION=5.7.1 - STI_BUILDER=jee ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-03-22T11:54:19 com.redhat.component: jboss-webserver-57-openjdk11-rhel8-openshift-container com.redhat.deployments-dir: /opt/jws-5.7/tomcat/webapps com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: JPDA_ADDRESS:8000 com.redhat.license_terms: https://www.redhat.com/agreements description: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 distribution-scope: public io.buildah.version: 1.27.3 io.cekit.version: 4.3.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running web applications on JBoss Web Server 5.7 with OpenJDK11 - Tomcat v9 io.k8s.display-name: JBoss Web Server 5.7 OpenJDK11 io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java,tomcat9 maintainer: szappis@redhat.com name: jboss-webserver-5/jws57-openjdk11-rhel8-openshift org.jboss.container.deployments-dir: /deployments org.jboss.deployments-dir: /opt/jws-5.7/tomcat/webapps org.jboss.product: webserver-tomcat9 org.jboss.product.openjdk.version: "11" org.jboss.product.version: 5.7.1 org.jboss.product.webserver-tomcat9.version: 5.7.1 release: "2.1679484703" summary: Red Hat JBoss Web Server 5.7 - Tomcat 9 OpenShift container image with OpenJDK11 on UBI8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/jboss-webserver-5/jws57-openjdk11-rhel8-openshift/images/5.7.1-2.1679484703 vcs-ref: 8863cf5802c30ccddffcde5d418af282f73367b8 vcs-type: git vendor: Red Hat, Inc. version: 5.7.1 User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-03-22T11:57:08Z" Id: sha256:f60910cd13151fcb24fa8547d4b08aad0c9df0b11a1770d7e72b320d400bb2f7 Size: 234897858 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/jboss-webserver-5/jws57-openjdk11-openshift-rhel8@sha256:eeb0c539ee7ffbd2f1e6eb326204c6f69c554ac5acf0454e9d68d75ffe954f7c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:00Z" name: sha256:eeb0c539ee7ffbd2f1e6eb326204c6f69c554ac5acf0454e9d68d75ffe954f7c resourceVersion: "13697" uid: 3277721d-50b7-44ee-986e-5fb133460c02 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:47a1453ca54b1db59a81a44dc7ad1efc177b68a6e40a98e297c059f754e6356f size: 75360688 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-8-runtime - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-23T16:02:59 com.redhat.component: openjdk-8-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "4" summary: Image for Red Hat OpenShift providing OpenJDK 1.8 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8-runtime/images/1.17-4 vcs-ref: 1e1eb748239d1923f81e45eaa6e9f67b93293ccb vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-23T16:12:48Z" Id: sha256:11f24744085d08e760dc2d1ee7519ee89aea128730ebebb300ab20a05fa6a9ae Size: 114687050 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8-runtime@sha256:eed7e29bf583e4f01e170bb9f22f2a78098bf15243269b670c307caa6813b783 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:eed7e29bf583e4f01e170bb9f22f2a78098bf15243269b670c307caa6813b783 resourceVersion: "14225" uid: bff4fa90-26bc-465d-be84-75085ea9bf96 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5329d7039f252afc1c5d69521ef7e674f71c36b50db99b369cbb52aa9e0a6782 size: 39330100 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:d8b515f03629826cbe0e8d4333e6c211a736172af3d5a42f4837356bc5cccc68 size: 91380360 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.16 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-07-19T16:18:52 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "2" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.16-2 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 3aa842acd0430ce1e0a6ba3219d036dd6ec6337b vcs-type: git vendor: Red Hat, Inc. version: "1.16" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-07-19T16:25:51Z" Id: sha256:88d418a60a56957c2c9b225d1b28de2cc6aeab1986f6cb6f01497eb1953612ef Size: 130729177 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:f438230ed2c2e609d0d7dbc430ccf1e9bad2660e6410187fd6e9b14a2952e70b kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:f438230ed2c2e609d0d7dbc430ccf1e9bad2660e6410187fd6e9b14a2952e70b resourceVersion: "14156" uid: 86f38bb8-6ae4-4753-bd9d-03187c290fea - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:af063699af1c142fce6707dc9306d122355e61bd23ded0d18f8a4ecfbf3aa89a size: 78847792 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:c81d6d556e6e3a4255dd2709ce18578bfbbf3eed10a4efb966bf99ab69c79e05 size: 9405288 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:d9e405c2a1a004feffa67c8e53dc0965b77b9c67d91626366ee9e53dd24e3de4 size: 199332407 - mediaType: application/vnd.oci.image.layer.v1.tar+gzip name: sha256:37aac202078354aebdb36c3ecb09116035479525ca00deb05fe74fdfd585060e size: 25713137 dockerImageManifestMediaType: application/vnd.oci.image.manifest.v1+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /bin/sh - -c - $STI_SCRIPTS_PATH/usage Entrypoint: - container-entrypoint Env: - container=oci - VERSION=10 - STI_SCRIPTS_URL=image:///usr/libexec/s2i - STI_SCRIPTS_PATH=/usr/libexec/s2i - APP_ROOT=/opt/app-root - HOME=/opt/app-root/src - PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PLATFORM=el10 - NODEJS_VER=22 - PHP_VERSION=8.3 - PHP_VER_SHORT=83 - NAME=php - SUMMARY=Platform for building and running PHP 8.3 applications - DESCRIPTION=PHP 8.3 available as container is a base platform for building and running various PHP 8.3 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. - PHP_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/php/ - APP_DATA=/opt/app-root/src - PHP_DEFAULT_INCLUDE_PATH=/usr/share/pear - PHP_SYSCONF_PATH=/etc - PHP_HTTPD_CONF_FILE=php.conf - PHP_FPM_CONF_D_PATH=/etc/php-fpm.d - PHP_FPM_CONF_FILE=www.conf - PHP_FPM_RUN_DIR=/run/php-fpm - PHP_MAIN_FPM_CONF_FILE=/etc/php-fpm.conf - PHP_FPM_LOG_PATH=/var/log/php-fpm - HTTPD_CONFIGURATION_PATH=/opt/app-root/etc/conf.d - HTTPD_MAIN_CONF_PATH=/etc/httpd/conf - HTTPD_MAIN_CONF_D_PATH=/etc/httpd/conf.d - HTTPD_MODULES_CONF_D_PATH=/etc/httpd/conf.modules.d - HTTPD_VAR_RUN=/var/run/httpd - HTTPD_DATA_PATH=/var/www - HTTPD_DATA_ORIG_PATH=/var/www - HTTPD_VAR_PATH=/var ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: "2025-10-29T04:08:18Z" com.redhat.component: php-83-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI cpe: cpe:/o:redhat:enterprise_linux:10.0 description: PHP 8.3 available as container is a base platform for building and running various PHP 8.3 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. distribution-scope: public help: For more information visit https://github.com/sclorg/s2i-php-container io.buildah.version: 1.41.4 io.k8s.description: PHP 8.3 available as container is a base platform for building and running various PHP 8.3 applications and frameworks. PHP is an HTML-embedded scripting language. PHP attempts to make it easy for developers to write dynamically generated web pages. PHP also offers built-in database integration for several commercial and non-commercial database management systems, so writing a database-enabled webpage with PHP is fairly simple. The most common use of PHP coding is probably as a replacement for CGI scripts. io.k8s.display-name: Apache 2.4 with PHP 8.3 io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/libexec/s2i io.openshift.tags: builder,php,php83,php-83 io.s2i.scripts-url: image:///usr/libexec/s2i maintainer: SoftwareCollections.org name: ubi10/php-83 org.opencontainers.image.revision: 8ded5302552207bde31b13b6c323e5b6d584dc8d release: "1761710868" summary: Platform for building and running PHP 8.3 applications url: https://catalog.redhat.com/en/search?searchType=containers usage: s2i build https://github.com/sclorg/s2i-php-container.git --context-dir=8.3/test/test-app ubi10/php-83 sample-server vcs-ref: 8ded5302552207bde31b13b6c323e5b6d584dc8d vcs-type: git vendor: Red Hat, Inc. version: "10.0" User: "1001" WorkingDir: /opt/app-root/src ContainerConfig: {} Created: "2025-10-29T04:08:25Z" Id: sha256:df662744b3599c199c217d916b2a4f5afda6970d90d1ac07c202f0251a8a0417 Size: 313320303 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/ubi10/php-83@sha256:f4fecc3946301cef5130f3eaaa33d9c15426437d46efe70b507ac90f03364307 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:04Z" name: sha256:f4fecc3946301cef5130f3eaaa33d9c15426437d46efe70b507ac90f03364307 resourceVersion: "14083" uid: e4dff983-c1a7-4e9b-aab3-21631ae67699 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:256c6fd715f2ea75bdb9f2447d80820d93b9cd01f2aec8167c0717c1800549b1 size: 75829357 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:aa58f5fa817d02a915c95c39e12d6b15ffc8c346799058bb158d0cdf120e00c1 size: 1306 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:42e80fd47395a049eb10df53b857280d5965de326d0bc3df09c0d37875fa94e4 size: 3898037 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9bdac4114b88f8681c1643e16b330112d01c5b71ba72c7f54254e683d919cd48 size: 84354649 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7f37d65200f0aab4e6f5c0df8d6ae10e729ccca9bcf5e27525157d61842a105e size: 209220958 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:70657bf8cc141aa7c7f0fb3566a5b3d8564ac595018edd6cbd27e573327a829a size: 269185973 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:95cb831de8d57784b131903926fdfb1c94c336c41c3f942c2bd5240421a602a1 size: 306234365 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-eap-7/eap71-openshift - JBOSS_IMAGE_VERSION=1.3 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_EAP_VERSION=7.1.5.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.1.5.GA - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 - WILDFLY_CAMEL_VERSION=5.2.0.fuse-720023-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f8f52fae6eae Image: 78b7765799c1c765b05c95e3acba83e0cd5081f43cb103e11d79f2a875c3bb82 Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-04-09T18:05:48.549573 com.redhat.build-host: cpt-0012.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.2.5 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-eap-openshift org.concrt.version: 2.2.5 org.jboss.product: eap org.jboss.product.eap.version: 7.1.5.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.1.5.GA release: "4.1554788912" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.2-4.1554788912 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: a754f58b06f9194e7a8f10b049868bd6fac80ac7 vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JBOSS_IMAGE_NAME=jboss-eap-7/eap71-openshift - JBOSS_IMAGE_VERSION=1.3 - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_EAP_VERSION=7.1.5.GA - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.1.5.GA - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - DEFAULT_ADMIN_USERNAME=eapadmin - HTTPS_ENABLE_HTTP2=true - JBOSS_MODULES_SYSTEM_PKGS=org.jboss.logmanager,jdk.nashorn.api - JOLOKIA_VERSION=1.5.0 - MAVEN_VERSION=3.5 - WILDFLY_CAMEL_VERSION=5.2.0.fuse-720023-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: f8f52fae6eae Image: sha256:92e39fef0f39140a6b78b1b623ace311310899682484124361485fdfb523198c Labels: architecture: x86_64 authoritative-source-url: registry.access.redhat.com build-date: 2019-04-09T18:05:48.549573 com.redhat.build-host: cpt-0012.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 description: Platform for building and running Apache Camel applications on EAP 7.1 distribution-scope: public io.cekit.version: 2.2.5 io.fabric8.s2i.version.jolokia: 1.5.0-redhat-1 io.fabric8.s2i.version.maven: "3.5" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.1 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Cloud Enablement Feedback name: fuse7/fuse-eap-openshift org.concrt.version: 2.2.5 org.jboss.product: eap org.jboss.product.eap.version: 7.1.5.GA org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.1.5.GA release: "4.1554788912" summary: Platform for building and running Apache Camel applications on EAP 7.1 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift/images/1.2-4.1554788912 usage: This image is very generic and does not serve a single use case. Use it as a base to build your own images. vcs-ref: a754f58b06f9194e7a8f10b049868bd6fac80ac7 vcs-type: git vendor: Red Hat, Inc. version: "1.2" User: "185" WorkingDir: /home/jboss Created: "2019-04-09T18:08:29Z" DockerVersion: 1.13.1 Id: sha256:2db07314cd40f1cfde19fa979f6b970b84d1a7dc418b057d09900b3511a370b9 Size: 948731354 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift@sha256:f74e72bc7bb13dec9f38ef9e00a8665a7c08a386176ca4b3c41075491e8d07e7 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:f74e72bc7bb13dec9f38ef9e00a8665a7c08a386176ca4b3c41075491e8d07e7 resourceVersion: "14027" uid: 7f39cbd5-17f0-4532-8b12-35dd7c7f7359 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:158b4527561fa6bd9dc89217fff5b1f4cce16fdc5a5aef36345db0554ba996fc size: 39501292 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:a3ba00ce78fe80837f49d37f5f538d9f7dc9eb8b1627350041496a99028cdf26 size: 1751 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1701390f3ce7d7dd4e2c941918153066ba87cefb617a6760b39db8ab5dbc2f05 size: 112626814 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.3 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fa28dc65189d Image: 7ddccef463e2d1c41d67c0b509163c8026ed422eeb86fb251a7a38f0aaa24687 Labels: architecture: x86_64 build-date: 2021-07-23T17:36:40.565755 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "15" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.3-15 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: fa3b855666b09a7f693162b72d089b43e2a493f0 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-8 - JBOSS_IMAGE_VERSION=1.3 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Hostname: fa28dc65189d Image: sha256:9f574112daf9dd02a36a7941d215ed2ec978a741e40d952f4ae6a202ce3265d1 Labels: architecture: x86_64 build-date: 2021-07-23T17:36:40.565755 com.redhat.build-host: cpt-1007.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-8-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-8 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "15" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 1.8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-8/images/1.3-15 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: fa3b855666b09a7f693162b72d089b43e2a493f0 vcs-type: git vendor: Red Hat, Inc. version: "1.3" User: "185" WorkingDir: /home/jboss Created: "2021-07-23T17:38:32Z" DockerVersion: 1.13.1 Id: sha256:6e220244fc89ed9fed3120a4ffbd12c0556e49457829fc46f24bd7079ffd21e3 Size: 152137321 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-8@sha256:f7ca08a8dda3610fcc10cc1fe5f5d0b9f8fc7a283b01975d0fe2c1e77ae06193 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:09Z" name: sha256:f7ca08a8dda3610fcc10cc1fe5f5d0b9f8fc7a283b01975d0fe2c1e77ae06193 resourceVersion: "14216" uid: 6194a097-3738-45f0-81b0-064869fa98c5 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:5ad1bac7764c70a157f75114885b9f1f9e6c5931b4e440f6fae93b0fa0af5a91 size: 79790422 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:7c8c73b6c7403cefa799c92d3a77d9fbbed6514c744e5e32dd77a9d8c79aecb1 size: 428797001 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:9c8f327aa720dabe8098b92fc3a7cbbf7d7869869a36c6fe79f49963ac848ee2 size: 1098367117 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /opt/eap/bin/openshift-launch.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - EAP_FULL_GROUPID=org.jboss.eap - JBOSS_EAP_VERSION=7.4.7 - JBOSS_HOME=/opt/eap - JBOSS_PRODUCT=eap - LAUNCH_JBOSS_IN_BACKGROUND=true - PRODUCT_VERSION=7.4.7 - WILDFLY_VERSION=7.4.7.GA-redhat-00003 - SSO_FORCE_LEGACY_SECURITY=true - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - S2I_SOURCE_DEPLOYMENTS_FILTER=*.war *.ear *.rar *.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/galleon-m2-repository - GALLEON_MAVEN_BUILD_IMG_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/build-image-settings.xml - GALLEON_MAVEN_SETTINGS_XML=/opt/jboss/container/wildfly/s2i/galleon/settings.xml - GALLEON_VERSION=4.2.8.Final - GALLEON_WILDFLY_VERSION=5.2.6.Final - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_DIR=/opt/jboss/container/wildfly/s2i/galleon - JBOSS_CONTAINER_WILDFLY_S2I_GALLEON_PROVISION=/opt/jboss/container/wildfly/s2i/galleon/provisioning/generic_provisioning - JBOSS_CONTAINER_WILDFLY_S2I_MODULE=/opt/jboss/container/wildfly/s2i - S2I_COPY_SERVER=true - TMP_GALLEON_LOCAL_MAVEN_REPO=/opt/jboss/container/wildfly/s2i/galleon/tmp-galleon-m2-repository - WILDFLY_S2I_OUTPUT_DIR=/s2i-output - DELETE_BUILD_ARTIFACTS=true - GALLEON_BUILD_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_DEFAULT_FAT_SERVER=/opt/jboss/container/eap/galleon/definitions/fat-default-server - GALLEON_DEFAULT_SERVER=/opt/jboss/container/eap/galleon/definitions/slim-default-server - GALLEON_DEFINITIONS=/opt/jboss/container/eap/galleon/definitions - GALLEON_FP_COMMON_PKG_NAME=eap.s2i.common - GALLEON_FP_PATH=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack - GALLEON_MAVEN_REPO_HOOK_SCRIPT=/opt/jboss/container/eap/galleon/patching.sh - GALLEON_PROVISON_FP_MAVEN_ARGS_APPEND=-Dcom.redhat.xpaas.repo.jbossorg - GALLEON_S2I_FP_ARTIFACT_ID=eap-s2i-galleon-pack - GALLEON_S2I_FP_GROUP_ID=org.jboss.eap.galleon.s2i - GALLEON_S2I_PRODUCER_NAME=eap-s2i - JBOSS_CONTAINER_EAP_GALLEON_FP_PACKAGES=/opt/jboss/container/eap/galleon/eap-s2i-galleon-pack/src/main/resources/packages - OFFLINER_URLS=--url https://repo1.maven.org/maven2/ --url https://repository.jboss.org/nexus/content/groups/public/ --url https://maven.repository.redhat.com/ga/ - S2I_FP_VERSION=23.0.0.Final - WILDFLY_DIST_MAVEN_LOCATION=https://repository.jboss.org/nexus/content/groups/public/org/wildfly/wildfly-dist - DEFAULT_ADMIN_USERNAME=eapadmin - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - JBOSS_MODULES_SYSTEM_PKGS=jdk.nashorn.api,com.sun.crypto.provider - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.7.1 - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - OFFLINER_VERSION=1.6 - AB_PROMETHEUS_JMX_EXPORTER_PORT=9799 - HTTPS_ENABLE_HTTP2=true - JBOSS_IMAGE_NAME=jboss-eap-7/eap74-openjdk8-openshift-rhel7 - JBOSS_IMAGE_VERSION=7.4.7 - LANG=C.utf8 - WILDFLY_CAMEL_VERSION=5.9.0.fuse-7_11_1-00014-redhat-00001 ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2023-06-20T12:51:18 com.redhat.component: fuse-eap-openshift-container com.redhat.deployments-dir: /opt/eap/standalone/deployments com.redhat.dev-mode: DEBUG:true com.redhat.dev-mode.port: DEBUG_PORT:8787 com.redhat.license_terms: https://www.redhat.com/agreements description: Platform for building and running Apache Camel applications on EAP 7.4 distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 3.2.1 io.fabric8.s2i.version.jolokia: 1.7.1.redhat-00001 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running Apache Camel applications on EAP 7.4 io.k8s.display-name: Fuse for OpenShift - EAP based io.openshift.expose-services: 8080:http io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,javaee,eap,eap7,fuse maintainer: Thomas Diesler name: fuse7/fuse-eap-openshift-jdk8-rhel7 org.jboss.container.deployments-dir: /deployments org.jboss.product: eap org.jboss.product.eap.version: 7.4.7 org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 7.4.7 release: "35.1687265402" summary: Platform for building and running Apache Camel applications on EAP 7.4 url: https://access.redhat.com/containers/#/registry.access.redhat.com/fuse7/fuse-eap-openshift-jdk8-rhel7/images/1.11-35.1687265402 vcs-ref: fb59c4aa5b79f841b05c9da17745ccf540244d31 vcs-type: git vendor: Red Hat, Inc. version: "1.11" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-06-20T12:54:43Z" Id: sha256:efb5ba3eaa6f2b624e612e278f56f5578588f34b7282eaa87eeef69a0a11ce16 Size: 1607007164 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.redhat.io/fuse7/fuse-eap-openshift-jdk8-rhel7@sha256:f7d4386680e3a44e3bf8bacc3ebfe3224232e44e4d1e2e7167aa1b4970f2866c kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:03Z" name: sha256:f7d4386680e3a44e3bf8bacc3ebfe3224232e44e4d1e2e7167aa1b4970f2866c resourceVersion: "14024" uid: d68b0529-2c81-4723-91f0-58c7d1914d1f - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:4752687a61a97d6f352ae62c381c87564bcb2f5b6523a05510ca1fb60d640216 size: 36442442 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0344366a246a0f7590c2bae4536c01f15f20c6d802b4654ce96ac81047bc23f3 size: 1740 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:1e443c4e5fd4cedbd0a83cfd29362c370b2a7f9713a6af5bfd1d824275aef75c size: 108563166 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /usr/local/s2i/run Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: 539deff60bbd7cdfcae72170baecc0d4ae6e920a39f8afbc3bd80b305ae42fee Labels: architecture: x86_64 build-date: 2022-06-15T16:27:27.479076 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1655306439" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.13-1.1655306439 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8734c13ed58f0a634230e9ea70b053f2aceaeab6 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss ContainerConfig: Cmd: - /bin/sh - -c - '#(nop) ' - USER [185] Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-17 - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=ubi8/openjdk-17 - JBOSS_IMAGE_VERSION=1.13 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Hostname: fd16d6f2e774 Image: sha256:b2b94c298f51760d3b97a2751b6b6961368cbddd87f237ea04fb6596edfbcac7 Labels: architecture: x86_64 build-date: 2022-06-15T16:27:27.479076 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: openjdk-17-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 distribution-scope: public io.cekit.version: 3.11.0 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17 org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "1.1655306439" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 17 url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17/images/1.13-1.1655306439 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 8734c13ed58f0a634230e9ea70b053f2aceaeab6 vcs-type: git vendor: Red Hat, Inc. version: "1.13" User: "185" WorkingDir: /home/jboss Created: "2022-06-15T16:32:40Z" DockerVersion: 1.13.1 Id: sha256:60d04f65477a71cf8fb3347dc0e7bf18ba670ac421920938944e912414f329fb Size: 145014172 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17@sha256:f89a54e6d1340be8ddd84a602cb4f1f27c1983417f655941645bf11809d49f18 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:f89a54e6d1340be8ddd84a602cb4f1f27c1983417f655941645bf11809d49f18 resourceVersion: "14144" uid: 0beba5fd-c4ef-4ae5-b3d8-b9ac7dad4238 - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:dc35b837139a95d1b9f7f7b0435a024a74ab972416bdc248f3f608c9f917a753 size: 39307955 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:8876cf1d7108be79268dd431b736ebe667a42066087a2f59dcbe11f09ed0e453 size: 90699505 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Config: Cmd: - /opt/jboss/container/java/run/run-java.sh Env: - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/jre - JAVA_VENDOR=openjdk - JAVA_VERSION=17 - JBOSS_CONTAINER_OPENJDK_JRE_MODULE=/opt/jboss/container/openjdk/jre - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_IMAGE_NAME=ubi8/openjdk-17-runtime - JBOSS_IMAGE_VERSION=1.17 - LANG=C.utf8 ExposedPorts: 8080/tcp: {} 8443/tcp: {} Labels: architecture: x86_64 build-date: 2023-10-23T16:08:26 com.redhat.component: openjdk-17-runtime-ubi8-container com.redhat.license_terms: https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI description: Image for Red Hat OpenShift providing OpenJDK 17 runtime distribution-scope: public io.buildah.version: 1.29.0 io.cekit.version: 4.7.0 io.k8s.description: Platform for running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.expose-services: "" io.openshift.tags: java maintainer: Red Hat OpenJDK name: ubi8/openjdk-17-runtime org.jboss.product: openjdk org.jboss.product.openjdk.version: "17" org.jboss.product.version: "17" release: "4" summary: Image for Red Hat OpenShift providing OpenJDK 17 runtime url: https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/openjdk-17-runtime/images/1.17-4 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 3889748da09fe5896b09a6b79b9d3fb95244e361 vcs-type: git vendor: Red Hat, Inc. version: "1.17" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2023-10-23T16:15:41Z" Id: sha256:1d67330fe844c4813d1cb372e06f8c06b1741fa2b77e6592f8390bfd0fe8fa89 Size: 130026126 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/ubi8/openjdk-17-runtime@sha256:f953734d89252219c3dcd8f703ba8b58c9c8a0f5dfa9425c9e56ec0834f7d288 kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:07Z" name: sha256:f953734d89252219c3dcd8f703ba8b58c9c8a0f5dfa9425c9e56ec0834f7d288 resourceVersion: "14157" uid: 6c1a3719-2b73-40f4-8bc8-d8978bf485fa - apiVersion: image.openshift.io/v1 dockerImageLayers: - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:0a017d456fb3a760722ba4895579d8a412aec74e61d6805b04df6527b70fce6b size: 80807726 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:80a2bcb42ca25702f1e5b3b71dd25c6882ae0a2a03bb87d0b76c579cef9806a4 size: 1607 - mediaType: application/vnd.docker.image.rootfs.diff.tar.gzip name: sha256:bbea88449c03fd029a5d4e6bf0457f140aacd45f0639d581045c0d1dea2efc9f size: 113937969 dockerImageManifestMediaType: application/vnd.docker.distribution.manifest.v2+json dockerImageMetadata: Architecture: amd64 Author: Red Hat, Inc. Config: Cmd: - /usr/local/s2i/run Env: - container=oci - HOME=/home/jboss - JAVA_HOME=/usr/lib/jvm/java-1.8.0 - JAVA_VENDOR=openjdk - JAVA_VERSION=1.8.0 - JBOSS_CONTAINER_OPENJDK_JDK_MODULE=/opt/jboss/container/openjdk/jdk - AB_PROMETHEUS_JMX_EXPORTER_CONFIG=/opt/jboss/container/prometheus/etc/jmx-exporter-config.yaml - JBOSS_CONTAINER_PROMETHEUS_MODULE=/opt/jboss/container/prometheus - AB_JOLOKIA_AUTH_OPENSHIFT=true - AB_JOLOKIA_HTTPS=true - AB_JOLOKIA_PASSWORD_RANDOM=true - JBOSS_CONTAINER_JOLOKIA_MODULE=/opt/jboss/container/jolokia - JOLOKIA_VERSION=1.6.2 - LD_PRELOAD=libnss_wrapper.so - NSS_WRAPPER_GROUP=/etc/group - NSS_WRAPPER_PASSWD=/home/jboss/passwd - S2I_SOURCE_DEPLOYMENTS_FILTER=*.jar - JBOSS_CONTAINER_S2I_CORE_MODULE=/opt/jboss/container/s2i/core/ - JBOSS_CONTAINER_JAVA_PROXY_MODULE=/opt/jboss/container/java/proxy - JBOSS_CONTAINER_JAVA_JVM_MODULE=/opt/jboss/container/java/jvm - JBOSS_CONTAINER_MAVEN_36_MODULE=/opt/jboss/container/maven/36/ - MAVEN_VERSION=3.6 - JBOSS_CONTAINER_UTIL_LOGGING_MODULE=/opt/jboss/container/util/logging/ - JBOSS_CONTAINER_MAVEN_DEFAULT_MODULE=/opt/jboss/container/maven/default/ - JBOSS_CONTAINER_MAVEN_S2I_MODULE=/opt/jboss/container/maven/s2i - JAVA_DATA_DIR=/deployments/data - JBOSS_CONTAINER_JAVA_RUN_MODULE=/opt/jboss/container/java/run - JBOSS_CONTAINER_JAVA_S2I_MODULE=/opt/jboss/container/java/s2i - JBOSS_IMAGE_NAME=redhat-openjdk-18/openjdk18-openshift - JBOSS_IMAGE_VERSION=1.14 - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/s2i ExposedPorts: 8080/tcp: {} 8443/tcp: {} 8778/tcp: {} Labels: architecture: x86_64 build-date: 2022-10-17T13:55:04 com.redhat.build-host: cpt-1002.osbs.prod.upshift.rdu2.redhat.com com.redhat.component: redhat-openjdk-18-openjdk18-openshift-container com.redhat.license_terms: https://www.redhat.com/agreements description: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 distribution-scope: public io.buildah.version: 1.26.2 io.cekit.version: 3.11.0 io.fabric8.s2i.version.jolokia: 1.6.2-redhat-00002 io.fabric8.s2i.version.maven: "3.6" io.k8s.description: Platform for building and running plain Java applications (fat-jar and flat classpath) io.k8s.display-name: Java Applications io.openshift.s2i.destination: /tmp io.openshift.s2i.scripts-url: image:///usr/local/s2i io.openshift.tags: builder,java maintainer: Red Hat OpenJDK name: redhat-openjdk-18/openjdk18-openshift org.jboss.container.deployments-dir: /deployments org.jboss.product: openjdk org.jboss.product.openjdk.version: 1.8.0 org.jboss.product.version: 1.8.0 release: "3" summary: Source To Image (S2I) image for Red Hat OpenShift providing OpenJDK 8 url: https://access.redhat.com/containers/#/registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift/images/1.14-3 usage: https://access.redhat.com/documentation/en-us/red_hat_jboss_middleware_for_openshift/3/html/red_hat_java_s2i_for_openshift/ vcs-ref: 7f66438357437cedfc17471d30cc6ae81cdc7e47 vcs-type: git vendor: Red Hat, Inc. version: "1.14" User: "185" WorkingDir: /home/jboss ContainerConfig: {} Created: "2022-10-17T14:04:57Z" Id: sha256:c3bd9b0150b83dc99fe250e474b9cc3e60efa871da8cd960e3f93f3fbed7f207 Size: 194772699 apiVersion: image.openshift.io/1.0 kind: DockerImage dockerImageMetadataVersion: "1.0" dockerImageReference: registry.access.redhat.com/redhat-openjdk-18/openjdk18-openshift@sha256:fa9556628c15b8eb22cafccb737b3fbcecfd681a5c2cfea3302dd771c644a7db kind: Image metadata: annotations: image.openshift.io/dockerLayersOrder: ascending creationTimestamp: "2025-11-02T08:11:08Z" name: sha256:fa9556628c15b8eb22cafccb737b3fbcecfd681a5c2cfea3302dd771c644a7db resourceVersion: "14164" uid: 81d5a322-e2ef-4baa-999c-09f91b848a3f kind: List metadata: resourceVersion: "" home/zuul/zuul-output/logs/controller/post_oc_get_imagestream.log0000644000175000017500000001276715117042533024655 0ustar zuulzuul*** [INFO] Showing oc get 'imagestream' NAME IMAGE REPOSITORY TAGS UPDATED ose-operator-registry default-route-openshift-image-registry.apps-crc.testing/service-telemetry/ose-operator-registry v4.13 11 minutes ago service-telemetry-framework-index default-route-openshift-image-registry.apps-crc.testing/service-telemetry/service-telemetry-framework-index service-telemetry-operator-bundle default-route-openshift-image-registry.apps-crc.testing/service-telemetry/service-telemetry-operator-bundle nightly-head 11 minutes ago smart-gateway-operator-bundle default-route-openshift-image-registry.apps-crc.testing/service-telemetry/smart-gateway-operator-bundle nightly-head 11 minutes ago [INFO] oc get 'imagestream' -oyaml apiVersion: v1 items: - apiVersion: image.openshift.io/v1 kind: ImageStream metadata: annotations: openshift.io/image.dockerRepositoryCheck: "2025-12-12T16:28:11Z" creationTimestamp: "2025-12-12T16:28:11Z" generation: 1 name: ose-operator-registry namespace: service-telemetry resourceVersion: "43629" uid: 4ae8851f-07d6-4e73-8283-af9f811541ab spec: lookupPolicy: local: false tags: - annotations: null from: kind: DockerImage name: quay.io/openshift/origin-operator-registry:4.13 generation: 1 importPolicy: importMode: Legacy insecure: true name: v4.13 referencePolicy: type: Source status: dockerImageRepository: image-registry.openshift-image-registry.svc:5000/service-telemetry/ose-operator-registry publicDockerImageRepository: default-route-openshift-image-registry.apps-crc.testing/service-telemetry/ose-operator-registry tags: - items: - created: "2025-12-12T16:28:11Z" dockerImageReference: quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a generation: 1 image: sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a tag: v4.13 - apiVersion: image.openshift.io/v1 kind: ImageStream metadata: creationTimestamp: "2025-12-12T16:28:12Z" generation: 1 name: service-telemetry-framework-index namespace: service-telemetry resourceVersion: "43638" uid: 1ed39546-31bd-41aa-945b-d7d440c6e83b spec: lookupPolicy: local: false status: dockerImageRepository: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index publicDockerImageRepository: default-route-openshift-image-registry.apps-crc.testing/service-telemetry/service-telemetry-framework-index - apiVersion: image.openshift.io/v1 kind: ImageStream metadata: annotations: openshift.io/image.dockerRepositoryCheck: "2025-12-12T16:28:03Z" creationTimestamp: "2025-12-12T16:28:03Z" generation: 1 name: service-telemetry-operator-bundle namespace: service-telemetry resourceVersion: "43571" uid: dbf69b25-3b53-4f3e-93b7-afea224e0dda spec: lookupPolicy: local: false tags: - annotations: null from: kind: DockerImage name: quay.io/infrawatch-operators/service-telemetry-operator-bundle:nightly-head generation: 1 importPolicy: importMode: Legacy insecure: true name: nightly-head referencePolicy: type: Source status: dockerImageRepository: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle publicDockerImageRepository: default-route-openshift-image-registry.apps-crc.testing/service-telemetry/service-telemetry-operator-bundle tags: - items: - created: "2025-12-12T16:28:03Z" dockerImageReference: quay.io/infrawatch-operators/service-telemetry-operator-bundle@sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b generation: 1 image: sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b tag: nightly-head - apiVersion: image.openshift.io/v1 kind: ImageStream metadata: annotations: openshift.io/image.dockerRepositoryCheck: "2025-12-12T16:28:04Z" creationTimestamp: "2025-12-12T16:28:04Z" generation: 1 name: smart-gateway-operator-bundle namespace: service-telemetry resourceVersion: "43577" uid: bbffd731-243e-4a98-a99f-6554bacab43f spec: lookupPolicy: local: false tags: - annotations: null from: kind: DockerImage name: quay.io/infrawatch-operators/smart-gateway-operator-bundle:nightly-head generation: 1 importPolicy: importMode: Legacy insecure: true name: nightly-head referencePolicy: type: Source status: dockerImageRepository: image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle publicDockerImageRepository: default-route-openshift-image-registry.apps-crc.testing/service-telemetry/smart-gateway-operator-bundle tags: - items: - created: "2025-12-12T16:28:04Z" dockerImageReference: quay.io/infrawatch-operators/smart-gateway-operator-bundle@sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc generation: 1 image: sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc tag: nightly-head kind: List metadata: resourceVersion: "" home/zuul/zuul-output/logs/controller/post_oc_get_pods.log0000644000175000017500000016572415117042533023326 0ustar zuulzuul*** [INFO] Showing oc get 'pods' NAME READY STATUS RESTARTS AGE elastic-operator-6c994c654b-42tmw 1/1 Running 0 12m elasticsearch-es-default-0 1/1 Running 0 12m infrawatch-operators-6bs58 0/1 ImagePullBackOff 0 5m30s infrawatch-operators-cdpts 0/1 ImagePullBackOff 0 10m [INFO] oc get 'pods' -oyaml apiVersion: v1 items: - apiVersion: v1 kind: Pod metadata: annotations: alm-examples: |- [ { "apiVersion": "elasticsearch.k8s.elastic.co/v1", "kind": "Elasticsearch", "metadata": { "name": "elasticsearch-sample" }, "spec": { "version": "9.2.0", "nodeSets": [ { "name": "default", "config": { "node.roles": ["master", "data"], "node.attr.attr_name": "attr_value", "node.store.allow_mmap": false }, "podTemplate": { "metadata": { "labels": { "foo": "bar" } }, "spec": { "containers": [ { "name": "elasticsearch", "resources": { "requests": { "memory": "4Gi", "cpu": 1 }, "limits": { "memory": "4Gi", "cpu": 2 } } } ] } }, "count": 3 } ] } }, { "apiVersion": "kibana.k8s.elastic.co/v1", "kind": "Kibana", "metadata": { "name": "kibana-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" }, "podTemplate": { "metadata": { "labels": { "foo": "bar" } }, "spec": { "containers": [ { "name": "kibana", "resources": { "requests": { "memory": "1Gi", "cpu": 0.5 }, "limits": { "memory": "2Gi", "cpu": 2 } } } ] } } } }, { "apiVersion": "apm.k8s.elastic.co/v1", "kind": "ApmServer", "metadata": { "name": "apmserver-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "enterprisesearch.k8s.elastic.co/v1", "kind": "EnterpriseSearch", "metadata": { "name": "ent-sample" }, "spec": { "version": "9.2.0", "config": { "ent_search.external_url": "https://localhost:3002" }, "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "beat.k8s.elastic.co/v1beta1", "kind": "Beat", "metadata": { "name": "heartbeat-sample" }, "spec": { "type": "heartbeat", "version": "9.2.0", "elasticsearchRef": { "name": "elasticsearch-sample" }, "config": { "heartbeat.monitors": [ { "type": "tcp", "schedule": "@every 5s", "hosts": [ "elasticsearch-sample-es-http.default.svc:9200" ] } ] }, "deployment": { "replicas": 1, "podTemplate": { "spec": { "securityContext": { "runAsUser": 0 } } } } } }, { "apiVersion": "agent.k8s.elastic.co/v1alpha1", "kind": "Agent", "metadata": { "name": "agent-sample" }, "spec": { "version": "9.2.0", "elasticsearchRefs": [ { "name": "elasticsearch-sample" } ], "daemonSet": {}, "config": { "inputs": [ { "name": "system-1", "revision": 1, "type": "system/metrics", "use_output": "default", "meta": { "package": { "name": "system", "version": "0.9.1" } }, "data_stream": { "namespace": "default" }, "streams": [ { "id": "system/metrics-system.cpu", "data_stream": { "dataset": "system.cpu", "type": "metrics" }, "metricsets": [ "cpu" ], "cpu.metrics": [ "percentages", "normalized_percentages" ], "period": "10s" } ] } ] } } }, { "apiVersion": "maps.k8s.elastic.co/v1alpha1", "kind": "ElasticMapsServer", "metadata": { "name": "ems-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "logstash.k8s.elastic.co/v1alpha1", "kind": "Logstash", "metadata" : { "name": "logstash-sample" }, "spec": { "version": "9.2.0", "count": 1 } } ] capabilities: Deep Insights categories: Database certified: "false" co.elastic.logs/raw: '[{"type":"filestream","enabled":true,"id":"eck-container-logs-${data.kubernetes.container.id}","paths":["/var/log/containers/*${data.kubernetes.container.id}.log"],"parsers":[{"container":{}},{"ndjson":{"keys_under_root":true}}],"prospector.scanner.symlinks":true,"processors":[{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"error","to":"_error"}]}},{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"_error","to":"error.message"}]}},{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"source","to":"_source"}]}},{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"_source","to":"event.source"}]}}]}]' containerImage: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 createdAt: "2025-10-31 20:52:32" description: Run Elasticsearch, Kibana, APM Server, Beats, Enterprise Search, Elastic Agent, Elastic Maps Server and Logstash on Kubernetes and OpenShift features.operators.openshift.io/disconnected: "false" features.operators.openshift.io/fips-compliant: "false" features.operators.openshift.io/proxy-aware: "false" features.operators.openshift.io/tls-profiles: "false" features.operators.openshift.io/token-auth-aws: "false" features.operators.openshift.io/token-auth-azure: "false" features.operators.openshift.io/token-auth-gcp: "false" k8s.ovn.org/pod-networks: '{"default":{"ip_addresses":["10.217.0.50/23"],"mac_address":"0a:58:0a:d9:00:32","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0/22","nextHop":"10.217.0.1"},{"dest":"10.217.4.0/23","nextHop":"10.217.0.1"},{"dest":"169.254.0.5/32","nextHop":"10.217.0.1"},{"dest":"100.64.0.0/16","nextHop":"10.217.0.1"}],"ip_address":"10.217.0.50/23","gateway_ip":"10.217.0.1","role":"primary"}}' k8s.v1.cni.cncf.io/network-status: |- [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.50" ], "mac": "0a:58:0a:d9:00:32", "default": true, "dns": {} }] olm.operatorGroup: service-telemetry-operator-group olm.operatorNamespace: service-telemetry olm.targetNamespaces: service-telemetry olmcahash: 02b97c20c00db6979b0f4cccf06449b5fb0cfdfb377da8c48f74f853a625fc69 openshift.io/scc: restricted-v2 operatorframework.io/properties: '{"properties":[{"type":"olm.gvk","value":{"group":"agent.k8s.elastic.co","kind":"Agent","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"apm.k8s.elastic.co","kind":"ApmServer","version":"v1"}},{"type":"olm.gvk","value":{"group":"apm.k8s.elastic.co","kind":"ApmServer","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"apm.k8s.elastic.co","kind":"ApmServer","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"autoscaling.k8s.elastic.co","kind":"ElasticsearchAutoscaler","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"beat.k8s.elastic.co","kind":"Beat","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"elasticsearch.k8s.elastic.co","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"elasticsearch.k8s.elastic.co","kind":"Elasticsearch","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"elasticsearch.k8s.elastic.co","kind":"Elasticsearch","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"enterprisesearch.k8s.elastic.co","kind":"EnterpriseSearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"enterprisesearch.k8s.elastic.co","kind":"EnterpriseSearch","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"kibana.k8s.elastic.co","kind":"Kibana","version":"v1"}},{"type":"olm.gvk","value":{"group":"kibana.k8s.elastic.co","kind":"Kibana","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"kibana.k8s.elastic.co","kind":"Kibana","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"logstash.k8s.elastic.co","kind":"Logstash","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"maps.k8s.elastic.co","kind":"ElasticMapsServer","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"stackconfigpolicy.k8s.elastic.co","kind":"StackConfigPolicy","version":"v1alpha1"}},{"type":"olm.package","value":{"packageName":"elasticsearch-eck-operator-certified","version":"3.2.0"}}]}' operators.openshift.io/valid-subscription: Elastic Basic license repository: https://github.com/elastic/cloud-on-k8s seccomp.security.alpha.kubernetes.io/pod: runtime/default security.openshift.io/validated-scc-subject-type: user support: elastic.co creationTimestamp: "2025-12-12T16:27:05Z" generateName: elastic-operator-6c994c654b- generation: 1 labels: control-plane: elastic-operator pod-template-hash: 6c994c654b name: elastic-operator-6c994c654b-42tmw namespace: service-telemetry ownerReferences: - apiVersion: apps/v1 blockOwnerDeletion: true controller: true kind: ReplicaSet name: elastic-operator-6c994c654b uid: 7ecdd644-c987-412c-8a2b-0b4ff15c8051 resourceVersion: "42531" uid: 1aa11df6-5c2b-4018-8146-09c5d79b9311 spec: containers: - args: - manager - --config=/conf/eck.yaml - --manage-webhook-certs=false - --enable-webhook - --ubi-only - --distribution-channel=certified-operators env: - name: NAMESPACES valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.annotations['olm.targetNamespaces'] - name: OPERATOR_NAMESPACE valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.annotations['olm.operatorNamespace'] - name: OPERATOR_IMAGE value: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 - name: OPERATOR_CONDITION_NAME value: elasticsearch-eck-operator-certified.v3.2.0 image: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 imagePullPolicy: IfNotPresent name: manager ports: - containerPort: 9443 name: https-webhook protocol: TCP resources: limits: cpu: "1" memory: 1Gi requests: cpu: 100m memory: 150Mi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL runAsNonRoot: true runAsUser: 1000670000 terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /apiserver.local.config/certificates name: apiservice-cert - mountPath: /tmp/k8s-webhook-server/serving-certs name: webhook-cert - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-m9pn9 readOnly: true dnsPolicy: ClusterFirst enableServiceLinks: true imagePullSecrets: - name: elastic-operator-dockercfg-rf5wq nodeName: crc preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: fsGroup: 1000670000 seLinuxOptions: level: s0:c26,c10 seccompProfile: type: RuntimeDefault serviceAccount: elastic-operator serviceAccountName: elastic-operator terminationGracePeriodSeconds: 10 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 - effect: NoSchedule key: node.kubernetes.io/memory-pressure operator: Exists volumes: - name: apiservice-cert secret: defaultMode: 420 items: - key: tls.crt path: apiserver.crt - key: tls.key path: apiserver.key secretName: elastic-operator-service-cert - name: webhook-cert secret: defaultMode: 420 items: - key: tls.crt path: tls.crt - key: tls.key path: tls.key secretName: elastic-operator-service-cert - name: kube-api-access-m9pn9 projected: defaultMode: 420 sources: - serviceAccountToken: expirationSeconds: 3607 path: token - configMap: items: - key: ca.crt path: ca.crt name: kube-root-ca.crt - downwardAPI: items: - fieldRef: apiVersion: v1 fieldPath: metadata.namespace path: namespace - configMap: items: - key: service-ca.crt path: service-ca.crt name: openshift-service-ca.crt status: conditions: - lastProbeTime: null lastTransitionTime: "2025-12-12T16:27:30Z" status: "True" type: PodReadyToStartContainers - lastProbeTime: null lastTransitionTime: "2025-12-12T16:27:05Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2025-12-12T16:27:30Z" status: "True" type: Ready - lastProbeTime: null lastTransitionTime: "2025-12-12T16:27:30Z" status: "True" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2025-12-12T16:27:05Z" status: "True" type: PodScheduled containerStatuses: - allocatedResources: cpu: 100m memory: 150Mi containerID: cri-o://f7468ca2597b25367291c527f32ed32cc5934aaea968905e6debd499cadf6d71 image: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 imageID: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 lastState: {} name: manager ready: true resources: limits: cpu: "1" memory: 1Gi requests: cpu: 100m memory: 150Mi restartCount: 0 started: true state: running: startedAt: "2025-12-12T16:27:30Z" user: linux: gid: 0 supplementalGroups: - 0 - 1000670000 uid: 1000670000 volumeMounts: - mountPath: /apiserver.local.config/certificates name: apiservice-cert - mountPath: /tmp/k8s-webhook-server/serving-certs name: webhook-cert - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-m9pn9 readOnly: true recursiveReadOnly: Disabled hostIP: 192.168.126.11 hostIPs: - ip: 192.168.126.11 phase: Running podIP: 10.217.0.50 podIPs: - ip: 10.217.0.50 qosClass: Burstable startTime: "2025-12-12T16:27:05Z" - apiVersion: v1 kind: Pod metadata: annotations: co.elastic.logs/module: elasticsearch elasticsearch.k8s.elastic.co/config-hash: "513175922" k8s.ovn.org/pod-networks: '{"default":{"ip_addresses":["10.217.0.53/23"],"mac_address":"0a:58:0a:d9:00:35","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0/22","nextHop":"10.217.0.1"},{"dest":"10.217.4.0/23","nextHop":"10.217.0.1"},{"dest":"169.254.0.5/32","nextHop":"10.217.0.1"},{"dest":"100.64.0.0/16","nextHop":"10.217.0.1"}],"ip_address":"10.217.0.53/23","gateway_ip":"10.217.0.1","role":"primary"}}' k8s.v1.cni.cncf.io/network-status: |- [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.53" ], "mac": "0a:58:0a:d9:00:35", "default": true, "dns": {} }] openshift.io/scc: restricted-v2 policy.k8s.elastic.co/elasticsearch-config-mounts-hash: "" seccomp.security.alpha.kubernetes.io/pod: runtime/default security.openshift.io/validated-scc-subject-type: user update.k8s.elastic.co/timestamp: "2025-12-12T16:28:02.391486162Z" creationTimestamp: "2025-12-12T16:27:31Z" generateName: elasticsearch-es-default- generation: 1 labels: apps.kubernetes.io/pod-index: "0" common.k8s.elastic.co/type: elasticsearch controller-revision-hash: elasticsearch-es-default-9774f4d96 elasticsearch.k8s.elastic.co/cluster-name: elasticsearch elasticsearch.k8s.elastic.co/http-scheme: https elasticsearch.k8s.elastic.co/node-data: "true" elasticsearch.k8s.elastic.co/node-data_cold: "false" elasticsearch.k8s.elastic.co/node-data_content: "false" elasticsearch.k8s.elastic.co/node-data_frozen: "false" elasticsearch.k8s.elastic.co/node-data_hot: "false" elasticsearch.k8s.elastic.co/node-data_warm: "false" elasticsearch.k8s.elastic.co/node-ingest: "true" elasticsearch.k8s.elastic.co/node-master: "true" elasticsearch.k8s.elastic.co/node-ml: "false" elasticsearch.k8s.elastic.co/node-remote_cluster_client: "false" elasticsearch.k8s.elastic.co/node-transform: "false" elasticsearch.k8s.elastic.co/node-voting_only: "false" elasticsearch.k8s.elastic.co/statefulset-name: elasticsearch-es-default elasticsearch.k8s.elastic.co/version: 7.17.20 statefulset.kubernetes.io/pod-name: elasticsearch-es-default-0 tuned.openshift.io/elasticsearch: elasticsearch name: elasticsearch-es-default-0 namespace: service-telemetry ownerReferences: - apiVersion: apps/v1 blockOwnerDeletion: true controller: true kind: StatefulSet name: elasticsearch-es-default uid: f2d2d921-7cff-4334-8b75-b0f22067a17f resourceVersion: "43696" uid: 8b73b1a4-74b4-4b36-9c02-328f2cc9b99a spec: affinity: podAntiAffinity: preferredDuringSchedulingIgnoredDuringExecution: - podAffinityTerm: labelSelector: matchLabels: elasticsearch.k8s.elastic.co/cluster-name: elasticsearch topologyKey: kubernetes.io/hostname weight: 100 automountServiceAccountToken: false containers: - env: - name: POD_IP valueFrom: fieldRef: apiVersion: v1 fieldPath: status.podIP - name: POD_NAME valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.name - name: NODE_NAME valueFrom: fieldRef: apiVersion: v1 fieldPath: spec.nodeName - name: NAMESPACE valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.namespace - name: HEADLESS_SERVICE_NAME value: elasticsearch-es-default - name: PROBE_PASSWORD_PATH value: /mnt/elastic-internal/pod-mounted-users/elastic-internal-probe - name: PROBE_USERNAME value: elastic-internal-probe - name: READINESS_PROBE_PROTOCOL value: https - name: NSS_SDB_USE_CACHE value: "no" image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 imagePullPolicy: IfNotPresent lifecycle: preStop: exec: command: - bash - -c - /mnt/elastic-internal/scripts/pre-stop-hook-script.sh name: elasticsearch ports: - containerPort: 9200 name: https protocol: TCP - containerPort: 9300 name: transport protocol: TCP readinessProbe: exec: command: - bash - -c - /mnt/elastic-internal/scripts/readiness-probe-script.sh failureThreshold: 3 initialDelaySeconds: 10 periodSeconds: 5 successThreshold: 1 timeoutSeconds: 5 resources: limits: cpu: "2" memory: 2Gi requests: cpu: "1" memory: 2Gi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL privileged: false readOnlyRootFilesystem: true runAsNonRoot: true runAsUser: 1000670000 terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /mnt/elastic-internal/downward-api name: downward-api readOnly: true - mountPath: /usr/share/elasticsearch/bin name: elastic-internal-elasticsearch-bin-local - mountPath: /mnt/elastic-internal/elasticsearch-config name: elastic-internal-elasticsearch-config readOnly: true - mountPath: /usr/share/elasticsearch/config name: elastic-internal-elasticsearch-config-local - mountPath: /usr/share/elasticsearch/plugins name: elastic-internal-elasticsearch-plugins-local - mountPath: /usr/share/elasticsearch/config/http-certs name: elastic-internal-http-certificates readOnly: true - mountPath: /mnt/elastic-internal/pod-mounted-users name: elastic-internal-probe-user readOnly: true - mountPath: /usr/share/elasticsearch/config/transport-remote-certs/ name: elastic-internal-remote-certificate-authorities readOnly: true - mountPath: /mnt/elastic-internal/scripts name: elastic-internal-scripts readOnly: true - mountPath: /usr/share/elasticsearch/config/transport-certs name: elastic-internal-transport-certificates readOnly: true - mountPath: /mnt/elastic-internal/unicast-hosts name: elastic-internal-unicast-hosts readOnly: true - mountPath: /mnt/elastic-internal/xpack-file-realm name: elastic-internal-xpack-file-realm readOnly: true - mountPath: /usr/share/elasticsearch/data name: elasticsearch-data - mountPath: /usr/share/elasticsearch/logs name: elasticsearch-logs - mountPath: /tmp name: tmp-volume dnsPolicy: ClusterFirst enableServiceLinks: true hostname: elasticsearch-es-default-0 imagePullSecrets: - name: default-dockercfg-8qddz initContainers: - command: - bash - -c - /mnt/elastic-internal/scripts/prepare-fs.sh env: - name: POD_IP valueFrom: fieldRef: apiVersion: v1 fieldPath: status.podIP - name: POD_NAME valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.name - name: NODE_NAME valueFrom: fieldRef: apiVersion: v1 fieldPath: spec.nodeName - name: NAMESPACE valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.namespace - name: HEADLESS_SERVICE_NAME value: elasticsearch-es-default - name: PROBE_PASSWORD_PATH value: /mnt/elastic-internal/pod-mounted-users/elastic-internal-probe - name: PROBE_USERNAME value: elastic-internal-probe - name: READINESS_PROBE_PROTOCOL value: https - name: NSS_SDB_USE_CACHE value: "no" image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 imagePullPolicy: IfNotPresent name: elastic-internal-init-filesystem resources: limits: cpu: 100m memory: 50Mi requests: cpu: 100m memory: 50Mi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL privileged: false readOnlyRootFilesystem: true runAsNonRoot: true runAsUser: 1000670000 terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /mnt/elastic-internal/downward-api name: downward-api readOnly: true - mountPath: /mnt/elastic-internal/elasticsearch-bin-local name: elastic-internal-elasticsearch-bin-local - mountPath: /mnt/elastic-internal/elasticsearch-config name: elastic-internal-elasticsearch-config readOnly: true - mountPath: /mnt/elastic-internal/elasticsearch-config-local name: elastic-internal-elasticsearch-config-local - mountPath: /mnt/elastic-internal/elasticsearch-plugins-local name: elastic-internal-elasticsearch-plugins-local - mountPath: /usr/share/elasticsearch/config/http-certs name: elastic-internal-http-certificates readOnly: true - mountPath: /mnt/elastic-internal/pod-mounted-users name: elastic-internal-probe-user readOnly: true - mountPath: /usr/share/elasticsearch/config/transport-remote-certs/ name: elastic-internal-remote-certificate-authorities readOnly: true - mountPath: /mnt/elastic-internal/scripts name: elastic-internal-scripts readOnly: true - mountPath: /mnt/elastic-internal/transport-certificates name: elastic-internal-transport-certificates readOnly: true - mountPath: /mnt/elastic-internal/unicast-hosts name: elastic-internal-unicast-hosts readOnly: true - mountPath: /mnt/elastic-internal/xpack-file-realm name: elastic-internal-xpack-file-realm readOnly: true - mountPath: /usr/share/elasticsearch/data name: elasticsearch-data - mountPath: /usr/share/elasticsearch/logs name: elasticsearch-logs - mountPath: /tmp name: tmp-volume - command: - bash - -c - /mnt/elastic-internal/scripts/suspend.sh env: - name: POD_IP valueFrom: fieldRef: apiVersion: v1 fieldPath: status.podIP - name: POD_NAME valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.name - name: NODE_NAME valueFrom: fieldRef: apiVersion: v1 fieldPath: spec.nodeName - name: NAMESPACE valueFrom: fieldRef: apiVersion: v1 fieldPath: metadata.namespace - name: HEADLESS_SERVICE_NAME value: elasticsearch-es-default - name: PROBE_PASSWORD_PATH value: /mnt/elastic-internal/pod-mounted-users/elastic-internal-probe - name: PROBE_USERNAME value: elastic-internal-probe - name: READINESS_PROBE_PROTOCOL value: https - name: NSS_SDB_USE_CACHE value: "no" image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 imagePullPolicy: IfNotPresent name: elastic-internal-suspend resources: limits: cpu: "2" memory: 2Gi requests: cpu: "1" memory: 2Gi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL privileged: false readOnlyRootFilesystem: true runAsNonRoot: true runAsUser: 1000670000 terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /mnt/elastic-internal/downward-api name: downward-api readOnly: true - mountPath: /usr/share/elasticsearch/bin name: elastic-internal-elasticsearch-bin-local - mountPath: /mnt/elastic-internal/elasticsearch-config name: elastic-internal-elasticsearch-config readOnly: true - mountPath: /usr/share/elasticsearch/config name: elastic-internal-elasticsearch-config-local - mountPath: /usr/share/elasticsearch/plugins name: elastic-internal-elasticsearch-plugins-local - mountPath: /usr/share/elasticsearch/config/http-certs name: elastic-internal-http-certificates readOnly: true - mountPath: /mnt/elastic-internal/pod-mounted-users name: elastic-internal-probe-user readOnly: true - mountPath: /usr/share/elasticsearch/config/transport-remote-certs/ name: elastic-internal-remote-certificate-authorities readOnly: true - mountPath: /mnt/elastic-internal/scripts name: elastic-internal-scripts readOnly: true - mountPath: /usr/share/elasticsearch/config/transport-certs name: elastic-internal-transport-certificates readOnly: true - mountPath: /mnt/elastic-internal/unicast-hosts name: elastic-internal-unicast-hosts readOnly: true - mountPath: /mnt/elastic-internal/xpack-file-realm name: elastic-internal-xpack-file-realm readOnly: true - mountPath: /usr/share/elasticsearch/data name: elasticsearch-data - mountPath: /usr/share/elasticsearch/logs name: elasticsearch-logs - mountPath: /tmp name: tmp-volume nodeName: crc preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: fsGroup: 1000670000 seLinuxOptions: level: s0:c26,c10 seccompProfile: type: RuntimeDefault serviceAccount: default serviceAccountName: default subdomain: elasticsearch-es-default terminationGracePeriodSeconds: 180 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 - effect: NoSchedule key: node.kubernetes.io/memory-pressure operator: Exists volumes: - downwardAPI: defaultMode: 420 items: - fieldRef: apiVersion: v1 fieldPath: metadata.labels path: labels name: downward-api - emptyDir: {} name: elastic-internal-elasticsearch-bin-local - name: elastic-internal-elasticsearch-config secret: defaultMode: 420 optional: false secretName: elasticsearch-es-default-es-config - emptyDir: {} name: elastic-internal-elasticsearch-config-local - emptyDir: {} name: elastic-internal-elasticsearch-plugins-local - name: elastic-internal-http-certificates secret: defaultMode: 420 optional: false secretName: elasticsearch-es-http-certs-internal - name: elastic-internal-probe-user secret: defaultMode: 420 items: - key: elastic-internal-probe path: elastic-internal-probe - key: elastic-internal-pre-stop path: elastic-internal-pre-stop optional: false secretName: elasticsearch-es-internal-users - name: elastic-internal-remote-certificate-authorities secret: defaultMode: 420 optional: false secretName: elasticsearch-es-remote-ca - configMap: defaultMode: 493 name: elasticsearch-es-scripts optional: false name: elastic-internal-scripts - name: elastic-internal-transport-certificates secret: defaultMode: 420 optional: false secretName: elasticsearch-es-default-es-transport-certs - configMap: defaultMode: 420 name: elasticsearch-es-unicast-hosts optional: false name: elastic-internal-unicast-hosts - name: elastic-internal-xpack-file-realm secret: defaultMode: 420 optional: false secretName: elasticsearch-es-xpack-file-realm - emptyDir: {} name: elasticsearch-data - emptyDir: {} name: elasticsearch-logs - emptyDir: {} name: tmp-volume status: conditions: - lastProbeTime: null lastTransitionTime: "2025-12-12T16:28:02Z" status: "True" type: PodReadyToStartContainers - lastProbeTime: null lastTransitionTime: "2025-12-12T16:28:08Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2025-12-12T16:28:25Z" status: "True" type: Ready - lastProbeTime: null lastTransitionTime: "2025-12-12T16:28:25Z" status: "True" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2025-12-12T16:27:31Z" status: "True" type: PodScheduled containerStatuses: - allocatedResources: cpu: "1" memory: 2Gi containerID: cri-o://6c7c2654787452f3eb41e84798babe6a4e11219f951dec0692a48118bd4af169 image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 imageID: registry.connect.redhat.com/elastic/elasticsearch@sha256:d2ba643f52fa7935d0428814d23aaef06cde96830a8cd59ddb2e078749b98856 lastState: {} name: elasticsearch ready: true resources: limits: cpu: "2" memory: 2Gi requests: cpu: "1" memory: 2Gi restartCount: 0 started: true state: running: startedAt: "2025-12-12T16:28:08Z" user: linux: gid: 0 supplementalGroups: - 0 - 1000670000 uid: 1000670000 volumeMounts: - mountPath: /mnt/elastic-internal/downward-api name: downward-api readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/bin name: elastic-internal-elasticsearch-bin-local - mountPath: /mnt/elastic-internal/elasticsearch-config name: elastic-internal-elasticsearch-config readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config name: elastic-internal-elasticsearch-config-local - mountPath: /usr/share/elasticsearch/plugins name: elastic-internal-elasticsearch-plugins-local - mountPath: /usr/share/elasticsearch/config/http-certs name: elastic-internal-http-certificates readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/pod-mounted-users name: elastic-internal-probe-user readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config/transport-remote-certs/ name: elastic-internal-remote-certificate-authorities readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/scripts name: elastic-internal-scripts readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config/transport-certs name: elastic-internal-transport-certificates readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/unicast-hosts name: elastic-internal-unicast-hosts readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/xpack-file-realm name: elastic-internal-xpack-file-realm readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/data name: elasticsearch-data - mountPath: /usr/share/elasticsearch/logs name: elasticsearch-logs - mountPath: /tmp name: tmp-volume hostIP: 192.168.126.11 hostIPs: - ip: 192.168.126.11 initContainerStatuses: - allocatedResources: cpu: 100m memory: 50Mi containerID: cri-o://d17ddd56ec1e69b963f06df02f54c22e27d986e75164a0c8e2bba0d7b48270bf image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 imageID: registry.connect.redhat.com/elastic/elasticsearch@sha256:d2ba643f52fa7935d0428814d23aaef06cde96830a8cd59ddb2e078749b98856 lastState: {} name: elastic-internal-init-filesystem ready: true resources: limits: cpu: 100m memory: 50Mi requests: cpu: 100m memory: 50Mi restartCount: 0 started: false state: terminated: containerID: cri-o://d17ddd56ec1e69b963f06df02f54c22e27d986e75164a0c8e2bba0d7b48270bf exitCode: 0 finishedAt: "2025-12-12T16:28:03Z" reason: Completed startedAt: "2025-12-12T16:28:01Z" user: linux: gid: 0 supplementalGroups: - 0 - 1000670000 uid: 1000670000 volumeMounts: - mountPath: /mnt/elastic-internal/downward-api name: downward-api readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/elasticsearch-bin-local name: elastic-internal-elasticsearch-bin-local - mountPath: /mnt/elastic-internal/elasticsearch-config name: elastic-internal-elasticsearch-config readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/elasticsearch-config-local name: elastic-internal-elasticsearch-config-local - mountPath: /mnt/elastic-internal/elasticsearch-plugins-local name: elastic-internal-elasticsearch-plugins-local - mountPath: /usr/share/elasticsearch/config/http-certs name: elastic-internal-http-certificates readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/pod-mounted-users name: elastic-internal-probe-user readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config/transport-remote-certs/ name: elastic-internal-remote-certificate-authorities readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/scripts name: elastic-internal-scripts readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/transport-certificates name: elastic-internal-transport-certificates readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/unicast-hosts name: elastic-internal-unicast-hosts readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/xpack-file-realm name: elastic-internal-xpack-file-realm readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/data name: elasticsearch-data - mountPath: /usr/share/elasticsearch/logs name: elasticsearch-logs - mountPath: /tmp name: tmp-volume - allocatedResources: cpu: "1" memory: 2Gi containerID: cri-o://02c6ca623bfbf1e086ed2b54c19e328c12a5ce746f2524c5bfadf77c7b7e7621 image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 imageID: registry.connect.redhat.com/elastic/elasticsearch@sha256:d2ba643f52fa7935d0428814d23aaef06cde96830a8cd59ddb2e078749b98856 lastState: {} name: elastic-internal-suspend ready: true resources: limits: cpu: "2" memory: 2Gi requests: cpu: "1" memory: 2Gi restartCount: 0 started: false state: terminated: containerID: cri-o://02c6ca623bfbf1e086ed2b54c19e328c12a5ce746f2524c5bfadf77c7b7e7621 exitCode: 0 finishedAt: "2025-12-12T16:28:07Z" reason: Completed startedAt: "2025-12-12T16:28:07Z" user: linux: gid: 0 supplementalGroups: - 0 - 1000670000 uid: 1000670000 volumeMounts: - mountPath: /mnt/elastic-internal/downward-api name: downward-api readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/bin name: elastic-internal-elasticsearch-bin-local - mountPath: /mnt/elastic-internal/elasticsearch-config name: elastic-internal-elasticsearch-config readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config name: elastic-internal-elasticsearch-config-local - mountPath: /usr/share/elasticsearch/plugins name: elastic-internal-elasticsearch-plugins-local - mountPath: /usr/share/elasticsearch/config/http-certs name: elastic-internal-http-certificates readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/pod-mounted-users name: elastic-internal-probe-user readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config/transport-remote-certs/ name: elastic-internal-remote-certificate-authorities readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/scripts name: elastic-internal-scripts readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/config/transport-certs name: elastic-internal-transport-certificates readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/unicast-hosts name: elastic-internal-unicast-hosts readOnly: true recursiveReadOnly: Disabled - mountPath: /mnt/elastic-internal/xpack-file-realm name: elastic-internal-xpack-file-realm readOnly: true recursiveReadOnly: Disabled - mountPath: /usr/share/elasticsearch/data name: elasticsearch-data - mountPath: /usr/share/elasticsearch/logs name: elasticsearch-logs - mountPath: /tmp name: tmp-volume phase: Running podIP: 10.217.0.53 podIPs: - ip: 10.217.0.53 qosClass: Burstable startTime: "2025-12-12T16:27:31Z" - apiVersion: v1 kind: Pod metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" k8s.ovn.org/pod-networks: '{"default":{"ip_addresses":["10.217.0.67/23"],"mac_address":"0a:58:0a:d9:00:43","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0/22","nextHop":"10.217.0.1"},{"dest":"10.217.4.0/23","nextHop":"10.217.0.1"},{"dest":"169.254.0.5/32","nextHop":"10.217.0.1"},{"dest":"100.64.0.0/16","nextHop":"10.217.0.1"}],"ip_address":"10.217.0.67/23","gateway_ip":"10.217.0.1","role":"primary"}}' k8s.v1.cni.cncf.io/network-status: |- [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.67" ], "mac": "0a:58:0a:d9:00:43", "default": true, "dns": {} }] openshift.io/scc: anyuid security.openshift.io/validated-scc-subject-type: user creationTimestamp: "2025-12-12T16:34:25Z" generateName: infrawatch-operators- generation: 1 labels: catalogsource.operators.coreos.com/update: infrawatch-operators olm.catalogSource: "" olm.managed: "true" olm.pod-spec-hash: 1n8G20tnqK1naks6OvaILwtmBXnofPrWrBgfHP name: infrawatch-operators-6bs58 namespace: service-telemetry ownerReferences: - apiVersion: operators.coreos.com/v1alpha1 blockOwnerDeletion: false controller: true kind: CatalogSource name: infrawatch-operators uid: 844125b7-bcbe-4137-9cf0-b045a652f768 resourceVersion: "45315" uid: 6510d065-e486-4274-a8ca-4c2cdb8dd1ae spec: containers: - image: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest imagePullPolicy: Always livenessProbe: exec: command: - grpc_health_probe - -addr=:50051 failureThreshold: 3 initialDelaySeconds: 10 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 5 name: registry-server ports: - containerPort: 50051 name: grpc protocol: TCP readinessProbe: exec: command: - grpc_health_probe - -addr=:50051 failureThreshold: 3 initialDelaySeconds: 5 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 5 resources: requests: cpu: 10m memory: 50Mi securityContext: capabilities: drop: - MKNOD readOnlyRootFilesystem: false startupProbe: exec: command: - grpc_health_probe - -addr=:50051 failureThreshold: 10 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 5 terminationMessagePath: /dev/termination-log terminationMessagePolicy: FallbackToLogsOnError volumeMounts: - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-q4pzm readOnly: true dnsPolicy: ClusterFirst enableServiceLinks: true imagePullSecrets: - name: infrawatch-operators-dockercfg-n6ssc nodeName: crc nodeSelector: kubernetes.io/os: linux preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: seLinuxOptions: level: s0:c26,c10 serviceAccount: infrawatch-operators serviceAccountName: infrawatch-operators terminationGracePeriodSeconds: 30 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 - effect: NoSchedule key: node.kubernetes.io/memory-pressure operator: Exists volumes: - name: kube-api-access-q4pzm projected: defaultMode: 420 sources: - serviceAccountToken: expirationSeconds: 3607 path: token - configMap: items: - key: ca.crt path: ca.crt name: kube-root-ca.crt - downwardAPI: items: - fieldRef: apiVersion: v1 fieldPath: metadata.namespace path: namespace - configMap: items: - key: service-ca.crt path: service-ca.crt name: openshift-service-ca.crt status: conditions: - lastProbeTime: null lastTransitionTime: "2025-12-12T16:34:27Z" status: "True" type: PodReadyToStartContainers - lastProbeTime: null lastTransitionTime: "2025-12-12T16:34:25Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2025-12-12T16:34:25Z" message: 'containers with unready status: [registry-server]' reason: ContainersNotReady status: "False" type: Ready - lastProbeTime: null lastTransitionTime: "2025-12-12T16:34:25Z" message: 'containers with unready status: [registry-server]' reason: ContainersNotReady status: "False" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2025-12-12T16:34:25Z" status: "True" type: PodScheduled containerStatuses: - image: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest imageID: "" lastState: {} name: registry-server ready: false restartCount: 0 started: false state: waiting: message: 'Back-off pulling image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest": ErrImagePull: unable to pull image or OCI artifact: pull image err: initializing source docker://image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown; artifact err: get manifest: build image source: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown' reason: ImagePullBackOff volumeMounts: - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-q4pzm readOnly: true recursiveReadOnly: Disabled hostIP: 192.168.126.11 hostIPs: - ip: 192.168.126.11 phase: Pending podIP: 10.217.0.67 podIPs: - ip: 10.217.0.67 qosClass: Burstable startTime: "2025-12-12T16:34:25Z" - apiVersion: v1 kind: Pod metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" k8s.ovn.org/pod-networks: '{"default":{"ip_addresses":["10.217.0.65/23"],"mac_address":"0a:58:0a:d9:00:41","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0/22","nextHop":"10.217.0.1"},{"dest":"10.217.4.0/23","nextHop":"10.217.0.1"},{"dest":"169.254.0.5/32","nextHop":"10.217.0.1"},{"dest":"100.64.0.0/16","nextHop":"10.217.0.1"}],"ip_address":"10.217.0.65/23","gateway_ip":"10.217.0.1","role":"primary"}}' k8s.v1.cni.cncf.io/network-status: |- [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.65" ], "mac": "0a:58:0a:d9:00:41", "default": true, "dns": {} }] openshift.io/scc: anyuid security.openshift.io/validated-scc-subject-type: user creationTimestamp: "2025-12-12T16:29:28Z" generateName: infrawatch-operators- generation: 1 labels: olm.catalogSource: infrawatch-operators olm.managed: "true" olm.pod-spec-hash: 1n8G20tnqK1naks6OvaILwtmBXnofPrWrBgfHP name: infrawatch-operators-cdpts namespace: service-telemetry ownerReferences: - apiVersion: operators.coreos.com/v1alpha1 blockOwnerDeletion: false controller: true kind: CatalogSource name: infrawatch-operators uid: 844125b7-bcbe-4137-9cf0-b045a652f768 resourceVersion: "44977" uid: eeed1a9b-f386-4d11-b730-03bcb44f9a55 spec: containers: - image: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest imagePullPolicy: Always livenessProbe: exec: command: - grpc_health_probe - -addr=:50051 failureThreshold: 3 initialDelaySeconds: 10 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 5 name: registry-server ports: - containerPort: 50051 name: grpc protocol: TCP readinessProbe: exec: command: - grpc_health_probe - -addr=:50051 failureThreshold: 3 initialDelaySeconds: 5 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 5 resources: requests: cpu: 10m memory: 50Mi securityContext: capabilities: drop: - MKNOD readOnlyRootFilesystem: false startupProbe: exec: command: - grpc_health_probe - -addr=:50051 failureThreshold: 10 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 5 terminationMessagePath: /dev/termination-log terminationMessagePolicy: FallbackToLogsOnError volumeMounts: - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-p4zc7 readOnly: true dnsPolicy: ClusterFirst enableServiceLinks: true imagePullSecrets: - name: infrawatch-operators-dockercfg-n6ssc nodeName: crc nodeSelector: kubernetes.io/os: linux preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: seLinuxOptions: level: s0:c26,c10 serviceAccount: infrawatch-operators serviceAccountName: infrawatch-operators terminationGracePeriodSeconds: 30 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 - effect: NoSchedule key: node.kubernetes.io/memory-pressure operator: Exists volumes: - name: kube-api-access-p4zc7 projected: defaultMode: 420 sources: - serviceAccountToken: expirationSeconds: 3607 path: token - configMap: items: - key: ca.crt path: ca.crt name: kube-root-ca.crt - downwardAPI: items: - fieldRef: apiVersion: v1 fieldPath: metadata.namespace path: namespace - configMap: items: - key: service-ca.crt path: service-ca.crt name: openshift-service-ca.crt status: conditions: - lastProbeTime: null lastTransitionTime: "2025-12-12T16:29:29Z" status: "True" type: PodReadyToStartContainers - lastProbeTime: null lastTransitionTime: "2025-12-12T16:29:28Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2025-12-12T16:29:28Z" message: 'containers with unready status: [registry-server]' reason: ContainersNotReady status: "False" type: Ready - lastProbeTime: null lastTransitionTime: "2025-12-12T16:29:28Z" message: 'containers with unready status: [registry-server]' reason: ContainersNotReady status: "False" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2025-12-12T16:29:28Z" status: "True" type: PodScheduled containerStatuses: - image: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest imageID: "" lastState: {} name: registry-server ready: false restartCount: 0 started: false state: waiting: message: 'Back-off pulling image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest": ErrImagePull: unable to pull image or OCI artifact: pull image err: initializing source docker://image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown; artifact err: get manifest: build image source: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown' reason: ImagePullBackOff volumeMounts: - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-p4zc7 readOnly: true recursiveReadOnly: Disabled hostIP: 192.168.126.11 hostIPs: - ip: 192.168.126.11 phase: Pending podIP: 10.217.0.65 podIPs: - ip: 10.217.0.65 qosClass: Burstable startTime: "2025-12-12T16:29:28Z" kind: List metadata: resourceVersion: "" home/zuul/zuul-output/logs/controller/post_oc_describe_subscriptions_STO.log0000644000175000017500000000554515117042534027011 0ustar zuulzuulName: service-telemetry-operator Namespace: service-telemetry Labels: operators.coreos.com/service-telemetry-operator.service-telemetry= Annotations: API Version: operators.coreos.com/v1alpha1 Kind: Subscription Metadata: Creation Timestamp: 2025-12-12T16:29:24Z Generation: 1 Resource Version: 44089 UID: d58b8d17-7fad-4b3a-b89e-4b5e8d6c5fdd Spec: Channel: unstable Install Plan Approval: Automatic Name: service-telemetry-operator Source: infrawatch-operators Source Namespace: service-telemetry Status: Catalog Health: Catalog Source Ref: API Version: operators.coreos.com/v1alpha1 Kind: CatalogSource Name: certified-operators Namespace: openshift-marketplace Resource Version: 41251 UID: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83 Healthy: true Last Updated: 2025-12-12T16:29:40Z Catalog Source Ref: API Version: operators.coreos.com/v1alpha1 Kind: CatalogSource Name: community-operators Namespace: openshift-marketplace Resource Version: 42447 UID: 88a656bd-c52a-4813-892e-7e3363ba9ac0 Healthy: true Last Updated: 2025-12-12T16:29:40Z Catalog Source Ref: API Version: operators.coreos.com/v1alpha1 Kind: CatalogSource Name: redhat-operators Namespace: openshift-marketplace Resource Version: 41489 UID: ca744265-3ae3-4482-8c3d-b10e28fe1042 Healthy: true Last Updated: 2025-12-12T16:29:40Z Catalog Source Ref: API Version: operators.coreos.com/v1alpha1 Kind: CatalogSource Name: infrawatch-operators Namespace: service-telemetry Resource Version: 44059 UID: 844125b7-bcbe-4137-9cf0-b045a652f768 Healthy: true Last Updated: 2025-12-12T16:29:40Z Conditions: Message: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" Reason: ErrorPreventedResolution Status: True Type: ResolutionFailed Last Transition Time: 2025-12-12T16:29:45Z Message: all available catalogsources are healthy Reason: AllCatalogSourcesHealthy Status: False Type: CatalogSourcesUnhealthy Last Updated: 2025-12-12T16:29:45Z Events: home/zuul/zuul-output/logs/controller/describe_sto.log0000644000175000017500000012430615117042535022432 0ustar zuulzuulName: elastic-operator-6c994c654b-42tmw Namespace: service-telemetry Priority: 0 Service Account: elastic-operator Node: crc/192.168.126.11 Start Time: Fri, 12 Dec 2025 16:27:05 +0000 Labels: control-plane=elastic-operator pod-template-hash=6c994c654b Annotations: alm-examples: [ { "apiVersion": "elasticsearch.k8s.elastic.co/v1", "kind": "Elasticsearch", "metadata": { "name": "elasticsearch-sample" }, "spec": { "version": "9.2.0", "nodeSets": [ { "name": "default", "config": { "node.roles": ["master", "data"], "node.attr.attr_name": "attr_value", "node.store.allow_mmap": false }, "podTemplate": { "metadata": { "labels": { "foo": "bar" } }, "spec": { "containers": [ { "name": "elasticsearch", "resources": { "requests": { "memory": "4Gi", "cpu": 1 }, "limits": { "memory": "4Gi", "cpu": 2 } } } ] } }, "count": 3 } ] } }, { "apiVersion": "kibana.k8s.elastic.co/v1", "kind": "Kibana", "metadata": { "name": "kibana-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" }, "podTemplate": { "metadata": { "labels": { "foo": "bar" } }, "spec": { "containers": [ { "name": "kibana", "resources": { "requests": { "memory": "1Gi", "cpu": 0.5 }, "limits": { "memory": "2Gi", "cpu": 2 } } } ] } } } }, { "apiVersion": "apm.k8s.elastic.co/v1", "kind": "ApmServer", "metadata": { "name": "apmserver-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "enterprisesearch.k8s.elastic.co/v1", "kind": "EnterpriseSearch", "metadata": { "name": "ent-sample" }, "spec": { "version": "9.2.0", "config": { "ent_search.external_url": "https://localhost:3002" }, "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "beat.k8s.elastic.co/v1beta1", "kind": "Beat", "metadata": { "name": "heartbeat-sample" }, "spec": { "type": "heartbeat", "version": "9.2.0", "elasticsearchRef": { "name": "elasticsearch-sample" }, "config": { "heartbeat.monitors": [ { "type": "tcp", "schedule": "@every 5s", "hosts": [ "elasticsearch-sample-es-http.default.svc:9200" ] } ] }, "deployment": { "replicas": 1, "podTemplate": { "spec": { "securityContext": { "runAsUser": 0 } } } } } }, { "apiVersion": "agent.k8s.elastic.co/v1alpha1", "kind": "Agent", "metadata": { "name": "agent-sample" }, "spec": { "version": "9.2.0", "elasticsearchRefs": [ { "name": "elasticsearch-sample" } ], "daemonSet": {}, "config": { "inputs": [ { "name": "system-1", "revision": 1, "type": "system/metrics", "use_output": "default", "meta": { "package": { "name": "system", "version": "0.9.1" } }, "data_stream": { "namespace": "default" }, "streams": [ { "id": "system/metrics-system.cpu", "data_stream": { "dataset": "system.cpu", "type": "metrics" }, "metricsets": [ "cpu" ], "cpu.metrics": [ "percentages", "normalized_percentages" ], "period": "10s" } ] } ] } } }, { "apiVersion": "maps.k8s.elastic.co/v1alpha1", "kind": "ElasticMapsServer", "metadata": { "name": "ems-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "logstash.k8s.elastic.co/v1alpha1", "kind": "Logstash", "metadata" : { "name": "logstash-sample" }, "spec": { "version": "9.2.0", "count": 1 } } ] capabilities: Deep Insights categories: Database certified: false co.elastic.logs/raw: [{"type":"filestream","enabled":true,"id":"eck-container-logs-${data.kubernetes.container.id}","paths":["/var/log/containers/*${data.kuber... containerImage: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 createdAt: 2025-10-31 20:52:32 description: Run Elasticsearch, Kibana, APM Server, Beats, Enterprise Search, Elastic Agent, Elastic Maps Server and Logstash on Kubernetes and OpenShi... features.operators.openshift.io/disconnected: false features.operators.openshift.io/fips-compliant: false features.operators.openshift.io/proxy-aware: false features.operators.openshift.io/tls-profiles: false features.operators.openshift.io/token-auth-aws: false features.operators.openshift.io/token-auth-azure: false features.operators.openshift.io/token-auth-gcp: false k8s.ovn.org/pod-networks: {"default":{"ip_addresses":["10.217.0.50/23"],"mac_address":"0a:58:0a:d9:00:32","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0... k8s.v1.cni.cncf.io/network-status: [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.50" ], "mac": "0a:58:0a:d9:00:32", "default": true, "dns": {} }] olm.operatorGroup: service-telemetry-operator-group olm.operatorNamespace: service-telemetry olm.targetNamespaces: service-telemetry olmcahash: 02b97c20c00db6979b0f4cccf06449b5fb0cfdfb377da8c48f74f853a625fc69 openshift.io/scc: restricted-v2 operatorframework.io/properties: {"properties":[{"type":"olm.gvk","value":{"group":"agent.k8s.elastic.co","kind":"Agent","version":"v1alpha1"}},{"type":"olm.gvk","value":{... operators.openshift.io/valid-subscription: Elastic Basic license repository: https://github.com/elastic/cloud-on-k8s seccomp.security.alpha.kubernetes.io/pod: runtime/default security.openshift.io/validated-scc-subject-type: user support: elastic.co Status: Running SeccompProfile: RuntimeDefault IP: 10.217.0.50 IPs: IP: 10.217.0.50 Controlled By: ReplicaSet/elastic-operator-6c994c654b Containers: manager: Container ID: cri-o://f7468ca2597b25367291c527f32ed32cc5934aaea968905e6debd499cadf6d71 Image: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 Image ID: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 Port: 9443/TCP Host Port: 0/TCP Args: manager --config=/conf/eck.yaml --manage-webhook-certs=false --enable-webhook --ubi-only --distribution-channel=certified-operators State: Running Started: Fri, 12 Dec 2025 16:27:30 +0000 Ready: True Restart Count: 0 Limits: cpu: 1 memory: 1Gi Requests: cpu: 100m memory: 150Mi Environment: NAMESPACES: (v1:metadata.annotations['olm.targetNamespaces']) OPERATOR_NAMESPACE: (v1:metadata.annotations['olm.operatorNamespace']) OPERATOR_IMAGE: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 OPERATOR_CONDITION_NAME: elasticsearch-eck-operator-certified.v3.2.0 Mounts: /apiserver.local.config/certificates from apiservice-cert (rw) /tmp/k8s-webhook-server/serving-certs from webhook-cert (rw) /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-m9pn9 (ro) Conditions: Type Status PodReadyToStartContainers True Initialized True Ready True ContainersReady True PodScheduled True Volumes: apiservice-cert: Type: Secret (a volume populated by a Secret) SecretName: elastic-operator-service-cert Optional: false webhook-cert: Type: Secret (a volume populated by a Secret) SecretName: elastic-operator-service-cert Optional: false kube-api-access-m9pn9: Type: Projected (a volume that contains injected data from multiple sources) TokenExpirationSeconds: 3607 ConfigMapName: kube-root-ca.crt Optional: false DownwardAPI: true ConfigMapName: openshift-service-ca.crt Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/memory-pressure:NoSchedule op=Exists node.kubernetes.io/not-ready:NoExecute op=Exists for 300s node.kubernetes.io/unreachable:NoExecute op=Exists for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 12m default-scheduler Successfully assigned service-telemetry/elastic-operator-6c994c654b-42tmw to crc Normal AddedInterface 12m multus Add eth0 [10.217.0.50/23] from ovn-kubernetes Normal Pulling 12m kubelet Pulling image "registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105" Normal Pulled 12m kubelet Successfully pulled image "registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105" in 23.21s (23.21s including waiting). Image size: 126460400 bytes. Normal Created 12m kubelet Created container: manager Normal Started 12m kubelet Started container manager Name: elasticsearch-es-default-0 Namespace: service-telemetry Priority: 0 Service Account: default Node: crc/192.168.126.11 Start Time: Fri, 12 Dec 2025 16:27:31 +0000 Labels: apps.kubernetes.io/pod-index=0 common.k8s.elastic.co/type=elasticsearch controller-revision-hash=elasticsearch-es-default-9774f4d96 elasticsearch.k8s.elastic.co/cluster-name=elasticsearch elasticsearch.k8s.elastic.co/http-scheme=https elasticsearch.k8s.elastic.co/node-data=true elasticsearch.k8s.elastic.co/node-data_cold=false elasticsearch.k8s.elastic.co/node-data_content=false elasticsearch.k8s.elastic.co/node-data_frozen=false elasticsearch.k8s.elastic.co/node-data_hot=false elasticsearch.k8s.elastic.co/node-data_warm=false elasticsearch.k8s.elastic.co/node-ingest=true elasticsearch.k8s.elastic.co/node-master=true elasticsearch.k8s.elastic.co/node-ml=false elasticsearch.k8s.elastic.co/node-remote_cluster_client=false elasticsearch.k8s.elastic.co/node-transform=false elasticsearch.k8s.elastic.co/node-voting_only=false elasticsearch.k8s.elastic.co/statefulset-name=elasticsearch-es-default elasticsearch.k8s.elastic.co/version=7.17.20 statefulset.kubernetes.io/pod-name=elasticsearch-es-default-0 tuned.openshift.io/elasticsearch=elasticsearch Annotations: co.elastic.logs/module: elasticsearch elasticsearch.k8s.elastic.co/config-hash: 513175922 k8s.ovn.org/pod-networks: {"default":{"ip_addresses":["10.217.0.53/23"],"mac_address":"0a:58:0a:d9:00:35","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0... k8s.v1.cni.cncf.io/network-status: [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.53" ], "mac": "0a:58:0a:d9:00:35", "default": true, "dns": {} }] openshift.io/scc: restricted-v2 policy.k8s.elastic.co/elasticsearch-config-mounts-hash: seccomp.security.alpha.kubernetes.io/pod: runtime/default security.openshift.io/validated-scc-subject-type: user update.k8s.elastic.co/timestamp: 2025-12-12T16:28:02.391486162Z Status: Running SeccompProfile: RuntimeDefault IP: 10.217.0.53 IPs: IP: 10.217.0.53 Controlled By: StatefulSet/elasticsearch-es-default Init Containers: elastic-internal-init-filesystem: Container ID: cri-o://d17ddd56ec1e69b963f06df02f54c22e27d986e75164a0c8e2bba0d7b48270bf Image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 Image ID: registry.connect.redhat.com/elastic/elasticsearch@sha256:d2ba643f52fa7935d0428814d23aaef06cde96830a8cd59ddb2e078749b98856 Port: Host Port: Command: bash -c /mnt/elastic-internal/scripts/prepare-fs.sh State: Terminated Reason: Completed Exit Code: 0 Started: Fri, 12 Dec 2025 16:28:01 +0000 Finished: Fri, 12 Dec 2025 16:28:03 +0000 Ready: True Restart Count: 0 Limits: cpu: 100m memory: 50Mi Requests: cpu: 100m memory: 50Mi Environment: POD_IP: (v1:status.podIP) POD_NAME: elasticsearch-es-default-0 (v1:metadata.name) NODE_NAME: (v1:spec.nodeName) NAMESPACE: service-telemetry (v1:metadata.namespace) HEADLESS_SERVICE_NAME: elasticsearch-es-default PROBE_PASSWORD_PATH: /mnt/elastic-internal/pod-mounted-users/elastic-internal-probe PROBE_USERNAME: elastic-internal-probe READINESS_PROBE_PROTOCOL: https NSS_SDB_USE_CACHE: no Mounts: /mnt/elastic-internal/downward-api from downward-api (ro) /mnt/elastic-internal/elasticsearch-bin-local from elastic-internal-elasticsearch-bin-local (rw) /mnt/elastic-internal/elasticsearch-config from elastic-internal-elasticsearch-config (ro) /mnt/elastic-internal/elasticsearch-config-local from elastic-internal-elasticsearch-config-local (rw) /mnt/elastic-internal/elasticsearch-plugins-local from elastic-internal-elasticsearch-plugins-local (rw) /mnt/elastic-internal/pod-mounted-users from elastic-internal-probe-user (ro) /mnt/elastic-internal/scripts from elastic-internal-scripts (ro) /mnt/elastic-internal/transport-certificates from elastic-internal-transport-certificates (ro) /mnt/elastic-internal/unicast-hosts from elastic-internal-unicast-hosts (ro) /mnt/elastic-internal/xpack-file-realm from elastic-internal-xpack-file-realm (ro) /tmp from tmp-volume (rw) /usr/share/elasticsearch/config/http-certs from elastic-internal-http-certificates (ro) /usr/share/elasticsearch/config/transport-remote-certs/ from elastic-internal-remote-certificate-authorities (ro) /usr/share/elasticsearch/data from elasticsearch-data (rw) /usr/share/elasticsearch/logs from elasticsearch-logs (rw) elastic-internal-suspend: Container ID: cri-o://02c6ca623bfbf1e086ed2b54c19e328c12a5ce746f2524c5bfadf77c7b7e7621 Image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 Image ID: registry.connect.redhat.com/elastic/elasticsearch@sha256:d2ba643f52fa7935d0428814d23aaef06cde96830a8cd59ddb2e078749b98856 Port: Host Port: Command: bash -c /mnt/elastic-internal/scripts/suspend.sh State: Terminated Reason: Completed Exit Code: 0 Started: Fri, 12 Dec 2025 16:28:07 +0000 Finished: Fri, 12 Dec 2025 16:28:07 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 2Gi Requests: cpu: 1 memory: 2Gi Environment: POD_IP: (v1:status.podIP) POD_NAME: elasticsearch-es-default-0 (v1:metadata.name) NODE_NAME: (v1:spec.nodeName) NAMESPACE: service-telemetry (v1:metadata.namespace) HEADLESS_SERVICE_NAME: elasticsearch-es-default PROBE_PASSWORD_PATH: /mnt/elastic-internal/pod-mounted-users/elastic-internal-probe PROBE_USERNAME: elastic-internal-probe READINESS_PROBE_PROTOCOL: https NSS_SDB_USE_CACHE: no Mounts: /mnt/elastic-internal/downward-api from downward-api (ro) /mnt/elastic-internal/elasticsearch-config from elastic-internal-elasticsearch-config (ro) /mnt/elastic-internal/pod-mounted-users from elastic-internal-probe-user (ro) /mnt/elastic-internal/scripts from elastic-internal-scripts (ro) /mnt/elastic-internal/unicast-hosts from elastic-internal-unicast-hosts (ro) /mnt/elastic-internal/xpack-file-realm from elastic-internal-xpack-file-realm (ro) /tmp from tmp-volume (rw) /usr/share/elasticsearch/bin from elastic-internal-elasticsearch-bin-local (rw) /usr/share/elasticsearch/config from elastic-internal-elasticsearch-config-local (rw) /usr/share/elasticsearch/config/http-certs from elastic-internal-http-certificates (ro) /usr/share/elasticsearch/config/transport-certs from elastic-internal-transport-certificates (ro) /usr/share/elasticsearch/config/transport-remote-certs/ from elastic-internal-remote-certificate-authorities (ro) /usr/share/elasticsearch/data from elasticsearch-data (rw) /usr/share/elasticsearch/logs from elasticsearch-logs (rw) /usr/share/elasticsearch/plugins from elastic-internal-elasticsearch-plugins-local (rw) Containers: elasticsearch: Container ID: cri-o://6c7c2654787452f3eb41e84798babe6a4e11219f951dec0692a48118bd4af169 Image: registry.connect.redhat.com/elastic/elasticsearch:7.17.20 Image ID: registry.connect.redhat.com/elastic/elasticsearch@sha256:d2ba643f52fa7935d0428814d23aaef06cde96830a8cd59ddb2e078749b98856 Ports: 9200/TCP, 9300/TCP Host Ports: 0/TCP, 0/TCP State: Running Started: Fri, 12 Dec 2025 16:28:08 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 2Gi Requests: cpu: 1 memory: 2Gi Readiness: exec [bash -c /mnt/elastic-internal/scripts/readiness-probe-script.sh] delay=10s timeout=5s period=5s #success=1 #failure=3 Environment: POD_IP: (v1:status.podIP) POD_NAME: elasticsearch-es-default-0 (v1:metadata.name) NODE_NAME: (v1:spec.nodeName) NAMESPACE: service-telemetry (v1:metadata.namespace) HEADLESS_SERVICE_NAME: elasticsearch-es-default PROBE_PASSWORD_PATH: /mnt/elastic-internal/pod-mounted-users/elastic-internal-probe PROBE_USERNAME: elastic-internal-probe READINESS_PROBE_PROTOCOL: https NSS_SDB_USE_CACHE: no Mounts: /mnt/elastic-internal/downward-api from downward-api (ro) /mnt/elastic-internal/elasticsearch-config from elastic-internal-elasticsearch-config (ro) /mnt/elastic-internal/pod-mounted-users from elastic-internal-probe-user (ro) /mnt/elastic-internal/scripts from elastic-internal-scripts (ro) /mnt/elastic-internal/unicast-hosts from elastic-internal-unicast-hosts (ro) /mnt/elastic-internal/xpack-file-realm from elastic-internal-xpack-file-realm (ro) /tmp from tmp-volume (rw) /usr/share/elasticsearch/bin from elastic-internal-elasticsearch-bin-local (rw) /usr/share/elasticsearch/config from elastic-internal-elasticsearch-config-local (rw) /usr/share/elasticsearch/config/http-certs from elastic-internal-http-certificates (ro) /usr/share/elasticsearch/config/transport-certs from elastic-internal-transport-certificates (ro) /usr/share/elasticsearch/config/transport-remote-certs/ from elastic-internal-remote-certificate-authorities (ro) /usr/share/elasticsearch/data from elasticsearch-data (rw) /usr/share/elasticsearch/logs from elasticsearch-logs (rw) /usr/share/elasticsearch/plugins from elastic-internal-elasticsearch-plugins-local (rw) Conditions: Type Status PodReadyToStartContainers True Initialized True Ready True ContainersReady True PodScheduled True Volumes: downward-api: Type: DownwardAPI (a volume populated by information about the pod) Items: metadata.labels -> labels elastic-internal-elasticsearch-bin-local: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: SizeLimit: elastic-internal-elasticsearch-config: Type: Secret (a volume populated by a Secret) SecretName: elasticsearch-es-default-es-config Optional: false elastic-internal-elasticsearch-config-local: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: SizeLimit: elastic-internal-elasticsearch-plugins-local: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: SizeLimit: elastic-internal-http-certificates: Type: Secret (a volume populated by a Secret) SecretName: elasticsearch-es-http-certs-internal Optional: false elastic-internal-probe-user: Type: Secret (a volume populated by a Secret) SecretName: elasticsearch-es-internal-users Optional: false elastic-internal-remote-certificate-authorities: Type: Secret (a volume populated by a Secret) SecretName: elasticsearch-es-remote-ca Optional: false elastic-internal-scripts: Type: ConfigMap (a volume populated by a ConfigMap) Name: elasticsearch-es-scripts Optional: false elastic-internal-transport-certificates: Type: Secret (a volume populated by a Secret) SecretName: elasticsearch-es-default-es-transport-certs Optional: false elastic-internal-unicast-hosts: Type: ConfigMap (a volume populated by a ConfigMap) Name: elasticsearch-es-unicast-hosts Optional: false elastic-internal-xpack-file-realm: Type: Secret (a volume populated by a Secret) SecretName: elasticsearch-es-xpack-file-realm Optional: false elasticsearch-data: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: SizeLimit: elasticsearch-logs: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: SizeLimit: tmp-volume: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: SizeLimit: QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/memory-pressure:NoSchedule op=Exists node.kubernetes.io/not-ready:NoExecute op=Exists for 300s node.kubernetes.io/unreachable:NoExecute op=Exists for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 12m default-scheduler Successfully assigned service-telemetry/elasticsearch-es-default-0 to crc Normal AddedInterface 12m multus Add eth0 [10.217.0.53/23] from ovn-kubernetes Normal Pulling 12m kubelet Pulling image "registry.connect.redhat.com/elastic/elasticsearch:7.17.20" Normal Pulled 11m kubelet Successfully pulled image "registry.connect.redhat.com/elastic/elasticsearch:7.17.20" in 28.803s (28.803s including waiting). Image size: 659128489 bytes. Normal Created 11m kubelet Created container: elastic-internal-init-filesystem Normal Started 11m kubelet Started container elastic-internal-init-filesystem Normal Pulled 11m kubelet Container image "registry.connect.redhat.com/elastic/elasticsearch:7.17.20" already present on machine Normal Created 11m kubelet Created container: elastic-internal-suspend Normal Started 11m kubelet Started container elastic-internal-suspend Normal Pulled 11m kubelet Container image "registry.connect.redhat.com/elastic/elasticsearch:7.17.20" already present on machine Normal Created 11m kubelet Created container: elasticsearch Normal Started 11m kubelet Started container elasticsearch Warning Unhealthy 11m kubelet Readiness probe failed: {"timestamp": "2025-12-12T16:28:20+00:00", "message": "readiness probe failed", "curl_rc": "7"} Name: infrawatch-operators-6bs58 Namespace: service-telemetry Priority: 0 Service Account: infrawatch-operators Node: crc/192.168.126.11 Start Time: Fri, 12 Dec 2025 16:34:25 +0000 Labels: catalogsource.operators.coreos.com/update=infrawatch-operators olm.catalogSource= olm.managed=true olm.pod-spec-hash=1n8G20tnqK1naks6OvaILwtmBXnofPrWrBgfHP Annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: true k8s.ovn.org/pod-networks: {"default":{"ip_addresses":["10.217.0.67/23"],"mac_address":"0a:58:0a:d9:00:43","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0... k8s.v1.cni.cncf.io/network-status: [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.67" ], "mac": "0a:58:0a:d9:00:43", "default": true, "dns": {} }] openshift.io/scc: anyuid security.openshift.io/validated-scc-subject-type: user Status: Pending IP: 10.217.0.67 IPs: IP: 10.217.0.67 Controlled By: CatalogSource/infrawatch-operators Containers: registry-server: Container ID: Image: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest Image ID: Port: 50051/TCP Host Port: 0/TCP State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Requests: cpu: 10m memory: 50Mi Liveness: exec [grpc_health_probe -addr=:50051] delay=10s timeout=5s period=10s #success=1 #failure=3 Readiness: exec [grpc_health_probe -addr=:50051] delay=5s timeout=5s period=10s #success=1 #failure=3 Startup: exec [grpc_health_probe -addr=:50051] delay=0s timeout=5s period=10s #success=1 #failure=10 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-q4pzm (ro) Conditions: Type Status PodReadyToStartContainers True Initialized True Ready False ContainersReady False PodScheduled True Volumes: kube-api-access-q4pzm: Type: Projected (a volume that contains injected data from multiple sources) TokenExpirationSeconds: 3607 ConfigMapName: kube-root-ca.crt Optional: false DownwardAPI: true ConfigMapName: openshift-service-ca.crt Optional: false QoS Class: Burstable Node-Selectors: kubernetes.io/os=linux Tolerations: node.kubernetes.io/memory-pressure:NoSchedule op=Exists node.kubernetes.io/not-ready:NoExecute op=Exists for 300s node.kubernetes.io/unreachable:NoExecute op=Exists for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 5m32s default-scheduler Successfully assigned service-telemetry/infrawatch-operators-6bs58 to crc Normal AddedInterface 5m31s multus Add eth0 [10.217.0.67/23] from ovn-kubernetes Normal Pulling 2m40s (x5 over 5m31s) kubelet Pulling image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest" Warning Failed 2m40s (x5 over 5m31s) kubelet Failed to pull image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest": unable to pull image or OCI artifact: pull image err: initializing source docker://image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown; artifact err: get manifest: build image source: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown Warning Failed 2m40s (x5 over 5m31s) kubelet Error: ErrImagePull Normal BackOff 30s (x21 over 5m30s) kubelet Back-off pulling image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest" Warning Failed 30s (x21 over 5m30s) kubelet Error: ImagePullBackOff Name: infrawatch-operators-cdpts Namespace: service-telemetry Priority: 0 Service Account: infrawatch-operators Node: crc/192.168.126.11 Start Time: Fri, 12 Dec 2025 16:29:28 +0000 Labels: olm.catalogSource=infrawatch-operators olm.managed=true olm.pod-spec-hash=1n8G20tnqK1naks6OvaILwtmBXnofPrWrBgfHP Annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: true k8s.ovn.org/pod-networks: {"default":{"ip_addresses":["10.217.0.65/23"],"mac_address":"0a:58:0a:d9:00:41","gateway_ips":["10.217.0.1"],"routes":[{"dest":"10.217.0.0... k8s.v1.cni.cncf.io/network-status: [{ "name": "ovn-kubernetes", "interface": "eth0", "ips": [ "10.217.0.65" ], "mac": "0a:58:0a:d9:00:41", "default": true, "dns": {} }] openshift.io/scc: anyuid security.openshift.io/validated-scc-subject-type: user Status: Pending IP: 10.217.0.65 IPs: IP: 10.217.0.65 Controlled By: CatalogSource/infrawatch-operators Containers: registry-server: Container ID: Image: image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest Image ID: Port: 50051/TCP Host Port: 0/TCP State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Requests: cpu: 10m memory: 50Mi Liveness: exec [grpc_health_probe -addr=:50051] delay=10s timeout=5s period=10s #success=1 #failure=3 Readiness: exec [grpc_health_probe -addr=:50051] delay=5s timeout=5s period=10s #success=1 #failure=3 Startup: exec [grpc_health_probe -addr=:50051] delay=0s timeout=5s period=10s #success=1 #failure=10 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-p4zc7 (ro) Conditions: Type Status PodReadyToStartContainers True Initialized True Ready False ContainersReady False PodScheduled True Volumes: kube-api-access-p4zc7: Type: Projected (a volume that contains injected data from multiple sources) TokenExpirationSeconds: 3607 ConfigMapName: kube-root-ca.crt Optional: false DownwardAPI: true ConfigMapName: openshift-service-ca.crt Optional: false QoS Class: Burstable Node-Selectors: kubernetes.io/os=linux Tolerations: node.kubernetes.io/memory-pressure:NoSchedule op=Exists node.kubernetes.io/not-ready:NoExecute op=Exists for 300s node.kubernetes.io/unreachable:NoExecute op=Exists for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned service-telemetry/infrawatch-operators-cdpts to crc Normal AddedInterface 10m multus Add eth0 [10.217.0.65/23] from ovn-kubernetes Normal Pulling 7m31s (x5 over 10m) kubelet Pulling image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest" Warning Failed 7m31s (x5 over 10m) kubelet Failed to pull image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest": unable to pull image or OCI artifact: pull image err: initializing source docker://image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown; artifact err: get manifest: build image source: reading manifest latest in image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index: manifest unknown Warning Failed 7m31s (x5 over 10m) kubelet Error: ErrImagePull Normal BackOff 22s (x43 over 10m) kubelet Back-off pulling image "image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest" Warning Failed 22s (x43 over 10m) kubelet Error: ImagePullBackOff home/zuul/zuul-output/logs/controller/post_question_deployment.log0000644000175000017500000035031615117042536025144 0ustar zuulzuulWhat images were created in the internal registry? What state is the STO csv in? apiVersion: v1 items: - apiVersion: operators.coreos.com/v1alpha1 kind: ClusterServiceVersion metadata: annotations: alm-examples: |- [ { "apiVersion": "monitoring.rhobs/v1alpha1", "kind": "MonitoringStack", "metadata": { "labels": { "mso": "example" }, "name": "sample-monitoring-stack" }, "spec": { "logLevel": "debug", "resourceSelector": { "matchLabels": { "app": "demo" } }, "retention": "1d" } }, { "apiVersion": "monitoring.rhobs/v1alpha1", "kind": "ThanosQuerier", "metadata": { "name": "example-thanos" }, "spec": { "selector": { "matchLabels": { "mso": "example" } } } } ] capabilities: Basic Install categories: Monitoring certified: "false" containerImage: registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb createdAt: "2025-11-03T09:23:49Z" description: A Go based Kubernetes operator to setup and manage highly available Monitoring Stack using Prometheus, Alertmanager and Thanos Querier. features.operators.openshift.io/cnf: "false" features.operators.openshift.io/cni: "false" features.operators.openshift.io/csi: "false" features.operators.openshift.io/disconnected: "true" features.operators.openshift.io/fips-compliant: "false" features.operators.openshift.io/proxy-aware: "false" features.operators.openshift.io/tls-profiles: "false" features.operators.openshift.io/token-auth-aws: "false" features.operators.openshift.io/token-auth-azure: "false" features.operators.openshift.io/token-auth-gcp: "false" olm.operatorGroup: global-operators olm.operatorNamespace: openshift-operators olm.skipRange: '>=..0 <1.3.0' operatorframework.io/cluster-monitoring: "true" operatorframework.io/properties: '{"properties":[{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"Alertmanager","version":"v1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"AlertmanagerConfig","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"MonitoringStack","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"PodMonitor","version":"v1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"Probe","version":"v1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"Prometheus","version":"v1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"PrometheusAgent","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"PrometheusRule","version":"v1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"ScrapeConfig","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"ServiceMonitor","version":"v1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"ThanosQuerier","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"monitoring.rhobs","kind":"ThanosRuler","version":"v1"}},{"type":"olm.gvk","value":{"group":"observability.openshift.io","kind":"ObservabilityInstaller","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"observability.openshift.io","kind":"UIPlugin","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"perses.dev","kind":"Perses","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"perses.dev","kind":"PersesDashboard","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"perses.dev","kind":"PersesDatasource","version":"v1alpha1"}},{"type":"olm.package","value":{"packageName":"cluster-observability-operator","version":"1.3.0"}}]}' operatorframework.io/suggested-namespace: openshift-cluster-observability-operator operators.openshift.io/valid-subscription: '["OpenShift Kubernetes Engine", "OpenShift Container Platform", "OpenShift Platform Plus"]' operators.operatorframework.io/builder: operator-sdk-v1.41.1 operators.operatorframework.io/internal-objects: |- [ "prometheuses.monitoring.rhobs", "alertmanagers.monitoring.rhobs", "thanosrulers.monitoring.rhobs", "prometheusagents.monitoring.rhobs", "perses.perses.dev" ] operators.operatorframework.io/project_layout: unknown repository: https://github.com/rhobs/observability-operator support: Cluster Observability (https://issues.redhat.com/projects/COO/) creationTimestamp: "2025-12-12T16:26:59Z" generation: 1 labels: olm.copiedFrom: openshift-operators olm.managed: "true" operatorframework.io/arch.amd64: supported operatorframework.io/arch.arm64: supported operatorframework.io/arch.ppc64le: supported operatorframework.io/arch.s390x: supported name: cluster-observability-operator.v1.3.0 namespace: service-telemetry resourceVersion: "43227" uid: 4c05580c-c2ab-433d-b954-395f764eb272 spec: apiservicedefinitions: {} cleanup: enabled: false customresourcedefinitions: owned: - description: AlertmanagerConfig configures the Prometheus Alertmanager, specifying how alerts should be grouped, inhibited and notified to external systems displayName: AlertmanagerConfig kind: AlertmanagerConfig name: alertmanagerconfigs.monitoring.rhobs version: v1alpha1 - description: Alertmanager describes an Alertmanager cluster displayName: Alertmanager kind: Alertmanager name: alertmanagers.monitoring.rhobs version: v1 - description: MonitoringStack is the Schema for the monitoringstacks API displayName: MonitoringStack kind: MonitoringStack name: monitoringstacks.monitoring.rhobs version: v1alpha1 - kind: ObservabilityInstaller name: observabilityinstallers.observability.openshift.io version: v1alpha1 - description: Perses is the Schema for the perses API displayName: Perses kind: Perses name: perses.perses.dev version: v1alpha1 - description: A Perses Dashboard displayName: Perses Dashboard kind: PersesDashboard name: persesdashboards.perses.dev version: v1alpha1 - description: A Perses Datasource displayName: Perses Datasource kind: PersesDatasource name: persesdatasources.perses.dev version: v1alpha1 - description: PodMonitor defines monitoring for a set of pods displayName: PodMonitor kind: PodMonitor name: podmonitors.monitoring.rhobs version: v1 - description: Probe defines monitoring for a set of static targets or ingresses displayName: Probe kind: Probe name: probes.monitoring.rhobs version: v1 - description: PrometheusAgent defines a Prometheus agent deployment displayName: PrometheusAgent kind: PrometheusAgent name: prometheusagents.monitoring.rhobs version: v1alpha1 - description: Prometheus defines a Prometheus deployment displayName: Prometheus kind: Prometheus name: prometheuses.monitoring.rhobs version: v1 - description: PrometheusRule defines recording and alerting rules for a Prometheus instance displayName: PrometheusRule kind: PrometheusRule name: prometheusrules.monitoring.rhobs version: v1 - description: ScrapeConfig defines a namespaced Prometheus scrape_config to be aggregated across multiple namespaces into the Prometheus configuration displayName: ScrapeConfig kind: ScrapeConfig name: scrapeconfigs.monitoring.rhobs version: v1alpha1 - description: ServiceMonitor defines monitoring for a set of services displayName: ServiceMonitor kind: ServiceMonitor name: servicemonitors.monitoring.rhobs version: v1 - description: ThanosQuerier outlines the Thanos querier components, managed by this stack displayName: ThanosQuerier kind: ThanosQuerier name: thanosqueriers.monitoring.rhobs version: v1alpha1 - description: ThanosRuler defines a ThanosRuler deployment displayName: ThanosRuler kind: ThanosRuler name: thanosrulers.monitoring.rhobs version: v1 - description: UIPlugin defines a console plugin for observability displayName: UIPlugin kind: UIPlugin name: uiplugins.observability.openshift.io version: v1alpha1 description: |- Cluster Observability Operator is a Go based Kubernetes operator to easily setup and manage various observability tools. ### Supported Features - Setup multiple Highly Available Monitoring stack using Prometheus, Alertmanager and Thanos Querier - Customizable configuration for managing Prometheus deployments - Customizable configuration for managing Alertmanager deployments - Customizable configuration for managing Thanos Querier deployments - Setup console plugins - Setup korrel8r - Setup Perses - Setup Cluster Health Analyzer ### Documentation - **[Documentation](https://docs.redhat.com/en/documentation/openshift_container_platform/latest/html/cluster_observability_operator/index)** ### License Licensed under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0) displayName: Cluster Observability Operator icon: - base64data: PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyBpZD0idXVpZC1kMWI4NDIzOC0wYzgxLTQ5MjctOGQ4Mi03OTcyN2Y5OGZjYWMiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDM4IDM4Ij4KPHRpdGxlPkNsdXN0ZXIgb2JzZXJ2YWJpbGl0eTwvdGl0bGU+CjxkZXNjPmNsb3VkPC9kZXNjPgo8bWV0YWRhdGE+PD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4KPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgOC4wLWMwMDEgMS4wMDAwMDAsIDAwMDAvMDAvMDAtMDA6MDA6MDAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIgogICAgICAgICAgICB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iPgogICAgICAgICA8IS0tIG1ldGFkYXRhIGZpZWxkcyAtLT4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cjw/eHBhY2tldCBlbmQ9InciPz48L21ldGFkYXRhPgo8ZGVmcz48c3R5bGU+LnV1aWQtMjRiMGQ5N2ItNjgxZS00ZGE2LWExYzctNzY3MWFlNTc1MzJhe2ZpbGw6I2UwZTBlMDt9LnV1aWQtMjRiMGQ5N2ItNjgxZS00ZGE2LWExYzctNzY3MWFlNTc1MzJhLC51dWlkLTk3YzFlYzg0LTliODEtNDU2ZS05OWFhLTcxMzc1MGViNjllMCwudXVpZC1mMDMyMTc4ZS1iZDUwLTRkZTctYjc3My02NjViZmQ1YzViYjgsLnV1aWQtNGJlZjgyMGItNTZjOS00N2U3LTgyYTMtMmRiOGQ4YzdiMTEye3N0cm9rZS13aWR0aDowcHg7fS51dWlkLTk3YzFlYzg0LTliODEtNDU2ZS05OWFhLTcxMzc1MGViNjllMHtmaWxsOiMwMDA7fS51dWlkLWYwMzIxNzhlLWJkNTAtNGRlNy1iNzczLTY2NWJmZDVjNWJiOHtmaWxsOiNlMDA7fS51dWlkLTRiZWY4MjBiLTU2YzktNDdlNy04MmEzLTJkYjhkOGM3YjExMntmaWxsOiNmZmY7fTwvc3R5bGU+PC9kZWZzPgo8cmVjdCBjbGFzcz0idXVpZC00YmVmODIwYi01NmM5LTQ3ZTctODJhMy0yZGI4ZDhjN2IxMTIiIHg9IjEiIHk9IjEiIHdpZHRoPSIzNiIgaGVpZ2h0PSIzNiIgcng9IjkiIHJ5PSI5Ii8+CjxwYXRoIGNsYXNzPSJ1dWlkLTI0YjBkOTdiLTY4MWUtNGRhNi1hMWM3LTc2NzFhZTU3NTMyYSIgZD0iTTI4LDIuMjVjNC4yNzM0LDAsNy43NSwzLjQ3NjYsNy43NSw3Ljc1djE4YzAsNC4yNzM0LTMuNDc2Niw3Ljc1LTcuNzUsNy43NUgxMGMtNC4yNzM0LDAtNy43NS0zLjQ3NjYtNy43NS03Ljc1VjEwYzAtNC4yNzM0LDMuNDc2Ni03Ljc1LDcuNzUtNy43NWgxOE0yOCwxSDEwQzUuMDI5NCwxLDEsNS4wMjk0LDEsMTB2MThjMCw0Ljk3MDYsNC4wMjk0LDksOSw5aDE4YzQuOTcwNiwwLDktNC4wMjk0LDktOVYxMGMwLTQuOTcwNi00LjAyOTQtOS05LTloMFoiLz4KPHBhdGggY2xhc3M9InV1aWQtZjAzMjE3OGUtYmQ1MC00ZGU3LWI3NzMtNjY1YmZkNWM1YmI4IiBkPSJNMjEuMzc1LDE5YzAsLjM0NTIuMjgwMy42MjUuNjI1LjYyNXMuNjI1LS4yNzk4LjYyNS0uNjI1YzAtMS45OTktMS42MjYtMy42MjUtMy42MjUtMy42MjUtLjM0NDcsMC0uNjI1LjI3OTgtLjYyNS42MjVzLjI4MDMuNjI1LjYyNS42MjVjMS4zMDk2LDAsMi4zNzUsMS4wNjU0LDIuMzc1LDIuMzc1WiIvPgo8cGF0aCBjbGFzcz0idXVpZC1mMDMyMTc4ZS1iZDUwLTRkZTctYjc3My02NjViZmQ1YzViYjgiIGQ9Ik0xOSwxMy4zNzVjLS43ODIyLDAtMS41MzkxLjE1NzctMi4yNS40NjgzLS4zMTY0LjEzODItLjQ2MDkuNTA2OC0uMzIyMy44MjMyLjEzNzcuMzE2NC41MDc4LjQ1ODUuODIyMy4zMjIzLjU1MjctLjI0MTIsMS4xNDE2LS4zNjM4LDEuNzUtLjM2MzgsMi40MTIxLDAsNC4zNzUsMS45NjI0LDQuMzc1LDQuMzc1cy0xLjk2MjksNC4zNzUtNC4zNzUsNC4zNzUtNC4zNzUtMS45NjI0LTQuMzc1LTQuMzc1YzAtLjYwODkuMTIyMS0xLjE5NzMuMzYzMy0xLjc0OTUuMTM4Ny0uMzE2NC0uMDA1OS0uNjg1MS0uMzIyMy0uODIzMi0uMzE0NS0uMTM5Mi0uNjgzNi4wMDU0LS44MjIzLjMyMjMtLjMxMTUuNzExNC0uNDY4OCwxLjQ2ODMtLjQ2ODgsMi4yNTA1LDAsMy4xMDE2LDIuNTIzNCw1LjYyNSw1LjYyNSw1LjYyNXM1LjYyNS0yLjUyMzQsNS42MjUtNS42MjUtMi41MjM0LTUuNjI1LTUuNjI1LTUuNjI1WiIvPgo8cGF0aCBjbGFzcz0idXVpZC05N2MxZWM4NC05YjgxLTQ1NmUtOTlhYS03MTM3NTBlYjY5ZTAiIGQ9Ik0zMC40NjY4LDE4LjczODhjLTIuMDU2Ni00LjQ3MzEtNi41NTc2LTcuMzYzOC0xMS40NjY4LTcuMzYzOHMtOS40MTAyLDIuODkwNi0xMS40NjY4LDcuMzYzOGMtLjA3NTIuMTY2LS4wNzUyLjM1NjQsMCwuNTIyNSwyLjA1NjYsNC40NzMxLDYuNTU3Niw3LjM2MzgsMTEuNDY2OCw3LjM2MzhzOS40MTAyLTIuODkwNiwxMS40NjY4LTcuMzYzOGMuMDc1Mi0uMTY2LjA3NTItLjM1NjQsMC0uNTIyNVpNMTksMjUuMzc1Yy00LjMyNjIsMC04LjMwMDgtMi40OTI3LTEwLjIwNjEtNi4zNzUsMS45MDUzLTMuODgyMyw1Ljg3OTktNi4zNzUsMTAuMjA2MS02LjM3NXM4LjMwMDgsMi40OTI3LDEwLjIwNjEsNi4zNzVjLTEuOTA1MywzLjg4MjMtNS44Nzk5LDYuMzc1LTEwLjIwNjEsNi4zNzVaIi8+CjxwYXRoIGNsYXNzPSJ1dWlkLTk3YzFlYzg0LTliODEtNDU2ZS05OWFhLTcxMzc1MGViNjllMCIgZD0iTTE1LjQ0MjQsMTQuNTU4MWMtLjI0NDEtLjI0NDEtLjY0MDYtLjI0NDEtLjg4NDgsMC0uMjQzMi4yNDQxLS4yNDMyLjYzOTYsMCwuODgzOGw0LDRjLjEyMjEuMTIyMS4yODIyLjE4MzEuNDQyNC4xODMxcy4zMjAzLS4wNjEuNDQyNC0uMTgzMWMuMjQzMi0uMjQ0MS4yNDMyLS42Mzk2LDAtLjg4MzhsLTQtNFoiLz4KPC9zdmc+Cg== mediatype: image/svg+xml install: spec: clusterPermissions: - rules: - apiGroups: - monitoring.rhobs resources: - alertmanagers - alertmanagers/finalizers - alertmanagers/status - alertmanagerconfigs - prometheuses - prometheuses/finalizers - prometheuses/status - prometheusagents - prometheusagents/finalizers - prometheusagents/status - thanosrulers - thanosrulers/finalizers - thanosrulers/status - scrapeconfigs - scrapeconfigs/status - servicemonitors - servicemonitors/status - podmonitors - podmonitors/status - probes - probes/status - prometheusrules verbs: - '*' - apiGroups: - apps resources: - statefulsets verbs: - '*' - apiGroups: - "" resources: - configmaps - secrets verbs: - '*' - apiGroups: - "" resources: - pods verbs: - list - delete - apiGroups: - "" resources: - services - services/finalizers verbs: - get - create - update - delete - apiGroups: - "" resources: - nodes verbs: - list - watch - apiGroups: - "" resources: - namespaces verbs: - get - list - watch - apiGroups: - "" resources: - events verbs: - patch - create - apiGroups: - networking.k8s.io resources: - ingresses verbs: - get - list - watch - apiGroups: - storage.k8s.io resources: - storageclasses verbs: - get - apiGroups: - "" resources: - endpoints verbs: - get - create - update - delete - apiGroups: - security.openshift.io resourceNames: - nonroot-v2 - nonroot resources: - securitycontextconstraints verbs: - use serviceAccountName: obo-prometheus-operator - rules: - apiGroups: - security.openshift.io resourceNames: - nonroot-v2 - nonroot resources: - securitycontextconstraints verbs: - use serviceAccountName: obo-prometheus-operator-admission-webhook - rules: - apiGroups: - "" resources: - configmaps - secrets - serviceaccounts - services verbs: - create - delete - get - list - patch - update - watch - apiGroups: - "" resources: - endpoints - events - namespaces - nodes - persistentvolumeclaims - persistentvolumes - pods - replicationcontrollers verbs: - get - list - watch - apiGroups: - "" resources: - services/finalizers verbs: - patch - update - apiGroups: - apps resources: - daemonsets - replicasets - statefulsets verbs: - get - list - watch - apiGroups: - apps resources: - deployments verbs: - create - delete - get - list - patch - update - watch - apiGroups: - authentication.k8s.io resources: - tokenreviews verbs: - create - apiGroups: - authorization.k8s.io resources: - subjectaccessreviews verbs: - create - apiGroups: - autoscaling resources: - horizontalpodautoscalers verbs: - get - list - watch - apiGroups: - batch resources: - cronjobs - jobs verbs: - get - list - watch - apiGroups: - config.openshift.io resources: - clusterversions verbs: - get - list - watch - apiGroups: - console.openshift.io resources: - consoleplugins verbs: - create - delete - get - list - patch - update - watch - apiGroups: - extensions - networking.k8s.io resources: - ingresses verbs: - get - list - watch - apiGroups: - loki.grafana.com resources: - application - audit - infrastructure - network verbs: - get - apiGroups: - loki.grafana.com resources: - lokistacks verbs: - get - list - apiGroups: - monitoring.coreos.com resourceNames: - main resources: - alertmanagers/api verbs: - get - list - apiGroups: - monitoring.coreos.com resourceNames: - k8s resources: - prometheuses/api verbs: - create - get - update - apiGroups: - monitoring.coreos.com resources: - servicemonitors verbs: - create - delete - get - list - patch - update - watch - apiGroups: - monitoring.rhobs resources: - alertmanagers - prometheuses - servicemonitors - thanosqueriers verbs: - create - delete - list - patch - update - watch - apiGroups: - monitoring.rhobs resources: - monitoringstacks verbs: - create - get - list - update - watch - apiGroups: - monitoring.rhobs resources: - monitoringstacks/finalizers - monitoringstacks/status verbs: - get - update - apiGroups: - monitoring.rhobs resources: - thanosqueriers/finalizers verbs: - update - apiGroups: - monitoring.rhobs resources: - thanosqueriers/status verbs: - get - patch - update - apiGroups: - networking.k8s.io resources: - ingresses - networkpolicies verbs: - get - list - watch - apiGroups: - observability.openshift.io resources: - observabilityinstallers - uiplugins verbs: - create - delete - get - list - patch - update - watch - apiGroups: - observability.openshift.io resources: - observabilityinstallers/finalizers - observabilityinstallers/status verbs: - delete - get - patch - update - apiGroups: - observability.openshift.io resources: - uiplugins/finalizers - uiplugins/status verbs: - get - update - apiGroups: - opentelemetry.io resources: - opentelemetrycollectors verbs: - create - delete - get - list - patch - update - watch - apiGroups: - opentelemetry.io resources: - opentelemetrycollectors/status verbs: - get - list - watch - apiGroups: - operator.openshift.io resources: - consoles verbs: - get - list - patch - watch - apiGroups: - operators.coreos.com resources: - clusterserviceversions - subscriptions verbs: - create - delete - get - list - patch - update - watch - apiGroups: - perses.dev resources: - perses - persesdashboards - persesdatasources verbs: - create - delete - get - list - patch - update - watch - apiGroups: - perses.dev resources: - perses/finalizers - persesdashboards/finalizers - persesdatasources/finalizers verbs: - update - apiGroups: - perses.dev resources: - perses/status - persesdashboards/status - persesdatasources/status verbs: - get - patch - update - apiGroups: - policy resources: - poddisruptionbudgets verbs: - create - delete - get - list - patch - update - watch - apiGroups: - rbac.authorization.k8s.io resources: - clusterrolebindings - clusterroles - rolebindings - roles verbs: - create - delete - get - list - patch - update - watch - apiGroups: - security.openshift.io resourceNames: - nonroot - nonroot-v2 resources: - securitycontextconstraints verbs: - use - apiGroups: - storage.k8s.io resources: - storageclasses - volumeattachments verbs: - get - list - watch - apiGroups: - tempo.grafana.com resourceNames: - traces resources: - application verbs: - create - apiGroups: - tempo.grafana.com resources: - tempomonolithics verbs: - list - apiGroups: - tempo.grafana.com resources: - tempostacks verbs: - create - delete - get - list - patch - update - watch - apiGroups: - tempo.grafana.com resources: - tempostacks/status verbs: - get - list - watch serviceAccountName: observability-operator-sa - rules: - apiGroups: - apps resources: - deployments - statefulsets verbs: - create - delete - get - list - patch - update - watch - apiGroups: - "" resources: - events verbs: - create - patch - apiGroups: - "" resources: - services - configmaps - secrets verbs: - get - patch - update - create - delete - list - watch - apiGroups: - perses.dev resources: - perses verbs: - create - delete - get - list - patch - update - watch - apiGroups: - perses.dev resources: - perses/finalizers verbs: - update - apiGroups: - perses.dev resources: - perses/status verbs: - get - patch - update - apiGroups: - perses.dev resources: - persesdashboards verbs: - create - delete - get - list - patch - update - watch - apiGroups: - perses.dev resources: - persesdashboards/finalizers verbs: - update - apiGroups: - perses.dev resources: - persesdashboards/status verbs: - get - patch - update - apiGroups: - perses.dev resources: - persesdatasources verbs: - create - delete - get - list - patch - update - watch - apiGroups: - perses.dev resources: - persesdatasources/finalizers verbs: - update - apiGroups: - perses.dev resources: - persesdatasources/status verbs: - get - patch - update serviceAccountName: perses-operator deployments: - label: app.kubernetes.io/component: controller app.kubernetes.io/name: prometheus-operator app.kubernetes.io/part-of: observability-operator app.kubernetes.io/version: 0.86.1-rhobs1 name: obo-prometheus-operator spec: replicas: 1 selector: matchLabels: app.kubernetes.io/component: controller app.kubernetes.io/name: prometheus-operator app.kubernetes.io/part-of: observability-operator strategy: {} template: metadata: annotations: kubectl.kubernetes.io/default-container: prometheus-operator target.workload.openshift.io/management: '{"effect": "PreferredDuringScheduling"}' creationTimestamp: null labels: app.kubernetes.io/component: controller app.kubernetes.io/name: prometheus-operator app.kubernetes.io/part-of: observability-operator app.kubernetes.io/version: 0.86.1-rhobs1 spec: affinity: nodeAffinity: preferredDuringSchedulingIgnoredDuringExecution: - preference: matchExpressions: - key: node-role.kubernetes.io/infra operator: Exists weight: 1 automountServiceAccountToken: true containers: - args: - --prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) - --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator - --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator - --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator env: - name: GOGC value: "30" - name: RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER value: registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62 image: registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3 name: prometheus-operator ports: - containerPort: 8080 name: http protocol: TCP resources: limits: cpu: 100m memory: 500Mi requests: cpu: 5m memory: 150Mi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL readOnlyRootFilesystem: true terminationMessagePolicy: FallbackToLogsOnError securityContext: runAsNonRoot: true runAsUser: 65534 serviceAccountName: obo-prometheus-operator tolerations: - effect: NoSchedule key: node-role.kubernetes.io/infra operator: Exists - label: app.kubernetes.io/name: prometheus-operator-admission-webhook app.kubernetes.io/part-of: observability-operator app.kubernetes.io/version: 0.86.1-rhobs1 name: obo-prometheus-operator-admission-webhook spec: replicas: 2 selector: matchLabels: app.kubernetes.io/name: prometheus-operator-admission-webhook app.kubernetes.io/part-of: observability-operator strategy: rollingUpdate: maxUnavailable: 1 template: metadata: annotations: kubectl.kubernetes.io/default-container: prometheus-operator-admission-webhook creationTimestamp: null labels: app.kubernetes.io/name: prometheus-operator-admission-webhook app.kubernetes.io/part-of: observability-operator app.kubernetes.io/version: 0.86.1-rhobs1 spec: affinity: nodeAffinity: preferredDuringSchedulingIgnoredDuringExecution: - preference: matchExpressions: - key: node-role.kubernetes.io/infra operator: Exists weight: 1 podAntiAffinity: requiredDuringSchedulingIgnoredDuringExecution: - labelSelector: matchLabels: app.kubernetes.io/name: prometheus-operator-admission-webhook app.kubernetes.io/part-of: observability-operator namespaces: - default topologyKey: kubernetes.io/hostname automountServiceAccountToken: false containers: - args: - --web.enable-tls=true - --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt - --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key image: registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec name: prometheus-operator-admission-webhook ports: - containerPort: 8443 name: https protocol: TCP resources: limits: cpu: 200m memory: 200Mi requests: cpu: 50m memory: 50Mi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL readOnlyRootFilesystem: true terminationMessagePolicy: FallbackToLogsOnError securityContext: runAsNonRoot: true runAsUser: 65534 serviceAccountName: obo-prometheus-operator-admission-webhook tolerations: - effect: NoSchedule key: node-role.kubernetes.io/infra operator: Exists - label: app.kubernetes.io/component: operator app.kubernetes.io/name: observability-operator app.kubernetes.io/version: 1.3.0 name: observability-operator spec: replicas: 1 selector: matchLabels: app.kubernetes.io/component: operator app.kubernetes.io/name: observability-operator strategy: {} template: metadata: creationTimestamp: null labels: app.kubernetes.io/component: operator app.kubernetes.io/name: observability-operator app.kubernetes.io/version: 1.3.0 spec: affinity: nodeAffinity: preferredDuringSchedulingIgnoredDuringExecution: - preference: matchExpressions: - key: node-role.kubernetes.io/infra operator: Exists weight: 1 containers: - args: - --namespace=$(NAMESPACE) - --images=perses=$(RELATED_IMAGE_PERSES) - --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) - --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) - --images=thanos=$(RELATED_IMAGE_THANOS) - --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) - --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) - --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) - --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) - --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) - --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) - --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) - --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) - --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) - --images=korrel8r=$(RELATED_IMAGE_KORREL8R) - --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) - --openshift.enabled=true env: - name: NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace - name: RELATED_IMAGE_ALERTMANAGER value: registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01 - name: RELATED_IMAGE_PROMETHEUS value: registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b - name: RELATED_IMAGE_THANOS value: registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4 - name: RELATED_IMAGE_PERSES value: registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade - name: RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN value: registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb - name: RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN value: registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f - name: RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5 value: registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8 - name: RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4 value: registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9 - name: RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN value: registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b - name: RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4 value: registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9 - name: RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN value: registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3 - name: RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN value: registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9 - name: RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5 value: registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c - name: RELATED_IMAGE_KORREL8R value: registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4 - name: RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER value: registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2 image: registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb imagePullPolicy: Always livenessProbe: httpGet: path: /healthz port: 8081 name: operator readinessProbe: httpGet: path: /healthz port: 8081 resources: limits: cpu: 400m memory: 512Mi requests: cpu: 100m memory: 256Mi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL volumeMounts: - mountPath: /etc/tls/private name: observability-operator-tls readOnly: true securityContext: runAsNonRoot: true serviceAccountName: observability-operator-sa terminationGracePeriodSeconds: 30 tolerations: - effect: NoSchedule key: node-role.kubernetes.io/infra operator: Exists volumes: - name: observability-operator-tls secret: optional: true secretName: observability-operator-tls - label: app.kubernetes.io/component: controller app.kubernetes.io/name: perses-operator app.kubernetes.io/part-of: observability-operator name: perses-operator spec: replicas: 1 selector: matchLabels: app.kubernetes.io/component: controller app.kubernetes.io/name: perses-operator app.kubernetes.io/part-of: observability-operator strategy: {} template: metadata: annotations: kubectl.kubernetes.io/default-container: perses-operator creationTimestamp: null labels: app.kubernetes.io/component: controller app.kubernetes.io/name: perses-operator app.kubernetes.io/part-of: observability-operator spec: affinity: nodeAffinity: preferredDuringSchedulingIgnoredDuringExecution: - preference: matchExpressions: - key: node-role.kubernetes.io/infra operator: Exists weight: 1 containers: - image: registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385 livenessProbe: httpGet: path: /healthz port: 8081 initialDelaySeconds: 15 periodSeconds: 20 name: perses-operator readinessProbe: httpGet: path: /readyz port: 8081 initialDelaySeconds: 5 periodSeconds: 10 resources: limits: cpu: 500m memory: 512Mi requests: cpu: 100m memory: 128Mi securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL volumeMounts: - mountPath: /ca name: openshift-service-ca readOnly: true serviceAccountName: perses-operator tolerations: - effect: NoSchedule key: node-role.kubernetes.io/infra operator: Exists volumes: - configMap: items: - key: service-ca.crt path: service-ca.crt name: openshift-service-ca.crt optional: true name: openshift-service-ca strategy: deployment installModes: - supported: false type: OwnNamespace - supported: false type: SingleNamespace - supported: false type: MultiNamespace - supported: true type: AllNamespaces keywords: - observability - monitoring - prometheus - thanos links: - name: GitHub url: https://github.com/rhobs/observability-operator maintainers: - email: jfajersk@redhat.com name: Jan Fajerski - email: spasquie@redhat.com name: Simon Pasquier maturity: alpha provider: name: Red Hat relatedImages: - image: registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb name: cluster-observability-operator - image: registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62 name: prometheus-config-reloader - image: registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01 name: alertmanager - image: registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b name: prometheus - image: registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4 name: thanos - image: registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec name: prometheus-operator-admission-webhook - image: registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3 name: prometheus-operator - image: registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb name: ui-dashboards - image: registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f name: ui-tracing - image: registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8 name: ui-tracing-pf5 - image: registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9 name: ui-tracing-pf4 - image: registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b name: ui-logging - image: registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9 name: ui-logging-pf4 - image: registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3 name: ui-troubleshooting - image: registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9 name: ui-monitoring - image: registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c name: ui-monitoring-pf5 - image: registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4 name: korrel8r - image: registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2 name: cluster-health-analyzer - image: registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade name: perses - image: registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385 name: perses-operator replaces: cluster-observability-operator.v1.2.2 version: 1.3.0 webhookdefinitions: - admissionReviewVersions: - v1 containerPort: 443 deploymentName: obo-prometheus-operator-admission-webhook failurePolicy: Ignore generateName: alertmanagerconfigs.monitoring.rhobs rules: - apiGroups: - monitoring.rhobs apiVersions: - '*' operations: - CREATE - UPDATE resources: - alertmanagerconfigs scope: Namespaced sideEffects: None targetPort: https timeoutSeconds: 5 type: ValidatingAdmissionWebhook webhookPath: /admission-alertmanagerconfigs/validate - admissionReviewVersions: - v1 containerPort: 443 deploymentName: obo-prometheus-operator-admission-webhook failurePolicy: Ignore generateName: prometheusrules.monitoring.rhobs rules: - apiGroups: - monitoring.rhobs apiVersions: - '*' operations: - CREATE - UPDATE resources: - prometheusrules scope: Namespaced sideEffects: None targetPort: https timeoutSeconds: 5 type: ValidatingAdmissionWebhook webhookPath: /admission-prometheusrules/validate status: certsLastUpdated: "2025-12-12T16:27:01Z" certsRotateAt: "2027-12-11T16:27:00Z" cleanup: {} conditions: - lastTransitionTime: "2025-12-12T16:26:52Z" lastUpdateTime: "2025-12-12T16:26:52Z" message: requirements not yet checked phase: Pending reason: RequirementsUnknown - lastTransitionTime: "2025-12-12T16:26:52Z" lastUpdateTime: "2025-12-12T16:26:52Z" message: one or more requirements couldn't be found phase: Pending reason: RequirementsNotMet - lastTransitionTime: "2025-12-12T16:26:59Z" lastUpdateTime: "2025-12-12T16:26:59Z" message: all requirements found, attempting install phase: InstallReady reason: AllRequirementsMet - lastTransitionTime: "2025-12-12T16:27:00Z" lastUpdateTime: "2025-12-12T16:27:00Z" message: waiting for install components to report healthy phase: Installing reason: InstallSucceeded - lastTransitionTime: "2025-12-12T16:27:00Z" lastUpdateTime: "2025-12-12T16:27:01Z" message: 'installing: waiting for deployment obo-prometheus-operator to become ready: deployment "obo-prometheus-operator" not available: Deployment does not have minimum availability.' phase: Installing reason: InstallWaiting - lastTransitionTime: "2025-12-12T16:27:43Z" lastUpdateTime: "2025-12-12T16:27:43Z" message: install strategy completed with no errors phase: Succeeded reason: InstallSucceeded lastTransitionTime: "2025-12-12T16:27:43Z" lastUpdateTime: "2025-12-12T16:27:43Z" message: The operator is running in openshift-operators but is managing this namespace phase: Succeeded reason: Copied requirementStatus: - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: alertmanagerconfigs.monitoring.rhobs status: Present uuid: bd4f0381-0524-4296-8264-181d723362df version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: alertmanagers.monitoring.rhobs status: Present uuid: 4fda672f-3605-4bfa-85f4-c346ddf5fbba version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: monitoringstacks.monitoring.rhobs status: Present uuid: fb546113-65aa-41e9-a2aa-3c4e44e778da version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: observabilityinstallers.observability.openshift.io status: Present uuid: 1a6d05b1-1bc0-412b-814b-1f39e3847d86 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: perses.perses.dev status: Present uuid: beeb7515-1cf5-4863-8d90-a15c34feb74d version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: persesdashboards.perses.dev status: Present uuid: 8e418188-c56d-4d7a-ba7d-60c68ef34f31 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: persesdatasources.perses.dev status: Present uuid: 60307fce-12a4-4d2e-bf4f-022ada40564c version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: podmonitors.monitoring.rhobs status: Present uuid: 6cedb4a3-6858-46ca-95a0-f572d788af1d version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: probes.monitoring.rhobs status: Present uuid: 983e0d5d-5976-4e10-b23b-08afae82208a version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: prometheusagents.monitoring.rhobs status: Present uuid: 6663e2d5-d2a5-4596-9d63-b5e473c7aa47 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: prometheuses.monitoring.rhobs status: Present uuid: 0838044d-3d38-4d96-9cb5-2e0fadccc5d3 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: prometheusrules.monitoring.rhobs status: Present uuid: f9661705-9c1b-41fe-b5f5-2834a2f233da version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: scrapeconfigs.monitoring.rhobs status: Present uuid: 2b96ae9f-37d3-4b5e-8598-42b080191429 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: servicemonitors.monitoring.rhobs status: Present uuid: f16a2947-4eb3-463d-b2b6-dcd2f33c38dd version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: thanosqueriers.monitoring.rhobs status: Present uuid: 407bed2f-5c09-40a3-b0c8-7de8b58e5e6f version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: thanosrulers.monitoring.rhobs status: Present uuid: d1ed0e9d-d220-4ec7-a444-381282aa8c79 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: uiplugins.observability.openshift.io status: Present uuid: 2bc3cdf5-608d-41c2-a740-69086fb4b14e version: v1 - dependents: - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["apps"],"resources":["deployments","statefulsets"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","patch"],"apiGroups":[""],"resources":["events"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","patch","update","create","delete","list","watch"],"apiGroups":[""],"resources":["services","configmaps","secrets"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["perses.dev"],"resources":["perses"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["update"],"apiGroups":["perses.dev"],"resources":["perses/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","patch","update"],"apiGroups":["perses.dev"],"resources":["perses/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["perses.dev"],"resources":["persesdashboards"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["update"],"apiGroups":["perses.dev"],"resources":["persesdashboards/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","patch","update"],"apiGroups":["perses.dev"],"resources":["persesdashboards/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["perses.dev"],"resources":["persesdatasources"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["update"],"apiGroups":["perses.dev"],"resources":["persesdatasources/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","patch","update"],"apiGroups":["perses.dev"],"resources":["persesdatasources/status"]} status: Satisfied version: v1 group: "" kind: ServiceAccount message: "" name: perses-operator status: Present version: v1 - dependents: - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["*"],"apiGroups":["monitoring.rhobs"],"resources":["alertmanagers","alertmanagers/finalizers","alertmanagers/status","alertmanagerconfigs","prometheuses","prometheuses/finalizers","prometheuses/status","prometheusagents","prometheusagents/finalizers","prometheusagents/status","thanosrulers","thanosrulers/finalizers","thanosrulers/status","scrapeconfigs","scrapeconfigs/status","servicemonitors","servicemonitors/status","podmonitors","podmonitors/status","probes","probes/status","prometheusrules"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["*"],"apiGroups":["apps"],"resources":["statefulsets"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["*"],"apiGroups":[""],"resources":["configmaps","secrets"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["list","delete"],"apiGroups":[""],"resources":["pods"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","create","update","delete"],"apiGroups":[""],"resources":["services","services/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["list","watch"],"apiGroups":[""],"resources":["nodes"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":[""],"resources":["namespaces"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["patch","create"],"apiGroups":[""],"resources":["events"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["networking.k8s.io"],"resources":["ingresses"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get"],"apiGroups":["storage.k8s.io"],"resources":["storageclasses"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","create","update","delete"],"apiGroups":[""],"resources":["endpoints"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["use"],"apiGroups":["security.openshift.io"],"resources":["securitycontextconstraints"],"resourceNames":["nonroot-v2","nonroot"]} status: Satisfied version: v1 group: "" kind: ServiceAccount message: "" name: obo-prometheus-operator status: Present version: v1 - dependents: - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["use"],"apiGroups":["security.openshift.io"],"resources":["securitycontextconstraints"],"resourceNames":["nonroot-v2","nonroot"]} status: Satisfied version: v1 group: "" kind: ServiceAccount message: "" name: obo-prometheus-operator-admission-webhook status: Present version: v1 - dependents: - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":[""],"resources":["configmaps","secrets","serviceaccounts","services"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":[""],"resources":["endpoints","events","namespaces","nodes","persistentvolumeclaims","persistentvolumes","pods","replicationcontrollers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["patch","update"],"apiGroups":[""],"resources":["services/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["apps"],"resources":["daemonsets","replicasets","statefulsets"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["apps"],"resources":["deployments"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create"],"apiGroups":["authentication.k8s.io"],"resources":["tokenreviews"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create"],"apiGroups":["authorization.k8s.io"],"resources":["subjectaccessreviews"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["autoscaling"],"resources":["horizontalpodautoscalers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["batch"],"resources":["cronjobs","jobs"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["config.openshift.io"],"resources":["clusterversions"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["console.openshift.io"],"resources":["consoleplugins"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["extensions","networking.k8s.io"],"resources":["ingresses"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get"],"apiGroups":["loki.grafana.com"],"resources":["application","audit","infrastructure","network"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list"],"apiGroups":["loki.grafana.com"],"resources":["lokistacks"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list"],"apiGroups":["monitoring.coreos.com"],"resources":["alertmanagers/api"],"resourceNames":["main"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","get","update"],"apiGroups":["monitoring.coreos.com"],"resources":["prometheuses/api"],"resourceNames":["k8s"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["monitoring.coreos.com"],"resources":["servicemonitors"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","list","patch","update","watch"],"apiGroups":["monitoring.rhobs"],"resources":["alertmanagers","prometheuses","servicemonitors","thanosqueriers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","get","list","update","watch"],"apiGroups":["monitoring.rhobs"],"resources":["monitoringstacks"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","update"],"apiGroups":["monitoring.rhobs"],"resources":["monitoringstacks/finalizers","monitoringstacks/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["update"],"apiGroups":["monitoring.rhobs"],"resources":["thanosqueriers/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","patch","update"],"apiGroups":["monitoring.rhobs"],"resources":["thanosqueriers/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["networking.k8s.io"],"resources":["ingresses","networkpolicies"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["observability.openshift.io"],"resources":["observabilityinstallers","uiplugins"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["delete","get","patch","update"],"apiGroups":["observability.openshift.io"],"resources":["observabilityinstallers/finalizers","observabilityinstallers/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","update"],"apiGroups":["observability.openshift.io"],"resources":["uiplugins/finalizers","uiplugins/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["opentelemetry.io"],"resources":["opentelemetrycollectors"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["opentelemetry.io"],"resources":["opentelemetrycollectors/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","patch","watch"],"apiGroups":["operator.openshift.io"],"resources":["consoles"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["operators.coreos.com"],"resources":["clusterserviceversions","subscriptions"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["perses.dev"],"resources":["perses","persesdashboards","persesdatasources"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["update"],"apiGroups":["perses.dev"],"resources":["perses/finalizers","persesdashboards/finalizers","persesdatasources/finalizers"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","patch","update"],"apiGroups":["perses.dev"],"resources":["perses/status","persesdashboards/status","persesdatasources/status"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["policy"],"resources":["poddisruptionbudgets"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["rbac.authorization.k8s.io"],"resources":["clusterrolebindings","clusterroles","rolebindings","roles"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["use"],"apiGroups":["security.openshift.io"],"resources":["securitycontextconstraints"],"resourceNames":["nonroot","nonroot-v2"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["storage.k8s.io"],"resources":["storageclasses","volumeattachments"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create"],"apiGroups":["tempo.grafana.com"],"resources":["application"],"resourceNames":["traces"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["list"],"apiGroups":["tempo.grafana.com"],"resources":["tempomonolithics"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["create","delete","get","list","patch","update","watch"],"apiGroups":["tempo.grafana.com"],"resources":["tempostacks"]} status: Satisfied version: v1 - group: rbac.authorization.k8s.io kind: PolicyRule message: cluster rule:{"verbs":["get","list","watch"],"apiGroups":["tempo.grafana.com"],"resources":["tempostacks/status"]} status: Satisfied version: v1 group: "" kind: ServiceAccount message: "" name: observability-operator-sa status: Present version: v1 - apiVersion: operators.coreos.com/v1alpha1 kind: ClusterServiceVersion metadata: annotations: alm-examples: |- [ { "apiVersion": "elasticsearch.k8s.elastic.co/v1", "kind": "Elasticsearch", "metadata": { "name": "elasticsearch-sample" }, "spec": { "version": "9.2.0", "nodeSets": [ { "name": "default", "config": { "node.roles": ["master", "data"], "node.attr.attr_name": "attr_value", "node.store.allow_mmap": false }, "podTemplate": { "metadata": { "labels": { "foo": "bar" } }, "spec": { "containers": [ { "name": "elasticsearch", "resources": { "requests": { "memory": "4Gi", "cpu": 1 }, "limits": { "memory": "4Gi", "cpu": 2 } } } ] } }, "count": 3 } ] } }, { "apiVersion": "kibana.k8s.elastic.co/v1", "kind": "Kibana", "metadata": { "name": "kibana-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" }, "podTemplate": { "metadata": { "labels": { "foo": "bar" } }, "spec": { "containers": [ { "name": "kibana", "resources": { "requests": { "memory": "1Gi", "cpu": 0.5 }, "limits": { "memory": "2Gi", "cpu": 2 } } } ] } } } }, { "apiVersion": "apm.k8s.elastic.co/v1", "kind": "ApmServer", "metadata": { "name": "apmserver-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "enterprisesearch.k8s.elastic.co/v1", "kind": "EnterpriseSearch", "metadata": { "name": "ent-sample" }, "spec": { "version": "9.2.0", "config": { "ent_search.external_url": "https://localhost:3002" }, "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "beat.k8s.elastic.co/v1beta1", "kind": "Beat", "metadata": { "name": "heartbeat-sample" }, "spec": { "type": "heartbeat", "version": "9.2.0", "elasticsearchRef": { "name": "elasticsearch-sample" }, "config": { "heartbeat.monitors": [ { "type": "tcp", "schedule": "@every 5s", "hosts": [ "elasticsearch-sample-es-http.default.svc:9200" ] } ] }, "deployment": { "replicas": 1, "podTemplate": { "spec": { "securityContext": { "runAsUser": 0 } } } } } }, { "apiVersion": "agent.k8s.elastic.co/v1alpha1", "kind": "Agent", "metadata": { "name": "agent-sample" }, "spec": { "version": "9.2.0", "elasticsearchRefs": [ { "name": "elasticsearch-sample" } ], "daemonSet": {}, "config": { "inputs": [ { "name": "system-1", "revision": 1, "type": "system/metrics", "use_output": "default", "meta": { "package": { "name": "system", "version": "0.9.1" } }, "data_stream": { "namespace": "default" }, "streams": [ { "id": "system/metrics-system.cpu", "data_stream": { "dataset": "system.cpu", "type": "metrics" }, "metricsets": [ "cpu" ], "cpu.metrics": [ "percentages", "normalized_percentages" ], "period": "10s" } ] } ] } } }, { "apiVersion": "maps.k8s.elastic.co/v1alpha1", "kind": "ElasticMapsServer", "metadata": { "name": "ems-sample" }, "spec": { "version": "9.2.0", "count": 1, "elasticsearchRef": { "name": "elasticsearch-sample" } } }, { "apiVersion": "logstash.k8s.elastic.co/v1alpha1", "kind": "Logstash", "metadata" : { "name": "logstash-sample" }, "spec": { "version": "9.2.0", "count": 1 } } ] capabilities: Deep Insights categories: Database certified: "false" containerImage: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 createdAt: "2025-10-31 20:52:32" description: Run Elasticsearch, Kibana, APM Server, Beats, Enterprise Search, Elastic Agent, Elastic Maps Server and Logstash on Kubernetes and OpenShift features.operators.openshift.io/disconnected: "false" features.operators.openshift.io/fips-compliant: "false" features.operators.openshift.io/proxy-aware: "false" features.operators.openshift.io/tls-profiles: "false" features.operators.openshift.io/token-auth-aws: "false" features.operators.openshift.io/token-auth-azure: "false" features.operators.openshift.io/token-auth-gcp: "false" olm.operatorGroup: service-telemetry-operator-group olm.operatorNamespace: service-telemetry olm.targetNamespaces: service-telemetry operatorframework.io/properties: '{"properties":[{"type":"olm.gvk","value":{"group":"agent.k8s.elastic.co","kind":"Agent","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"apm.k8s.elastic.co","kind":"ApmServer","version":"v1"}},{"type":"olm.gvk","value":{"group":"apm.k8s.elastic.co","kind":"ApmServer","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"apm.k8s.elastic.co","kind":"ApmServer","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"autoscaling.k8s.elastic.co","kind":"ElasticsearchAutoscaler","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"beat.k8s.elastic.co","kind":"Beat","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"elasticsearch.k8s.elastic.co","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"elasticsearch.k8s.elastic.co","kind":"Elasticsearch","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"elasticsearch.k8s.elastic.co","kind":"Elasticsearch","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"enterprisesearch.k8s.elastic.co","kind":"EnterpriseSearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"enterprisesearch.k8s.elastic.co","kind":"EnterpriseSearch","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"kibana.k8s.elastic.co","kind":"Kibana","version":"v1"}},{"type":"olm.gvk","value":{"group":"kibana.k8s.elastic.co","kind":"Kibana","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"kibana.k8s.elastic.co","kind":"Kibana","version":"v1beta1"}},{"type":"olm.gvk","value":{"group":"logstash.k8s.elastic.co","kind":"Logstash","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"maps.k8s.elastic.co","kind":"ElasticMapsServer","version":"v1alpha1"}},{"type":"olm.gvk","value":{"group":"stackconfigpolicy.k8s.elastic.co","kind":"StackConfigPolicy","version":"v1alpha1"}},{"type":"olm.package","value":{"packageName":"elasticsearch-eck-operator-certified","version":"3.2.0"}}]}' operators.openshift.io/valid-subscription: Elastic Basic license repository: https://github.com/elastic/cloud-on-k8s support: elastic.co creationTimestamp: "2025-12-12T16:27:03Z" finalizers: - operators.coreos.com/csv-cleanup generation: 1 labels: olm.managed: "true" operators.coreos.com/elasticsearch-eck-operator-certified.service-telemetry: "" name: elasticsearch-eck-operator-certified.v3.2.0 namespace: service-telemetry resourceVersion: "42635" uid: 162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8 spec: apiservicedefinitions: {} cleanup: enabled: false customresourcedefinitions: owned: - description: Elastic Agent instance displayName: Elastic Agent kind: Agent name: agents.agent.k8s.elastic.co version: v1alpha1 - description: APM Server instance displayName: APM Server kind: ApmServer name: apmservers.apm.k8s.elastic.co version: v1 - description: Beats instance displayName: Beats kind: Beat name: beats.beat.k8s.elastic.co version: v1beta1 - description: Elastic Maps Server instance displayName: Elastic Maps Server kind: ElasticMapsServer name: elasticmapsservers.maps.k8s.elastic.co version: v1alpha1 - description: Instance of an Elasticsearch autoscaler displayName: Elasticsearch Autoscaler kind: ElasticsearchAutoscaler name: elasticsearchautoscalers.autoscaling.k8s.elastic.co version: v1alpha1 - description: Instance of an Elasticsearch cluster displayName: Elasticsearch Cluster kind: Elasticsearch name: elasticsearches.elasticsearch.k8s.elastic.co version: v1 - description: Enterprise Search instance displayName: Enterprise Search kind: EnterpriseSearch name: enterprisesearches.enterprisesearch.k8s.elastic.co version: v1 - description: Kibana instance displayName: Kibana kind: Kibana name: kibanas.kibana.k8s.elastic.co version: v1 - description: Logstash instance displayName: Logstash kind: Logstash name: logstashes.logstash.k8s.elastic.co version: v1alpha1 - description: Elastic Stack Config Policy displayName: Elastic Stack Config Policy kind: StackConfigPolicy name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co version: v1alpha1 description: |- Elastic Cloud on Kubernetes (ECK) is the official operator by Elastic for automating the deployment, provisioning, management, and orchestration of Elasticsearch, Kibana, APM Server, Beats, Enterprise Search, Elastic Agent, Elastic Maps Server, and Logstash on Kubernetes. Current features: * Elasticsearch, Kibana, APM Server, Enterprise Search, Beats, Elastic Agent, Elastic Maps Server, and Logstash deployments * TLS Certificates management * Safe Elasticsearch cluster configuration and topology changes * Persistent volumes usage * Custom node configuration and attributes * Secure settings keystore updates Supported versions: * Kubernetes 1.30-1.34 * OpenShift 4.15-4.19 * Google Kubernetes Engine (GKE), Azure Kubernetes Service (AKS), and Amazon Elastic Kubernetes Service (EKS) * Elasticsearch, Kibana, APM Server: 7.17+, 8+, 9+ * Enterprise Search: 7.17+, 8+ * Beats: 7.17+, 8+, 9+ * Elastic Agent: 7.17+, 8+, 9+ * Elastic Maps Server: 7.17+, 8+, 9+ * Logstash 8.12+, 9+ ECK should work with all conformant installers as listed in these [FAQs](https://github.com/cncf/k8s-conformance/blob/master/faq.md#what-is-a-distribution-hosted-platform-and-an-installer). Distributions include source patches and so may not work as-is with ECK. Alpha, beta, and stable API versions follow the same [conventions used by Kubernetes](https://kubernetes.io/docs/concepts/overview/kubernetes-api/#api-versioning). See the full [Elastic support matrix](https://www.elastic.co/support/matrix#matrix_kubernetes) for more information. See the [Quickstart](https://www.elastic.co/guide/en/cloud-on-k8s/3.2/k8s-quickstart.html) to get started with ECK. displayName: Elasticsearch (ECK) Operator icon: - base64data: PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgeG1sbnM6c29kaXBvZGk9Imh0dHA6Ly9zb2RpcG9kaS5zb3VyY2Vmb3JnZS5uZXQvRFREL3NvZGlwb2RpLTAuZHRkIgogICB4bWxuczppbmtzY2FwZT0iaHR0cDovL3d3dy5pbmtzY2FwZS5vcmcvbmFtZXNwYWNlcy9pbmtzY2FwZSIKICAgaW5rc2NhcGU6dmVyc2lvbj0iMS4wICg0MDM1YTRmYjQ5LCAyMDIwLTA1LTAxKSIKICAgaGVpZ2h0PSI2NCIKICAgd2lkdGg9IjY0IgogICBzb2RpcG9kaTpkb2NuYW1lPSJjbHVzdGVyLWNvbG9yLTY0eDY0LnN2ZyIKICAgeG1sOnNwYWNlPSJwcmVzZXJ2ZSIKICAgdmlld0JveD0iMCAwIDY0IDY0IgogICB5PSIwcHgiCiAgIHg9IjBweCIKICAgaWQ9IkxheWVyXzEiCiAgIHZlcnNpb249IjEuMSI+PG1ldGFkYXRhCiAgIGlkPSJtZXRhZGF0YTExOCI+PHJkZjpSREY+PGNjOldvcmsKICAgICAgIHJkZjphYm91dD0iIj48ZGM6Zm9ybWF0PmltYWdlL3N2Zyt4bWw8L2RjOmZvcm1hdD48ZGM6dHlwZQogICAgICAgICByZGY6cmVzb3VyY2U9Imh0dHA6Ly9wdXJsLm9yZy9kYy9kY21pdHlwZS9TdGlsbEltYWdlIiAvPjxkYzp0aXRsZT48L2RjOnRpdGxlPjwvY2M6V29yaz48L3JkZjpSREY+PC9tZXRhZGF0YT48ZGVmcwogICBpZD0iZGVmczExNiIgLz48c29kaXBvZGk6bmFtZWR2aWV3CiAgIGlua3NjYXBlOmN1cnJlbnQtbGF5ZXI9IkxheWVyXzEiCiAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgIGlua3NjYXBlOndpbmRvdy15PSIwIgogICBpbmtzY2FwZTp3aW5kb3cteD0iMCIKICAgaW5rc2NhcGU6Y3k9IjUwLjkwMzQ1NiIKICAgaW5rc2NhcGU6Y3g9IjEyIgogICBpbmtzY2FwZTp6b29tPSIzNC45NTgzMzMiCiAgIGZpdC1tYXJnaW4tYm90dG9tPSIwIgogICBmaXQtbWFyZ2luLXJpZ2h0PSIwIgogICBmaXQtbWFyZ2luLWxlZnQ9IjAiCiAgIGZpdC1tYXJnaW4tdG9wPSIwIgogICBzaG93Z3JpZD0iZmFsc2UiCiAgIGlkPSJuYW1lZHZpZXcxMTQiCiAgIGlua3NjYXBlOndpbmRvdy1oZWlnaHQ9IjEzODgiCiAgIGlua3NjYXBlOndpbmRvdy13aWR0aD0iMjU2MCIKICAgaW5rc2NhcGU6cGFnZXNoYWRvdz0iMiIKICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAiCiAgIGd1aWRldG9sZXJhbmNlPSIxMCIKICAgZ3JpZHRvbGVyYW5jZT0iMTAiCiAgIG9iamVjdHRvbGVyYW5jZT0iMTAiCiAgIGJvcmRlcm9wYWNpdHk9IjEiCiAgIGJvcmRlcmNvbG9yPSIjNjY2NjY2IgogICBwYWdlY29sb3I9IiNmZmZmZmYiIC8+CjxzdHlsZQogICBpZD0ic3R5bGU5MSIKICAgdHlwZT0idGV4dC9jc3MiPgoJLnN0MHtmaWxsOiNGRkQxMDY7fQoJLnN0MXtmaWxsOiMyMUJBQjA7fQoJLnN0MntmaWxsOiNFRTRGOTc7fQoJLnN0M3tmaWxsOiMxNEE3REY7fQoJLnN0NHtmaWxsOiM5MUM3M0U7fQoJLnN0NXtmaWxsOiMwMjc5QTA7fQoJLnN0NntmaWxsOm5vbmU7fQo8L3N0eWxlPgo8ZwogICB0cmFuc2Zvcm09InNjYWxlKDIuNjU1NjAxNywyLjY2NjY2NjcpIgogICBpZD0iZzEwOSI+Cgk8ZwogICBpZD0iZzEwNyI+CgkJPGcKICAgaWQ9ImcxMDUiPgoJCQk8cGF0aAogICBpZD0icGF0aDkzIgogICBkPSJtIDkuMiwxMC4yIDUuNywyLjYgNS43LC01IEMgMjAuNyw3LjQgMjAuNyw3IDIwLjcsNi41IDIwLjcsMyAxNy44LDAuMSAxNC4zLDAuMSAxMi4yLDAuMSAxMC4yLDEuMSA5LDIuOSBsIC0xLDUgeiIKICAgY2xhc3M9InN0MCIgLz4KCQkJPHBhdGgKICAgaWQ9InBhdGg5NSIKICAgZD0ibSAzLjMsMTYuMiBjIC0wLjEsMC40IC0wLjEsMC44IC0wLjEsMS4zIDAsMy41IDIuOSw2LjQgNi40LDYuNCAyLjEsMCA0LjEsLTEuMSA1LjMsLTIuOCBsIDAuOSwtNC45IC0xLjMsLTIuNCAtNS43LC0yLjYgeiIKICAgY2xhc3M9InN0MSIgLz4KCQkJPHBhdGgKICAgaWQ9InBhdGg5NyIKICAgZD0iTSAzLjMsNi40IDcuMiw3LjMgOCwyLjkgQyA3LjUsMi40IDYuOSwyLjIgNi4yLDIuMiA0LjUsMi4yIDMuMSwzLjYgMy4xLDUuMyAzLjEsNS43IDMuMiw2IDMuMyw2LjQiCiAgIGNsYXNzPSJzdDIiIC8+CgkJCTxwYXRoCiAgIGlkPSJwYXRoOTkiCiAgIGQ9Im0gMyw3LjMgYyAtMS43LDAuNiAtMywyLjIgLTMsNC4xIDAsMS44IDEuMSwzLjQgMi44LDQgbCA1LjUsLTQuOSAtMSwtMi4xIHoiCiAgIGNsYXNzPSJzdDMiIC8+CgkJCTxwYXRoCiAgIGlkPSJwYXRoMTAxIgogICBkPSJtIDE2LDIxLjEgYyAwLjUsMC40IDEuMiwwLjYgMS45LDAuNiAxLjcsMCAzLjEsLTEuNCAzLjEsLTMuMSAwLC0wLjQgLTAuMSwtMC43IC0wLjIsLTEuMSBsIC0zLjksLTAuOSB6IgogICBjbGFzcz0ic3Q0IiAvPgoJCQk8cGF0aAogICBpZD0icGF0aDEwMyIKICAgZD0ibSAxNi44LDE1LjcgNC4zLDEgYyAxLjcsLTAuNiAzLC0yLjIgMywtNC4xIDAsLTEuOCAtMS4xLC0zLjQgLTIuOCwtNCBsIC01LjYsNC45IHoiCiAgIGNsYXNzPSJzdDUiIC8+CgkJPC9nPgoJPC9nPgo8L2c+CjxyZWN0CiAgIHN0eWxlPSJzdHJva2Utd2lkdGg6Mi42NjExMyIKICAgeT0iMCIKICAgeD0iMCIKICAgaWQ9InJlY3QxMTEiCiAgIGhlaWdodD0iNjQiCiAgIHdpZHRoPSI2My43MzQ0NCIKICAgY2xhc3M9InN0NiIgLz4KPC9zdmc+Cg== mediatype: image/svg+xml install: spec: deployments: - name: elastic-operator spec: replicas: 1 selector: matchLabels: control-plane: elastic-operator strategy: {} template: metadata: annotations: co.elastic.logs/raw: '[{"type":"filestream","enabled":true,"id":"eck-container-logs-${data.kubernetes.container.id}","paths":["/var/log/containers/*${data.kubernetes.container.id}.log"],"parsers":[{"container":{}},{"ndjson":{"keys_under_root":true}}],"prospector.scanner.symlinks":true,"processors":[{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"error","to":"_error"}]}},{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"_error","to":"error.message"}]}},{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"source","to":"_source"}]}},{"convert":{"mode":"rename","ignore_missing":true,"fields":[{"from":"_source","to":"event.source"}]}}]}]' creationTimestamp: null labels: control-plane: elastic-operator spec: containers: - args: - manager - --config=/conf/eck.yaml - --manage-webhook-certs=false - --enable-webhook - --ubi-only - --distribution-channel=certified-operators env: - name: NAMESPACES valueFrom: fieldRef: fieldPath: metadata.annotations['olm.targetNamespaces'] - name: OPERATOR_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.annotations['olm.operatorNamespace'] - name: OPERATOR_IMAGE value: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 image: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 name: manager ports: - containerPort: 9443 name: https-webhook protocol: TCP resources: limits: cpu: "1" memory: 1Gi requests: cpu: 100m memory: 150Mi serviceAccountName: elastic-operator terminationGracePeriodSeconds: 10 permissions: - rules: - apiGroups: - authorization.k8s.io resources: - subjectaccessreviews verbs: - create - apiGroups: - coordination.k8s.io resources: - leases verbs: - create - apiGroups: - coordination.k8s.io resourceNames: - elastic-operator-leader resources: - leases verbs: - get - watch - update - apiGroups: - "" resources: - endpoints verbs: - get - list - watch - apiGroups: - "" resources: - pods - events - persistentvolumeclaims - secrets - services - configmaps verbs: - get - list - watch - create - update - patch - delete - apiGroups: - apps resources: - deployments - statefulsets - daemonsets verbs: - get - list - watch - create - update - patch - delete - apiGroups: - policy resources: - poddisruptionbudgets verbs: - get - list - watch - create - update - patch - delete - apiGroups: - elasticsearch.k8s.elastic.co resources: - elasticsearches - elasticsearches/status - elasticsearches/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - autoscaling.k8s.elastic.co resources: - elasticsearchautoscalers - elasticsearchautoscalers/status - elasticsearchautoscalers/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - kibana.k8s.elastic.co resources: - kibanas - kibanas/status - kibanas/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - apm.k8s.elastic.co resources: - apmservers - apmservers/status - apmservers/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - enterprisesearch.k8s.elastic.co resources: - enterprisesearches - enterprisesearches/status - enterprisesearches/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - beat.k8s.elastic.co resources: - beats - beats/status - beats/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - agent.k8s.elastic.co resources: - agents - agents/status - agents/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - maps.k8s.elastic.co resources: - elasticmapsservers - elasticmapsservers/status - elasticmapsservers/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - stackconfigpolicy.k8s.elastic.co resources: - stackconfigpolicies - stackconfigpolicies/status - stackconfigpolicies/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - logstash.k8s.elastic.co resources: - logstashes - logstashes/status - logstashes/finalizers verbs: - get - list - watch - create - update - patch - apiGroups: - storage.k8s.io resources: - storageclasses verbs: - get - list - watch - apiGroups: - admissionregistration.k8s.io resources: - validatingwebhookconfigurations verbs: - get - list - watch - create - update - patch - delete - apiGroups: - "" resources: - nodes verbs: - get - list - watch serviceAccountName: elastic-operator strategy: deployment installModes: - supported: true type: OwnNamespace - supported: true type: SingleNamespace - supported: true type: MultiNamespace - supported: true type: AllNamespaces keywords: - elasticsearch - kibana - analytics - search - database - apm - logstash links: - name: Documentation url: https://www.elastic.co/guide/en/cloud-on-k8s/3.2/index.html maintainers: - email: eck@elastic.co name: Elastic maturity: stable minKubeVersion: 1.21.0 provider: name: Elastic relatedImages: - image: registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105 name: eck-operator replaces: elasticsearch-eck-operator-certified.v3.1.0 version: 3.2.0 webhookdefinitions: - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-agent-validation-v1alpha1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - agent.k8s.elastic.co apiVersions: - v1alpha1 operations: - CREATE - UPDATE resources: - agents sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-agent-k8s-elastic-co-v1alpha1-agent - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-apm-validation-v1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - apm.k8s.elastic.co apiVersions: - v1 operations: - CREATE - UPDATE resources: - apmservers sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-apm-k8s-elastic-co-v1-apmserver - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-apm-validation-v1beta1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - apm.k8s.elastic.co apiVersions: - v1beta1 operations: - CREATE - UPDATE resources: - apmservers sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-apm-k8s-elastic-co-v1beta1-apmserver - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-beat-validation-v1beta1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - beat.k8s.elastic.co apiVersions: - v1beta1 operations: - CREATE - UPDATE resources: - beats sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-beat-k8s-elastic-co-v1beta1-beat - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-ent-validation-v1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - enterprisesearch.k8s.elastic.co apiVersions: - v1 operations: - CREATE - UPDATE resources: - enterprisesearches sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-enterprisesearch-k8s-elastic-co-v1-enterprisesearch - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-ent-validation-v1beta1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - enterprisesearch.k8s.elastic.co apiVersions: - v1beta1 operations: - CREATE - UPDATE resources: - enterprisesearches sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-enterprisesearch-k8s-elastic-co-v1beta1-enterprisesearch - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-es-validation-v1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - elasticsearch.k8s.elastic.co apiVersions: - v1 operations: - CREATE - UPDATE resources: - elasticsearches sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-elasticsearch-k8s-elastic-co-v1-elasticsearch - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-es-validation-v1beta1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - elasticsearch.k8s.elastic.co apiVersions: - v1beta1 operations: - CREATE - UPDATE resources: - elasticsearches sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-elasticsearch-k8s-elastic-co-v1beta1-elasticsearch - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-ems-validation-v1alpha1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - maps.k8s.elastic.co apiVersions: - v1alpha1 operations: - CREATE - UPDATE resources: - mapsservers sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-ems-k8s-elastic-co-v1alpha1-mapsservers - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-kb-validation-v1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - kibana.k8s.elastic.co apiVersions: - v1 operations: - CREATE - UPDATE resources: - kibanas sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-kibana-k8s-elastic-co-v1-kibana - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-kb-validation-v1beta1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - kibana.k8s.elastic.co apiVersions: - v1beta1 operations: - CREATE - UPDATE resources: - kibanas sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-kibana-k8s-elastic-co-v1beta1-kibana - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-esa-validation-v1alpha1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - autoscaling.k8s.elastic.co apiVersions: - v1alpha1 operations: - CREATE - UPDATE resources: - elasticsearchautoscalers sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-autoscaling-k8s-elastic-co-v1alpha1-elasticsearchautoscaler - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-scp-validation-v1alpha1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - stackconfigpolicy.k8s.elastic.co apiVersions: - v1alpha1 operations: - CREATE - UPDATE resources: - stackconfigpolicies sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-scp-k8s-elastic-co-v1alpha1-stackconfigpolicies - admissionReviewVersions: - v1 containerPort: 443 deploymentName: elastic-operator failurePolicy: Ignore generateName: elastic-logstash-validation-v1alpha1.k8s.elastic.co matchPolicy: Exact rules: - apiGroups: - logstash.k8s.elastic.co apiVersions: - v1alpha1 operations: - CREATE - UPDATE resources: - logstashes sideEffects: None targetPort: 9443 type: ValidatingAdmissionWebhook webhookPath: /validate-logstash-k8s-elastic-co-v1alpha1-logstash status: certsLastUpdated: "2025-12-12T16:27:05Z" certsRotateAt: "2027-12-11T16:27:04Z" cleanup: {} conditions: - lastTransitionTime: "2025-12-12T16:27:03Z" lastUpdateTime: "2025-12-12T16:27:03Z" message: requirements not yet checked phase: Pending reason: RequirementsUnknown - lastTransitionTime: "2025-12-12T16:27:03Z" lastUpdateTime: "2025-12-12T16:27:03Z" message: one or more requirements couldn't be found phase: Pending reason: RequirementsNotMet - lastTransitionTime: "2025-12-12T16:27:04Z" lastUpdateTime: "2025-12-12T16:27:04Z" message: all requirements found, attempting install phase: InstallReady reason: AllRequirementsMet - lastTransitionTime: "2025-12-12T16:27:04Z" lastUpdateTime: "2025-12-12T16:27:04Z" message: waiting for install components to report healthy phase: Installing reason: InstallSucceeded - lastTransitionTime: "2025-12-12T16:27:04Z" lastUpdateTime: "2025-12-12T16:27:07Z" message: 'installing: waiting for deployment elastic-operator to become ready: deployment "elastic-operator" not available: Deployment does not have minimum availability.' phase: Installing reason: InstallWaiting - lastTransitionTime: "2025-12-12T16:27:31Z" lastUpdateTime: "2025-12-12T16:27:31Z" message: install strategy completed with no errors phase: Succeeded reason: InstallSucceeded lastTransitionTime: "2025-12-12T16:27:31Z" lastUpdateTime: "2025-12-12T16:27:31Z" message: install strategy completed with no errors phase: Succeeded reason: InstallSucceeded requirementStatus: - group: operators.coreos.com kind: ClusterServiceVersion message: CSV minKubeVersion (1.21.0) less than server version (v1.33.5) name: elasticsearch-eck-operator-certified.v3.2.0 status: Present version: v1alpha1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: agents.agent.k8s.elastic.co status: Present uuid: 80e8fdc8-6be7-45b2-bfc7-f86334e99030 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: apmservers.apm.k8s.elastic.co status: Present uuid: a6b32a36-7a86-44fc-a7e2-8950c55c1296 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: beats.beat.k8s.elastic.co status: Present uuid: 9ba97ad7-3500-4447-9937-5b974f093a81 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: elasticmapsservers.maps.k8s.elastic.co status: Present uuid: 5167abdf-a93d-477f-b5cd-d9d3680622d4 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: elasticsearchautoscalers.autoscaling.k8s.elastic.co status: Present uuid: e3f95fc0-8259-4b63-8743-b65f6fba926d version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: elasticsearches.elasticsearch.k8s.elastic.co status: Present uuid: d13c3251-6397-486d-b294-14b9f4c30762 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: enterprisesearches.enterprisesearch.k8s.elastic.co status: Present uuid: 5cef7994-84ad-4a87-b9e2-163fb5db039e version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: kibanas.kibana.k8s.elastic.co status: Present uuid: e8dc7587-af57-416c-9cfb-1702fe87860e version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: logstashes.logstash.k8s.elastic.co status: Present uuid: d8ed4e16-d503-4f18-a616-d7a8cc685317 version: v1 - group: apiextensions.k8s.io kind: CustomResourceDefinition message: CRD is present and Established condition is true name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co status: Present uuid: 9d325e42-4d4f-4e8c-92e9-081dd02c7489 version: v1 kind: List metadata: resourceVersion: "" home/zuul/zuul-output/logs/controller/post_oc_describe_build_service-telemetry-framework-index-1.log0000644000175000017500000000460615117042537033442 0ustar zuulzuulName: service-telemetry-framework-index-1 Namespace: service-telemetry Created: 11 minutes ago Labels: build=service-telemetry-framework-index buildconfig=service-telemetry-framework-index openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.start-policy=Serial Annotations: openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.number=1 openshift.io/build.pod-name=service-telemetry-framework-index-1-build Status: Cancelled (The build was cancelled by the user.) Started: Fri, 12 Dec 2025 16:28:18 UTC Duration: 7s Build Config: service-telemetry-framework-index Build Pod: service-telemetry-framework-index-1-build Strategy: Docker Dockerfile: # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs From Image: DockerImage quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a Volumes: Name Source Type Source Mounts pull-secret Secret service-telemetry-framework-index-dockercfg /opt/app-root/auth Output to: ImageStreamTag service-telemetry-framework-index:latest Binary: provided on build Push Secret: builder-dockercfg-ff94g Build trigger cause: Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal BuildStarted 11m build-controller Build service-telemetry/service-telemetry-framework-index-1 is now running Normal BuildCancelled 11m (x2 over 11m) build-controller Build service-telemetry/service-telemetry-framework-index-1 has been cancelled home/zuul/zuul-output/logs/controller/post_oc_describe_build_service-telemetry-framework-index-2.log0000644000175000017500000000456315117042537033445 0ustar zuulzuulName: service-telemetry-framework-index-2 Namespace: service-telemetry Created: 11 minutes ago Labels: build=service-telemetry-framework-index buildconfig=service-telemetry-framework-index openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.start-policy=Serial Annotations: openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.number=2 openshift.io/build.pod-name=service-telemetry-framework-index-2-build Status: Cancelled (The build was cancelled by the user.) Started: Fri, 12 Dec 2025 16:28:40 UTC Duration: 10s Build Config: service-telemetry-framework-index Build Pod: service-telemetry-framework-index-2-build Strategy: Docker Dockerfile: # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs From Image: DockerImage quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a Volumes: Name Source Type Source Mounts pull-secret Secret service-telemetry-framework-index-dockercfg /opt/app-root/auth Output to: ImageStreamTag service-telemetry-framework-index:latest Binary: provided on build Push Secret: builder-dockercfg-ff94g Build trigger cause: Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal BuildStarted 11m build-controller Build service-telemetry/service-telemetry-framework-index-2 is now running Normal BuildCancelled 11m build-controller Build service-telemetry/service-telemetry-framework-index-2 has been cancelled home/zuul/zuul-output/logs/controller/post_oc_describe_build_service-telemetry-framework-index-3.log0000644000175000017500000000456215117042540033437 0ustar zuulzuulName: service-telemetry-framework-index-3 Namespace: service-telemetry Created: 10 minutes ago Labels: build=service-telemetry-framework-index buildconfig=service-telemetry-framework-index openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.start-policy=Serial Annotations: openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.number=3 openshift.io/build.pod-name=service-telemetry-framework-index-3-build Status: Cancelled (The build was cancelled by the user.) Started: Fri, 12 Dec 2025 16:29:03 UTC Duration: 3s Build Config: service-telemetry-framework-index Build Pod: service-telemetry-framework-index-3-build Strategy: Docker Dockerfile: # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs From Image: DockerImage quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a Volumes: Name Source Type Source Mounts pull-secret Secret service-telemetry-framework-index-dockercfg /opt/app-root/auth Output to: ImageStreamTag service-telemetry-framework-index:latest Binary: provided on build Push Secret: builder-dockercfg-ff94g Build trigger cause: Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal BuildStarted 10m build-controller Build service-telemetry/service-telemetry-framework-index-3 is now running Normal BuildCancelled 10m build-controller Build service-telemetry/service-telemetry-framework-index-3 has been cancelled home/zuul/zuul-output/logs/controller/post_oc_describe_build_service-telemetry-framework-index-4.log0000644000175000017500000000456215117042540033440 0ustar zuulzuulName: service-telemetry-framework-index-4 Namespace: service-telemetry Created: 10 minutes ago Labels: build=service-telemetry-framework-index buildconfig=service-telemetry-framework-index openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.start-policy=Serial Annotations: openshift.io/build-config.name=service-telemetry-framework-index openshift.io/build.number=4 openshift.io/build.pod-name=service-telemetry-framework-index-4-build Status: Cancelled (The build was cancelled by the user.) Started: Fri, 12 Dec 2025 16:29:19 UTC Duration: 2s Build Config: service-telemetry-framework-index Build Pod: service-telemetry-framework-index-4-build Strategy: Docker Dockerfile: # The base image is expected to contain # /bin/opm (with a serve subcommand) and /bin/grpc_health_probe FROM quay.io/openshift/origin-operator-registry:4.13 COPY --chmod=666 index.yaml /configs/ RUN mkdir /tmp/auth/ # we need the contents of the mounted build volume from secret placed into config.json RUN cp /opt/app-root/auth/.dockerconfigjson /tmp/auth/config.json RUN DOCKER_CONFIG=/tmp/auth /bin/opm --skip-tls-verify render image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-operator-bundle:nightly-head image-registry.openshift-image-registry.svc:5000/service-telemetry/smart-gateway-operator-bundle:nightly-head --output=yaml >> /configs/index.yaml ENTRYPOINT ["/bin/opm"] CMD ["serve", "/configs"] # Set DC-specific label for the location of the DC root directory # in the image LABEL operators.operatorframework.io.index.configs.v1=/configs From Image: DockerImage quay.io/openshift/origin-operator-registry@sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a Volumes: Name Source Type Source Mounts pull-secret Secret service-telemetry-framework-index-dockercfg /opt/app-root/auth Output to: ImageStreamTag service-telemetry-framework-index:latest Binary: provided on build Push Secret: builder-dockercfg-ff94g Build trigger cause: Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal BuildStarted 10m build-controller Build service-telemetry/service-telemetry-framework-index-4 is now running Normal BuildCancelled 10m build-controller Build service-telemetry/service-telemetry-framework-index-4 has been cancelled home/zuul/zuul-output/logs/controller/post_pv.log0000644000175000017500000000066215117042541021452 0ustar zuulzuulNAME CAPACITY ACCESS MODES RECLAIM POLICY STATUS CLAIM STORAGECLASS VOLUMEATTRIBUTESCLASS REASON AGE pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 30Gi RWX Retain Bound openshift-image-registry/crc-image-registry-storage crc-csi-hostpath-provisioner 39d home/zuul/zuul-output/logs/controller/post_pvc.log0000644000175000017500000000006315117042541021610 0ustar zuulzuulNo resources found in service-telemetry namespace. home/zuul/zuul-output/logs/controller/logs_sto.log0000644000175000017500000000024715117042542021611 0ustar zuulzuulerror: expected 'logs [-f] [-p] (POD | TYPE/NAME) [-c CONTAINER]'. POD or TYPE/NAME is a required argument for the logs command See 'oc logs -h' for help and examples home/zuul/zuul-output/logs/controller/logs_sgo.log0000644000175000017500000000024715117042542021574 0ustar zuulzuulerror: expected 'logs [-f] [-p] (POD | TYPE/NAME) [-c CONTAINER]'. POD or TYPE/NAME is a required argument for the logs command See 'oc logs -h' for help and examples home/zuul/zuul-output/logs/controller/logs_qdr.log0000644000175000017500000000024715117042542021572 0ustar zuulzuulerror: expected 'logs [-f] [-p] (POD | TYPE/NAME) [-c CONTAINER]'. POD or TYPE/NAME is a required argument for the logs command See 'oc logs -h' for help and examples home/zuul/zuul-output/logs/controller/ansible.log0000644000175000017500000046107715117042543021412 0ustar zuulzuul2025-12-12 16:23:25,733 p=30999 u=zuul n=ansible | Starting galaxy collection install process 2025-12-12 16:23:25,735 p=30999 u=zuul n=ansible | Process install dependency map 2025-12-12 16:23:42,274 p=30999 u=zuul n=ansible | Starting collection install process 2025-12-12 16:23:42,274 p=30999 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+b9f05e2b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+b9f05e2b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | cifmw.general:1.0.0+b9f05e2b was installed successfully 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-12-12 16:23:44,296 p=30999 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-12-12 16:23:44,297 p=30999 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-12-12 16:23:44,297 p=30999 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-12-12 16:23:44,612 p=30999 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-12-12 16:23:44,612 p=30999 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-12-12 16:23:44,613 p=30999 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-12-12 16:23:44,740 p=30999 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-12-12 16:23:44,740 p=30999 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2025-12-12 16:23:52,472 p=31591 u=zuul n=ansible | PLAY [Bootstrap playbook] ****************************************************** 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:52 +0000 (0:00:00.034) 0:00:00.034 ******* 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:52 +0000 (0:00:00.033) 0:00:00.033 ******* 2025-12-12 16:23:53,532 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | TASK [Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:01.062) 0:00:01.096 ******* 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:01.062) 0:00:01.095 ******* 2025-12-12 16:23:53,579 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,586 p=31591 u=zuul n=ansible | TASK [Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2025-12-12 16:23:53,587 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.035) 0:00:01.132 ******* 2025-12-12 16:23:53,587 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.035) 0:00:01.130 ******* 2025-12-12 16:23:53,650 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.070) 0:00:01.202 ******* 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.070) 0:00:01.201 ******* 2025-12-12 16:23:54,401 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.753) 0:00:01.956 ******* 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.754) 0:00:01.955 ******* 2025-12-12 16:23:54,441 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,452 p=31591 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2025-12-12 16:23:54,452 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:01.998 ******* 2025-12-12 16:23:54,453 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:01.996 ******* 2025-12-12 16:23:54,490 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,502 p=31591 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2025-12-12 16:23:54,502 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.049) 0:00:02.048 ******* 2025-12-12 16:23:54,503 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.049) 0:00:02.046 ******* 2025-12-12 16:23:54,528 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:02.089 ******* 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:02.088 ******* 2025-12-12 16:23:56,110 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:01.579) 0:00:03.668 ******* 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:01.579) 0:00:03.667 ******* 2025-12-12 16:23:56,315 p=31591 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2025-12-12 16:23:56,501 p=31591 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2025-12-12 16:23:56,672 p=31591 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:00.563) 0:00:04.232 ******* 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:00.563) 0:00:04.231 ******* 2025-12-12 16:23:58,223 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:58 +0000 (0:00:01.541) 0:00:05.774 ******* 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:58 +0000 (0:00:01.541) 0:00:05.773 ******* 2025-12-12 16:23:59,641 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:59 +0000 (0:00:01.421) 0:00:07.195 ******* 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:59 +0000 (0:00:01.421) 0:00:07.194 ******* 2025-12-12 16:24:08,242 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:08 +0000 (0:00:08.606) 0:00:15.802 ******* 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:08 +0000 (0:00:08.606) 0:00:15.801 ******* 2025-12-12 16:24:09,064 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:09,072 p=31591 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2025-12-12 16:24:09,073 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.815) 0:00:16.618 ******* 2025-12-12 16:24:09,073 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.815) 0:00:16.616 ******* 2025-12-12 16:24:09,103 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.037) 0:00:16.655 ******* 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.037) 0:00:16.654 ******* 2025-12-12 16:24:09,757 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:09,771 p=31591 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2025-12-12 16:24:09,772 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.661) 0:00:17.317 ******* 2025-12-12 16:24:09,772 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.661) 0:00:17.315 ******* 2025-12-12 16:24:09,801 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,807 p=31591 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2025-12-12 16:24:09,808 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.036) 0:00:17.353 ******* 2025-12-12 16:24:09,808 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.036) 0:00:17.351 ******* 2025-12-12 16:24:09,834 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.032) 0:00:17.385 ******* 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.032) 0:00:17.384 ******* 2025-12-12 16:24:09,870 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.038) 0:00:17.423 ******* 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.038) 0:00:17.422 ******* 2025-12-12 16:24:10,353 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:10,368 p=31591 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-12-12 16:24:10,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:10 +0000 (0:00:00.490) 0:00:17.913 ******* 2025-12-12 16:24:10,369 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:10 +0000 (0:00:00.490) 0:00:17.912 ******* 2025-12-12 16:24:11,049 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.689) 0:00:18.603 ******* 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.689) 0:00:18.602 ******* 2025-12-12 16:24:11,076 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.630 ******* 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.629 ******* 2025-12-12 16:24:11,104 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.657 ******* 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.656 ******* 2025-12-12 16:24:11,131 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.685 ******* 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.683 ******* 2025-12-12 16:24:11,172 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:11,179 p=31591 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2025-12-12 16:24:11,179 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.039) 0:00:18.725 ******* 2025-12-12 16:24:11,180 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.039) 0:00:18.723 ******* 2025-12-12 16:24:11,194 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.021) 0:00:18.746 ******* 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.021) 0:00:18.745 ******* 2025-12-12 16:24:11,215 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.767 ******* 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.766 ******* 2025-12-12 16:24:11,239 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.793 ******* 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.792 ******* 2025-12-12 16:24:11,268 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,275 p=31591 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2025-12-12 16:24:11,276 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.821 ******* 2025-12-12 16:24:11,276 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.819 ******* 2025-12-12 16:24:11,289 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.019) 0:00:18.840 ******* 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.019) 0:00:18.839 ******* 2025-12-12 16:24:11,308 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.861 ******* 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.860 ******* 2025-12-12 16:24:11,329 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,334 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2025-12-12 16:24:11,335 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.018) 0:00:18.880 ******* 2025-12-12 16:24:11,335 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.018) 0:00:18.878 ******* 2025-12-12 16:24:11,534 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:11,548 p=31591 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2025-12-12 16:24:11,549 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.214) 0:00:19.094 ******* 2025-12-12 16:24:11,549 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.214) 0:00:19.092 ******* 2025-12-12 16:24:11,755 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:11,770 p=31591 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2025-12-12 16:24:11,771 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.221) 0:00:19.316 ******* 2025-12-12 16:24:11,771 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.222) 0:00:19.314 ******* 2025-12-12 16:24:12,015 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:12,021 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2025-12-12 16:24:12,021 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.250) 0:00:19.567 ******* 2025-12-12 16:24:12,022 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.250) 0:00:19.565 ******* 2025-12-12 16:24:12,046 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.599 ******* 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.597 ******* 2025-12-12 16:24:12,080 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.634 ******* 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.633 ******* 2025-12-12 16:24:12,117 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.670 ******* 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.668 ******* 2025-12-12 16:24:12,146 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.028) 0:00:19.698 ******* 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.028) 0:00:19.697 ******* 2025-12-12 16:24:12,179 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.733 ******* 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.732 ******* 2025-12-12 16:24:12,210 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.764 ******* 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.763 ******* 2025-12-12 16:24:12,535 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.324) 0:00:20.089 ******* 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.324) 0:00:20.088 ******* 2025-12-12 16:24:12,793 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2025-12-12 16:24:13,025 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.492) 0:00:20.581 ******* 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.492) 0:00:20.580 ******* 2025-12-12 16:24:13,488 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:13,496 p=31591 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2025-12-12 16:24:13,497 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.460) 0:00:21.042 ******* 2025-12-12 16:24:13,497 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.460) 0:00:21.040 ******* 2025-12-12 16:24:13,770 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.285) 0:00:21.327 ******* 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.285) 0:00:21.326 ******* 2025-12-12 16:24:13,818 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.047) 0:00:21.374 ******* 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.047) 0:00:21.373 ******* 2025-12-12 16:24:13,850 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.027) 0:00:21.402 ******* 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.027) 0:00:21.400 ******* 2025-12-12 16:24:44,229 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:30.381) 0:00:51.783 ******* 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:30.381) 0:00:51.782 ******* 2025-12-12 16:24:44,462 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.239) 0:00:52.022 ******* 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.239) 0:00:52.021 ******* 2025-12-12 16:24:44,691 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.229) 0:00:52.251 ******* 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.229) 0:00:52.250 ******* 2025-12-12 16:24:50,033 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:05.336) 0:00:57.587 ******* 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:05.335) 0:00:57.586 ******* 2025-12-12 16:24:50,063 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.034) 0:00:57.622 ******* 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.034) 0:00:57.621 ******* 2025-12-12 16:24:50,411 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.342) 0:00:57.965 ******* 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.342) 0:00:57.964 ******* 2025-12-12 16:24:50,765 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.360) 0:00:58.325 ******* 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.360) 0:00:58.324 ******* 2025-12-12 16:24:50,799 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,813 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2025-12-12 16:24:50,813 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.359 ******* 2025-12-12 16:24:50,814 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.357 ******* 2025-12-12 16:24:50,835 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,849 p=31591 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2025-12-12 16:24:50,850 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.036) 0:00:58.395 ******* 2025-12-12 16:24:50,850 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.036) 0:00:58.393 ******* 2025-12-12 16:24:50,868 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.032) 0:00:58.427 ******* 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.032) 0:00:58.426 ******* 2025-12-12 16:24:50,912 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.037) 0:00:58.464 ******* 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.037) 0:00:58.463 ******* 2025-12-12 16:24:50,938 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,945 p=31591 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2025-12-12 16:24:50,946 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.026) 0:00:58.491 ******* 2025-12-12 16:24:50,946 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.026) 0:00:58.489 ******* 2025-12-12 16:24:50,967 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,978 p=31591 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2025-12-12 16:24:50,979 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.524 ******* 2025-12-12 16:24:50,979 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.522 ******* 2025-12-12 16:24:51,232 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2025-12-12 16:24:51,436 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2025-12-12 16:24:51,670 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2025-12-12 16:24:51,870 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2025-12-12 16:24:52,050 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:01.096) 0:00:59.620 ******* 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:01.096) 0:00:59.619 ******* 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.112) 0:00:59.733 ******* 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.112) 0:00:59.732 ******* 2025-12-12 16:24:52,378 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2025-12-12 16:24:52,558 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2025-12-12 16:24:52,756 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.578) 0:01:00.311 ******* 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.578) 0:01:00.310 ******* 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.033) 0:01:00.345 ******* 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.033) 0:01:00.344 ******* 2025-12-12 16:24:52,834 p=31591 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'master', 'change': '694', 'change_url': 'https://github.com/infrawatch/service-telemetry-operator/pull/694', 'commit_id': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'patchset': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/infrawatch/service-telemetry-operator', 'name': 'infrawatch/service-telemetry-operator', 'short_name': 'service-telemetry-operator', 'src_dir': 'src/github.com/infrawatch/service-telemetry-operator'}, 'topic': None}) 2025-12-12 16:24:52,836 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.389 ******* 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.388 ******* 2025-12-12 16:24:52,876 p=31591 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'master', 'change': '694', 'change_url': 'https://github.com/infrawatch/service-telemetry-operator/pull/694', 'commit_id': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'patchset': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/infrawatch/service-telemetry-operator', 'name': 'infrawatch/service-telemetry-operator', 'short_name': 'service-telemetry-operator', 'src_dir': 'src/github.com/infrawatch/service-telemetry-operator'}, 'topic': None}) 2025-12-12 16:24:52,878 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | TASK [Customize install_yamls devsetup vars if needed name=install_yamls, tasks_from=customize_devsetup_vars.yml] *** 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.047) 0:01:00.436 ******* 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.047) 0:01:00.435 ******* 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | TASK [install_yamls : Update opm_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^opm_version:, line=opm_version: {{ cifmw_install_yamls_opm_version }}, state=present] *** 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.480 ******* 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.479 ******* 2025-12-12 16:24:52,969 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | TASK [install_yamls : Update sdk_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^sdk_version:, line=sdk_version: {{ cifmw_install_yamls_sdk_version }}, state=present] *** 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.041) 0:01:00.521 ******* 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.041) 0:01:00.520 ******* 2025-12-12 16:24:53,000 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | TASK [install_yamls : Update go_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^go_version:, line=go_version: {{ cifmw_install_yamls_go_version }}, state=present] *** 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:00.553 ******* 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:00.552 ******* 2025-12-12 16:24:53,029 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | TASK [install_yamls : Update kustomize_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^kustomize_version:, line=kustomize_version: {{ cifmw_install_yamls_kustomize_version }}, state=present] *** 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:00.581 ******* 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:00.580 ******* 2025-12-12 16:24:53,060 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.035) 0:01:00.616 ******* 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.035) 0:01:00.615 ******* 2025-12-12 16:24:53,129 p=31591 u=zuul n=ansible | ok: [localhost] => (item={}) 2025-12-12 16:24:53,137 p=31591 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|antelope|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2025-12-12 16:24:53,138 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.066) 0:01:00.683 ******* 2025-12-12 16:24:53,138 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.066) 0:01:00.681 ******* 2025-12-12 16:24:53,177 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,182 p=31591 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2025-12-12 16:24:53,182 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.044) 0:01:00.728 ******* 2025-12-12 16:24:53,183 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.044) 0:01:00.726 ******* 2025-12-12 16:24:53,768 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,822 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2025-12-12 16:24:53,823 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.640) 0:01:01.368 ******* 2025-12-12 16:24:53,823 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.640) 0:01:01.366 ******* 2025-12-12 16:24:53,849 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.036) 0:01:01.404 ******* 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.036) 0:01:01.403 ******* 2025-12-12 16:24:53,878 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:01.436 ******* 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.435 ******* 2025-12-12 16:24:53,913 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.029) 0:01:01.465 ******* 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:01.464 ******* 2025-12-12 16:24:53,945 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,954 p=31591 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2025-12-12 16:24:53,954 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.034) 0:01:01.500 ******* 2025-12-12 16:24:53,955 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.034) 0:01:01.498 ******* 2025-12-12 16:24:53,977 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: CHECKOUT_FROM_OPENSTACK_REF: 'true' OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.532 ******* 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.531 ******* 2025-12-12 16:24:54,015 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: '' BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '1234567842' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12345678' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: osp-secret SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' 2025-12-12 16:24:54,022 p=31591 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2025-12-12 16:24:54,023 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.035) 0:01:01.568 ******* 2025-12-12 16:24:54,023 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.035) 0:01:01.566 ******* 2025-12-12 16:24:54,328 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.314) 0:01:01.882 ******* 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.314) 0:01:01.881 ******* 2025-12-12 16:24:54,360 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.030) 0:01:01.913 ******* 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.030) 0:01:01.912 ******* 2025-12-12 16:24:54,758 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.397) 0:01:02.311 ******* 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.397) 0:01:02.310 ******* 2025-12-12 16:24:54,784 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.034) 0:01:02.345 ******* 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.034) 0:01:02.344 ******* 2025-12-12 16:24:55,338 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:55,344 p=31591 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2025-12-12 16:24:55,345 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.544) 0:01:02.890 ******* 2025-12-12 16:24:55,345 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.544) 0:01:02.888 ******* 2025-12-12 16:24:55,366 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:55,382 p=31591 u=zuul n=ansible | TASK [Create artifacts with custom params mode=0644, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2025-12-12 16:24:55,382 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.037) 0:01:02.927 ******* 2025-12-12 16:24:55,383 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.037) 0:01:02.926 ******* 2025-12-12 16:24:55,817 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | localhost : ok=43 changed=23 unreachable=0 failed=0 skipped=40 rescued=0 ignored=0 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.458) 0:01:03.386 ******* 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | =============================================================================== 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 30.38s 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 8.61s 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.34s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Make sure git-core package is installed -------------------- 1.54s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Get repo-setup repository ---------------------------------- 1.42s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ci_setup : Manage directories ------------------------------------------- 1.10s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Gathering Facts --------------------------------------------------------- 1.06s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Install repo-setup package --------------------------------- 0.82s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca : Ensure target directory exists ----------------------------- 0.75s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Dump full hash in delorean.repo.md5 file ------------------- 0.69s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Run repo-setup --------------------------------------------- 0.66s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls : Get environment structure ------------------------------- 0.64s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls : Ensure directories exist -------------------------------- 0.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Ensure directories are present ----------------------------- 0.56s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | discover_latest_image : Get latest image -------------------------------- 0.54s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Remove existing repos from /etc/yum.repos.d directory ------ 0.49s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Run repo-setup-get-hash ------------------------------------ 0.49s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Cleanup existing metadata ---------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Create artifacts with custom params ------------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.459) 0:01:03.386 ******* 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | =============================================================================== 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ci_setup --------------------------------------------------------------- 38.29s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup ------------------------------------------------------------- 17.66s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls ----------------------------------------------------------- 2.61s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca -------------------------------------------------------------- 2.47s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | gather_facts ------------------------------------------------------------ 1.06s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | discover_latest_image --------------------------------------------------- 0.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.copy ---------------------------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.11s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.set_fact ------------------------------------------------ 0.11s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | total ------------------------------------------------------------------ 63.35s 2025-12-12 16:24:57,434 p=32453 u=zuul n=ansible | PLAY [Run pre_infra hooks] ***************************************************** 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.054) 0:00:00.054 ******* 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.053) 0:00:00.053 ******* 2025-12-12 16:24:57,552 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.094) 0:00:00.149 ******* 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.094) 0:00:00.148 ******* 2025-12-12 16:24:57,622 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,633 p=32453 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2025-12-12 16:24:57,634 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.069) 0:00:00.218 ******* 2025-12-12 16:24:57,634 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.069) 0:00:00.217 ******* 2025-12-12 16:24:57,694 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:57,741 p=32453 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | TASK [Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] ****** 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.141) 0:00:00.360 ******* 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.141) 0:00:00.359 ******* 2025-12-12 16:24:57,906 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.150) 0:00:00.510 ******* 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.150) 0:00:00.509 ******* 2025-12-12 16:24:57,945 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.029) 0:00:00.539 ******* 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.029) 0:00:00.539 ******* 2025-12-12 16:24:57,973 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,010 p=32453 u=zuul n=ansible | PLAY [Prepare the platform] **************************************************** 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | TASK [Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] ****** 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.081) 0:00:00.621 ******* 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.081) 0:00:00.620 ******* 2025-12-12 16:24:58,079 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.057) 0:00:00.679 ******* 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.057) 0:00:00.678 ******* 2025-12-12 16:24:58,388 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:58,401 p=32453 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2025-12-12 16:24:58,402 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.307) 0:00:00.986 ******* 2025-12-12 16:24:58,402 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.307) 0:00:00.985 ******* 2025-12-12 16:24:58,431 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,443 p=32453 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-12-12 16:24:58,444 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.028 ******* 2025-12-12 16:24:58,444 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.027 ******* 2025-12-12 16:24:58,466 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.035) 0:00:01.063 ******* 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.035) 0:00:01.062 ******* 2025-12-12 16:24:58,513 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.056) 0:00:01.120 ******* 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.056) 0:00:01.119 ******* 2025-12-12 16:24:58,557 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.032) 0:00:01.152 ******* 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.032) 0:00:01.152 ******* 2025-12-12 16:24:58,599 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.195 ******* 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.194 ******* 2025-12-12 16:24:58,632 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.034) 0:00:01.229 ******* 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.034) 0:00:01.228 ******* 2025-12-12 16:24:59,049 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.416) 0:00:01.645 ******* 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.416) 0:00:01.645 ******* 2025-12-12 16:24:59,095 p=32453 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2025-12-12 16:24:59,108 p=32453 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-12-12 16:24:59,109 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.047) 0:00:01.693 ******* 2025-12-12 16:24:59,109 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.047) 0:00:01.692 ******* 2025-12-12 16:24:59,132 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.726 ******* 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.725 ******* 2025-12-12 16:24:59,164 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,173 p=32453 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2025-12-12 16:24:59,174 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.758 ******* 2025-12-12 16:24:59,174 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.757 ******* 2025-12-12 16:24:59,193 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{ cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.029) 0:00:01.788 ******* 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.029) 0:00:01.787 ******* 2025-12-12 16:24:59,230 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.066) 0:00:01.854 ******* 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.066) 0:00:01.853 ******* 2025-12-12 16:24:59,440 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.183) 0:00:02.038 ******* 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.183) 0:00:02.037 ******* 2025-12-12 16:24:59,477 p=32453 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-12-12 16:24:59,488 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2025-12-12 16:24:59,489 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.073 ******* 2025-12-12 16:24:59,489 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.072 ******* 2025-12-12 16:24:59,511 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,523 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2025-12-12 16:24:59,524 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.034) 0:00:02.108 ******* 2025-12-12 16:24:59,524 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.034) 0:00:02.107 ******* 2025-12-12 16:24:59,546 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.144 ******* 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.143 ******* 2025-12-12 16:24:59,580 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:02.176 ******* 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:02.175 ******* 2025-12-12 16:24:59,614 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.212 ******* 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.211 ******* 2025-12-12 16:24:59,659 p=32453 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.049) 0:00:02.262 ******* 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.049) 0:00:02.261 ******* 2025-12-12 16:24:59,695 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,712 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2025-12-12 16:24:59,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.297 ******* 2025-12-12 16:24:59,713 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.296 ******* 2025-12-12 16:24:59,778 p=32453 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_fetch_openshift.log 2025-12-12 16:25:00,188 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:00,198 p=32453 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2025-12-12 16:25:00,198 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.486) 0:00:02.783 ******* 2025-12-12 16:25:00,199 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.486) 0:00:02.782 ******* 2025-12-12 16:25:00,219 p=32453 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.031) 0:00:02.815 ******* 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.031) 0:00:02.814 ******* 2025-12-12 16:25:00,701 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:00,711 p=32453 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2025-12-12 16:25:00,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.481) 0:00:03.296 ******* 2025-12-12 16:25:00,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.481) 0:00:03.295 ******* 2025-12-12 16:25:00,738 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.036) 0:00:03.333 ******* 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.036) 0:00:03.332 ******* 2025-12-12 16:25:01,060 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,073 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2025-12-12 16:25:01,074 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.325) 0:00:03.658 ******* 2025-12-12 16:25:01,074 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.325) 0:00:03.657 ******* 2025-12-12 16:25:01,379 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,389 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2025-12-12 16:25:01,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.315) 0:00:03.974 ******* 2025-12-12 16:25:01,390 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.315) 0:00:03.973 ******* 2025-12-12 16:25:01,668 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,732 p=32453 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2025-12-12 16:25:01,733 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.343) 0:00:04.317 ******* 2025-12-12 16:25:01,733 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.343) 0:00:04.316 ******* 2025-12-12 16:25:01,786 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:01,797 p=32453 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2025-12-12 16:25:01,798 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.064) 0:00:04.382 ******* 2025-12-12 16:25:01,798 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.064) 0:00:04.381 ******* 2025-12-12 16:25:02,365 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml] *** 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.578) 0:00:04.960 ******* 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.578) 0:00:04.960 ******* 2025-12-12 16:25:02,690 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:02,703 p=32453 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2025-12-12 16:25:02,704 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.327) 0:00:05.288 ******* 2025-12-12 16:25:02,704 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.327) 0:00:05.287 ******* 2025-12-12 16:25:03,147 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.465) 0:00:05.753 ******* 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.465) 0:00:05.753 ******* 2025-12-12 16:25:03,374 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.220) 0:00:05.974 ******* 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.220) 0:00:05.973 ******* 2025-12-12 16:25:03,437 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.062) 0:00:06.036 ******* 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.062) 0:00:06.035 ******* 2025-12-12 16:25:04,407 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-12-12 16:25:05,092 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:05 +0000 (0:00:01.658) 0:00:07.695 ******* 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:05 +0000 (0:00:01.658) 0:00:07.694 ******* 2025-12-12 16:25:06,268 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:06,280 p=32453 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2025-12-12 16:25:06,281 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:06 +0000 (0:00:01.170) 0:00:08.865 ******* 2025-12-12 16:25:06,281 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:06 +0000 (0:00:01.170) 0:00:08.864 ******* 2025-12-12 16:25:07,027 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-12-12 16:25:07,763 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-12-12 16:25:07,781 p=32453 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2025-12-12 16:25:07,781 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:07 +0000 (0:00:01.500) 0:00:10.366 ******* 2025-12-12 16:25:07,782 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:07 +0000 (0:00:01.500) 0:00:10.365 ******* 2025-12-12 16:25:08,718 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.947) 0:00:11.313 ******* 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.947) 0:00:11.313 ******* 2025-12-12 16:25:08,783 p=32453 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_login_into_openshift_internal.log 2025-12-12 16:25:08,970 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.250) 0:00:11.564 ******* 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.250) 0:00:11.563 ******* 2025-12-12 16:25:09,011 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.044) 0:00:11.609 ******* 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.044) 0:00:11.608 ******* 2025-12-12 16:25:09,049 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.036) 0:00:11.645 ******* 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.036) 0:00:11.644 ******* 2025-12-12 16:25:09,114 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,124 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2025-12-12 16:25:09,124 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.064) 0:00:11.709 ******* 2025-12-12 16:25:09,125 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.064) 0:00:11.708 ******* 2025-12-12 16:25:09,147 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,156 p=32453 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2025-12-12 16:25:09,157 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.032) 0:00:11.741 ******* 2025-12-12 16:25:09,157 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.032) 0:00:11.740 ******* 2025-12-12 16:25:09,178 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.031) 0:00:11.772 ******* 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.031) 0:00:11.771 ******* 2025-12-12 16:25:09,208 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.030) 0:00:11.802 ******* 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.030) 0:00:11.801 ******* 2025-12-12 16:25:09,243 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,256 p=32453 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2025-12-12 16:25:09,257 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.038) 0:00:11.841 ******* 2025-12-12 16:25:09,257 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.038) 0:00:11.840 ******* 2025-12-12 16:25:10,059 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:10 +0000 (0:00:00.816) 0:00:12.658 ******* 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:10 +0000 (0:00:00.816) 0:00:12.657 ******* 2025-12-12 16:25:11,085 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:01.023) 0:00:13.682 ******* 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:01.023) 0:00:13.681 ******* 2025-12-12 16:25:11,815 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:11,828 p=32453 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2025-12-12 16:25:11,828 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.731) 0:00:14.413 ******* 2025-12-12 16:25:11,829 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.731) 0:00:14.412 ******* 2025-12-12 16:25:11,844 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.036) 0:00:14.450 ******* 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.036) 0:00:14.449 ******* 2025-12-12 16:25:11,896 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.078) 0:00:14.528 ******* 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.078) 0:00:14.527 ******* 2025-12-12 16:25:11,976 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.045) 0:00:14.574 ******* 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.045) 0:00:14.573 ******* 2025-12-12 16:25:12,020 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.618 ******* 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.618 ******* 2025-12-12 16:25:12,067 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,078 p=32453 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2025-12-12 16:25:12,079 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.663 ******* 2025-12-12 16:25:12,079 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.662 ******* 2025-12-12 16:25:12,099 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,111 p=32453 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2025-12-12 16:25:12,111 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.032) 0:00:14.696 ******* 2025-12-12 16:25:12,112 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.032) 0:00:14.695 ******* 2025-12-12 16:25:12,137 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.038) 0:00:14.734 ******* 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.038) 0:00:14.734 ******* 2025-12-12 16:25:12,175 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.041) 0:00:14.776 ******* 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.041) 0:00:14.775 ******* 2025-12-12 16:25:12,223 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,234 p=32453 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2025-12-12 16:25:12,235 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.043) 0:00:14.819 ******* 2025-12-12 16:25:12,235 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.043) 0:00:14.818 ******* 2025-12-12 16:25:12,262 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,274 p=32453 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-12-12 16:25:12,275 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.039) 0:00:14.859 ******* 2025-12-12 16:25:12,275 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.039) 0:00:14.858 ******* 2025-12-12 16:25:12,333 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.073) 0:00:14.933 ******* 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.073) 0:00:14.932 ******* 2025-12-12 16:25:12,420 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.083) 0:00:15.016 ******* 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.082) 0:00:15.015 ******* 2025-12-12 16:25:12,526 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | localhost : ok=35 changed=12 unreachable=0 failed=0 skipped=34 rescued=0 ignored=0 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.170) 0:00:15.186 ******* 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | =============================================================================== 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.66s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces --- 1.50s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Get internal OpenShift registry route ----------------- 1.17s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Patch network operator -------------------------------- 1.02s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Wait for the image registry to be ready --------------- 0.95s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Gather network.operator info -------------------------- 0.82s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Patch samples registry configuration ------------------ 0.73s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Create the openshift_login parameters file ------------ 0.58s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift token --------------------------------- 0.49s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch new OpenShift access token ---------------------- 0.48s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Append the KUBECONFIG to the install yamls parameters --- 0.47s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Ensure output directory exists ------------------------ 0.42s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift current user -------------------------- 0.34s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Read the install yamls parameters file ---------------- 0.33s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift API URL ------------------------------- 0.33s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift kubeconfig context -------------------- 0.32s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | networking_mapper : Check for Networking Environment Definition file existence --- 0.31s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Login into OpenShift internal registry ---------------- 0.25s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Ensure output directory exists ------------------------ 0.22s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Check if kubeconfig exists ---------------------------- 0.18s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.171) 0:00:15.186 ******* 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | =============================================================================== 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup --------------------------------------------------------- 8.77s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login --------------------------------------------------------- 4.52s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | run_hook ---------------------------------------------------------------- 0.63s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.55s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | networking_mapper ------------------------------------------------------- 0.44s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ansible.builtin.include_vars -------------------------------------------- 0.21s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | total ------------------------------------------------------------------ 15.13s 2025-12-12 16:25:31,617 p=33051 u=zuul n=ansible | Starting galaxy collection install process 2025-12-12 16:25:31,637 p=33051 u=zuul n=ansible | Process install dependency map 2025-12-12 16:25:45,294 p=33051 u=zuul n=ansible | Starting collection install process 2025-12-12 16:25:45,294 p=33051 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+b9f05e2b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+b9f05e2b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | cifmw.general:1.0.0+b9f05e2b was installed successfully 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-12-12 16:25:46,994 p=33051 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-12-12 16:25:46,994 p=33051 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-12-12 16:25:46,995 p=33051 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-12-12 16:25:47,197 p=33051 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-12-12 16:25:47,198 p=33051 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-12-12 16:25:47,198 p=33051 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-12-12 16:25:47,430 p=33051 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-12-12 16:25:47,430 p=33051 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-12-12 16:25:47,431 p=33051 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-12-12 16:25:48,448 p=33051 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-12-12 16:25:48,449 p=33051 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-12-12 16:25:48,449 p=33051 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-12-12 16:25:48,624 p=33051 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-12-12 16:25:48,625 p=33051 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully home/zuul/zuul-output/logs/ci-framework-data/0000755000175000017500000000000015117043112020356 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/0000755000175000017500000000000015117043102021321 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/0000755000175000017500000000000015117043062025553 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/must-gather.logs0000644000175000017500000000617215117043022030703 0ustar zuulzuul[must-gather ] OUT 2025-12-12T16:40:38.139377922Z Using must-gather plug-in image: quay.io/openstack-k8s-operators/openstack-must-gather:latest When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: ClientVersion: 4.20.6 ClusterVersion: Stable at "4.20.1" ClusterOperators: clusteroperator/machine-config is degraded because Failed to resync 4.20.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"")] clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing [must-gather ] OUT 2025-12-12T16:40:38.18604624Z namespace/openshift-must-gather-2sjxj created [must-gather ] OUT 2025-12-12T16:40:38.195476451Z clusterrolebinding.rbac.authorization.k8s.io/must-gather-k5chq created [must-gather ] OUT 2025-12-12T16:40:38.21981086Z pod for plug-in image quay.io/openstack-k8s-operators/openstack-must-gather:latest created [must-gather-v4h5l] OUT 2025-12-12T16:40:48.231999094Z gather logs unavailable: Get "https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?follow=true×tamps=true": remote error: tls: internal error [must-gather-v4h5l] OUT 2025-12-12T16:40:48.23223174Z waiting for gather to complete [must-gather-v4h5l] OUT 2025-12-12T16:42:58.238364847Z downloading gather output [must-gather-v4h5l] OUT 2025-12-12T16:42:58.802953469Z gather output not downloaded: [Get "https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?timestamps=true": remote error: tls: internal error, No available strategies to copy.] [must-gather-v4h5l] OUT 2025-12-12T16:42:58.80298743Z [must-gather ] OUT 2025-12-12T16:42:58.80745501Z namespace/openshift-must-gather-2sjxj deleted Reprinting Cluster State: When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: ClientVersion: 4.20.6 ClusterVersion: Stable at "4.20.1" ClusterOperators: clusteroperator/machine-config is degraded because Failed to resync 4.20.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"")] clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing home/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/timestamp0000644000175000017500000000015715117043022027500 0ustar zuulzuul2025-12-12 16:40:38.200794262 +0000 UTC m=+0.170457892 2025-12-12 16:42:58.80377238 +0000 UTC m=+140.773436080 home/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/event-filter.html0000644000175000017500000000641015117043022031042 0ustar zuulzuul Events
Time Namespace Component RelatedObject Reason Message
././@LongLink0000644000000000000000000000017600000000000011606 Kustar rootrootquay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2chome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/latest0000777000175000017500000000000015117043023047527 2quay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ecustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/quay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2c/home/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/quay-io-openstack-k8s-operat0000755000175000017500000000000015117043062033040 5ustar zuulzuul././@LongLink0000644000000000000000000000032200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/quay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2c/gather.logshome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/quay-io-openstack-k8s-operat0000644000175000017500000000000015117043022033024 0ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/0000755000175000017500000000000015117043042022073 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/0000755000175000017500000000000015117043042025562 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/0000755000175000017500000000000015117043044026531 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043043033043 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000033400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000100707015117043043033052 0ustar zuulzuul2025-12-12T16:16:45.475020434+00:00 stderr F I1212 16:16:45.474824 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:45.475020434+00:00 stderr F I1212 16:16:45.474983 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.476391117+00:00 stderr F I1212 16:16:45.476251 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:45.502805202+00:00 stderr F I1212 16:16:45.502751 1 builder.go:304] kube-apiserver-operator version v0.0.0-unknown-c3d9642-c3d9642 2025-12-12T16:16:46.216102266+00:00 stderr F I1212 16:16:46.215068 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:46.216102266+00:00 stderr F W1212 16:16:46.215751 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.216102266+00:00 stderr F W1212 16:16:46.215817 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.216102266+00:00 stderr F W1212 16:16:46.215821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:46.216102266+00:00 stderr F W1212 16:16:46.215823 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:46.216102266+00:00 stderr F W1212 16:16:46.215826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:46.216102266+00:00 stderr F W1212 16:16:46.215829 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:46.222313318+00:00 stderr F I1212 16:16:46.222275 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:46.222924383+00:00 stderr F I1212 16:16:46.222908 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:46.223360253+00:00 stderr F I1212 16:16:46.223340 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-apiserver-operator/kube-apiserver-operator-lock... 2025-12-12T16:16:46.223643370+00:00 stderr F I1212 16:16:46.223624 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:46.223724092+00:00 stderr F I1212 16:16:46.223699 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.223786674+00:00 stderr F I1212 16:16:46.223774 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:46.223938867+00:00 stderr F I1212 16:16:46.223920 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:46.224140702+00:00 stderr F I1212 16:16:46.224122 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.224234335+00:00 stderr F I1212 16:16:46.224218 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.224321767+00:00 stderr F I1212 16:16:46.224275 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.224357298+00:00 stderr F I1212 16:16:46.224345 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.248694882+00:00 stderr F I1212 16:16:46.246578 1 leaderelection.go:271] successfully acquired lease openshift-kube-apiserver-operator/kube-apiserver-operator-lock 2025-12-12T16:16:46.248694882+00:00 stderr F I1212 16:16:46.248114 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator-lock", UID:"d2120d60-887e-43f5-bf5a-5e3e6811b9a9", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37213", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' kube-apiserver-operator-575994946d-wff8v_39fc3fac-452d-49f2-89ce-9587266d04bc became leader 2025-12-12T16:16:46.256782419+00:00 stderr F I1212 16:16:46.256748 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.263854502+00:00 stderr F I1212 16:16:46.263707 1 starter.go:164] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:46.263969315+00:00 stderr F I1212 16:16:46.263927 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:46.290220646+00:00 stderr F E1212 16:16:46.286949 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.290560674+00:00 stderr F E1212 16:16:46.290533 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.293475885+00:00 stderr F E1212 16:16:46.292794 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.297223617+00:00 stderr F E1212 16:16:46.296236 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.301203984+00:00 stderr F E1212 16:16:46.301089 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.305198771+00:00 stderr F E1212 16:16:46.302850 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.310497061+00:00 stderr F E1212 16:16:46.309551 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.319309266+00:00 stderr F E1212 16:16:46.319276 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.323211221+00:00 stderr F E1212 16:16:46.320813 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.323211221+00:00 stderr F E1212 16:16:46.322035 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.327249800+00:00 stderr F E1212 16:16:46.323264 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.327249800+00:00 stderr F E1212 16:16:46.324484 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.327249800+00:00 stderr F I1212 16:16:46.324537 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.327249800+00:00 stderr F I1212 16:16:46.324599 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.327249800+00:00 stderr F I1212 16:16:46.324691 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.394782238+00:00 stderr F E1212 16:16:46.394733 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:46.404610188+00:00 stderr F I1212 16:16:46.399843 1 certrotationcontroller.go:148] Setting monthPeriod to 720h0m0s, yearPeriod to 8760h0m0s, tenMonthPeriod to 7008h0m0s 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.449038 1 base_controller.go:76] Waiting for caches to sync for SCCReconcileController 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.449863 1 base_controller.go:76] Waiting for caches to sync for MissingStaticPodController 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.449882 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450007 1 base_controller.go:76] Waiting for caches to sync for KubeAPIServerStaticResources-StaticResources 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450020 1 base_controller.go:76] Waiting for caches to sync for TargetConfigController 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450039 1 base_controller.go:76] Waiting for caches to sync for NodeKubeconfigController 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450049 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450113 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_kube-apiserver 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450127 1 certrotationcontroller.go:919] Starting CertRotation 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450131 1 certrotationcontroller.go:884] Waiting for CertRotation 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450145 1 base_controller.go:76] Waiting for caches to sync for openshift-kube-apiserver-EncryptionCondition 2025-12-12T16:16:46.457848958+00:00 stderr F I1212 16:16:46.450158 1 base_controller.go:76] Waiting for caches to sync for CertRotationTimeUpgradeableController 2025-12-12T16:16:46.460040532+00:00 stderr F I1212 16:16:46.460008 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:46.460447812+00:00 stderr F I1212 16:16:46.460430 1 base_controller.go:76] Waiting for caches to sync for EventWatchController 2025-12-12T16:16:46.469738158+00:00 stderr F I1212 16:16:46.463552 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:46.469791200+00:00 stderr F I1212 16:16:46.468385 1 termination_observer.go:145] Starting TerminationObserver 2025-12-12T16:16:46.469820150+00:00 stderr F I1212 16:16:46.468447 1 base_controller.go:76] Waiting for caches to sync for RevisionController 2025-12-12T16:16:46.470007925+00:00 stderr F I1212 16:16:46.468587 1 base_controller.go:76] Waiting for caches to sync for Installer 2025-12-12T16:16:46.470037766+00:00 stderr F I1212 16:16:46.468599 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver-InstallerState 2025-12-12T16:16:46.470060696+00:00 stderr F I1212 16:16:46.468606 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver-StaticPodState 2025-12-12T16:16:46.470085157+00:00 stderr F I1212 16:16:46.468613 1 base_controller.go:76] Waiting for caches to sync for PruneController 2025-12-12T16:16:46.470120508+00:00 stderr F I1212 16:16:46.468621 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver-StartupMonitorPodCondition 2025-12-12T16:16:46.470143238+00:00 stderr F I1212 16:16:46.468629 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver-StaticPodStateFallback 2025-12-12T16:16:46.470165339+00:00 stderr F I1212 16:16:46.468637 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver-Node 2025-12-12T16:16:46.470205130+00:00 stderr F I1212 16:16:46.468706 1 base_controller.go:76] Waiting for caches to sync for BackingResourceController-StaticResources 2025-12-12T16:16:46.470231841+00:00 stderr F I1212 16:16:46.468712 1 base_controller.go:76] Waiting for caches to sync for cluster-kube-apiserver-operator-UnsupportedConfigOverrides 2025-12-12T16:16:46.470253991+00:00 stderr F I1212 16:16:46.468720 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:46.470276372+00:00 stderr F I1212 16:16:46.468726 1 base_controller.go:76] Waiting for caches to sync for GuardController 2025-12-12T16:16:46.470299472+00:00 stderr F I1212 16:16:46.468788 1 base_controller.go:76] Waiting for caches to sync for openshift-kube-apiserver-EncryptionKey 2025-12-12T16:16:46.470323823+00:00 stderr F I1212 16:16:46.468802 1 base_controller.go:76] Waiting for caches to sync for openshift-kube-apiserver-EncryptionState 2025-12-12T16:16:46.470345333+00:00 stderr F I1212 16:16:46.468808 1 base_controller.go:76] Waiting for caches to sync for openshift-kube-apiserver-EncryptionPrune 2025-12-12T16:16:46.470367184+00:00 stderr F I1212 16:16:46.468815 1 base_controller.go:76] Waiting for caches to sync for openshift-kube-apiserver-EncryptionMigration 2025-12-12T16:16:46.470390864+00:00 stderr F I1212 16:16:46.468937 1 base_controller.go:76] Waiting for caches to sync for BoundSATokenSignerController 2025-12-12T16:16:46.470413015+00:00 stderr F I1212 16:16:46.468948 1 base_controller.go:76] Waiting for caches to sync for auditPolicyController 2025-12-12T16:16:46.470435756+00:00 stderr F I1212 16:16:46.468957 1 base_controller.go:76] Waiting for caches to sync for kube-apiserver-RemoveStaleConditions 2025-12-12T16:16:46.470457916+00:00 stderr F I1212 16:16:46.468966 1 base_controller.go:76] Waiting for caches to sync for ConnectivityCheckController 2025-12-12T16:16:46.470479437+00:00 stderr F I1212 16:16:46.468973 1 base_controller.go:76] Waiting for caches to sync for KubeletVersionSkewController 2025-12-12T16:16:46.470501657+00:00 stderr F I1212 16:16:46.468980 1 base_controller.go:76] Waiting for caches to sync for WorkerLatencyProfile 2025-12-12T16:16:46.470523608+00:00 stderr F I1212 16:16:46.468986 1 base_controller.go:76] Waiting for caches to sync for webhookSupportabilityController 2025-12-12T16:16:46.470545408+00:00 stderr F I1212 16:16:46.468994 1 base_controller.go:76] Waiting for caches to sync for ServiceAccountIssuerController 2025-12-12T16:16:46.470567329+00:00 stderr F I1212 16:16:46.469001 1 base_controller.go:76] Waiting for caches to sync for PodSecurityReadinessController 2025-12-12T16:16:46.470599140+00:00 stderr F I1212 16:16:46.470588 1 base_controller.go:82] Caches are synced for PodSecurityReadinessController 2025-12-12T16:16:46.470628870+00:00 stderr F I1212 16:16:46.470619 1 base_controller.go:119] Starting #1 worker of PodSecurityReadinessController controller ... 2025-12-12T16:16:46.470682002+00:00 stderr F I1212 16:16:46.469008 1 base_controller.go:76] Waiting for caches to sync for highCPUUsageAlertController 2025-12-12T16:16:46.521293267+00:00 stderr F I1212 16:16:46.521244 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:46.559655514+00:00 stderr F I1212 16:16:46.559600 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:46.573068951+00:00 stderr F I1212 16:16:46.572772 1 base_controller.go:82] Caches are synced for highCPUUsageAlertController 2025-12-12T16:16:46.573068951+00:00 stderr F I1212 16:16:46.572826 1 base_controller.go:119] Starting #1 worker of highCPUUsageAlertController controller ... 2025-12-12T16:16:46.573068951+00:00 stderr F I1212 16:16:46.572954 1 base_controller.go:82] Caches are synced for BackingResourceController-StaticResources 2025-12-12T16:16:46.573068951+00:00 stderr F I1212 16:16:46.572961 1 base_controller.go:119] Starting #1 worker of BackingResourceController-StaticResources controller ... 2025-12-12T16:16:46.573068951+00:00 stderr F I1212 16:16:46.572997 1 base_controller.go:82] Caches are synced for cluster-kube-apiserver-operator-UnsupportedConfigOverrides 2025-12-12T16:16:46.573068951+00:00 stderr F I1212 16:16:46.573003 1 base_controller.go:119] Starting #1 worker of cluster-kube-apiserver-operator-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:46.573573774+00:00 stderr F I1212 16:16:46.573537 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:46.573573774+00:00 stderr F I1212 16:16:46.573565 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:46.573801419+00:00 stderr F I1212 16:16:46.573780 1 base_controller.go:82] Caches are synced for ServiceAccountIssuerController 2025-12-12T16:16:46.573836900+00:00 stderr F I1212 16:16:46.573825 1 base_controller.go:119] Starting #1 worker of ServiceAccountIssuerController controller ... 2025-12-12T16:16:46.576628778+00:00 stderr F I1212 16:16:46.576586 1 base_controller.go:82] Caches are synced for kube-apiserver-RemoveStaleConditions 2025-12-12T16:16:46.576628778+00:00 stderr F I1212 16:16:46.576612 1 base_controller.go:119] Starting #1 worker of kube-apiserver-RemoveStaleConditions controller ... 2025-12-12T16:16:46.668847380+00:00 stderr F I1212 16:16:46.668292 1 base_controller.go:82] Caches are synced for SCCReconcileController 2025-12-12T16:16:46.668847380+00:00 stderr F I1212 16:16:46.668343 1 base_controller.go:119] Starting #1 worker of SCCReconcileController controller ... 2025-12-12T16:16:46.684237355+00:00 stderr F I1212 16:16:46.683779 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.707402921+00:00 stderr F I1212 16:16:46.706029 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.750397371+00:00 stderr F I1212 16:16:46.750341 1 base_controller.go:82] Caches are synced for CertRotationTimeUpgradeableController 2025-12-12T16:16:46.750556284+00:00 stderr F I1212 16:16:46.750544 1 base_controller.go:119] Starting #1 worker of CertRotationTimeUpgradeableController controller ... 2025-12-12T16:16:46.750613656+00:00 stderr F I1212 16:16:46.750508 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:16:46.750647297+00:00 stderr F I1212 16:16:46.750638 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:16:46.750694768+00:00 stderr F I1212 16:16:46.750686 1 internalloadbalancer.go:27] syncing internal loadbalancer hostnames: api-int.crc.testing 2025-12-12T16:16:46.750717318+00:00 stderr F I1212 16:16:46.750709 1 certrotationcontroller.go:902] Finished waiting for CertRotation 2025-12-12T16:16:46.750771110+00:00 stderr F I1212 16:16:46.750762 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754148272+00:00 stderr F I1212 16:16:46.754120 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754228844+00:00 stderr F I1212 16:16:46.754195 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754254 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754269 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754282 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754274 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754300 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754294 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754321 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754338 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754339 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754354 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.754391738+00:00 stderr F I1212 16:16:46.754357 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.862468547+00:00 stderr F E1212 16:16:46.862094 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" kube-node-lease="(MISSING)" 2025-12-12T16:16:46.862571579+00:00 stderr F E1212 16:16:46.862559 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift="(MISSING)" 2025-12-12T16:16:46.862677172+00:00 stderr F E1212 16:16:46.862664 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-apiserver-operator="(MISSING)" 2025-12-12T16:16:46.862760134+00:00 stderr F E1212 16:16:46.862748 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-cloud-network-config-controller="(MISSING)" 2025-12-12T16:16:46.862841696+00:00 stderr F E1212 16:16:46.862831 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-cluster-samples-operator="(MISSING)" 2025-12-12T16:16:46.862920358+00:00 stderr F E1212 16:16:46.862910 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-cluster-storage-operator="(MISSING)" 2025-12-12T16:16:46.863005680+00:00 stderr F E1212 16:16:46.862995 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-config-managed="(MISSING)" 2025-12-12T16:16:46.863087062+00:00 stderr F E1212 16:16:46.863076 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-config-operator="(MISSING)" 2025-12-12T16:16:46.863170534+00:00 stderr F E1212 16:16:46.863160 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-console="(MISSING)" 2025-12-12T16:16:46.863274426+00:00 stderr F E1212 16:16:46.863262 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-console-operator="(MISSING)" 2025-12-12T16:16:46.863355988+00:00 stderr F E1212 16:16:46.863345 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-console-user-settings="(MISSING)" 2025-12-12T16:16:46.863423800+00:00 stderr F E1212 16:16:46.863413 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-controller-manager="(MISSING)" 2025-12-12T16:16:46.863506232+00:00 stderr F E1212 16:16:46.863494 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-controller-manager-operator="(MISSING)" 2025-12-12T16:16:46.863584564+00:00 stderr F E1212 16:16:46.863574 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-dns-operator="(MISSING)" 2025-12-12T16:16:46.863656876+00:00 stderr F E1212 16:16:46.863646 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-host-network="(MISSING)" 2025-12-12T16:16:46.863714157+00:00 stderr F E1212 16:16:46.863704 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-ingress-canary="(MISSING)" 2025-12-12T16:16:46.863796329+00:00 stderr F E1212 16:16:46.863785 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-ingress-operator="(MISSING)" 2025-12-12T16:16:46.863885681+00:00 stderr F E1212 16:16:46.863875 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-kube-controller-manager-operator="(MISSING)" 2025-12-12T16:16:46.863952583+00:00 stderr F E1212 16:16:46.863942 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-kube-storage-version-migrator="(MISSING)" 2025-12-12T16:16:46.864030985+00:00 stderr F E1212 16:16:46.864020 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-kube-storage-version-migrator-operator="(MISSING)" 2025-12-12T16:16:46.864195949+00:00 stderr F E1212 16:16:46.864170 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-network-console="(MISSING)" 2025-12-12T16:16:46.864275181+00:00 stderr F E1212 16:16:46.864264 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-network-diagnostics="(MISSING)" 2025-12-12T16:16:46.864338442+00:00 stderr F E1212 16:16:46.864328 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-node="(MISSING)" 2025-12-12T16:16:46.864406334+00:00 stderr F E1212 16:16:46.864397 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-route-controller-manager="(MISSING)" 2025-12-12T16:16:46.864466276+00:00 stderr F E1212 16:16:46.864456 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-service-ca="(MISSING)" 2025-12-12T16:16:46.864550448+00:00 stderr F E1212 16:16:46.864538 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-service-ca-operator="(MISSING)" 2025-12-12T16:16:46.864675031+00:00 stderr F E1212 16:16:46.864655 1 podsecurityreadinesscontroller.go:88] "namespace:" err="unable to determine if the namespace is violating because no appropriate labels or annotations were found" openshift-user-workload-monitoring="(MISSING)" 2025-12-12T16:16:46.872055771+00:00 stderr F I1212 16:16:46.866778 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.930033756+00:00 stderr F I1212 16:16:46.929560 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.952334171+00:00 stderr F I1212 16:16:46.950839 1 base_controller.go:82] Caches are synced for StatusSyncer_kube-apiserver 2025-12-12T16:16:46.952334171+00:00 stderr F I1212 16:16:46.950859 1 base_controller.go:119] Starting #1 worker of StatusSyncer_kube-apiserver controller ... 2025-12-12T16:16:46.992594504+00:00 stderr F I1212 16:16:46.981468 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)","reason":"NodeController_MasterNodesReady","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:29Z","message":"NodeInstallerProgressing: 1 node is at revision 11","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:47.073414187+00:00 stderr F I1212 16:16:47.070849 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.093517128+00:00 stderr F I1212 16:16:47.092134 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.140997007+00:00 stderr F I1212 16:16:47.140945 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.172042325+00:00 stderr F I1212 16:16:47.171346 1 base_controller.go:82] Caches are synced for ConnectivityCheckController 2025-12-12T16:16:47.172042325+00:00 stderr F I1212 16:16:47.171383 1 base_controller.go:119] Starting #1 worker of ConnectivityCheckController controller ... 2025-12-12T16:16:47.250725066+00:00 stderr F I1212 16:16:47.250677 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.273337648+00:00 stderr F I1212 16:16:47.272826 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:47Z","message":"NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)","reason":"NodeController_MasterNodesReady","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:29Z","message":"NodeInstallerProgressing: 1 node is at revision 11","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:47.273621595+00:00 stderr F I1212 16:16:47.273432 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Degraded changed from False to True ("NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)") 2025-12-12T16:16:47.449485199+00:00 stderr F I1212 16:16:47.447429 1 request.go:752] "Waited before sending request" delay="1.003237714s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/events?limit=500&resourceVersion=0" 2025-12-12T16:16:47.466432302+00:00 stderr F E1212 16:16:47.464606 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_kube-apiserver reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"kube-apiserver\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:47.475589226+00:00 stderr F I1212 16:16:47.474086 1 reflector.go:430] "Caches populated" type="*v1.Event" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.482224358+00:00 stderr F I1212 16:16:47.478798 1 base_controller.go:82] Caches are synced for EventWatchController 2025-12-12T16:16:47.482224358+00:00 stderr F I1212 16:16:47.478822 1 base_controller.go:119] Starting #1 worker of EventWatchController controller ... 2025-12-12T16:16:47.655997730+00:00 stderr F I1212 16:16:47.652201 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.851583146+00:00 stderr F I1212 16:16:47.849878 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872112 1 base_controller.go:82] Caches are synced for kube-apiserver-InstallerState 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872140 1 base_controller.go:119] Starting #1 worker of kube-apiserver-InstallerState controller ... 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872162 1 base_controller.go:82] Caches are synced for kube-apiserver-StaticPodState 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872168 1 base_controller.go:119] Starting #1 worker of kube-apiserver-StaticPodState controller ... 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872855 1 base_controller.go:82] Caches are synced for kube-apiserver-StartupMonitorPodCondition 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872866 1 base_controller.go:119] Starting #1 worker of kube-apiserver-StartupMonitorPodCondition controller ... 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872885 1 base_controller.go:82] Caches are synced for kube-apiserver-StaticPodStateFallback 2025-12-12T16:16:47.873227824+00:00 stderr F I1212 16:16:47.872890 1 base_controller.go:119] Starting #1 worker of kube-apiserver-StaticPodStateFallback controller ... 2025-12-12T16:16:48.048074293+00:00 stderr F I1212 16:16:48.047982 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.075649656+00:00 stderr F I1212 16:16:48.072463 1 base_controller.go:82] Caches are synced for webhookSupportabilityController 2025-12-12T16:16:48.075649656+00:00 stderr F I1212 16:16:48.072507 1 base_controller.go:119] Starting #1 worker of webhookSupportabilityController controller ... 2025-12-12T16:16:48.149923069+00:00 stderr F W1212 16:16:48.146033 1 degraded_webhook.go:147] failed to connect to webhook "controlplanemachineset.machine.openshift.io" via service "control-plane-machine-set-operator.openshift-machine-api.svc:9443": dial tcp: lookup control-plane-machine-set-operator.openshift-machine-api.svc on 10.217.4.10:53: read udp 10.217.0.20:37133->10.217.4.10:53: read: connection refused 2025-12-12T16:16:48.279260507+00:00 stderr F I1212 16:16:48.273687 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.283700435+00:00 stderr F I1212 16:16:48.281728 1 base_controller.go:82] Caches are synced for PruneController 2025-12-12T16:16:48.283700435+00:00 stderr F I1212 16:16:48.281757 1 base_controller.go:119] Starting #1 worker of PruneController controller ... 2025-12-12T16:16:48.283700435+00:00 stderr F I1212 16:16:48.282464 1 base_controller.go:82] Caches are synced for auditPolicyController 2025-12-12T16:16:48.283700435+00:00 stderr F I1212 16:16:48.282512 1 base_controller.go:119] Starting #1 worker of auditPolicyController controller ... 2025-12-12T16:16:48.286293309+00:00 stderr F I1212 16:16:48.285404 1 base_controller.go:82] Caches are synced for WorkerLatencyProfile 2025-12-12T16:16:48.286293309+00:00 stderr F I1212 16:16:48.285442 1 base_controller.go:119] Starting #1 worker of WorkerLatencyProfile controller ... 2025-12-12T16:16:48.352537836+00:00 stderr F I1212 16:16:48.351032 1 base_controller.go:82] Caches are synced for MissingStaticPodController 2025-12-12T16:16:48.352537836+00:00 stderr F I1212 16:16:48.351082 1 base_controller.go:119] Starting #1 worker of MissingStaticPodController controller ... 2025-12-12T16:16:48.462459920+00:00 stderr F I1212 16:16:48.460992 1 request.go:752] "Waited before sending request" delay="2.013205802s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets?limit=500&resourceVersion=0" 2025-12-12T16:16:48.492282698+00:00 stderr F I1212 16:16:48.489862 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.555532122+00:00 stderr F I1212 16:16:48.552712 1 base_controller.go:82] Caches are synced for openshift-kube-apiserver-EncryptionCondition 2025-12-12T16:16:48.555532122+00:00 stderr F I1212 16:16:48.552744 1 base_controller.go:119] Starting #1 worker of openshift-kube-apiserver-EncryptionCondition controller ... 2025-12-12T16:16:48.572409864+00:00 stderr F I1212 16:16:48.570099 1 base_controller.go:82] Caches are synced for RevisionController 2025-12-12T16:16:48.572409864+00:00 stderr F I1212 16:16:48.570135 1 base_controller.go:119] Starting #1 worker of RevisionController controller ... 2025-12-12T16:16:48.577161330+00:00 stderr F I1212 16:16:48.576386 1 base_controller.go:82] Caches are synced for Installer 2025-12-12T16:16:48.577161330+00:00 stderr F I1212 16:16:48.576433 1 base_controller.go:119] Starting #1 worker of Installer controller ... 2025-12-12T16:16:48.577375125+00:00 stderr F I1212 16:16:48.577241 1 base_controller.go:82] Caches are synced for openshift-kube-apiserver-EncryptionKey 2025-12-12T16:16:48.577375125+00:00 stderr F I1212 16:16:48.577261 1 base_controller.go:119] Starting #1 worker of openshift-kube-apiserver-EncryptionKey controller ... 2025-12-12T16:16:48.577375125+00:00 stderr F I1212 16:16:48.577328 1 base_controller.go:82] Caches are synced for openshift-kube-apiserver-EncryptionMigration 2025-12-12T16:16:48.577375125+00:00 stderr F I1212 16:16:48.577365 1 base_controller.go:119] Starting #1 worker of openshift-kube-apiserver-EncryptionMigration controller ... 2025-12-12T16:16:48.577731204+00:00 stderr F I1212 16:16:48.577411 1 base_controller.go:82] Caches are synced for openshift-kube-apiserver-EncryptionState 2025-12-12T16:16:48.577731204+00:00 stderr F I1212 16:16:48.577420 1 base_controller.go:119] Starting #1 worker of openshift-kube-apiserver-EncryptionState controller ... 2025-12-12T16:16:48.577731204+00:00 stderr F I1212 16:16:48.577432 1 base_controller.go:82] Caches are synced for openshift-kube-apiserver-EncryptionPrune 2025-12-12T16:16:48.577731204+00:00 stderr F I1212 16:16:48.577436 1 base_controller.go:119] Starting #1 worker of openshift-kube-apiserver-EncryptionPrune controller ... 2025-12-12T16:16:48.652115300+00:00 stderr F I1212 16:16:48.652041 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.653637797+00:00 stderr F I1212 16:16:48.653588 1 base_controller.go:82] Caches are synced for KubeAPIServerStaticResources-StaticResources 2025-12-12T16:16:48.653637797+00:00 stderr F I1212 16:16:48.653630 1 base_controller.go:119] Starting #1 worker of KubeAPIServerStaticResources-StaticResources controller ... 2025-12-12T16:16:48.849467488+00:00 stderr F I1212 16:16:48.849398 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.850104844+00:00 stderr F I1212 16:16:48.850067 1 base_controller.go:82] Caches are synced for TargetConfigController 2025-12-12T16:16:48.850121954+00:00 stderr F I1212 16:16:48.850103 1 base_controller.go:119] Starting #1 worker of TargetConfigController controller ... 2025-12-12T16:16:49.052118706+00:00 stderr F I1212 16:16:49.050001 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.052118706+00:00 stderr F I1212 16:16:49.050998 1 base_controller.go:82] Caches are synced for NodeKubeconfigController 2025-12-12T16:16:49.052118706+00:00 stderr F I1212 16:16:49.051009 1 base_controller.go:119] Starting #1 worker of NodeKubeconfigController controller ... 2025-12-12T16:16:49.054105795+00:00 stderr F I1212 16:16:49.054022 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.054105795+00:00 stderr F I1212 16:16:49.054055 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056366660+00:00 stderr F I1212 16:16:49.056328 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056366660+00:00 stderr F I1212 16:16:49.056358 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056419031+00:00 stderr F I1212 16:16:49.056388 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056419031+00:00 stderr F I1212 16:16:49.056396 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056427701+00:00 stderr F I1212 16:16:49.056422 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056435561+00:00 stderr F I1212 16:16:49.056426 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056443322+00:00 stderr F I1212 16:16:49.056438 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056450902+00:00 stderr F I1212 16:16:49.056442 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056463092+00:00 stderr F I1212 16:16:49.056455 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056470322+00:00 stderr F I1212 16:16:49.056460 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056505193+00:00 stderr F I1212 16:16:49.056485 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056505193+00:00 stderr F I1212 16:16:49.056493 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.056573275+00:00 stderr F I1212 16:16:49.056540 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.056573275+00:00 stderr F I1212 16:16:49.056557 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.058609004+00:00 stderr F I1212 16:16:49.058577 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.058609004+00:00 stderr F I1212 16:16:49.058596 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.058625125+00:00 stderr F I1212 16:16:49.058616 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.058625125+00:00 stderr F I1212 16:16:49.058621 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.058653446+00:00 stderr F I1212 16:16:49.058634 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.058653446+00:00 stderr F I1212 16:16:49.058643 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.058664196+00:00 stderr F I1212 16:16:49.058657 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:49.058671676+00:00 stderr F I1212 16:16:49.058661 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:49.060163752+00:00 stderr F I1212 16:16:49.060127 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SignerUpdateRequired' "kube-apiserver-to-kubelet-signer" in "openshift-kube-apiserver-operator" requires a new signing cert/key pair: past its refresh time 2025-12-02 07:34:11 +0000 UTC 2025-12-12T16:16:49.060163752+00:00 stderr F I1212 16:16:49.060154 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SignerUpdateRequired' "kube-control-plane-signer" in "openshift-kube-apiserver-operator" requires a new signing cert/key pair: past its refresh time 2025-12-02 07:34:10 +0000 UTC 2025-12-12T16:16:49.060266785+00:00 stderr F I1212 16:16:49.060164 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SignerUpdateRequired' "kube-control-plane-signer" in "openshift-kube-apiserver-operator" requires a new signing cert/key pair: past its refresh time 2025-12-02 07:34:10 +0000 UTC 2025-12-12T16:16:49.060836079+00:00 stderr F I1212 16:16:49.060770 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SignerUpdateRequired' "kube-control-plane-signer" in "openshift-kube-apiserver-operator" requires a new signing cert/key pair: past its refresh time 2025-12-02 07:34:10 +0000 UTC 2025-12-12T16:16:49.071292454+00:00 stderr F I1212 16:16:49.071060 1 base_controller.go:82] Caches are synced for BoundSATokenSignerController 2025-12-12T16:16:49.071292454+00:00 stderr F I1212 16:16:49.071094 1 base_controller.go:119] Starting #1 worker of BoundSATokenSignerController controller ... 2025-12-12T16:16:49.150619751+00:00 stderr F I1212 16:16:49.150546 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:49.150619751+00:00 stderr F I1212 16:16:49.150575 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:49.150619751+00:00 stderr F I1212 16:16:49.150608 1 base_controller.go:82] Caches are synced for kube-apiserver 2025-12-12T16:16:49.150681342+00:00 stderr F I1212 16:16:49.150616 1 base_controller.go:119] Starting #1 worker of kube-apiserver controller ... 2025-12-12T16:16:49.155854229+00:00 stderr F I1212 16:16:49.154455 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for kube-apiserver-to-kubelet-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.155854229+00:00 stderr F - "2025-11-02T07:34:11Z", 2025-12-12T16:16:49.155854229+00:00 stderr F + "2025-12-12T16:16:48Z", 2025-12-12T16:16:49.155854229+00:00 stderr F ) 2025-12-12T16:16:49.155854229+00:00 stderr F I1212 16:16:49.154494 1 annotations.go:82] Updating "auth.openshift.io/certificate-not-after" annotation for kube-apiserver-to-kubelet-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.155854229+00:00 stderr F - "2026-11-02T07:34:11Z", 2025-12-12T16:16:49.155854229+00:00 stderr F + "2026-12-12T16:16:49Z", 2025-12-12T16:16:49.155854229+00:00 stderr F ) 2025-12-12T16:16:49.155854229+00:00 stderr F I1212 16:16:49.154510 1 annotations.go:88] Updating "certificates.openshift.io/refresh-period" annotation for kube-apiserver-to-kubelet-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.155854229+00:00 stderr F - "", 2025-12-12T16:16:49.155854229+00:00 stderr F + "720h0m0s", 2025-12-12T16:16:49.155854229+00:00 stderr F ) 2025-12-12T16:16:49.186113567+00:00 stderr F I1212 16:16:49.183750 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.186113567+00:00 stderr F - "2025-11-02T07:34:10Z", 2025-12-12T16:16:49.186113567+00:00 stderr F + "2025-12-12T16:16:48Z", 2025-12-12T16:16:49.186113567+00:00 stderr F ) 2025-12-12T16:16:49.186113567+00:00 stderr F I1212 16:16:49.183797 1 annotations.go:82] Updating "auth.openshift.io/certificate-not-after" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.186113567+00:00 stderr F - "2026-11-02T07:34:10Z", 2025-12-12T16:16:49.186113567+00:00 stderr F + "2026-02-10T16:16:49Z", 2025-12-12T16:16:49.186113567+00:00 stderr F ) 2025-12-12T16:16:49.186113567+00:00 stderr F I1212 16:16:49.183828 1 annotations.go:88] Updating "certificates.openshift.io/refresh-period" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.186113567+00:00 stderr F - "", 2025-12-12T16:16:49.186113567+00:00 stderr F + "720h0m0s", 2025-12-12T16:16:49.186113567+00:00 stderr F ) 2025-12-12T16:16:49.190615247+00:00 stderr F W1212 16:16:49.190540 1 degraded_webhook.go:147] failed to connect to webhook "controlplanemachineset.machine.openshift.io" via service "control-plane-machine-set-operator.openshift-machine-api.svc:9443": dial tcp: lookup control-plane-machine-set-operator.openshift-machine-api.svc on 10.217.4.10:53: read udp 10.217.0.20:46234->10.217.4.10:53: read: connection refused 2025-12-12T16:16:49.244628676+00:00 stderr F I1212 16:16:49.244546 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.244628676+00:00 stderr F - "2025-11-02T07:34:10Z", 2025-12-12T16:16:49.244628676+00:00 stderr F + "2025-12-12T16:16:48Z", 2025-12-12T16:16:49.244628676+00:00 stderr F ) 2025-12-12T16:16:49.244748419+00:00 stderr F I1212 16:16:49.244734 1 annotations.go:82] Updating "auth.openshift.io/certificate-not-after" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.244748419+00:00 stderr F - "2026-11-02T07:34:10Z", 2025-12-12T16:16:49.244748419+00:00 stderr F + "2026-02-10T16:16:49Z", 2025-12-12T16:16:49.244748419+00:00 stderr F ) 2025-12-12T16:16:49.244793970+00:00 stderr F I1212 16:16:49.244781 1 annotations.go:88] Updating "certificates.openshift.io/refresh-period" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.244793970+00:00 stderr F - "", 2025-12-12T16:16:49.244793970+00:00 stderr F + "720h0m0s", 2025-12-12T16:16:49.244793970+00:00 stderr F ) 2025-12-12T16:16:49.247496286+00:00 stderr F I1212 16:16:49.247468 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.270942778+00:00 stderr F I1212 16:16:49.270861 1 base_controller.go:82] Caches are synced for KubeletVersionSkewController 2025-12-12T16:16:49.270942778+00:00 stderr F I1212 16:16:49.270901 1 base_controller.go:119] Starting #1 worker of KubeletVersionSkewController controller ... 2025-12-12T16:16:49.275307435+00:00 stderr F I1212 16:16:49.272284 1 base_controller.go:82] Caches are synced for kube-apiserver-Node 2025-12-12T16:16:49.275307435+00:00 stderr F I1212 16:16:49.272299 1 base_controller.go:119] Starting #1 worker of kube-apiserver-Node controller ... 2025-12-12T16:16:49.275307435+00:00 stderr F I1212 16:16:49.272620 1 base_controller.go:82] Caches are synced for GuardController 2025-12-12T16:16:49.275307435+00:00 stderr F I1212 16:16:49.272661 1 base_controller.go:119] Starting #1 worker of GuardController controller ... 2025-12-12T16:16:49.313146319+00:00 stderr F I1212 16:16:49.313037 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'MasterNodesReadyChanged' All master nodes are ready 2025-12-12T16:16:49.342848454+00:00 stderr F I1212 16:16:49.342784 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:29Z","message":"NodeInstallerProgressing: 1 node is at revision 11","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.371796490+00:00 stderr F I1212 16:16:49.370765 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Degraded changed from True to False ("NodeControllerDegraded: All master nodes are ready") 2025-12-12T16:16:49.567387465+00:00 stderr F I1212 16:16:49.566453 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.567387465+00:00 stderr F - "2025-11-02T07:34:10Z", 2025-12-12T16:16:49.567387465+00:00 stderr F + "2025-12-12T16:16:48Z", 2025-12-12T16:16:49.567387465+00:00 stderr F ) 2025-12-12T16:16:49.567387465+00:00 stderr F I1212 16:16:49.567340 1 annotations.go:82] Updating "auth.openshift.io/certificate-not-after" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.567387465+00:00 stderr F - "2026-11-02T07:34:10Z", 2025-12-12T16:16:49.567387465+00:00 stderr F + "2026-02-10T16:16:49Z", 2025-12-12T16:16:49.567387465+00:00 stderr F ) 2025-12-12T16:16:49.567387465+00:00 stderr F I1212 16:16:49.567351 1 annotations.go:88] Updating "certificates.openshift.io/refresh-period" annotation for kube-control-plane-signer/openshift-kube-apiserver-operator, diff: string( 2025-12-12T16:16:49.567387465+00:00 stderr F - "", 2025-12-12T16:16:49.567387465+00:00 stderr F + "720h0m0s", 2025-12-12T16:16:49.567387465+00:00 stderr F ) 2025-12-12T16:16:49.647403619+00:00 stderr F I1212 16:16:49.643694 1 request.go:752] "Waited before sending request" delay="1.77151593s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dopenshift-kube-apiserver" 2025-12-12T16:16:50.644790249+00:00 stderr F I1212 16:16:50.643990 1 request.go:752] "Waited before sending request" delay="2.066748798s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:16:51.202553937+00:00 stderr F W1212 16:16:51.201574 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.20:46610->10.217.4.10:53: read: connection refused 2025-12-12T16:16:51.464992294+00:00 stderr F I1212 16:16:51.463167 1 signer.go:123] Updated secret openshift-kube-apiserver-operator/kube-apiserver-to-kubelet-signer 2025-12-12T16:16:51.472159119+00:00 stderr F I1212 16:16:51.471157 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/kube-apiserver-to-kubelet-signer -n openshift-kube-apiserver-operator because it changed 2025-12-12T16:16:51.651455286+00:00 stderr F I1212 16:16:51.650357 1 signer.go:123] Updated secret openshift-kube-apiserver-operator/kube-control-plane-signer 2025-12-12T16:16:51.651455286+00:00 stderr F I1212 16:16:51.650751 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/kube-control-plane-signer -n openshift-kube-apiserver-operator because it changed 2025-12-12T16:16:51.651455286+00:00 stderr F I1212 16:16:51.650771 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CABundleUpdateRequired' "kube-control-plane-signer-ca" in "openshift-kube-apiserver-operator" requires a new cert: signer update openshift-config-managed/kube-scheduler-client-cert-key 2025-12-12T16:16:51.651455286+00:00 stderr F I1212 16:16:51.651219 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CABundleUpdateRequired' "kube-control-plane-signer-ca" in "openshift-kube-apiserver-operator" requires a new cert: signer update openshift-kube-apiserver/check-endpoints-client-cert-key 2025-12-12T16:16:51.843240979+00:00 stderr F I1212 16:16:51.843053 1 request.go:752] "Waited before sending request" delay="2.59806062s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver-operator/secrets/kube-control-plane-signer" 2025-12-12T16:16:52.209889320+00:00 stderr F W1212 16:16:52.209028 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.20:33130->10.217.4.10:53: read: connection refused 2025-12-12T16:16:52.845854077+00:00 stderr F I1212 16:16:52.844973 1 request.go:752] "Waited before sending request" delay="2.795209623s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:16:53.259152656+00:00 stderr F I1212 16:16:53.257953 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'PodCreated' Created Pod/revision-pruner-11-crc -n openshift-kube-apiserver because it was missing 2025-12-12T16:16:54.047502614+00:00 stderr F I1212 16:16:54.046574 1 request.go:752] "Waited before sending request" delay="2.783544487s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:16:54.253859322+00:00 stderr F I1212 16:16:54.252609 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'TargetUpdateRequired' "kube-scheduler-client-cert-key" in "openshift-config-managed" requires a new target cert/key pair: issuer "openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209", not in ca bundle: 2025-12-12T16:16:54.274078525+00:00 stderr F E1212 16:16:54.272606 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.20:33536->10.217.4.10:53: read: connection refused 2025-12-12T16:16:54.314754738+00:00 stderr F W1212 16:16:54.311770 1 degraded_webhook.go:147] failed to connect to webhook "multus-validating-config.k8s.io" via service "multus-admission-controller.openshift-multus.svc:443": dial tcp: lookup multus-admission-controller.openshift-multus.svc on 10.217.4.10:53: read udp 10.217.0.20:43452->10.217.4.10:53: read: connection refused 2025-12-12T16:16:54.474367855+00:00 stderr F I1212 16:16:54.473771 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'TargetUpdateRequired' "check-endpoints-client-cert-key" in "openshift-kube-apiserver" requires a new target cert/key pair: issuer "openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209", not in ca bundle: 2025-12-12T16:16:54.598427574+00:00 stderr F I1212 16:16:54.596313 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for kube-scheduler-client-cert-key/openshift-config-managed, diff: string( 2025-12-12T16:16:54.598427574+00:00 stderr F - "2025-12-12T16:16:51Z", 2025-12-12T16:16:54.598427574+00:00 stderr F + "2025-12-12T16:16:53Z", 2025-12-12T16:16:54.598427574+00:00 stderr F ) 2025-12-12T16:16:54.906546997+00:00 stderr F I1212 16:16:54.906449 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for check-endpoints-client-cert-key/openshift-kube-apiserver, diff: string( 2025-12-12T16:16:54.906546997+00:00 stderr F - "2025-12-12T16:16:51Z", 2025-12-12T16:16:54.906546997+00:00 stderr F + "2025-12-12T16:16:53Z", 2025-12-12T16:16:54.906546997+00:00 stderr F ) 2025-12-12T16:16:55.053174586+00:00 stderr F I1212 16:16:55.052431 1 request.go:752] "Waited before sending request" delay="1.793503077s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dopenshift-kube-apiserver" 2025-12-12T16:16:55.613662700+00:00 stderr F W1212 16:16:55.610149 1 degraded_webhook.go:147] failed to connect to webhook "multus-validating-config.k8s.io" via service "multus-admission-controller.openshift-multus.svc:443": dial tcp: lookup multus-admission-controller.openshift-multus.svc on 10.217.4.10:53: read udp 10.217.0.20:49899->10.217.4.10:53: read: connection refused 2025-12-12T16:16:55.863338336+00:00 stderr F I1212 16:16:55.863199 1 targetconfigcontroller.go:419] Updated client CA bundle configmap openshift-kube-apiserver/client-ca 2025-12-12T16:16:55.867731293+00:00 stderr F I1212 16:16:55.866251 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-kube-apiserver because it changed 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.919375 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.919299992 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920309 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.919400655 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920328 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.920318877 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920366 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.920356808 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920383 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.920372529 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920399 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.920389169 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920433 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.920403169 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920451 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.92044057 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920466 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.920456031 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920485 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.920475341 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920795 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-apiserver-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-apiserver-operator.svc,metrics.openshift-kube-apiserver-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:13 +0000 UTC to 2027-11-02 07:52:14 +0000 UTC (now=2025-12-12 16:16:55.920776708 +0000 UTC))" 2025-12-12T16:16:55.921445705+00:00 stderr F I1212 16:16:55.920983 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556206\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:55.920966913 +0000 UTC))" 2025-12-12T16:16:56.245260641+00:00 stderr F I1212 16:16:56.243530 1 request.go:752] "Waited before sending request" delay="1.647029861s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/kube-scheduler-client-cert-key" 2025-12-12T16:16:56.257339365+00:00 stderr F I1212 16:16:56.254629 1 target.go:150] Updated secret openshift-config-managed/kube-scheduler-client-cert-key 2025-12-12T16:16:56.260413710+00:00 stderr F I1212 16:16:56.260349 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/kube-scheduler-client-cert-key -n openshift-config-managed because it changed 2025-12-12T16:16:56.654146992+00:00 stderr F I1212 16:16:56.651983 1 target.go:150] Updated secret openshift-kube-apiserver/check-endpoints-client-cert-key 2025-12-12T16:16:56.655897035+00:00 stderr F I1212 16:16:56.655841 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/check-endpoints-client-cert-key -n openshift-kube-apiserver because it changed 2025-12-12T16:16:57.243999763+00:00 stderr F I1212 16:16:57.243931 1 request.go:752] "Waited before sending request" delay="1.795047264s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:16:58.445316543+00:00 stderr F I1212 16:16:58.444483 1 request.go:752] "Waited before sending request" delay="2.353101059s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:16:58.485300479+00:00 stderr P I1212 16:16:58.485165 1 core.go:352] ConfigMap "openshift-config-managed/kube-apiserver-client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHN 2025-12-12T16:16:58.485364211+00:00 stderr F exaG3VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:16:55Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:16:58.487671497+00:00 stderr F I1212 16:16:58.487619 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/kube-apiserver-client-ca -n openshift-config-managed: 2025-12-12T16:16:58.487671497+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:16:59.643382043+00:00 stderr F I1212 16:16:59.643312 1 request.go:752] "Waited before sending request" delay="1.97628958s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa" 2025-12-12T16:17:00.647335563+00:00 stderr F I1212 16:17:00.646323 1 request.go:752] "Waited before sending request" delay="1.998153133s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller" 2025-12-12T16:17:01.655350993+00:00 stderr F I1212 16:17:01.655262 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/webhook-authenticator -n openshift-kube-apiserver because it changed 2025-12-12T16:17:01.657218229+00:00 stderr F I1212 16:17:01.657153 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'StartingNewRevision' new revision 12 triggered by "optional secret/webhook-authenticator has changed" 2025-12-12T16:17:01.843691191+00:00 stderr F I1212 16:17:01.843610 1 request.go:752] "Waited before sending request" delay="1.596962739s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/localhost-recovery-client-token" 2025-12-12T16:17:03.044065098+00:00 stderr F I1212 16:17:03.043950 1 request.go:752] "Waited before sending request" delay="1.389447573s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:17:04.443258918+00:00 stderr F I1212 16:17:04.443156 1 request.go:752] "Waited before sending request" delay="1.193456627s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:17:04.653293846+00:00 stderr F I1212 16:17:04.651545 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/kube-apiserver-pod-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:05.456759932+00:00 stderr F I1212 16:17:05.455901 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/config-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:05.843642137+00:00 stderr F I1212 16:17:05.843164 1 request.go:752] "Waited before sending request" delay="1.190873165s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-audit-policies" 2025-12-12T16:17:06.843809426+00:00 stderr F I1212 16:17:06.843719 1 request.go:752] "Waited before sending request" delay="1.393419059s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps" 2025-12-12T16:17:06.856286980+00:00 stderr F I1212 16:17:06.854460 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/kube-apiserver-cert-syncer-kubeconfig-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:08.043895045+00:00 stderr F I1212 16:17:08.043764 1 request.go:752] "Waited before sending request" delay="1.396857112s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/services/apiserver" 2025-12-12T16:17:08.256682880+00:00 stderr F I1212 16:17:08.256542 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/oauth-metadata-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:09.043858338+00:00 stderr F I1212 16:17:09.043779 1 request.go:752] "Waited before sending request" delay="1.3963327s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/localhost-recovery-client-token" 2025-12-12T16:17:09.651724639+00:00 stderr F I1212 16:17:09.651633 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/bound-sa-token-signing-certs-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:10.044006886+00:00 stderr F I1212 16:17:10.043899 1 request.go:752] "Waited before sending request" delay="1.392500187s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-audit-policies" 2025-12-12T16:17:11.050017817+00:00 stderr F I1212 16:17:11.049195 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/etcd-serving-ca-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:11.243965922+00:00 stderr F I1212 16:17:11.243874 1 request.go:752] "Waited before sending request" delay="1.396764011s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/revision-pruner-11-crc" 2025-12-12T16:17:12.443353224+00:00 stderr F I1212 16:17:12.443252 1 request.go:752] "Waited before sending request" delay="1.394241989s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps" 2025-12-12T16:17:12.448430458+00:00 stderr F I1212 16:17:12.448367 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/kube-apiserver-server-ca-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:13.644262882+00:00 stderr F I1212 16:17:13.643415 1 request.go:752] "Waited before sending request" delay="1.376857094s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/localhost-recovery-client" 2025-12-12T16:17:13.854751232+00:00 stderr F I1212 16:17:13.854641 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/kubelet-serving-ca-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:14.644125173+00:00 stderr F I1212 16:17:14.643999 1 request.go:752] "Waited before sending request" delay="1.383220139s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/localhost-recovery-client" 2025-12-12T16:17:15.252015013+00:00 stderr F I1212 16:17:15.251884 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/sa-token-signing-certs-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:15.843406901+00:00 stderr F I1212 16:17:15.843282 1 request.go:752] "Waited before sending request" delay="1.387252578s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:17:16.649658925+00:00 stderr F I1212 16:17:16.649569 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/kube-apiserver-audit-policies-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:16.844306342+00:00 stderr F I1212 16:17:16.843344 1 request.go:752] "Waited before sending request" delay="1.389237547s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/revision-pruner-11-crc" 2025-12-12T16:17:18.044243118+00:00 stderr F I1212 16:17:18.044088 1 request.go:752] "Waited before sending request" delay="1.394906749s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets" 2025-12-12T16:17:18.050130787+00:00 stderr F I1212 16:17:18.050061 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretCreated' Created Secret/etcd-client-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:19.243664741+00:00 stderr F I1212 16:17:19.243563 1 request.go:752] "Waited before sending request" delay="1.356468378s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver" 2025-12-12T16:17:20.455079267+00:00 stderr F I1212 16:17:20.454713 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretCreated' Created Secret/localhost-recovery-serving-certkey-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:21.249902451+00:00 stderr F I1212 16:17:21.249840 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretCreated' Created Secret/localhost-recovery-client-token-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:22.255350027+00:00 stderr F I1212 16:17:22.255258 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretCreated' Created Secret/webhook-authenticator-12 -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:23.251044345+00:00 stderr F I1212 16:17:23.250370 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RevisionTriggered' new revision 12 triggered by "optional secret/webhook-authenticator has changed" 2025-12-12T16:17:24.443246940+00:00 stderr F I1212 16:17:24.443139 1 request.go:752] "Waited before sending request" delay="1.192844024s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/revision-pruner-11-crc" 2025-12-12T16:17:25.443932090+00:00 stderr F I1212 16:17:25.443870 1 request.go:752] "Waited before sending request" delay="1.597011848s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:17:26.865815175+00:00 stderr F I1212 16:17:26.863985 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'PodCreated' Created Pod/revision-pruner-12-crc -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:28.043761972+00:00 stderr F I1212 16:17:28.043647 1 request.go:752] "Waited before sending request" delay="1.177213616s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dopenshift-kube-apiserver" 2025-12-12T16:17:30.849202032+00:00 stderr F I1212 16:17:30.848171 1 installer_controller.go:562] node crc with revision 11 is the oldest and needs new revision 12 2025-12-12T16:17:30.849286535+00:00 stderr F I1212 16:17:30.849208 1 installer_controller.go:570] "crc" moving to (v1.NodeStatus) { 2025-12-12T16:17:30.849286535+00:00 stderr F NodeName: (string) (len=3) "crc", 2025-12-12T16:17:30.849286535+00:00 stderr F CurrentRevision: (int32) 11, 2025-12-12T16:17:30.849286535+00:00 stderr F TargetRevision: (int32) 12, 2025-12-12T16:17:30.849286535+00:00 stderr F LastFailedRevision: (int32) 11, 2025-12-12T16:17:30.849286535+00:00 stderr F LastFailedTime: (*v1.Time)(0xc006f612d8)(2025-11-03 08:56:07 +0000 UTC), 2025-12-12T16:17:30.849286535+00:00 stderr F LastFailedReason: (string) (len=15) "InstallerFailed", 2025-12-12T16:17:30.849286535+00:00 stderr F LastFailedCount: (int) 1, 2025-12-12T16:17:30.849286535+00:00 stderr F LastFallbackCount: (int) 0, 2025-12-12T16:17:30.849286535+00:00 stderr F LastFailedRevisionErrors: ([]string) (len=1 cap=1) { 2025-12-12T16:17:30.849286535+00:00 stderr F (string) (len=73) "installer: The container could not be located when the pod was terminated" 2025-12-12T16:17:30.849286535+00:00 stderr F } 2025-12-12T16:17:30.849286535+00:00 stderr F } 2025-12-12T16:17:30.899195257+00:00 stderr F I1212 16:17:30.899029 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'NodeTargetRevisionChanged' Updating node "crc" from revision 11 to 12 because node crc with revision 11 is the oldest 2025-12-12T16:17:30.906309711+00:00 stderr F I1212 16:17:30.904122 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:30Z","message":"NodeInstallerProgressing: 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"NodeInstaller","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:30.930072182+00:00 stderr F I1212 16:17:30.927485 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Progressing changed from False to True ("NodeInstallerProgressing: 1 node is at revision 11; 0 nodes have achieved new revision 12"),Available message changed from "StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11" to "StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11; 0 nodes have achieved new revision 12" 2025-12-12T16:17:32.044275120+00:00 stderr F I1212 16:17:32.043396 1 request.go:752] "Waited before sending request" delay="1.126650994s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver" 2025-12-12T16:17:33.047239395+00:00 stderr F I1212 16:17:33.046506 1 request.go:752] "Waited before sending request" delay="1.973654735s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:17:34.244059873+00:00 stderr F I1212 16:17:34.243044 1 request.go:752] "Waited before sending request" delay="1.189448937s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:17:34.860233212+00:00 stderr F I1212 16:17:34.850320 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'PodCreated' Created Pod/installer-12-crc -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:35.247070941+00:00 stderr F I1212 16:17:35.246306 1 request.go:752] "Waited before sending request" delay="1.15191954s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller" 2025-12-12T16:17:35.854414107+00:00 stderr F I1212 16:17:35.853844 1 installer_controller.go:550] "crc" is in transition to 12, but has not made progress because installer is not finished, but in Pending phase 2025-12-12T16:17:38.456321197+00:00 stderr F I1212 16:17:38.450273 1 installer_controller.go:550] "crc" is in transition to 12, but has not made progress because installer is not finished, but in Running phase 2025-12-12T16:17:39.046754448+00:00 stderr F I1212 16:17:39.046675 1 installer_controller.go:550] "crc" is in transition to 12, but has not made progress because installer is not finished, but in Running phase 2025-12-12T16:17:45.830407407+00:00 stderr F I1212 16:17:45.828989 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapCreated' Created ConfigMap/user-client-ca -n openshift-kube-apiserver because it was missing 2025-12-12T16:17:46.297276819+00:00 stderr F I1212 16:17:46.297209 1 targetconfigcontroller.go:419] Updated client CA bundle configmap openshift-kube-apiserver/client-ca 2025-12-12T16:17:46.306717053+00:00 stderr F I1212 16:17:46.298192 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-kube-apiserver because it changed 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.317982 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.31793829 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318019 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.318007402 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318035 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.318025132 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318062 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.318039742 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318079 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.318067783 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318097 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.318084253 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318115 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.318102044 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318133 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.318120194 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318150 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.318138155 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318169 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.318157245 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318232 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.318216777 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318488 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-apiserver-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-apiserver-operator.svc,metrics.openshift-kube-apiserver-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:13 +0000 UTC to 2027-11-02 07:52:14 +0000 UTC (now=2025-12-12 16:17:46.318471383 +0000 UTC))" 2025-12-12T16:17:46.324338088+00:00 stderr F I1212 16:17:46.318669 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556206\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:17:46.318652568 +0000 UTC))" 2025-12-12T16:17:46.692762856+00:00 stderr F I1212 16:17:46.692215 1 installer_controller.go:550] "crc" is in transition to 12, but has not made progress because installer is not finished, but in Running phase 2025-12-12T16:17:47.107052628+00:00 stderr P I1212 16:17:47.102705 1 core.go:352] ConfigMap "openshift-config-managed/kube-apiserver-client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHN 2025-12-12T16:17:47.107137520+00:00 stderr F exaG3VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:46Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:17:47.109143760+00:00 stderr F I1212 16:17:47.108252 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/kube-apiserver-client-ca -n openshift-config-managed: 2025-12-12T16:17:47.109143760+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:47.487371011+00:00 stderr F I1212 16:17:47.487287 1 request.go:752] "Waited before sending request" delay="1.187916108s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:17:48.888203605+00:00 stderr F I1212 16:17:48.887037 1 request.go:752] "Waited before sending request" delay="1.014301998s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:17:50.091581506+00:00 stderr F I1212 16:17:50.090362 1 installer_controller.go:550] "crc" is in transition to 12, but has not made progress because installer is not finished, but in Running phase 2025-12-12T16:17:51.097601746+00:00 stderr F I1212 16:17:51.096620 1 targetconfigcontroller.go:419] Updated client CA bundle configmap openshift-kube-apiserver/client-ca 2025-12-12T16:17:51.098998051+00:00 stderr F I1212 16:17:51.098316 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-kube-apiserver because it changed 2025-12-12T16:17:52.287999697+00:00 stderr F I1212 16:17:52.287903 1 request.go:752] "Waited before sending request" delay="1.188119674s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-apiserver-client-ca" 2025-12-12T16:17:52.302290490+00:00 stderr P I1212 16:17:52.301372 1 core.go:352] ConfigMap "openshift-config-managed/kube-apiserver-client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJ 2025-12-12T16:17:52.302425463+00:00 stderr F TFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:51Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:17:52.313239141+00:00 stderr F I1212 16:17:52.313143 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/kube-apiserver-client-ca -n openshift-config-managed: 2025-12-12T16:17:52.313239141+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:53.691910405+00:00 stderr F I1212 16:17:53.691838 1 installer_controller.go:550] "crc" is in transition to 12, but has not made progress because installer is not finished, but in Running phase 2025-12-12T16:18:13.927511263+00:00 stderr F I1212 16:18:13.926720 1 termination_observer.go:236] Observed event "TerminationPreShutdownHooksFinished" for API server pod "kube-apiserver-crc" (last termination at 2025-12-12 16:16:21 +0000 UTC) at 2025-12-12 16:18:13 +0000 UTC 2025-12-12T16:18:15.961021786+00:00 stderr F I1212 16:18:15.960436 1 termination_observer.go:236] Observed event "TerminationGracefulTerminationFinished" for API server pod "kube-apiserver-crc" (last termination at 2025-12-12 16:16:21 +0000 UTC) at 2025-12-12 16:18:15 +0000 UTC 2025-12-12T16:18:46.268897690+00:00 stderr F E1212 16:18:46.267893 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-apiserver-operator/leases/kube-apiserver-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:46.270347536+00:00 stderr F E1212 16:18:46.270299 1 leaderelection.go:436] error retrieving resource lock openshift-kube-apiserver-operator/kube-apiserver-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-apiserver-operator/leases/kube-apiserver-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.581448457+00:00 stderr F E1212 16:18:46.581360 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.595404502+00:00 stderr F E1212 16:18:46.595301 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.612596857+00:00 stderr F E1212 16:18:46.612482 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.640646961+00:00 stderr F E1212 16:18:46.640557 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.688503114+00:00 stderr F E1212 16:18:46.688433 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.775557746+00:00 stderr F E1212 16:18:46.775450 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.943847567+00:00 stderr F E1212 16:18:46.943750 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.271975089+00:00 stderr F E1212 16:18:47.271891 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.874972596+00:00 stderr F E1212 16:18:47.874913 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.880494503+00:00 stderr F E1212 16:18:47.880444 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.884276126+00:00 stderr F E1212 16:18:47.884217 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.893458633+00:00 stderr F E1212 16:18:47.893382 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.900275872+00:00 stderr F E1212 16:18:47.900208 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.921556338+00:00 stderr F E1212 16:18:47.921483 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.922611344+00:00 stderr F E1212 16:18:47.922583 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.928167231+00:00 stderr F E1212 16:18:47.928123 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.982918195+00:00 stderr F E1212 16:18:47.982734 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.177763432+00:00 stderr F E1212 16:18:48.177692 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.379900709+00:00 stderr F E1212 16:18:48.379819 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.577444193+00:00 stderr F E1212 16:18:48.576895 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.780601976+00:00 stderr F E1212 16:18:48.780514 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.980624511+00:00 stderr F E1212 16:18:48.980550 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.376664172+00:00 stderr F E1212 16:18:49.376624 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.579155439+00:00 stderr F E1212 16:18:49.579096 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.980535372+00:00 stderr F E1212 16:18:49.980476 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.376201494+00:00 stderr F I1212 16:18:50.376145 1 request.go:752] "Waited before sending request" delay="1.164686335s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa" 2025-12-12T16:18:50.382435618+00:00 stderr F E1212 16:18:50.382385 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:50.580841383+00:00 stderr F E1212 16:18:50.579878 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.776907361+00:00 stderr F E1212 16:18:50.776828 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.191380867+00:00 stderr F E1212 16:18:51.191304 1 base_controller.go:279] "Unhandled Error" err="KubeAPIServerStaticResources-StaticResources reconciliation failed: [\"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/services/apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole-node-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints-node-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole-crd-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints-crd-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-auth-delegator.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-auth-delegator\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-node-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-node-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-crd-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-crd-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-rolebinding-kube-system.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:controller:kube-apiserver-check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-apiserver/rolebindings/system:openshift:controller:check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/delegated-incluster-authentication-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/authentication-reader-for-authenticated-users\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-apiserver-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/apiserver.openshift.io_apirequestcount.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/apirequestcounts.apiserver.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-flowschema.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-flowschema-storage-version-migration\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-prioritylevelconfiguration.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-prioritylevel-storage-version-migration\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-flowschema-v1beta3.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-flowschema-storage-version-migration-v1beta3\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-prioritylevelconfiguration-v1beta3.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-prioritylevel-storage-version-migration-v1beta3\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/api-usage.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/api-usage\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/audit-errors.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/audit-errors\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-requests.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-requests\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-slos-basic.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-slos-basic\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/podsecurity-violations.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/podsecurity\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-slos-extended.yaml\" (string): Delete \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-slos-extended\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeAPIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=KubeAPIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:51.379552979+00:00 stderr F E1212 16:18:51.379499 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.580818985+00:00 stderr F E1212 16:18:51.580755 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.177566248+00:00 stderr F E1212 16:18:52.177444 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.379866420+00:00 stderr F E1212 16:18:52.378917 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.580272344+00:00 stderr F E1212 16:18:52.580203 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.983714298+00:00 stderr F E1212 16:18:52.983642 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.180470783+00:00 stderr F E1212 16:18:53.180417 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.597620586+00:00 stderr F E1212 16:18:53.597529 1 base_controller.go:279] "Unhandled Error" err="KubeAPIServerStaticResources-StaticResources reconciliation failed: [\"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/services/apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole-node-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints-node-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole-crd-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints-crd-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-auth-delegator.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-auth-delegator\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-node-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-node-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-crd-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-crd-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-rolebinding-kube-system.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:controller:kube-apiserver-check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-apiserver/rolebindings/system:openshift:controller:check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/delegated-incluster-authentication-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/authentication-reader-for-authenticated-users\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-apiserver-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/apiserver.openshift.io_apirequestcount.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/apirequestcounts.apiserver.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-flowschema.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-flowschema-storage-version-migration\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-prioritylevelconfiguration.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-prioritylevel-storage-version-migration\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-flowschema-v1beta3.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-flowschema-storage-version-migration-v1beta3\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-prioritylevelconfiguration-v1beta3.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-prioritylevel-storage-version-migration-v1beta3\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/api-usage.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/api-usage\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/audit-errors.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/audit-errors\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-requests.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-requests\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-slos-basic.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-slos-basic\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/podsecurity-violations.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/podsecurity\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-slos-extended.yaml\" (string): Delete \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-slos-extended\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeAPIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=KubeAPIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.789532340+00:00 stderr F E1212 16:18:53.789422 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-apiserver-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.981055805+00:00 stderr F E1212 16:18:53.980989 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.377444765+00:00 stderr F E1212 16:18:54.377377 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.980121204+00:00 stderr F E1212 16:18:54.980057 1 base_controller.go:279] "Unhandled Error" err="kube-apiserver-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.180141199+00:00 stderr F E1212 16:18:55.180095 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.578914848+00:00 stderr F E1212 16:18:55.578833 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.803056859+00:00 stderr F E1212 16:18:55.802956 1 base_controller.go:279] "Unhandled Error" err="KubeAPIServerStaticResources-StaticResources reconciliation failed: [\"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/services/apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole-node-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints-node-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrole-crd-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:check-endpoints-crd-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-auth-delegator.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-auth-delegator\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-node-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-node-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-clusterrolebinding-crd-reader.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:kube-apiserver-check-endpoints-crd-reader\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-rolebinding-kube-system.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:controller:kube-apiserver-check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/check-endpoints-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-apiserver/rolebindings/system:openshift:controller:check-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/delegated-incluster-authentication-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/authentication-reader-for-authenticated-users\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-apiserver-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/apiserver.openshift.io_apirequestcount.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/apirequestcounts.apiserver.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-flowschema.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-flowschema-storage-version-migration\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-prioritylevelconfiguration.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-prioritylevel-storage-version-migration\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-flowschema-v1beta3.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-flowschema-storage-version-migration-v1beta3\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-apiserver/storage-version-migration-prioritylevelconfiguration-v1beta3.yaml\" (string): Get \"https://10.217.4.1:443/apis/migration.k8s.io/v1alpha1/storageversionmigrations/flowcontrol-prioritylevel-storage-version-migration-v1beta3\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/api-usage.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/api-usage\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/audit-errors.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/audit-errors\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-requests.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-requests\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-slos-basic.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-slos-basic\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/podsecurity-violations.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/podsecurity\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/alerts/kube-apiserver-slos-extended.yaml\" (string): Delete \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-kube-apiserver/prometheusrules/kube-apiserver-slos-extended\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeAPIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=KubeAPIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:56.181359022+00:00 stderr F E1212 16:18:56.181085 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeapiservers/cluster/status?fieldManager=kube-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.824097692+00:00 stderr F I1212 16:18:56.824031 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:57.620251376+00:00 stderr F I1212 16:18:57.619561 1 helpers.go:264] lister was stale at resourceVersion=38595, live get showed resourceVersion=39219 2025-12-12T16:18:57.840809868+00:00 stderr F E1212 16:18:57.840742 1 base_controller.go:279] "Unhandled Error" err="KubeAPIServerStaticResources-StaticResources reconciliation failed: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:25.296751231+00:00 stderr F I1212 16:19:25.295999 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:25.296895505+00:00 stderr F I1212 16:19:25.296865 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:25.298477414+00:00 stderr F I1212 16:19:25.298434 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:26.932282847+00:00 stderr F I1212 16:19:26.931675 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:27.398895452+00:00 stderr F I1212 16:19:27.398833 1 reflector.go:430] "Caches populated" type="*v1alpha1.StorageVersionMigration" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:33.800864904+00:00 stderr F I1212 16:19:33.800010 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:35.922002111+00:00 stderr F I1212 16:19:35.921921 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:35.922495643+00:00 stderr F I1212 16:19:35.922421 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:19:37.761324952+00:00 stderr F I1212 16:19:37.761245 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:39.501171996+00:00 stderr F I1212 16:19:39.500577 1 reflector.go:430] "Caches populated" type="*v1.OAuth" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:42.917070602+00:00 stderr F I1212 16:19:42.915739 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:46.093426584+00:00 stderr F I1212 16:19:46.093074 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.607196220+00:00 stderr F I1212 16:19:52.606210 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:55.063278417+00:00 stderr F I1212 16:19:55.062915 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:55.662893362+00:00 stderr F I1212 16:19:55.662501 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:57.523065747+00:00 stderr F I1212 16:19:57.523005 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:58.166967214+00:00 stderr F I1212 16:19:58.166457 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:01.013096374+00:00 stderr F I1212 16:20:01.012497 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:02.021271378+00:00 stderr F I1212 16:20:02.021222 1 reflector.go:430] "Caches populated" type="*v1.Scheduler" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:02.805690412+00:00 stderr F I1212 16:20:02.805626 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:03.031022770+00:00 stderr F I1212 16:20:03.030928 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:04.814531211+00:00 stderr F I1212 16:20:04.813646 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.123537173+00:00 stderr F I1212 16:20:08.123442 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.124320653+00:00 stderr F I1212 16:20:08.123853 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:20:09.174197683+00:00 stderr F I1212 16:20:09.174114 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:09.192760669+00:00 stderr F W1212 16:20:09.192705 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:09.538837498+00:00 stderr F I1212 16:20:09.538751 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:10.084416676+00:00 stderr F I1212 16:20:10.084323 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:10.196670844+00:00 stderr F W1212 16:20:10.196584 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:10.303567768+00:00 stderr F I1212 16:20:10.303472 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.721693715+00:00 stderr F I1212 16:20:11.721615 1 reflector.go:430] "Caches populated" type="*v1.Authentication" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.730500056+00:00 stderr F I1212 16:20:11.730438 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.838783115+00:00 stderr F I1212 16:20:11.838702 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:12.200067146+00:00 stderr F E1212 16:20:12.200006 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:12.244030320+00:00 stderr F I1212 16:20:12.243966 1 helpers.go:188] lister was stale at resourceVersion=38595, live get showed resourceVersion=39228 2025-12-12T16:20:12.262397701+00:00 stderr F E1212 16:20:12.262324 1 base_controller.go:279] "Unhandled Error" err="webhookSupportabilityController reconciliation failed: KubeAPIServer.operator.openshift.io \"cluster\" is invalid: status.nodeStatuses[0].currentRevision: Invalid value: \"integer\": must only increase" 2025-12-12T16:20:12.272517075+00:00 stderr F W1212 16:20:12.272432 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:12.440419941+00:00 stderr F I1212 16:20:12.440284 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:12.661494431+00:00 stderr F I1212 16:20:12.661080 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubeapiservers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:12.665646416+00:00 stderr F I1212 16:20:12.665555 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nKubeAPIServerStaticResourcesDegraded: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeAPIServerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap/config\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/kube-apiserver-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-pod\": dial tcp 10.217.4.1:443: connect: connection refused","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:30Z","message":"NodeInstallerProgressing: 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"NodeInstaller","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:12.677291198+00:00 stderr F I1212 16:20:12.677168 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready" to "NodeControllerDegraded: All master nodes are ready\nKubeAPIServerStaticResourcesDegraded: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeAPIServerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap/config\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/kube-apiserver-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-pod\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:20:13.055949986+00:00 stderr F I1212 16:20:13.052489 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nKubeAPIServerStaticResourcesDegraded: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeAPIServerStaticResourcesDegraded: ","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:30Z","message":"NodeInstallerProgressing: 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"NodeInstaller","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:13.078306267+00:00 stderr F I1212 16:20:13.078223 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nKubeAPIServerStaticResourcesDegraded: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeAPIServerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap/config\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/kube-apiserver-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-pod\": dial tcp 10.217.4.1:443: connect: connection refused" to "NodeControllerDegraded: All master nodes are ready\nKubeAPIServerStaticResourcesDegraded: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeAPIServerStaticResourcesDegraded: " 2025-12-12T16:20:13.089588580+00:00 stderr F I1212 16:20:13.089526 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.276190296+00:00 stderr F W1212 16:20:13.276106 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:13.825643801+00:00 stderr F I1212 16:20:13.825562 1 request.go:752] "Waited before sending request" delay="1.159320638s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/installer-sa" 2025-12-12T16:20:14.825739741+00:00 stderr F I1212 16:20:14.825632 1 request.go:752] "Waited before sending request" delay="1.594189826s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-audit-policies" 2025-12-12T16:20:15.279037153+00:00 stderr F E1212 16:20:15.278990 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:15.412763601+00:00 stderr F W1212 16:20:15.412375 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:15.826207971+00:00 stderr F I1212 16:20:15.826135 1 request.go:752] "Waited before sending request" delay="1.777257594s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/kube-system/secrets?resourceVersion=38741" 2025-12-12T16:20:15.831800242+00:00 stderr F I1212 16:20:15.831745 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:16.190056217+00:00 stderr F I1212 16:20:16.189978 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:16.294882299+00:00 stderr F I1212 16:20:16.294820 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:30Z","message":"NodeInstallerProgressing: 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"NodeInstaller","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11; 0 nodes have achieved new revision 12","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:16.303936436+00:00 stderr F I1212 16:20:16.303813 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nKubeAPIServerStaticResourcesDegraded: \"assets/kube-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeAPIServerStaticResourcesDegraded: " to "NodeControllerDegraded: All master nodes are ready" 2025-12-12T16:20:16.415870717+00:00 stderr F W1212 16:20:16.415791 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:16.581088235+00:00 stderr F I1212 16:20:16.581020 1 reflector.go:430] "Caches populated" type="*v1.KubeAPIServer" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:17.025793130+00:00 stderr F I1212 16:20:17.025718 1 request.go:752] "Waited before sending request" delay="1.676655397s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/configmaps/kube-apiserver-audit-policies" 2025-12-12T16:20:17.272638758+00:00 stderr F I1212 16:20:17.272560 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:17.658104556+00:00 stderr F I1212 16:20:17.658045 1 status_controller.go:230] clusteroperator/kube-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:17Z","message":"NodeInstallerProgressing: 1 node is at revision 12","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:08:37Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 12","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:28Z","message":"KubeletMinorVersionUpgradeable: Kubelet and API server minor versions are synced.","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:29Z","message":"PodSecurityInconclusiveEvaluationConditionsDetected: Could not evaluate violations for namespaces: [kube-node-lease openshift openshift-apiserver-operator openshift-cloud-network-config-controller openshift-cluster-samples-operator openshift-cluster-storage-operator openshift-config-managed openshift-config-operator openshift-console openshift-console-operator openshift-console-user-settings openshift-controller-manager openshift-controller-manager-operator openshift-dns-operator openshift-host-network openshift-ingress-canary openshift-ingress-operator openshift-kube-controller-manager-operator openshift-kube-storage-version-migrator openshift-kube-storage-version-migrator-operator openshift-network-console openshift-network-diagnostics openshift-node openshift-route-controller-manager openshift-service-ca openshift-service-ca-operator openshift-user-workload-monitoring]","reason":"PodSecurityInconclusive_PSViolationDecisionInconclusive","status":"True","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:17.669033200+00:00 stderr F I1212 16:20:17.668967 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator", UID:"3a8705c5-b62b-40a4-8e43-30f0569fa490", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-apiserver changed: Progressing changed from True to False ("NodeInstallerProgressing: 1 node is at revision 12"),Available message changed from "StaticPodsAvailable: 1 nodes are active; 1 node is at revision 11; 0 nodes have achieved new revision 12" to "StaticPodsAvailable: 1 nodes are active; 1 node is at revision 12" 2025-12-12T16:20:18.226260991+00:00 stderr F I1212 16:20:18.226199 1 request.go:752] "Waited before sending request" delay="1.587653632s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/secrets/node-kubeconfigs" 2025-12-12T16:20:18.419124134+00:00 stderr F E1212 16:20:18.419078 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:18.630441130+00:00 stderr F I1212 16:20:18.630373 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:19.426894407+00:00 stderr F I1212 16:20:19.426794 1 request.go:752] "Waited before sending request" delay="1.759228851s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" 2025-12-12T16:20:20.626784353+00:00 stderr F I1212 16:20:20.625681 1 request.go:752] "Waited before sending request" delay="1.776025502s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/configmaps?resourceVersion=38785" 2025-12-12T16:20:20.637459681+00:00 stderr F I1212 16:20:20.637362 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:20.828905428+00:00 stderr F I1212 16:20:20.828808 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:21.626331340+00:00 stderr F I1212 16:20:21.626247 1 request.go:752] "Waited before sending request" delay="1.963613672s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/serviceaccounts/localhost-recovery-client" 2025-12-12T16:20:22.229268038+00:00 stderr F I1212 16:20:22.229170 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:22.248440620+00:00 stderr F W1212 16:20:22.248366 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:23.252134411+00:00 stderr F W1212 16:20:23.252078 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:23.827756324+00:00 stderr F I1212 16:20:23.827647 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.145781008+00:00 stderr F I1212 16:20:24.145680 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.306392750+00:00 stderr F I1212 16:20:24.306314 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.836227303+00:00 stderr F I1212 16:20:24.832647 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.986009064+00:00 stderr F I1212 16:20:24.985942 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:25.255038299+00:00 stderr F E1212 16:20:25.254931 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:25.267646436+00:00 stderr F W1212 16:20:25.267487 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:26.271708046+00:00 stderr F W1212 16:20:26.271609 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:28.276342217+00:00 stderr F E1212 16:20:28.275790 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:20:28.413556662+00:00 stderr F I1212 16:20:28.413477 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:28.423581704+00:00 stderr F I1212 16:20:28.423483 1 termination_observer.go:130] Observed termination of API server pod "kube-apiserver-crc" at 2025-12-12 16:18:35 +0000 UTC 2025-12-12T16:20:28.425835180+00:00 stderr F I1212 16:20:28.425749 1 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Resource=prometheusrules" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:34.031126245+00:00 stderr F I1212 16:20:34.030327 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:34.506230217+00:00 stderr F I1212 16:20:34.506154 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:36.103367179+00:00 stderr F I1212 16:20:36.101823 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:39.101887109+00:00 stderr F I1212 16:20:39.101304 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:39.101989762+00:00 stderr F I1212 16:20:39.101965 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:39.103699677+00:00 stderr F I1212 16:20:39.103671 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:39.924232819+00:00 stderr F I1212 16:20:39.923715 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:24:57.301323569+00:00 stderr F I1212 16:24:57.300373 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:25:50.106204930+00:00 stderr F I1212 16:25:50.105392 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:26:38.932390484+00:00 stderr F W1212 16:26:38.931620 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:26:39.945296803+00:00 stderr F W1212 16:26:39.943918 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:26:41.952991655+00:00 stderr F E1212 16:26:41.951908 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:26:50.477475103+00:00 stderr F I1212 16:26:50.476730 1 reflector.go:430] "Caches populated" type="*v1.Event" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:26:56.097241357+00:00 stderr F W1212 16:26:56.096206 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:26:57.104851003+00:00 stderr F W1212 16:26:57.104551 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:26:59.125198378+00:00 stderr F E1212 16:26:59.124486 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:26:59.170384082+00:00 stderr F W1212 16:26:59.169598 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:00.175476130+00:00 stderr F W1212 16:27:00.175391 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:02.181696174+00:00 stderr F E1212 16:27:02.179883 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:02.211287073+00:00 stderr F W1212 16:27:02.208845 1 degraded_webhook.go:147] failed to connect to webhook "alertmanagerconfigs.monitoring.rhobs" via service "obo-prometheus-operator-admission-webhook-service.openshift-operators.svc:443": dial tcp 10.217.4.232:443: connect: connection refused 2025-12-12T16:27:03.217229752+00:00 stderr F W1212 16:27:03.213426 1 degraded_webhook.go:147] failed to connect to webhook "alertmanagerconfigs.monitoring.rhobs" via service "obo-prometheus-operator-admission-webhook-service.openshift-operators.svc:443": dial tcp 10.217.4.232:443: connect: connection refused 2025-12-12T16:27:05.233393878+00:00 stderr F E1212 16:27:05.231149 1 degraded_webhook.go:68] alertmanagerconfigs.monitoring.rhobs: dial tcp 10.217.4.232:443: connect: connection refused 2025-12-12T16:27:05.239059492+00:00 stderr F W1212 16:27:05.238988 1 degraded_webhook.go:147] failed to connect to webhook "prometheusrules.monitoring.rhobs" via service "obo-prometheus-operator-admission-webhook-service.openshift-operators.svc:443": dial tcp 10.217.4.232:443: connect: connection refused 2025-12-12T16:27:06.261071147+00:00 stderr F W1212 16:27:06.260097 1 degraded_webhook.go:147] failed to connect to webhook "prometheusrules.monitoring.rhobs" via service "obo-prometheus-operator-admission-webhook-service.openshift-operators.svc:443": dial tcp 10.217.4.232:443: connect: connection refused 2025-12-12T16:27:08.268961775+00:00 stderr F E1212 16:27:08.268215 1 degraded_webhook.go:68] prometheusrules.monitoring.rhobs: dial tcp 10.217.4.232:443: connect: connection refused 2025-12-12T16:27:08.282214191+00:00 stderr F W1212 16:27:08.282132 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:09.296898560+00:00 stderr F W1212 16:27:09.295358 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:11.302536450+00:00 stderr F E1212 16:27:11.301589 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:11.308102981+00:00 stderr F W1212 16:27:11.307582 1 degraded_webhook.go:147] failed to connect to webhook "elastic-ent-validation-v1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:12.314666365+00:00 stderr F W1212 16:27:12.314550 1 degraded_webhook.go:147] failed to connect to webhook "elastic-ent-validation-v1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:14.319814243+00:00 stderr F E1212 16:27:14.318768 1 degraded_webhook.go:68] elastic-ent-validation-v1.k8s.elastic.co: dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:14.324244985+00:00 stderr F W1212 16:27:14.324145 1 degraded_webhook.go:147] failed to connect to webhook "elastic-es-validation-v1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:15.329293302+00:00 stderr F W1212 16:27:15.328151 1 degraded_webhook.go:147] failed to connect to webhook "elastic-es-validation-v1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:17.335611208+00:00 stderr F E1212 16:27:17.335529 1 degraded_webhook.go:68] elastic-es-validation-v1.k8s.elastic.co: dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:17.338824299+00:00 stderr F W1212 16:27:17.338509 1 degraded_webhook.go:147] failed to connect to webhook "elastic-es-validation-v1beta1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:18.345632711+00:00 stderr F W1212 16:27:18.345573 1 degraded_webhook.go:147] failed to connect to webhook "elastic-es-validation-v1beta1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:20.352317536+00:00 stderr F E1212 16:27:20.351306 1 degraded_webhook.go:68] elastic-es-validation-v1beta1.k8s.elastic.co: dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:20.355696582+00:00 stderr F W1212 16:27:20.355615 1 degraded_webhook.go:147] failed to connect to webhook "elastic-ems-validation-v1alpha1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:21.360237456+00:00 stderr F W1212 16:27:21.360133 1 degraded_webhook.go:147] failed to connect to webhook "elastic-ems-validation-v1alpha1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:23.364940421+00:00 stderr F E1212 16:27:23.364787 1 degraded_webhook.go:68] elastic-ems-validation-v1alpha1.k8s.elastic.co: dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:23.368087781+00:00 stderr F W1212 16:27:23.367758 1 degraded_webhook.go:147] failed to connect to webhook "elastic-scp-validation-v1alpha1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:24.372073831+00:00 stderr F W1212 16:27:24.371994 1 degraded_webhook.go:147] failed to connect to webhook "elastic-scp-validation-v1alpha1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:26.379347502+00:00 stderr F E1212 16:27:26.377350 1 degraded_webhook.go:68] elastic-scp-validation-v1alpha1.k8s.elastic.co: dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:26.382438981+00:00 stderr F W1212 16:27:26.380792 1 degraded_webhook.go:147] failed to connect to webhook "elastic-agent-validation-v1alpha1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:27.385005713+00:00 stderr F W1212 16:27:27.384935 1 degraded_webhook.go:147] failed to connect to webhook "elastic-agent-validation-v1alpha1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:29.390787157+00:00 stderr F E1212 16:27:29.390038 1 degraded_webhook.go:68] elastic-agent-validation-v1alpha1.k8s.elastic.co: dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:29.471399048+00:00 stderr F W1212 16:27:29.402644 1 degraded_webhook.go:147] failed to connect to webhook "elastic-ent-validation-v1beta1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:30.424248212+00:00 stderr F W1212 16:27:30.422890 1 degraded_webhook.go:147] failed to connect to webhook "elastic-ent-validation-v1beta1.k8s.elastic.co" via service "elastic-operator-service.service-telemetry.svc:443": dial tcp 10.217.4.51:443: connect: connection refused 2025-12-12T16:27:32.576354789+00:00 stderr F W1212 16:27:32.573670 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:33.587003128+00:00 stderr F W1212 16:27:33.586718 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:35.605223145+00:00 stderr F E1212 16:27:35.599252 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:35.768288282+00:00 stderr F W1212 16:27:35.764645 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:36.771169594+00:00 stderr F W1212 16:27:36.769009 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:38.775155102+00:00 stderr F E1212 16:27:38.775079 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:38.945680077+00:00 stderr F W1212 16:27:38.945603 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:39.950298843+00:00 stderr F W1212 16:27:39.949247 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:41.955031909+00:00 stderr F E1212 16:27:41.954962 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:42.016919836+00:00 stderr F W1212 16:27:42.016829 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:43.021646077+00:00 stderr F W1212 16:27:43.021536 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:45.171249539+00:00 stderr F W1212 16:27:45.168574 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:46.189386308+00:00 stderr F W1212 16:27:46.188344 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:48.197952212+00:00 stderr F W1212 16:27:48.197839 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:49.202237889+00:00 stderr F W1212 16:27:49.202103 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:51.208587478+00:00 stderr F E1212 16:27:51.207557 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:51.259640310+00:00 stderr F W1212 16:27:51.258402 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:52.275464369+00:00 stderr F W1212 16:27:52.274461 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:54.313711145+00:00 stderr F W1212 16:27:54.313599 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:55.318118185+00:00 stderr F W1212 16:27:55.318009 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:27:57.332736393+00:00 stderr F W1212 16:27:57.331939 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:27:58.338754074+00:00 stderr F W1212 16:27:58.338646 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:00.353145885+00:00 stderr F E1212 16:28:00.352010 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:09.223468380+00:00 stderr F W1212 16:28:09.222926 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:28:10.227913031+00:00 stderr F W1212 16:28:10.227547 1 degraded_webhook.go:147] failed to connect to webhook "webhook.cert-manager.io" via service "cert-manager-webhook.cert-manager.svc:443": dial tcp 10.217.5.200:443: connect: connection refused 2025-12-12T16:28:12.250262064+00:00 stderr F W1212 16:28:12.247944 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:13.254227732+00:00 stderr F W1212 16:28:13.254091 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:15.261264358+00:00 stderr F E1212 16:28:15.258892 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:15.478353882+00:00 stderr F W1212 16:28:15.477508 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:16.482008644+00:00 stderr F W1212 16:28:16.480800 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:28:18.487247103+00:00 stderr F E1212 16:28:18.486720 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:29:23.777018059+00:00 stderr F W1212 16:29:23.776608 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:29:24.781773509+00:00 stderr F W1212 16:29:24.781157 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:29:26.786685364+00:00 stderr F E1212 16:29:26.786586 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:29:33.841187402+00:00 stderr F W1212 16:29:33.837886 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:29:34.849244076+00:00 stderr F W1212 16:29:34.848246 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:29:35.923401126+00:00 stderr F I1212 16:29:35.923270 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:29:36.852849648+00:00 stderr F E1212 16:29:36.852806 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:08.125328883+00:00 stderr F I1212 16:30:08.124521 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:30:09.268522134+00:00 stderr F W1212 16:30:09.268423 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:10.274031034+00:00 stderr F W1212 16:30:10.273934 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:12.279639693+00:00 stderr F E1212 16:30:12.279032 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:12.391936326+00:00 stderr F W1212 16:30:12.391858 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:13.396264541+00:00 stderr F W1212 16:30:13.396199 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:15.400736924+00:00 stderr F E1212 16:30:15.400658 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:22.259354154+00:00 stderr F W1212 16:30:22.258750 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:23.263519225+00:00 stderr F W1212 16:30:23.263433 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:25.268312726+00:00 stderr F E1212 16:30:25.267904 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:25.492480532+00:00 stderr F W1212 16:30:25.492090 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:26.495723489+00:00 stderr F W1212 16:30:26.495635 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:30:28.500531281+00:00 stderr F E1212 16:30:28.499929 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:32:19.305020933+00:00 stderr F I1212 16:32:19.304028 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:35:46.110139260+00:00 stderr F I1212 16:35:46.109113 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:39:10.308296589+00:00 stderr F I1212 16:39:10.307198 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:39:18.649335329+00:00 stderr F I1212 16:39:18.648484 1 reflector.go:430] "Caches populated" type="*v1.Event" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:39:33.847880818+00:00 stderr F W1212 16:39:33.846867 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:39:34.854808778+00:00 stderr F W1212 16:39:34.854653 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:39:35.924363912+00:00 stderr F I1212 16:39:35.924272 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:39:36.859424225+00:00 stderr F E1212 16:39:36.859354 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:08.128091567+00:00 stderr F I1212 16:40:08.125788 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:40:09.299859697+00:00 stderr F W1212 16:40:09.299096 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:10.304161121+00:00 stderr F W1212 16:40:10.304058 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:12.308813460+00:00 stderr F E1212 16:40:12.308352 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:12.349208625+00:00 stderr F W1212 16:40:12.349109 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:13.353472267+00:00 stderr F W1212 16:40:13.353241 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:15.360472064+00:00 stderr F E1212 16:40:15.360381 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:22.268062871+00:00 stderr F W1212 16:40:22.267318 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:23.275643746+00:00 stderr F W1212 16:40:23.274399 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:25.286562471+00:00 stderr F E1212 16:40:25.285616 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:25.436001516+00:00 stderr F W1212 16:40:25.435892 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:26.441092459+00:00 stderr F W1212 16:40:26.440969 1 degraded_webhook.go:147] failed to connect to webhook "monitoringconfigmaps.openshift.io" via service "cluster-monitoring-operator.openshift-monitoring.svc:8443": dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host 2025-12-12T16:40:28.450347912+00:00 stderr F E1212 16:40:28.450266 1 degraded_webhook.go:68] monitoringconfigmaps.openshift.io: dial tcp: lookup cluster-monitoring-operator.openshift-monitoring.svc on 10.217.4.10:53: no such host ././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000755000175000017500000000000015117043043033035 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000755000175000017500000000000015117043062033036 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000644000175000017500000002025615117043043033044 0ustar zuulzuul2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093832 1 flags.go:64] FLAG: --add-dir-header="false" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093949 1 flags.go:64] FLAG: --allow-paths="[]" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093958 1 flags.go:64] FLAG: --alsologtostderr="false" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093962 1 flags.go:64] FLAG: --auth-header-fields-enabled="false" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093965 1 flags.go:64] FLAG: --auth-header-groups-field-name="x-remote-groups" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093971 1 flags.go:64] FLAG: --auth-header-groups-field-separator="|" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093974 1 flags.go:64] FLAG: --auth-header-user-field-name="x-remote-user" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093978 1 flags.go:64] FLAG: --auth-token-audiences="[]" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093984 1 flags.go:64] FLAG: --client-ca-file="" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093986 1 flags.go:64] FLAG: --config-file="/etc/kube-rbac-proxy/config-file.yaml" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093989 1 flags.go:64] FLAG: --help="false" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093992 1 flags.go:64] FLAG: --http2-disable="false" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093995 1 flags.go:64] FLAG: --http2-max-concurrent-streams="100" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.093998 1 flags.go:64] FLAG: --http2-max-size="262144" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.094002 1 flags.go:64] FLAG: --ignore-paths="[]" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.094005 1 flags.go:64] FLAG: --insecure-listen-address="" 2025-12-12T16:16:41.094021275+00:00 stderr F I1212 16:16:41.094008 1 flags.go:64] FLAG: --kube-api-burst="0" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094018 1 flags.go:64] FLAG: --kube-api-qps="0" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094026 1 flags.go:64] FLAG: --kubeconfig="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094029 1 flags.go:64] FLAG: --log-backtrace-at="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094031 1 flags.go:64] FLAG: --log-dir="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094034 1 flags.go:64] FLAG: --log-file="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094037 1 flags.go:64] FLAG: --log-file-max-size="0" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094042 1 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094045 1 flags.go:64] FLAG: --logtostderr="true" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094048 1 flags.go:64] FLAG: --oidc-ca-file="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094051 1 flags.go:64] FLAG: --oidc-clientID="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094053 1 flags.go:64] FLAG: --oidc-groups-claim="groups" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094056 1 flags.go:64] FLAG: --oidc-groups-prefix="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094059 1 flags.go:64] FLAG: --oidc-issuer="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094062 1 flags.go:64] FLAG: --oidc-sign-alg="[RS256]" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094068 1 flags.go:64] FLAG: --oidc-username-claim="email" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094071 1 flags.go:64] FLAG: --oidc-username-prefix="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094074 1 flags.go:64] FLAG: --one-output="false" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094077 1 flags.go:64] FLAG: --proxy-endpoints-port="0" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094079 1 flags.go:64] FLAG: --secure-listen-address="0.0.0.0:9192" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094082 1 flags.go:64] FLAG: --skip-headers="false" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094085 1 flags.go:64] FLAG: --skip-log-headers="false" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094087 1 flags.go:64] FLAG: --stderrthreshold="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094090 1 flags.go:64] FLAG: --tls-cert-file="/etc/tls/private/tls.crt" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094093 1 flags.go:64] FLAG: --tls-cipher-suites="[TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305]" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094100 1 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094104 1 flags.go:64] FLAG: --tls-private-key-file="/etc/tls/private/tls.key" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094107 1 flags.go:64] FLAG: --tls-reload-interval="1m0s" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094111 1 flags.go:64] FLAG: --upstream="http://127.0.0.1:9191/" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094113 1 flags.go:64] FLAG: --upstream-ca-file="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094116 1 flags.go:64] FLAG: --upstream-client-cert-file="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094119 1 flags.go:64] FLAG: --upstream-client-key-file="" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094122 1 flags.go:64] FLAG: --upstream-force-h2c="false" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094125 1 flags.go:64] FLAG: --v="3" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094127 1 flags.go:64] FLAG: --version="false" 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094132 1 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:16:41.094175529+00:00 stderr F W1212 16:16:41.094140 1 deprecated.go:66] 2025-12-12T16:16:41.094175529+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:41.094175529+00:00 stderr F 2025-12-12T16:16:41.094175529+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:41.094175529+00:00 stderr F 2025-12-12T16:16:41.094175529+00:00 stderr F =============================================== 2025-12-12T16:16:41.094175529+00:00 stderr F 2025-12-12T16:16:41.094175529+00:00 stderr F I1212 16:16:41.094152 1 kube-rbac-proxy.go:532] Reading config file: /etc/kube-rbac-proxy/config-file.yaml 2025-12-12T16:16:41.095265496+00:00 stderr F I1212 16:16:41.095030 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:41.095265496+00:00 stderr F I1212 16:16:41.095052 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:41.095265496+00:00 stderr F I1212 16:16:41.095058 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:41.095265496+00:00 stderr F I1212 16:16:41.095063 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:41.095693626+00:00 stderr F I1212 16:16:41.095323 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:41.097753356+00:00 stderr F I1212 16:16:41.096602 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:41.097753356+00:00 stderr F I1212 16:16:41.097285 1 kube-rbac-proxy.go:397] Starting TCP socket on 0.0.0.0:9192 2025-12-12T16:16:41.099195692+00:00 stderr F I1212 16:16:41.097952 1 kube-rbac-proxy.go:404] Listening securely on 0.0.0.0:9192 ././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000755000175000017500000000000015117043062033036 5ustar zuulzuul././@LongLink0000644000000000000000000000033100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000644000175000017500000020354515117043043033050 0ustar zuulzuul2025-12-12T16:16:41.680888173+00:00 stderr F W1212 16:16:41.680671 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:41.681854147+00:00 stderr F W1212 16:16:41.681071 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:41.682617895+00:00 stderr F I1212 16:16:41.682565 1 main.go:153] setting up manager 2025-12-12T16:16:41.683417245+00:00 stderr F I1212 16:16:41.683378 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:41.683417245+00:00 stderr F I1212 16:16:41.683402 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:41.683417245+00:00 stderr F I1212 16:16:41.683407 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:41.683417245+00:00 stderr F I1212 16:16:41.683412 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:41.683430795+00:00 stderr F I1212 16:16:41.683416 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:41.707033932+00:00 stderr F I1212 16:16:41.706512 1 main.go:178] registering components 2025-12-12T16:16:41.707033932+00:00 stderr F I1212 16:16:41.706539 1 main.go:180] setting up scheme 2025-12-12T16:16:41.707199396+00:00 stderr F I1212 16:16:41.707144 1 main.go:218] setting up controllers 2025-12-12T16:16:41.707233286+00:00 stderr F I1212 16:16:41.707206 1 config.go:33] using default as failed to load config /var/run/configmaps/config/config.yaml: open /var/run/configmaps/config/config.yaml: no such file or directory 2025-12-12T16:16:41.707242947+00:00 stderr F I1212 16:16:41.707229 1 config.go:23] machine approver config: {NodeClientCert:{Disabled:false}} 2025-12-12T16:16:41.707876192+00:00 stderr F I1212 16:16:41.707835 1 main.go:244] starting the cmd 2025-12-12T16:16:41.708451046+00:00 stderr F I1212 16:16:41.708392 1 server.go:208] "Starting metrics server" logger="controller-runtime.metrics" 2025-12-12T16:16:41.709657216+00:00 stderr F I1212 16:16:41.709044 1 server.go:247] "Serving metrics server" logger="controller-runtime.metrics" bindAddress="127.0.0.1:9191" secure=false 2025-12-12T16:16:41.716363879+00:00 stderr F I1212 16:16:41.715796 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:41.808897869+00:00 stderr F I1212 16:16:41.808814 1 leaderelection.go:257] attempting to acquire leader lease openshift-cluster-machine-approver/cluster-machine-approver-leader... 2025-12-12T16:16:41.816586176+00:00 stderr F I1212 16:16:41.816472 1 leaderelection.go:271] successfully acquired lease openshift-cluster-machine-approver/cluster-machine-approver-leader 2025-12-12T16:16:41.816697619+00:00 stderr F I1212 16:16:41.816653 1 recorder.go:104] "crc_3b6be58d-ff49-4653-b1f0-d3e82d438077 became leader" logger="events" type="Normal" object={"kind":"Lease","namespace":"openshift-cluster-machine-approver","name":"cluster-machine-approver-leader","uid":"44ce3b93-568b-4943-9fa8-6c5f9ecbf413","apiVersion":"coordination.k8s.io/v1","resourceVersion":"36853"} reason="LeaderElection" 2025-12-12T16:16:41.816949315+00:00 stderr F I1212 16:16:41.816866 1 status.go:100] Starting cluster operator status controller 2025-12-12T16:16:41.816949315+00:00 stderr F I1212 16:16:41.816939 1 controller.go:246] "Starting EventSource" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" source="kind source: *v1.ConfigMap" 2025-12-12T16:16:41.817069608+00:00 stderr F I1212 16:16:41.817022 1 controller.go:246] "Starting EventSource" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" source="kind source: *v1.CertificateSigningRequest" 2025-12-12T16:16:41.819586660+00:00 stderr F I1212 16:16:41.819537 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/cluster-machine-approver/status.go:102" 2025-12-12T16:16:41.963373060+00:00 stderr F I1212 16:16:41.963228 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.ConfigMap" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:42.018261160+00:00 stderr F I1212 16:16:42.018029 1 controller.go:186] "Starting Controller" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" 2025-12-12T16:16:42.018261160+00:00 stderr F I1212 16:16:42.018091 1 controller.go:195] "Starting workers" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" worker count=10 2025-12-12T16:16:42.018261160+00:00 stderr F I1212 16:16:42.018163 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.048608571+00:00 stderr F I1212 16:16:42.048523 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.052984298+00:00 stderr F I1212 16:16:42.052903 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.054612348+00:00 stderr F I1212 16:16:42.054561 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.054612348+00:00 stderr F I1212 16:16:42.054595 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.054952236+00:00 stderr F E1212 16:16:42.054920 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.054952236+00:00 stderr F I1212 16:16:42.054936 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.060109482+00:00 stderr F I1212 16:16:42.060058 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.060155073+00:00 stderr F E1212 16:16:42.060145 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="a27180bb-6287-4f3b-93f2-492b4eb21356" 2025-12-12T16:16:42.065612416+00:00 stderr F I1212 16:16:42.065566 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.080845268+00:00 stderr F I1212 16:16:42.080757 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.083848441+00:00 stderr F I1212 16:16:42.083808 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.085214205+00:00 stderr F I1212 16:16:42.085162 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.085214205+00:00 stderr F I1212 16:16:42.085196 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.085214205+00:00 stderr F E1212 16:16:42.085205 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.085234415+00:00 stderr F I1212 16:16:42.085214 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.088783952+00:00 stderr F I1212 16:16:42.088750 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.088841473+00:00 stderr F E1212 16:16:42.088816 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="499a0733-9180-4f99-96eb-9cefd978793d" 2025-12-12T16:16:42.099418541+00:00 stderr F I1212 16:16:42.099335 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.118896907+00:00 stderr F I1212 16:16:42.118793 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.122864104+00:00 stderr F I1212 16:16:42.122822 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.124558135+00:00 stderr F I1212 16:16:42.124529 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.124599286+00:00 stderr F I1212 16:16:42.124589 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.124626837+00:00 stderr F E1212 16:16:42.124617 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.124651078+00:00 stderr F I1212 16:16:42.124642 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.127740753+00:00 stderr F I1212 16:16:42.127712 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.127823685+00:00 stderr F E1212 16:16:42.127809 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="4d351849-6463-435d-9657-e76875d3e16c" 2025-12-12T16:16:42.148235483+00:00 stderr F I1212 16:16:42.148160 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.169799880+00:00 stderr F I1212 16:16:42.169724 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.180040320+00:00 stderr F I1212 16:16:42.174714 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.180040320+00:00 stderr F I1212 16:16:42.176076 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.180040320+00:00 stderr F I1212 16:16:42.176107 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.180040320+00:00 stderr F E1212 16:16:42.176118 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.180040320+00:00 stderr F I1212 16:16:42.176128 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.180040320+00:00 stderr F I1212 16:16:42.179648 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.180040320+00:00 stderr F E1212 16:16:42.179717 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="8f617e59-dbcf-4950-a1ca-4aec8f1a7a31" 2025-12-12T16:16:42.220531737+00:00 stderr F I1212 16:16:42.220463 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.235917043+00:00 stderr F I1212 16:16:42.235631 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.240073254+00:00 stderr F I1212 16:16:42.240009 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.240966966+00:00 stderr F I1212 16:16:42.240920 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.240966966+00:00 stderr F I1212 16:16:42.240940 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.240966966+00:00 stderr F E1212 16:16:42.240947 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.240966966+00:00 stderr F I1212 16:16:42.240954 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.243202941+00:00 stderr F I1212 16:16:42.243153 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.243341964+00:00 stderr F E1212 16:16:42.243313 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="f8050da8-a98d-457d-8a91-737ac5885506" 2025-12-12T16:16:42.324018814+00:00 stderr F I1212 16:16:42.323935 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.337223536+00:00 stderr F I1212 16:16:42.337133 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.339155283+00:00 stderr F I1212 16:16:42.339119 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.340369043+00:00 stderr F I1212 16:16:42.340317 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.340369043+00:00 stderr F I1212 16:16:42.340353 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.340386814+00:00 stderr F E1212 16:16:42.340364 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.340386814+00:00 stderr F I1212 16:16:42.340376 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.344038523+00:00 stderr F I1212 16:16:42.343998 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.344113665+00:00 stderr F E1212 16:16:42.344079 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="8b6a3ed9-1497-4a24-b90c-64346356290c" 2025-12-12T16:16:42.507900793+00:00 stderr F I1212 16:16:42.506327 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.531221223+00:00 stderr F I1212 16:16:42.531151 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.535048936+00:00 stderr F I1212 16:16:42.534987 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.536225355+00:00 stderr F I1212 16:16:42.536191 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.536268356+00:00 stderr F I1212 16:16:42.536256 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.536301487+00:00 stderr F E1212 16:16:42.536290 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.536346378+00:00 stderr F I1212 16:16:42.536320 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.541027752+00:00 stderr F I1212 16:16:42.540049 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.541027752+00:00 stderr F E1212 16:16:42.540121 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="2d497760-e910-4658-99f1-6e19431aa148" 2025-12-12T16:16:42.861001274+00:00 stderr F I1212 16:16:42.860917 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:42.881303770+00:00 stderr F I1212 16:16:42.881214 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:42.884977929+00:00 stderr F I1212 16:16:42.884934 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:42.889008708+00:00 stderr F I1212 16:16:42.888962 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:42.889008708+00:00 stderr F I1212 16:16:42.888998 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:42.889033448+00:00 stderr F E1212 16:16:42.889012 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:42.889033448+00:00 stderr F I1212 16:16:42.889023 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:42.894421360+00:00 stderr F I1212 16:16:42.894366 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:42.894458371+00:00 stderr F E1212 16:16:42.894437 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="18b45851-30c6-4cdd-adfa-2d01049fa0f2" 2025-12-12T16:16:43.536696191+00:00 stderr F I1212 16:16:43.536636 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:43.576668417+00:00 stderr F I1212 16:16:43.575904 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:43.584936668+00:00 stderr F I1212 16:16:43.584592 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:43.600340185+00:00 stderr F I1212 16:16:43.599077 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:43.600340185+00:00 stderr F I1212 16:16:43.599105 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:43.600340185+00:00 stderr F E1212 16:16:43.599113 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:43.600340185+00:00 stderr F I1212 16:16:43.599121 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:43.609621331+00:00 stderr F I1212 16:16:43.608675 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:43.609621331+00:00 stderr F E1212 16:16:43.608735 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="e17f56f2-496f-43c0-bd10-b7deec84b696" 2025-12-12T16:16:44.891636701+00:00 stderr F I1212 16:16:44.891218 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:44.927260191+00:00 stderr F I1212 16:16:44.926453 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:44.931392762+00:00 stderr F I1212 16:16:44.931346 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:44.954791603+00:00 stderr F I1212 16:16:44.954728 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:44.954891315+00:00 stderr F I1212 16:16:44.954877 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:44.954925606+00:00 stderr F E1212 16:16:44.954914 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:44.954975637+00:00 stderr F I1212 16:16:44.954963 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:44.958659867+00:00 stderr F I1212 16:16:44.958625 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:44.958798481+00:00 stderr F E1212 16:16:44.958767 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="c7183305-d88b-4d0d-bb69-826e8611d0c3" 2025-12-12T16:16:47.520945823+00:00 stderr F I1212 16:16:47.520886 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:47.569617772+00:00 stderr F I1212 16:16:47.569043 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:47.577003552+00:00 stderr F I1212 16:16:47.576748 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:47.578447387+00:00 stderr F I1212 16:16:47.578397 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:47.578447387+00:00 stderr F I1212 16:16:47.578419 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:47.578447387+00:00 stderr F E1212 16:16:47.578427 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:47.578447387+00:00 stderr F I1212 16:16:47.578435 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:47.597805640+00:00 stderr F I1212 16:16:47.597090 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:47.597805640+00:00 stderr F E1212 16:16:47.597189 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="847faccf-70af-43cb-b91b-a0961d0d58aa" 2025-12-12T16:16:52.722239239+00:00 stderr F I1212 16:16:52.718407 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:16:52.771660286+00:00 stderr F I1212 16:16:52.771592 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:16:52.786266872+00:00 stderr F I1212 16:16:52.782696 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:16:52.790477955+00:00 stderr F I1212 16:16:52.787575 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:16:52.790477955+00:00 stderr F I1212 16:16:52.787647 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:16:52.790477955+00:00 stderr F E1212 16:16:52.787660 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:16:52.790477955+00:00 stderr F I1212 16:16:52.787691 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:16:52.800220633+00:00 stderr F I1212 16:16:52.799552 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:16:52.800220633+00:00 stderr F E1212 16:16:52.799666 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="48d401e4-cc45-4fb1-a5f3-151f741c5ec2" 2025-12-12T16:17:03.039980338+00:00 stderr F I1212 16:17:03.039911 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:17:03.061444622+00:00 stderr F I1212 16:17:03.061400 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:17:03.065370348+00:00 stderr F I1212 16:17:03.065171 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:17:03.067719575+00:00 stderr F I1212 16:17:03.067353 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:17:03.067719575+00:00 stderr F I1212 16:17:03.067397 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:17:03.067719575+00:00 stderr F E1212 16:17:03.067410 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:17:03.067719575+00:00 stderr F I1212 16:17:03.067419 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:17:03.077688149+00:00 stderr F I1212 16:17:03.077633 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:17:03.077841833+00:00 stderr F E1212 16:17:03.077817 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="bef18343-bc44-42c8-96d6-8b95cefa1eb5" 2025-12-12T16:17:23.558067054+00:00 stderr F I1212 16:17:23.557986 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:17:23.576713639+00:00 stderr F I1212 16:17:23.576648 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:17:23.580189048+00:00 stderr F I1212 16:17:23.580154 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:17:23.581852596+00:00 stderr F I1212 16:17:23.581823 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:17:23.581893607+00:00 stderr F I1212 16:17:23.581882 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:17:23.581926158+00:00 stderr F E1212 16:17:23.581915 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:17:23.581954909+00:00 stderr F I1212 16:17:23.581943 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:17:23.586692435+00:00 stderr F I1212 16:17:23.586629 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:17:23.586791448+00:00 stderr F E1212 16:17:23.586745 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="373a2ea0-5a3b-4ead-a525-eadca671f217" 2025-12-12T16:18:04.548048047+00:00 stderr F I1212 16:18:04.547949 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:18:04.564350460+00:00 stderr F I1212 16:18:04.564269 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:18:04.566509643+00:00 stderr F I1212 16:18:04.566459 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:18:04.568599125+00:00 stderr F I1212 16:18:04.568512 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:18:04.568599125+00:00 stderr F I1212 16:18:04.568549 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:18:04.568599125+00:00 stderr F E1212 16:18:04.568567 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:18:04.568599125+00:00 stderr F I1212 16:18:04.568577 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:18:04.571715492+00:00 stderr F I1212 16:18:04.571638 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:18:04.571761053+00:00 stderr F E1212 16:18:04.571721 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="3b967857-86c1-40b2-9689-be53a069eae0" 2025-12-12T16:18:55.852233535+00:00 stderr F E1212 16:18:55.848207 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-cluster-machine-approver/leases/cluster-machine-approver-leader?timeout=53.5s": dial tcp 10.217.4.1:443: i/o timeout, falling back to slow path 2025-12-12T16:18:58.098880929+00:00 stderr F I1212 16:18:58.098807 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.ConfigMap" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:18:58.158145634+00:00 stderr F I1212 16:18:58.158076 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:18:58.158922703+00:00 stderr F I1212 16:18:58.158461 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:18:58.173661268+00:00 stderr F I1212 16:18:58.173571 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:18:58.176536089+00:00 stderr F I1212 16:18:58.176493 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:18:58.178167869+00:00 stderr F I1212 16:18:58.178110 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:18:58.178167869+00:00 stderr F I1212 16:18:58.178135 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:18:58.178167869+00:00 stderr F E1212 16:18:58.178144 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:18:58.178167869+00:00 stderr F I1212 16:18:58.178153 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:18:58.180672531+00:00 stderr F I1212 16:18:58.180602 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:18:58.180719882+00:00 stderr F E1212 16:18:58.180676 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="b4211bf7-0add-4ff8-9743-784ba5119c0e" 2025-12-12T16:18:58.466271911+00:00 stderr F I1212 16:18:58.465946 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/cluster-machine-approver/status.go:102" 2025-12-12T16:19:26.492929936+00:00 stderr F I1212 16:19:26.492842 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:19:26.516335434+00:00 stderr F I1212 16:19:26.516268 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:19:26.520210221+00:00 stderr F I1212 16:19:26.520135 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:19:26.521554195+00:00 stderr F I1212 16:19:26.521450 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:19:26.521554195+00:00 stderr F I1212 16:19:26.521478 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:19:26.521554195+00:00 stderr F E1212 16:19:26.521488 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:19:26.521554195+00:00 stderr F I1212 16:19:26.521495 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:19:26.525657748+00:00 stderr F I1212 16:19:26.525626 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:19:26.525734460+00:00 stderr F E1212 16:19:26.525711 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="df9e9a6d-56dd-424b-8ebf-6ad2ad4cad7d" 2025-12-12T16:24:54.206543401+00:00 stderr F I1212 16:24:54.206452 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:24:54.229005211+00:00 stderr F I1212 16:24:54.228939 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:24:54.231971799+00:00 stderr F I1212 16:24:54.231904 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:24:54.233592222+00:00 stderr F I1212 16:24:54.233517 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:24:54.233592222+00:00 stderr F I1212 16:24:54.233570 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:24:54.233592222+00:00 stderr F E1212 16:24:54.233584 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:24:54.233619742+00:00 stderr F I1212 16:24:54.233595 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:24:54.237367791+00:00 stderr F I1212 16:24:54.237307 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:24:54.237466783+00:00 stderr F E1212 16:24:54.237420 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="745a19e4-f373-4791-b9d3-991542e12e35" 2025-12-12T16:31:31.567608815+00:00 stderr F I1212 16:31:31.567511 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:31.590151399+00:00 stderr F I1212 16:31:31.590073 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:31.592818885+00:00 stderr F I1212 16:31:31.592773 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:31.594637770+00:00 stderr F I1212 16:31:31.594368 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:31.594637770+00:00 stderr F I1212 16:31:31.594388 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:31.594637770+00:00 stderr F E1212 16:31:31.594396 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:31.594637770+00:00 stderr F I1212 16:31:31.594404 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:31.597684944+00:00 stderr F I1212 16:31:31.597662 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:31.597804787+00:00 stderr F E1212 16:31:31.597788 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="749e7d46-e22a-49c8-b3b5-a87d2a1fcfe6" 2025-12-12T16:31:31.603335683+00:00 stderr F I1212 16:31:31.603149 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:31.618640280+00:00 stderr F I1212 16:31:31.618568 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:31.621330446+00:00 stderr F I1212 16:31:31.621311 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:31.623356846+00:00 stderr F I1212 16:31:31.622994 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:31.623389127+00:00 stderr F I1212 16:31:31.623379 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:31.623414457+00:00 stderr F E1212 16:31:31.623404 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:31.623438768+00:00 stderr F I1212 16:31:31.623429 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:31.625852447+00:00 stderr F I1212 16:31:31.625835 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:31.625950749+00:00 stderr F E1212 16:31:31.625937 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="913ac5ad-61fe-4d99-8c50-4c4d10f98cfc" 2025-12-12T16:31:31.636441047+00:00 stderr F I1212 16:31:31.636365 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:31.653982049+00:00 stderr F I1212 16:31:31.653926 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:31.656319996+00:00 stderr F I1212 16:31:31.656290 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:31.659636078+00:00 stderr F I1212 16:31:31.659575 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:31.659636078+00:00 stderr F I1212 16:31:31.659621 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:31.659654918+00:00 stderr F E1212 16:31:31.659637 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:31.659663068+00:00 stderr F I1212 16:31:31.659651 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:31.662869177+00:00 stderr F I1212 16:31:31.662836 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:31.662944149+00:00 stderr F E1212 16:31:31.662916 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="6066cd8b-2180-460d-a648-87c215df0149" 2025-12-12T16:31:31.683432623+00:00 stderr F I1212 16:31:31.683363 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:31.704137982+00:00 stderr F I1212 16:31:31.704046 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:31.707266429+00:00 stderr F I1212 16:31:31.707209 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:31.709507694+00:00 stderr F I1212 16:31:31.709215 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:31.709507694+00:00 stderr F I1212 16:31:31.709256 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:31.709507694+00:00 stderr F E1212 16:31:31.709269 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:31.709507694+00:00 stderr F I1212 16:31:31.709281 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:31.712365284+00:00 stderr F I1212 16:31:31.712331 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:31.712506608+00:00 stderr F E1212 16:31:31.712451 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="a8834047-7fbb-40e7-9abe-596b27b43855" 2025-12-12T16:31:31.753001814+00:00 stderr F I1212 16:31:31.752907 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:31.770340470+00:00 stderr F I1212 16:31:31.770277 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:31.777170628+00:00 stderr F I1212 16:31:31.777055 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:31.779858964+00:00 stderr F I1212 16:31:31.779755 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:31.779858964+00:00 stderr F I1212 16:31:31.779800 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:31.779858964+00:00 stderr F E1212 16:31:31.779815 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:31.779858964+00:00 stderr F I1212 16:31:31.779831 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:31.785317688+00:00 stderr F I1212 16:31:31.785244 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:31.785393310+00:00 stderr F E1212 16:31:31.785358 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="2add6988-d934-4876-b730-712dd919e9ff" 2025-12-12T16:31:31.866487754+00:00 stderr F I1212 16:31:31.866386 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:31.894119403+00:00 stderr F I1212 16:31:31.894003 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:31.897256381+00:00 stderr F I1212 16:31:31.897164 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:31.898949112+00:00 stderr F I1212 16:31:31.898742 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:31.898949112+00:00 stderr F I1212 16:31:31.898790 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:31.898949112+00:00 stderr F E1212 16:31:31.898804 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:31.898949112+00:00 stderr F I1212 16:31:31.898817 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:31.902282064+00:00 stderr F I1212 16:31:31.902245 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:31.902375306+00:00 stderr F E1212 16:31:31.902350 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="1210e010-ed5e-4ac2-8aee-b242c815d446" 2025-12-12T16:31:32.062962135+00:00 stderr F I1212 16:31:32.062856 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:32.083449189+00:00 stderr F I1212 16:31:32.083353 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:32.086248488+00:00 stderr F I1212 16:31:32.086164 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:32.087796686+00:00 stderr F I1212 16:31:32.087492 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:32.087796686+00:00 stderr F I1212 16:31:32.087520 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:32.087796686+00:00 stderr F E1212 16:31:32.087529 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:32.087796686+00:00 stderr F I1212 16:31:32.087538 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:32.090896432+00:00 stderr F I1212 16:31:32.090849 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:32.090983914+00:00 stderr F E1212 16:31:32.090950 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="2ba0eebf-4ab4-4477-9543-45301083379d" 2025-12-12T16:31:32.411536116+00:00 stderr F I1212 16:31:32.411395 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:32.428442752+00:00 stderr F I1212 16:31:32.428353 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:32.432280926+00:00 stderr F I1212 16:31:32.432236 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:32.434019419+00:00 stderr F I1212 16:31:32.433599 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:32.434019419+00:00 stderr F I1212 16:31:32.433639 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:32.434019419+00:00 stderr F E1212 16:31:32.433650 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:32.434019419+00:00 stderr F I1212 16:31:32.433658 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:32.437104665+00:00 stderr F I1212 16:31:32.437056 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:32.437238018+00:00 stderr F E1212 16:31:32.437157 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="b184ee28-ce88-4be4-8667-36580ae9337d" 2025-12-12T16:31:33.078617179+00:00 stderr F I1212 16:31:33.077790 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:33.099093862+00:00 stderr F I1212 16:31:33.098978 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:33.102128427+00:00 stderr F I1212 16:31:33.102060 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:33.104295030+00:00 stderr F I1212 16:31:33.103668 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:33.104295030+00:00 stderr F I1212 16:31:33.103701 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:33.104295030+00:00 stderr F E1212 16:31:33.103714 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:33.104295030+00:00 stderr F I1212 16:31:33.103728 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:33.106873014+00:00 stderr F I1212 16:31:33.106821 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:33.106961496+00:00 stderr F E1212 16:31:33.106922 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="2d59117b-b903-4371-972b-c8b2bb7ddbe1" 2025-12-12T16:31:34.387903912+00:00 stderr F I1212 16:31:34.387755 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:34.406782336+00:00 stderr F I1212 16:31:34.406638 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:34.409098393+00:00 stderr F I1212 16:31:34.409005 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:34.410836526+00:00 stderr F I1212 16:31:34.410768 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:34.410836526+00:00 stderr F I1212 16:31:34.410795 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:34.410836526+00:00 stderr F E1212 16:31:34.410804 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:34.410836526+00:00 stderr F I1212 16:31:34.410814 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:34.414301351+00:00 stderr F I1212 16:31:34.414241 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:34.414352402+00:00 stderr F E1212 16:31:34.414322 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="81e1a18c-82d0-48af-a9b9-3e5cbd2a049b" 2025-12-12T16:31:36.975141179+00:00 stderr F I1212 16:31:36.975042 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:36.991416019+00:00 stderr F I1212 16:31:36.991329 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:36.993205143+00:00 stderr F I1212 16:31:36.993153 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:36.994449534+00:00 stderr F I1212 16:31:36.994388 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:36.994449534+00:00 stderr F I1212 16:31:36.994424 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:36.994449534+00:00 stderr F E1212 16:31:36.994436 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:36.994496785+00:00 stderr F I1212 16:31:36.994445 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:36.996902914+00:00 stderr F I1212 16:31:36.996824 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:36.996945355+00:00 stderr F E1212 16:31:36.996923 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="93549c97-4853-4dfc-8972-8d2374b3d60f" 2025-12-12T16:31:42.118716031+00:00 stderr F I1212 16:31:42.117826 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:42.145379907+00:00 stderr F I1212 16:31:42.145267 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:42.149277263+00:00 stderr F I1212 16:31:42.149223 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:42.151040026+00:00 stderr F I1212 16:31:42.150965 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:42.151040026+00:00 stderr F I1212 16:31:42.151009 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:42.151040026+00:00 stderr F E1212 16:31:42.151022 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:42.151069927+00:00 stderr F I1212 16:31:42.151032 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:42.156028279+00:00 stderr F I1212 16:31:42.155941 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:42.156116191+00:00 stderr F E1212 16:31:42.156021 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="da8b16cd-ebf9-4854-a8b3-e0e3e86b0819" 2025-12-12T16:31:52.396931641+00:00 stderr F I1212 16:31:52.396814 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:31:52.420217523+00:00 stderr F I1212 16:31:52.420099 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:31:52.422908259+00:00 stderr F I1212 16:31:52.422869 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:31:52.425160405+00:00 stderr F I1212 16:31:52.425100 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:31:52.425160405+00:00 stderr F I1212 16:31:52.425134 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:31:52.425160405+00:00 stderr F E1212 16:31:52.425145 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:31:52.425160405+00:00 stderr F I1212 16:31:52.425153 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:31:52.429461751+00:00 stderr F I1212 16:31:52.429378 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:31:52.429513212+00:00 stderr F E1212 16:31:52.429446 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="5a5ea726-7fc3-4e45-a48a-80ef88083ccb" 2025-12-12T16:32:12.910671164+00:00 stderr F I1212 16:32:12.909794 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:32:12.937168396+00:00 stderr F I1212 16:32:12.937063 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:32:12.941198875+00:00 stderr F I1212 16:32:12.941111 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:32:12.942459506+00:00 stderr F I1212 16:32:12.942405 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:32:12.942459506+00:00 stderr F I1212 16:32:12.942444 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:32:12.942478447+00:00 stderr F E1212 16:32:12.942456 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:32:12.942478447+00:00 stderr F I1212 16:32:12.942467 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:32:12.945642684+00:00 stderr F I1212 16:32:12.945595 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:32:12.945693926+00:00 stderr F E1212 16:32:12.945667 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="6d2ed010-8282-47b3-9027-9784a4f15c62" 2025-12-12T16:32:53.912107114+00:00 stderr F I1212 16:32:53.912018 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:32:53.931221362+00:00 stderr F I1212 16:32:53.931132 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:32:53.935156430+00:00 stderr F I1212 16:32:53.935102 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:32:53.936993086+00:00 stderr F I1212 16:32:53.936953 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:32:53.936993086+00:00 stderr F I1212 16:32:53.936985 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:32:53.937019157+00:00 stderr F E1212 16:32:53.936998 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:32:53.937026877+00:00 stderr F I1212 16:32:53.937014 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:32:53.940953285+00:00 stderr F I1212 16:32:53.940936 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:32:53.941070108+00:00 stderr F E1212 16:32:53.941046 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="7f7b6473-29a3-4b67-88ce-717626545aef" 2025-12-12T16:34:15.862269626+00:00 stderr F I1212 16:34:15.862117 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:34:15.885534238+00:00 stderr F I1212 16:34:15.885370 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:34:15.889039495+00:00 stderr F I1212 16:34:15.888926 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:34:15.890566753+00:00 stderr F I1212 16:34:15.890507 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:34:15.890566753+00:00 stderr F I1212 16:34:15.890552 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:34:15.890642645+00:00 stderr F E1212 16:34:15.890570 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:34:15.890642645+00:00 stderr F I1212 16:34:15.890586 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:34:15.894906192+00:00 stderr F I1212 16:34:15.894833 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:34:15.895000614+00:00 stderr F E1212 16:34:15.894948 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="6a70ea3a-f26c-429a-b4a5-4adcb66e777a" 2025-12-12T16:35:49.598409154+00:00 stderr F I1212 16:35:49.597529 1 controller.go:165] Reconciling CSR: csr-vts4j 2025-12-12T16:35:49.618190971+00:00 stderr F I1212 16:35:49.618101 1 csr_check.go:173] csr-vts4j: CSR does not appear to be client csr 2025-12-12T16:35:49.619924184+00:00 stderr F I1212 16:35:49.619884 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:35:49.621358270+00:00 stderr F I1212 16:35:49.621309 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:35:49.621358270+00:00 stderr F I1212 16:35:49.621331 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:35:49.621358270+00:00 stderr F E1212 16:35:49.621339 1 csr_check.go:376] csr-vts4j: Serving Cert: No target machine for node "crc" 2025-12-12T16:35:49.621358270+00:00 stderr F I1212 16:35:49.621348 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:35:49.623521715+00:00 stderr F I1212 16:35:49.623479 1 controller.go:286] csr-vts4j: CSR not authorized 2025-12-12T16:35:49.623579286+00:00 stderr F E1212 16:35:49.623547 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-vts4j" namespace="" name="csr-vts4j" reconcileID="0422d48c-b5fe-4b07-afe4-4737b15f068d" 2025-12-12T16:36:59.736024137+00:00 stderr F I1212 16:36:59.735897 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:36:59.764815650+00:00 stderr F I1212 16:36:59.764696 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:36:59.768214716+00:00 stderr F I1212 16:36:59.768141 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:36:59.770457692+00:00 stderr F I1212 16:36:59.770367 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:36:59.770457692+00:00 stderr F I1212 16:36:59.770413 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:36:59.770457692+00:00 stderr F E1212 16:36:59.770433 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:36:59.770538424+00:00 stderr F I1212 16:36:59.770453 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:36:59.774455533+00:00 stderr F I1212 16:36:59.774398 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:36:59.774530945+00:00 stderr F E1212 16:36:59.774501 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="3408fd6c-bd06-4424-9b5a-13d19232256c" 2025-12-12T16:42:27.455934924+00:00 stderr F I1212 16:42:27.455385 1 controller.go:165] Reconciling CSR: csr-9cx9t 2025-12-12T16:42:27.477449655+00:00 stderr F I1212 16:42:27.477354 1 csr_check.go:173] csr-9cx9t: CSR does not appear to be client csr 2025-12-12T16:42:27.481233210+00:00 stderr F I1212 16:42:27.481144 1 csr_check.go:563] retrieving serving cert from crc (192.168.126.11:10250) 2025-12-12T16:42:27.483071026+00:00 stderr F I1212 16:42:27.483003 1 csr_check.go:198] Failed to retrieve current serving cert: remote error: tls: internal error 2025-12-12T16:42:27.483071026+00:00 stderr F I1212 16:42:27.483058 1 csr_check.go:218] Falling back to machine-api authorization for crc 2025-12-12T16:42:27.483088766+00:00 stderr F E1212 16:42:27.483076 1 csr_check.go:376] csr-9cx9t: Serving Cert: No target machine for node "crc" 2025-12-12T16:42:27.483131457+00:00 stderr F I1212 16:42:27.483095 1 csr_check.go:221] Could not use Machine for serving cert authorization: unable to find machine for node 2025-12-12T16:42:27.487489907+00:00 stderr F I1212 16:42:27.487449 1 controller.go:286] csr-9cx9t: CSR not authorized 2025-12-12T16:42:27.487560469+00:00 stderr F E1212 16:42:27.487534 1 controller.go:353] "Reconciler error" err="could not reconcile CSR: could not authorize CSR: exhausted all authorization methods: unable to find machine for node" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" CertificateSigningRequest="csr-9cx9t" namespace="" name="csr-9cx9t" reconcileID="b073dff5-d815-49f9-9996-fcc29170c4aa" ././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015117043043033126 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015117043062033127 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000644000175000017500000035466215117043043033150 0ustar zuulzuul2025-12-12T16:16:48.793921642+00:00 stdout F Copying system trust bundle 2025-12-12T16:16:49.482190815+00:00 stderr F W1212 16:16:49.481160 1 feature_gate.go:352] Setting GA feature gate RouteExternalCertificate=true. It will be removed in a future release. 2025-12-12T16:16:49.482190815+00:00 stderr F W1212 16:16:49.481228 1 feature_gate.go:350] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:16:49.482190815+00:00 stderr F W1212 16:16:49.481235 1 feature_gate.go:352] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:16:49.482565144+00:00 stderr F I1212 16:16:49.482485 1 feature_gate.go:385] feature gates: {map[DynamicResourceAllocation:false EventedPLEG:false ImageVolume:true KMSv1:true MaxUnavailableStatefulSet:false MutatingAdmissionPolicy:false NodeSwap:false ProcMountType:true RouteExternalCertificate:true SELinuxMount:false ServiceAccountTokenNodeBinding:true TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:true UserNamespacesSupport:true VolumeAttributesClass:false]} 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483495 1 config.go:124] Ignoring unknown FeatureGate "AlibabaPlatform" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483512 1 config.go:124] Ignoring unknown FeatureGate "BuildCSIVolumes" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483516 1 config.go:124] Ignoring unknown FeatureGate "AWSClusterHostedDNSInstall" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483519 1 config.go:124] Ignoring unknown FeatureGate "SigstoreImageVerificationPKI" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483522 1 config.go:124] Ignoring unknown FeatureGate "VolumeGroupSnapshot" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483524 1 config.go:124] Ignoring unknown FeatureGate "AzureWorkloadIdentity" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483526 1 config.go:124] Ignoring unknown FeatureGate "ConsolePluginContentSecurityPolicy" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483529 1 config.go:124] Ignoring unknown FeatureGate "NetworkDiagnosticsConfig" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483531 1 config.go:124] Ignoring unknown FeatureGate "AzureMultiDisk" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483533 1 config.go:124] Ignoring unknown FeatureGate "GCPCustomAPIEndpoints" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483536 1 config.go:124] Ignoring unknown FeatureGate "NewOLMOwnSingleNamespace" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483538 1 config.go:124] Ignoring unknown FeatureGate "VSphereHostVMGroupZonal" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483541 1 config.go:124] Ignoring unknown FeatureGate "MachineConfigNodes" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483543 1 config.go:124] Ignoring unknown FeatureGate "SigstoreImageVerification" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483545 1 config.go:124] Ignoring unknown FeatureGate "UpgradeStatus" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483547 1 config.go:124] Ignoring unknown FeatureGate "BootImageSkewEnforcement" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483550 1 config.go:124] Ignoring unknown FeatureGate "ClusterAPIInstall" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483552 1 config.go:124] Ignoring unknown FeatureGate "DyanmicServiceEndpointIBMCloud" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483554 1 config.go:124] Ignoring unknown FeatureGate "ExternalOIDCWithUIDAndExtraClaimMappings" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483556 1 config.go:124] Ignoring unknown FeatureGate "ClusterAPIInstallIBMCloud" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483559 1 config.go:124] Ignoring unknown FeatureGate "IngressControllerDynamicConfigurationManager" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483561 1 config.go:124] Ignoring unknown FeatureGate "OpenShiftPodSecurityAdmission" 2025-12-12T16:16:49.483568599+00:00 stderr F W1212 16:16:49.483563 1 config.go:124] Ignoring unknown FeatureGate "CPMSMachineNamePrefix" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483566 1 config.go:124] Ignoring unknown FeatureGate "ManagedBootImages" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483569 1 config.go:124] Ignoring unknown FeatureGate "MetricsCollectionProfiles" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483572 1 config.go:124] Ignoring unknown FeatureGate "NetworkLiveMigration" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483576 1 config.go:124] Ignoring unknown FeatureGate "NewOLM" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483578 1 config.go:124] Ignoring unknown FeatureGate "VSphereMultiDisk" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483580 1 config.go:124] Ignoring unknown FeatureGate "ImageStreamImportMode" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483582 1 config.go:124] Ignoring unknown FeatureGate "MachineAPIOperatorDisableMachineHealthCheckController" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483586 1 config.go:124] Ignoring unknown FeatureGate "ManagedBootImagesAWS" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483588 1 config.go:124] Ignoring unknown FeatureGate "AzureClusterHostedDNSInstall" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483590 1 config.go:124] Ignoring unknown FeatureGate "ExternalOIDC" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483592 1 config.go:124] Ignoring unknown FeatureGate "GCPCustomAPIEndpointsInstall" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483595 1 config.go:124] Ignoring unknown FeatureGate "MinimumKubeletVersion" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483597 1 config.go:124] Ignoring unknown FeatureGate "NewOLMCatalogdAPIV1Metas" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483599 1 config.go:124] Ignoring unknown FeatureGate "IngressControllerLBSubnetsAWS" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483601 1 config.go:124] Ignoring unknown FeatureGate "RouteAdvertisements" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483604 1 config.go:124] Ignoring unknown FeatureGate "DualReplica" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483606 1 config.go:124] Ignoring unknown FeatureGate "ClusterVersionOperatorConfiguration" 2025-12-12T16:16:49.483624820+00:00 stderr F W1212 16:16:49.483608 1 config.go:124] Ignoring unknown FeatureGate "ManagedBootImagesAzure" 2025-12-12T16:16:49.483638490+00:00 stderr F W1212 16:16:49.483612 1 config.go:124] Ignoring unknown FeatureGate "MultiArchInstallAzure" 2025-12-12T16:16:49.483638490+00:00 stderr F W1212 16:16:49.483626 1 config.go:124] Ignoring unknown FeatureGate "NewOLMWebhookProviderOpenshiftServiceCA" 2025-12-12T16:16:49.483638490+00:00 stderr F W1212 16:16:49.483628 1 config.go:124] Ignoring unknown FeatureGate "PreconfiguredUDNAddresses" 2025-12-12T16:16:49.483638490+00:00 stderr F W1212 16:16:49.483630 1 config.go:124] Ignoring unknown FeatureGate "PinnedImages" 2025-12-12T16:16:49.483638490+00:00 stderr F W1212 16:16:49.483632 1 config.go:124] Ignoring unknown FeatureGate "AWSServiceLBNetworkSecurityGroup" 2025-12-12T16:16:49.483638490+00:00 stderr F W1212 16:16:49.483635 1 config.go:124] Ignoring unknown FeatureGate "AutomatedEtcdBackup" 2025-12-12T16:16:49.483648771+00:00 stderr F W1212 16:16:49.483637 1 config.go:124] Ignoring unknown FeatureGate "AzureDedicatedHosts" 2025-12-12T16:16:49.483648771+00:00 stderr F W1212 16:16:49.483640 1 config.go:124] Ignoring unknown FeatureGate "ExternalSnapshotMetadata" 2025-12-12T16:16:49.483648771+00:00 stderr F W1212 16:16:49.483642 1 config.go:124] Ignoring unknown FeatureGate "GCPClusterHostedDNS" 2025-12-12T16:16:49.483648771+00:00 stderr F W1212 16:16:49.483644 1 config.go:124] Ignoring unknown FeatureGate "GCPClusterHostedDNSInstall" 2025-12-12T16:16:49.483663551+00:00 stderr F W1212 16:16:49.483646 1 config.go:124] Ignoring unknown FeatureGate "Example" 2025-12-12T16:16:49.483663551+00:00 stderr F W1212 16:16:49.483649 1 config.go:124] Ignoring unknown FeatureGate "SetEIPForNLBIngressController" 2025-12-12T16:16:49.483663551+00:00 stderr F W1212 16:16:49.483651 1 config.go:124] Ignoring unknown FeatureGate "AdditionalRoutingCapabilities" 2025-12-12T16:16:49.483663551+00:00 stderr F W1212 16:16:49.483654 1 config.go:124] Ignoring unknown FeatureGate "VSphereMultiNetworks" 2025-12-12T16:16:49.483663551+00:00 stderr F W1212 16:16:49.483656 1 config.go:124] Ignoring unknown FeatureGate "ImageModeStatusReporting" 2025-12-12T16:16:49.483663551+00:00 stderr F W1212 16:16:49.483658 1 config.go:124] Ignoring unknown FeatureGate "KMSEncryptionProvider" 2025-12-12T16:16:49.483672451+00:00 stderr F W1212 16:16:49.483661 1 config.go:124] Ignoring unknown FeatureGate "MultiDiskSetup" 2025-12-12T16:16:49.483672451+00:00 stderr F W1212 16:16:49.483664 1 config.go:124] Ignoring unknown FeatureGate "AWSClusterHostedDNS" 2025-12-12T16:16:49.483672451+00:00 stderr F W1212 16:16:49.483667 1 config.go:124] Ignoring unknown FeatureGate "ClusterMonitoringConfig" 2025-12-12T16:16:49.483672451+00:00 stderr F W1212 16:16:49.483669 1 config.go:124] Ignoring unknown FeatureGate "InsightsOnDemandDataGather" 2025-12-12T16:16:49.483682431+00:00 stderr F W1212 16:16:49.483671 1 config.go:124] Ignoring unknown FeatureGate "OVNObservability" 2025-12-12T16:16:49.483682431+00:00 stderr F W1212 16:16:49.483674 1 config.go:124] Ignoring unknown FeatureGate "GatewayAPI" 2025-12-12T16:16:49.483682431+00:00 stderr F W1212 16:16:49.483676 1 config.go:124] Ignoring unknown FeatureGate "NutanixMultiSubnets" 2025-12-12T16:16:49.483682431+00:00 stderr F W1212 16:16:49.483678 1 config.go:124] Ignoring unknown FeatureGate "SignatureStores" 2025-12-12T16:16:49.483691442+00:00 stderr F W1212 16:16:49.483681 1 config.go:124] Ignoring unknown FeatureGate "AdminNetworkPolicy" 2025-12-12T16:16:49.483691442+00:00 stderr F W1212 16:16:49.483684 1 config.go:124] Ignoring unknown FeatureGate "HighlyAvailableArbiter" 2025-12-12T16:16:49.483691442+00:00 stderr F W1212 16:16:49.483686 1 config.go:124] Ignoring unknown FeatureGate "BootcNodeManagement" 2025-12-12T16:16:49.483699442+00:00 stderr F W1212 16:16:49.483688 1 config.go:124] Ignoring unknown FeatureGate "NoRegistryClusterOperations" 2025-12-12T16:16:49.483699442+00:00 stderr F W1212 16:16:49.483692 1 config.go:124] Ignoring unknown FeatureGate "AWSDedicatedHosts" 2025-12-12T16:16:49.483699442+00:00 stderr F W1212 16:16:49.483695 1 config.go:124] Ignoring unknown FeatureGate "DNSNameResolver" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483707 1 config.go:124] Ignoring unknown FeatureGate "EtcdBackendQuota" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483714 1 config.go:124] Ignoring unknown FeatureGate "Example2" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483716 1 config.go:124] Ignoring unknown FeatureGate "MachineAPIMigration" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483719 1 config.go:124] Ignoring unknown FeatureGate "VSphereConfigurableMaxAllowedBlockVolumesPerNode" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483721 1 config.go:124] Ignoring unknown FeatureGate "VSphereMixedNodeEnv" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483723 1 config.go:124] Ignoring unknown FeatureGate "GatewayAPIController" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483726 1 config.go:124] Ignoring unknown FeatureGate "StoragePerformantSecurityPolicy" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483728 1 config.go:124] Ignoring unknown FeatureGate "InsightsConfigAPI" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483730 1 config.go:124] Ignoring unknown FeatureGate "ManagedBootImagesvSphere" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483732 1 config.go:124] Ignoring unknown FeatureGate "MixedCPUsAllocation" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483735 1 config.go:124] Ignoring unknown FeatureGate "NewOLMPreflightPermissionChecks" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483737 1 config.go:124] Ignoring unknown FeatureGate "ShortCertRotation" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483739 1 config.go:124] Ignoring unknown FeatureGate "NetworkSegmentation" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483741 1 config.go:124] Ignoring unknown FeatureGate "InsightsConfig" 2025-12-12T16:16:49.484171593+00:00 stderr F W1212 16:16:49.483744 1 config.go:124] Ignoring unknown FeatureGate "IrreconcilableMachineConfig" 2025-12-12T16:16:49.488846128+00:00 stderr F I1212 16:16:49.488222 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:50.522334570+00:00 stderr F I1212 16:16:50.522263 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:16:50.555239183+00:00 stderr F I1212 16:16:50.554742 1 audit.go:340] Using audit backend: ignoreErrors 2025-12-12T16:16:50.565221957+00:00 stderr F I1212 16:16:50.564202 1 plugins.go:83] "Registered admission plugin" plugin="NamespaceLifecycle" 2025-12-12T16:16:50.565221957+00:00 stderr F I1212 16:16:50.564231 1 plugins.go:83] "Registered admission plugin" plugin="ValidatingAdmissionWebhook" 2025-12-12T16:16:50.565221957+00:00 stderr F I1212 16:16:50.564236 1 plugins.go:83] "Registered admission plugin" plugin="MutatingAdmissionWebhook" 2025-12-12T16:16:50.565221957+00:00 stderr F I1212 16:16:50.564241 1 plugins.go:83] "Registered admission plugin" plugin="ValidatingAdmissionPolicy" 2025-12-12T16:16:50.565221957+00:00 stderr F I1212 16:16:50.564245 1 plugins.go:83] "Registered admission plugin" plugin="MutatingAdmissionPolicy" 2025-12-12T16:16:50.570359062+00:00 stderr F I1212 16:16:50.570306 1 admission.go:48] Admission plugin "project.openshift.io/ProjectRequestLimit" is not configured so it will be disabled. 2025-12-12T16:16:50.610832000+00:00 stderr F I1212 16:16:50.610742 1 plugins.go:157] Loaded 5 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,build.openshift.io/BuildConfigSecretInjector,image.openshift.io/ImageLimitRange,image.openshift.io/ImagePolicy,MutatingAdmissionWebhook. 2025-12-12T16:16:50.610832000+00:00 stderr F I1212 16:16:50.610785 1 plugins.go:160] Loaded 9 validating admission controller(s) successfully in the following order: OwnerReferencesPermissionEnforcement,build.openshift.io/BuildConfigSecretInjector,build.openshift.io/BuildByStrategy,image.openshift.io/ImageLimitRange,image.openshift.io/ImagePolicy,quota.openshift.io/ClusterResourceQuota,route.openshift.io/RequiredRouteAnnotations,ValidatingAdmissionWebhook,ResourceQuota. 2025-12-12T16:16:50.611912687+00:00 stderr F I1212 16:16:50.611835 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:50.611912687+00:00 stderr F I1212 16:16:50.611858 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:50.611912687+00:00 stderr F I1212 16:16:50.611897 1 maxinflight.go:116] "Set denominator for readonly requests" limit=3000 2025-12-12T16:16:50.611912687+00:00 stderr F I1212 16:16:50.611903 1 maxinflight.go:120] "Set denominator for mutating requests" limit=1500 2025-12-12T16:16:50.624689549+00:00 stderr F I1212 16:16:50.624612 1 handler.go:288] Adding GroupVersion quota.openshift.io v1 to ResourceManager 2025-12-12T16:16:50.626428671+00:00 stderr F I1212 16:16:50.624950 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:50.626428671+00:00 stderr F I1212 16:16:50.624971 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:50.928020824+00:00 stderr F I1212 16:16:50.927652 1 store.go:1663] "Monitoring resource count at path" resource="builds.build.openshift.io" path="//builds" 2025-12-12T16:16:50.934631176+00:00 stderr F I1212 16:16:50.934555 1 store.go:1663] "Monitoring resource count at path" resource="buildconfigs.build.openshift.io" path="//buildconfigs" 2025-12-12T16:16:50.938563642+00:00 stderr F I1212 16:16:50.938488 1 cacher.go:469] cacher (builds.build.openshift.io): initialized 2025-12-12T16:16:50.938596952+00:00 stderr F I1212 16:16:50.938569 1 reflector.go:430] "Caches populated" type="*build.Build" reflector="storage/cacher.go:/builds" 2025-12-12T16:16:50.938622883+00:00 stderr F I1212 16:16:50.938586 1 cacher.go:469] cacher (buildconfigs.build.openshift.io): initialized 2025-12-12T16:16:50.938664314+00:00 stderr F I1212 16:16:50.938642 1 reflector.go:430] "Caches populated" type="*build.BuildConfig" reflector="storage/cacher.go:/buildconfigs" 2025-12-12T16:16:50.940012577+00:00 stderr F I1212 16:16:50.939738 1 handler.go:288] Adding GroupVersion build.openshift.io v1 to ResourceManager 2025-12-12T16:16:50.940012577+00:00 stderr F I1212 16:16:50.939875 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:50.940012577+00:00 stderr F I1212 16:16:50.939888 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960816 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960843 1 apiserver.go:161] skipping dir or symlink: /var/run/configmaps/image-import-ca 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960879 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/..2025_12_12_16_16_42.2314801924, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960883 1 apiserver.go:161] skipping dir or symlink: /var/run/configmaps/image-import-ca/..2025_12_12_16_16_42.2314801924 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960896 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/..2025_12_12_16_16_42.2314801924/default-route-openshift-image-registry.apps-crc.testing, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960957 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/..2025_12_12_16_16_42.2314801924/image-registry.openshift-image-registry.svc..5000, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.960998 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/..2025_12_12_16_16_42.2314801924/image-registry.openshift-image-registry.svc.cluster.local..5000, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961044 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/..data, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961049 1 apiserver.go:161] skipping dir or symlink: /var/run/configmaps/image-import-ca/..data 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961061 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/default-route-openshift-image-registry.apps-crc.testing, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961064 1 apiserver.go:161] skipping dir or symlink: /var/run/configmaps/image-import-ca/default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961076 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/image-registry.openshift-image-registry.svc..5000, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961079 1 apiserver.go:161] skipping dir or symlink: /var/run/configmaps/image-import-ca/image-registry.openshift-image-registry.svc..5000 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961090 1 apiserver.go:156] reading image import ca path: /var/run/configmaps/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000, incoming err: 2025-12-12T16:16:50.963393408+00:00 stderr F I1212 16:16:50.961094 1 apiserver.go:161] skipping dir or symlink: /var/run/configmaps/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:16:50.972831618+00:00 stderr F I1212 16:16:50.971009 1 store.go:1663] "Monitoring resource count at path" resource="images.image.openshift.io" path="//images" 2025-12-12T16:16:50.981985172+00:00 stderr F I1212 16:16:50.981914 1 store.go:1663] "Monitoring resource count at path" resource="imagestreams.image.openshift.io" path="//imagestreams" 2025-12-12T16:16:50.992058498+00:00 stderr F I1212 16:16:50.991724 1 handler.go:288] Adding GroupVersion image.openshift.io v1 to ResourceManager 2025-12-12T16:16:50.992058498+00:00 stderr F W1212 16:16:50.991761 1 genericapiserver.go:792] Skipping API image.openshift.io/1.0 because it has no resources. 2025-12-12T16:16:50.992058498+00:00 stderr F W1212 16:16:50.991775 1 genericapiserver.go:792] Skipping API image.openshift.io/pre012 because it has no resources. 2025-12-12T16:16:50.993048482+00:00 stderr F I1212 16:16:50.993001 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:50.993048482+00:00 stderr F I1212 16:16:50.993037 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.016148926+00:00 stderr F I1212 16:16:51.016066 1 store.go:1663] "Monitoring resource count at path" resource="routes.route.openshift.io" path="//routes" 2025-12-12T16:16:51.134872924+00:00 stderr F I1212 16:16:51.134259 1 cacher.go:469] cacher (routes.route.openshift.io): initialized 2025-12-12T16:16:51.134872924+00:00 stderr F I1212 16:16:51.134318 1 reflector.go:430] "Caches populated" type="*route.Route" reflector="storage/cacher.go:/routes" 2025-12-12T16:16:51.134872924+00:00 stderr F I1212 16:16:51.134801 1 handler.go:288] Adding GroupVersion route.openshift.io v1 to ResourceManager 2025-12-12T16:16:51.135166592+00:00 stderr F I1212 16:16:51.135105 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:51.135166592+00:00 stderr F I1212 16:16:51.135137 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.138552714+00:00 stderr F I1212 16:16:51.137565 1 cacher.go:469] cacher (imagestreams.image.openshift.io): initialized 2025-12-12T16:16:51.138552714+00:00 stderr F I1212 16:16:51.137607 1 reflector.go:430] "Caches populated" type="*image.ImageStream" reflector="storage/cacher.go:/imagestreams" 2025-12-12T16:16:51.146665802+00:00 stderr F I1212 16:16:51.146534 1 store.go:1663] "Monitoring resource count at path" resource="rangeallocations.security.openshift.io" path="//rangeallocations" 2025-12-12T16:16:51.153281044+00:00 stderr F I1212 16:16:51.150487 1 cacher.go:469] cacher (rangeallocations.security.openshift.io): initialized 2025-12-12T16:16:51.153281044+00:00 stderr F I1212 16:16:51.150544 1 reflector.go:430] "Caches populated" type="*security.RangeAllocation" reflector="storage/cacher.go:/rangeallocations" 2025-12-12T16:16:51.153281044+00:00 stderr F I1212 16:16:51.152309 1 handler.go:288] Adding GroupVersion security.openshift.io v1 to ResourceManager 2025-12-12T16:16:51.153281044+00:00 stderr F I1212 16:16:51.152417 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:51.153281044+00:00 stderr F I1212 16:16:51.152429 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.175402774+00:00 stderr F I1212 16:16:51.175327 1 cacher.go:469] cacher (images.image.openshift.io): initialized 2025-12-12T16:16:51.175442385+00:00 stderr F I1212 16:16:51.175392 1 reflector.go:430] "Caches populated" type="*image.Image" reflector="storage/cacher.go:/images" 2025-12-12T16:16:51.243198299+00:00 stderr F I1212 16:16:51.241283 1 store.go:1663] "Monitoring resource count at path" resource="templates.template.openshift.io" path="//templates" 2025-12-12T16:16:51.256558125+00:00 stderr F I1212 16:16:51.256488 1 cacher.go:469] cacher (templates.template.openshift.io): initialized 2025-12-12T16:16:51.256652108+00:00 stderr F I1212 16:16:51.256609 1 reflector.go:430] "Caches populated" type="*template.Template" reflector="storage/cacher.go:/templates" 2025-12-12T16:16:51.256879653+00:00 stderr F I1212 16:16:51.256818 1 store.go:1663] "Monitoring resource count at path" resource="templateinstances.template.openshift.io" path="//templateinstances" 2025-12-12T16:16:51.260150493+00:00 stderr F I1212 16:16:51.260075 1 cacher.go:469] cacher (templateinstances.template.openshift.io): initialized 2025-12-12T16:16:51.260150493+00:00 stderr F I1212 16:16:51.260136 1 reflector.go:430] "Caches populated" type="*template.TemplateInstance" reflector="storage/cacher.go:/templateinstances" 2025-12-12T16:16:51.332735405+00:00 stderr F I1212 16:16:51.332318 1 store.go:1663] "Monitoring resource count at path" resource="brokertemplateinstances.template.openshift.io" path="//brokertemplateinstances" 2025-12-12T16:16:51.336784114+00:00 stderr F I1212 16:16:51.336704 1 cacher.go:469] cacher (brokertemplateinstances.template.openshift.io): initialized 2025-12-12T16:16:51.336784114+00:00 stderr F I1212 16:16:51.336761 1 reflector.go:430] "Caches populated" type="*template.BrokerTemplateInstance" reflector="storage/cacher.go:/brokertemplateinstances" 2025-12-12T16:16:51.337684516+00:00 stderr F I1212 16:16:51.337403 1 handler.go:288] Adding GroupVersion template.openshift.io v1 to ResourceManager 2025-12-12T16:16:51.337684516+00:00 stderr F I1212 16:16:51.337613 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:51.337684516+00:00 stderr F I1212 16:16:51.337631 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.352537449+00:00 stderr F I1212 16:16:51.351909 1 store.go:1663] "Monitoring resource count at path" resource="deploymentconfigs.apps.openshift.io" path="//deploymentconfigs" 2025-12-12T16:16:51.354407934+00:00 stderr F I1212 16:16:51.354375 1 cacher.go:469] cacher (deploymentconfigs.apps.openshift.io): initialized 2025-12-12T16:16:51.354462136+00:00 stderr F I1212 16:16:51.354426 1 reflector.go:430] "Caches populated" type="*apps.DeploymentConfig" reflector="storage/cacher.go:/deploymentconfigs" 2025-12-12T16:16:51.362299677+00:00 stderr F I1212 16:16:51.362227 1 handler.go:288] Adding GroupVersion apps.openshift.io v1 to ResourceManager 2025-12-12T16:16:51.362432800+00:00 stderr F I1212 16:16:51.362391 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:51.362432800+00:00 stderr F I1212 16:16:51.362424 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.387427890+00:00 stderr F I1212 16:16:51.387351 1 handler.go:288] Adding GroupVersion authorization.openshift.io v1 to ResourceManager 2025-12-12T16:16:51.387687397+00:00 stderr F I1212 16:16:51.387662 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:51.387741008+00:00 stderr F I1212 16:16:51.387729 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.395039806+00:00 stderr F I1212 16:16:51.394219 1 handler.go:288] Adding GroupVersion project.openshift.io v1 to ResourceManager 2025-12-12T16:16:51.395039806+00:00 stderr F I1212 16:16:51.394330 1 maxinflight.go:139] "Initialized nonMutatingChan" len=3000 2025-12-12T16:16:51.395039806+00:00 stderr F I1212 16:16:51.394342 1 maxinflight.go:145] "Initialized mutatingChan" len=1500 2025-12-12T16:16:51.864468187+00:00 stderr F I1212 16:16:51.864219 1 server.go:50] Starting master on 0.0.0.0:8443 (v0.0.0-master+$Format:%H$) 2025-12-12T16:16:51.864601780+00:00 stderr F I1212 16:16:51.864580 1 genericapiserver.go:551] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:16:51.868127456+00:00 stderr F I1212 16:16:51.868086 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:51.868159957+00:00 stderr F I1212 16:16:51.868132 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:51.868251950+00:00 stderr F I1212 16:16:51.868228 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:51.868306011+00:00 stderr F I1212 16:16:51.868294 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:51.868347712+00:00 stderr F I1212 16:16:51.868239 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:51.868386403+00:00 stderr F I1212 16:16:51.868372 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:51.868427244+00:00 stderr F I1212 16:16:51.868395 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-apiserver.svc\" [serving] validServingFor=[api.openshift-apiserver.svc,api.openshift-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:51.868343432 +0000 UTC))" 2025-12-12T16:16:51.868652259+00:00 stderr F I1212 16:16:51.868614 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556209\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:16:51.868593068 +0000 UTC))" 2025-12-12T16:16:51.868665240+00:00 stderr F I1212 16:16:51.868642 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:51.868692940+00:00 stderr F I1212 16:16:51.868674 1 genericapiserver.go:706] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:16:51.869046269+00:00 stderr F I1212 16:16:51.868982 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:51.869098990+00:00 stderr F I1212 16:16:51.869075 1 openshift_apiserver.go:603] Using default project node label selector: 2025-12-12T16:16:51.869148621+00:00 stderr F I1212 16:16:51.869124 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:51.869646974+00:00 stderr F I1212 16:16:51.869599 1 clusterquotamapping.go:127] Starting ClusterQuotaMappingController controller 2025-12-12T16:16:51.873611930+00:00 stderr F I1212 16:16:51.873568 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.873862207+00:00 stderr F I1212 16:16:51.873828 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.874142813+00:00 stderr F I1212 16:16:51.874110 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.874274147+00:00 stderr F I1212 16:16:51.874242 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.883282206+00:00 stderr F I1212 16:16:51.883207 1 reflector.go:430] "Caches populated" type="*v1alpha1.ImageContentSourcePolicy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.883591574+00:00 stderr F I1212 16:16:51.883562 1 reflector.go:430] "Caches populated" type="*v1.ImageTagMirrorSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.884506036+00:00 stderr F I1212 16:16:51.884463 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.884815594+00:00 stderr F I1212 16:16:51.884788 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.885149862+00:00 stderr F I1212 16:16:51.885123 1 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.885309596+00:00 stderr F I1212 16:16:51.885282 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.885799178+00:00 stderr F I1212 16:16:51.885749 1 reflector.go:430] "Caches populated" type="*v1.Route" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.887987811+00:00 stderr F I1212 16:16:51.886472 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.887987811+00:00 stderr F I1212 16:16:51.887446 1 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.887987811+00:00 stderr F I1212 16:16:51.887836 1 reflector.go:430] "Caches populated" type="*v1.ImageDigestMirrorSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.888042343+00:00 stderr F I1212 16:16:51.886171 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.899508553+00:00 stderr F I1212 16:16:51.896433 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.899508553+00:00 stderr F I1212 16:16:51.896854 1 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.899508553+00:00 stderr F I1212 16:16:51.897736 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.902259660+00:00 stderr F I1212 16:16:51.902015 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.922503904+00:00 stderr F I1212 16:16:51.922435 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.925722283+00:00 stderr F I1212 16:16:51.925552 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.953111591+00:00 stderr F I1212 16:16:51.953031 1 reflector.go:430] "Caches populated" type="*v1.ImageStream" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.968241171+00:00 stderr F I1212 16:16:51.967650 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.970529537+00:00 stderr F I1212 16:16:51.970487 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:51.970929456+00:00 stderr F I1212 16:16:51.970900 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:51.970989398+00:00 stderr F I1212 16:16:51.970978 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:51.971206203+00:00 stderr F I1212 16:16:51.971191 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:51.971142231 +0000 UTC))" 2025-12-12T16:16:51.971456229+00:00 stderr F I1212 16:16:51.971443 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-apiserver.svc\" [serving] validServingFor=[api.openshift-apiserver.svc,api.openshift-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:51.971424998 +0000 UTC))" 2025-12-12T16:16:51.971644614+00:00 stderr F I1212 16:16:51.971634 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556209\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:16:51.971616613 +0000 UTC))" 2025-12-12T16:16:51.972642738+00:00 stderr F I1212 16:16:51.972598 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:51.972561936 +0000 UTC))" 2025-12-12T16:16:51.972662949+00:00 stderr F I1212 16:16:51.972634 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:51.972620548 +0000 UTC))" 2025-12-12T16:16:51.972662949+00:00 stderr F I1212 16:16:51.972652 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:51.972645178 +0000 UTC))" 2025-12-12T16:16:51.972672849+00:00 stderr F I1212 16:16:51.972664 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:51.972657109 +0000 UTC))" 2025-12-12T16:16:51.972695009+00:00 stderr F I1212 16:16:51.972676 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:51.972668569 +0000 UTC))" 2025-12-12T16:16:51.972702280+00:00 stderr F I1212 16:16:51.972692 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:51.972683139 +0000 UTC))" 2025-12-12T16:16:51.972740611+00:00 stderr F I1212 16:16:51.972708 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:51.97269758 +0000 UTC))" 2025-12-12T16:16:51.972740611+00:00 stderr F I1212 16:16:51.972726 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:51.97271823 +0000 UTC))" 2025-12-12T16:16:51.972934065+00:00 stderr F I1212 16:16:51.972909 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-apiserver.svc\" [serving] validServingFor=[api.openshift-apiserver.svc,api.openshift-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:51.972896754 +0000 UTC))" 2025-12-12T16:16:51.973119240+00:00 stderr F I1212 16:16:51.973092 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556209\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:16:51.973076639 +0000 UTC))" 2025-12-12T16:16:52.027995990+00:00 stderr F I1212 16:16:52.027901 1 reflector.go:430] "Caches populated" type="*etcd.ImageLayers" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.901887 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.901833826 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.901941 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.901913938 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.901956 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.901948939 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.901970 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.901960489 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.901987 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.90197764 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.902012 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.90199347 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.902028 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.90201705 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.902043 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.902032821 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.902056 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.902047281 +0000 UTC))" 2025-12-12T16:16:55.903367433+00:00 stderr F I1212 16:16:55.902075 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.902063372 +0000 UTC))" 2025-12-12T16:16:55.921336852+00:00 stderr F I1212 16:16:55.919217 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-apiserver.svc\" [serving] validServingFor=[api.openshift-apiserver.svc,api.openshift-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:55.913346617 +0000 UTC))" 2025-12-12T16:16:55.921336852+00:00 stderr F I1212 16:16:55.919530 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556209\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:16:55.919494847 +0000 UTC))" 2025-12-12T16:17:03.025960776+00:00 stderr F E1212 16:17:03.024748 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:3bc55cb1bafdd281a784ec7950c8e95914079522f152f642e8172869e83b4585\": unexpected end of JSON input" 2025-12-12T16:17:03.036661287+00:00 stderr F I1212 16:17:03.036595 1 trace.go:236] Trace[573901152]: "Create" accept:application/json, */*,audit-id:7764faf7-175f-406d-a810-a9fc59678b53,client:10.217.0.10,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:cluster-samples-operator/v0.0.0 (linux/amd64) kubernetes/$Format,verb:POST (12-Dec-2025 16:17:01.190) (total time: 1846ms): 2025-12-12T16:17:03.036661287+00:00 stderr F Trace[573901152]: [1.846202774s] [1.846202774s] END 2025-12-12T16:17:03.618460082+00:00 stderr F E1212 16:17:03.618395 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:52f9b4df3f7833876ee502a6bff2539491db07e060b213b6a0a8fda0c4a881c1\": unexpected end of JSON input" 2025-12-12T16:17:03.625122784+00:00 stderr F I1212 16:17:03.625025 1 trace.go:236] Trace[1168823619]: "Create" accept:application/json, */*,audit-id:055557ad-21aa-4253-a90b-16a4d614fd77,client:10.217.0.10,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:cluster-samples-operator/v0.0.0 (linux/amd64) kubernetes/$Format,verb:POST (12-Dec-2025 16:17:03.072) (total time: 552ms): 2025-12-12T16:17:03.625122784+00:00 stderr F Trace[1168823619]: [552.604092ms] [552.604092ms] END 2025-12-12T16:17:46.319268323+00:00 stderr F I1212 16:17:46.319205 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.319129449 +0000 UTC))" 2025-12-12T16:17:46.319387086+00:00 stderr F I1212 16:17:46.319375 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.319356745 +0000 UTC))" 2025-12-12T16:17:46.319433207+00:00 stderr F I1212 16:17:46.319423 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.319402836 +0000 UTC))" 2025-12-12T16:17:46.319490818+00:00 stderr F I1212 16:17:46.319478 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.319468298 +0000 UTC))" 2025-12-12T16:17:46.319534109+00:00 stderr F I1212 16:17:46.319515 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.319505359 +0000 UTC))" 2025-12-12T16:17:46.319576400+00:00 stderr F I1212 16:17:46.319566 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.31955445 +0000 UTC))" 2025-12-12T16:17:46.319624292+00:00 stderr F I1212 16:17:46.319604 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.319590221 +0000 UTC))" 2025-12-12T16:17:46.319677063+00:00 stderr F I1212 16:17:46.319657 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.319644492 +0000 UTC))" 2025-12-12T16:17:46.319724064+00:00 stderr F I1212 16:17:46.319714 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.319692223 +0000 UTC))" 2025-12-12T16:17:46.319769415+00:00 stderr F I1212 16:17:46.319760 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.319748705 +0000 UTC))" 2025-12-12T16:17:46.319814226+00:00 stderr F I1212 16:17:46.319796 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.319784626 +0000 UTC))" 2025-12-12T16:17:46.320082273+00:00 stderr F I1212 16:17:46.320068 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-apiserver.svc\" [serving] validServingFor=[api.openshift-apiserver.svc,api.openshift-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:17:46.320051162 +0000 UTC))" 2025-12-12T16:17:46.320352430+00:00 stderr F I1212 16:17:46.320334 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556209\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:17:46.320317609 +0000 UTC))" 2025-12-12T16:18:30.564230336+00:00 stderr F E1212 16:18:30.563390 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.564230336+00:00 stderr F E1212 16:18:30.563591 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.572082840+00:00 stderr F E1212 16:18:30.572011 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.572114541+00:00 stderr F E1212 16:18:30.572088 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.583393550+00:00 stderr F E1212 16:18:30.582087 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.584501587+00:00 stderr F E1212 16:18:30.584341 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.596243468+00:00 stderr F E1212 16:18:30.596160 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.596971206+00:00 stderr F E1212 16:18:30.596269 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.633774466+00:00 stderr F E1212 16:18:30.633680 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.633774466+00:00 stderr F E1212 16:18:30.633749 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.639149708+00:00 stderr F E1212 16:18:30.639087 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.639225910+00:00 stderr F E1212 16:18:30.639147 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.644712586+00:00 stderr F E1212 16:18:30.644440 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.644712586+00:00 stderr F E1212 16:18:30.644484 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.648417038+00:00 stderr F E1212 16:18:30.648361 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.648417038+00:00 stderr F E1212 16:18:30.648397 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.651978126+00:00 stderr F E1212 16:18:30.651912 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.651978126+00:00 stderr F E1212 16:18:30.651950 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.517231037+00:00 stderr F E1212 16:18:31.514355 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.517231037+00:00 stderr F E1212 16:18:31.514434 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.521820321+00:00 stderr F E1212 16:18:31.521463 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.521820321+00:00 stderr F E1212 16:18:31.521560 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.534115345+00:00 stderr F E1212 16:18:31.534046 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.534188486+00:00 stderr F E1212 16:18:31.534145 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.539244031+00:00 stderr F E1212 16:18:31.539163 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.539291843+00:00 stderr F E1212 16:18:31.539266 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.544670416+00:00 stderr F E1212 16:18:31.544639 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.544767898+00:00 stderr F E1212 16:18:31.544754 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.550377577+00:00 stderr F E1212 16:18:31.550337 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.550418648+00:00 stderr F E1212 16:18:31.550400 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.551260608+00:00 stderr F E1212 16:18:31.551227 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.551346611+00:00 stderr F E1212 16:18:31.551332 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.556260972+00:00 stderr F E1212 16:18:31.556171 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.556348484+00:00 stderr F E1212 16:18:31.556320 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.556444487+00:00 stderr F E1212 16:18:31.556421 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.556535209+00:00 stderr F E1212 16:18:31.556455 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.562071836+00:00 stderr F E1212 16:18:31.562041 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.562158168+00:00 stderr F E1212 16:18:31.562145 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.570853793+00:00 stderr F E1212 16:18:31.570802 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.570898814+00:00 stderr F E1212 16:18:31.570876 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.571007077+00:00 stderr F E1212 16:18:31.570970 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.571025047+00:00 stderr F E1212 16:18:31.571016 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.576327788+00:00 stderr F E1212 16:18:31.576255 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.576374859+00:00 stderr F E1212 16:18:31.576322 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.576374859+00:00 stderr F E1212 16:18:31.576340 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.576472832+00:00 stderr F E1212 16:18:31.576427 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.580694686+00:00 stderr F E1212 16:18:31.580633 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.580731847+00:00 stderr F E1212 16:18:31.580692 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.584761197+00:00 stderr F E1212 16:18:31.584712 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.584761197+00:00 stderr F E1212 16:18:31.584753 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.604583857+00:00 stderr F E1212 16:18:31.604509 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.604621158+00:00 stderr F E1212 16:18:31.604606 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.619898855+00:00 stderr F E1212 16:18:31.619846 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.620038109+00:00 stderr F E1212 16:18:31.620023 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:33.358095388+00:00 stderr F E1212 16:18:33.358023 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:33.358257362+00:00 stderr F E1212 16:18:33.358231 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:34.628300381+00:00 stderr F E1212 16:18:34.628209 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:34.628385083+00:00 stderr F E1212 16:18:34.628365 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:34.678234186+00:00 stderr F E1212 16:18:34.675388 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:34.678234186+00:00 stderr F E1212 16:18:34.675568 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:35.579501368+00:00 stderr F E1212 16:18:35.579421 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:35.579727443+00:00 stderr F E1212 16:18:35.579681 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:35.878284024+00:00 stderr F E1212 16:18:35.878171 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:35.878611883+00:00 stderr F E1212 16:18:35.878401 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:38.529820767+00:00 stderr F E1212 16:18:38.529748 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:38.530136005+00:00 stderr F E1212 16:18:38.530109 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.273062649+00:00 stderr F E1212 16:18:52.272989 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.273329726+00:00 stderr F E1212 16:18:52.273284 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.626718852+00:00 stderr F E1212 16:18:52.626608 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.626718852+00:00 stderr F E1212 16:18:52.626696 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.127850152+00:00 stderr F E1212 16:18:53.127775 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:53.127909013+00:00 stderr F E1212 16:18:53.127856 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.683840925+00:00 stderr F E1212 16:18:56.683775 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.683893596+00:00 stderr F E1212 16:18:56.683869 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:29.547292295+00:00 stderr F I1212 16:19:29.545456 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:29.891611130+00:00 stderr F I1212 16:19:29.891520 1 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:30.650302638+00:00 stderr F I1212 16:19:30.650227 1 reflector.go:430] "Caches populated" type="*v1.ImageDigestMirrorSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:34.158694447+00:00 stderr F I1212 16:19:34.158597 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:39.700821649+00:00 stderr F I1212 16:19:39.700710 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:45.486877754+00:00 stderr F I1212 16:19:45.486799 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:59.909249009+00:00 stderr F I1212 16:19:59.908790 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:00.223450088+00:00 stderr F I1212 16:20:00.223333 1 reflector.go:430] "Caches populated" type="*v1alpha1.ImageContentSourcePolicy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:09.308827543+00:00 stderr F I1212 16:20:09.308733 1 reflector.go:430] "Caches populated" type="*v1.ImageTagMirrorSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:12.844056255+00:00 stderr F I1212 16:20:12.843970 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:16.631990773+00:00 stderr F I1212 16:20:16.631926 1 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:17.675726828+00:00 stderr F I1212 16:20:17.674211 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:22.898095332+00:00 stderr F I1212 16:20:22.898034 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:23.249973086+00:00 stderr F I1212 16:20:23.249886 1 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:23.383580911+00:00 stderr F I1212 16:20:23.383497 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:24.007165438+00:00 stderr F I1212 16:20:24.007070 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:24.824042898+00:00 stderr F I1212 16:20:24.823965 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:30.186715517+00:00 stderr F I1212 16:20:30.186218 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:31.114092282+00:00 stderr F I1212 16:20:31.113985 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:36.805246349+00:00 stderr F I1212 16:20:36.805165 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:41.093660323+00:00 stderr F I1212 16:20:41.093043 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:25:13.399159039+00:00 stderr F E1212 16:25:13.399079 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:4f35566977c35306a8f2102841ceb7fa10a6d9ac47c079131caed5655140f9b2\": unexpected end of JSON input" 2025-12-12T16:25:13.499070083+00:00 stderr F E1212 16:25:13.498969 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:df8858f0c01ae1657a14234a94f6785cbb2fba7f12c9d0325f427a3f1284481b\": unexpected end of JSON input" 2025-12-12T16:25:13.502898923+00:00 stderr F E1212 16:25:13.502851 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:9b5d2fc574a13613f18fa983ac2901593c1e812836e918095bc3d15b6cc4ba57\": unexpected end of JSON input" 2025-12-12T16:25:13.507634298+00:00 stderr F E1212 16:25:13.507603 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:57ab1f0ad24e02143978fc79c5219a02c4d6a5a27225ee5454c85a47839b6ddc\": unexpected end of JSON input" 2025-12-12T16:25:13.514909859+00:00 stderr F I1212 16:25:13.514241 1 trace.go:236] Trace[1206177249]: "Create" accept:application/json, */*,audit-id:09df26ce-82e5-4923-98e9-98b2ad7a9644,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.099) (total time: 1414ms): 2025-12-12T16:25:13.514909859+00:00 stderr F Trace[1206177249]: [1.414949637s] [1.414949637s] END 2025-12-12T16:25:13.538783206+00:00 stderr F E1212 16:25:13.538631 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:55dc61c31ea50a8f7a45e993a9b3220097974948b5cd1ab3f317e7702e8cb6fc\": unexpected end of JSON input" 2025-12-12T16:25:13.914887742+00:00 stderr F E1212 16:25:13.914814 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:5dfcc5b000a1fab4be66bbd43e4db44b61176e2bcba9c24f6fe887dea9b7fd49\": unexpected end of JSON input" 2025-12-12T16:25:13.918765824+00:00 stderr F E1212 16:25:13.918721 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:55a832a2dd32c4ab288b2c76e1c531bd6df07651010f7b9f8f983dff5ee584ab\": unexpected end of JSON input" 2025-12-12T16:25:13.921702451+00:00 stderr F E1212 16:25:13.921656 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:43920d10408205a379519e16530d5181db65a79df3c2725b3cbad26798d09037\": unexpected end of JSON input" 2025-12-12T16:25:13.929590438+00:00 stderr F I1212 16:25:13.929519 1 trace.go:236] Trace[2021574230]: "Create" accept:application/json, */*,audit-id:9dbcdd4a-a219-462a-834b-c130c9b25caa,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.340) (total time: 1588ms): 2025-12-12T16:25:13.929590438+00:00 stderr F Trace[2021574230]: [1.588970347s] [1.588970347s] END 2025-12-12T16:25:14.702108434+00:00 stderr F E1212 16:25:14.702011 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:70a21b3f93c05843ce9d07f125b1464436caf01680bb733754a2a5df5bc3b11b\": unexpected end of JSON input" 2025-12-12T16:25:14.706580251+00:00 stderr F E1212 16:25:14.706520 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7201e059b92acc55fe9fe1cc390d44e92f0e2af297fbe52b3f1bb56327f59624\": unexpected end of JSON input" 2025-12-12T16:25:14.710267408+00:00 stderr F E1212 16:25:14.710168 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:ec784f172735873a5893504e07c57dcbd56b7b049a395c5629c6058dbfac21a3\": unexpected end of JSON input" 2025-12-12T16:25:14.717285902+00:00 stderr F I1212 16:25:14.717244 1 trace.go:236] Trace[1935129334]: "Create" accept:application/json, */*,audit-id:73f1557a-f417-4f51-b263-b385d17ca8d2,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:13.061) (total time: 1655ms): 2025-12-12T16:25:14.717285902+00:00 stderr F Trace[1935129334]: [1.655214586s] [1.655214586s] END 2025-12-12T16:25:15.126809586+00:00 stderr F E1212 16:25:15.126717 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:d186c94f8843f854d77b2b05d10efb0d272f88a4bf4f1d8ebe304428b9396392\": unexpected end of JSON input" 2025-12-12T16:25:15.131594382+00:00 stderr F E1212 16:25:15.131530 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:e37aeaeb0159194a9855350e13e399470f39ce340d6381069933742990741fb8\": unexpected end of JSON input" 2025-12-12T16:25:15.135297219+00:00 stderr F E1212 16:25:15.135246 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:f89a54e6d1340be8ddd84a602cb4f1f27c1983417f655941645bf11809d49f18\": unexpected end of JSON input" 2025-12-12T16:25:15.138723989+00:00 stderr F E1212 16:25:15.138693 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:739fac452e78a21a16b66e0451b85590b9e48ec7a1ed3887fbb9ed85cf564275\": unexpected end of JSON input" 2025-12-12T16:25:15.142159169+00:00 stderr F E1212 16:25:15.142108 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:0eea1d20aaa26041edf26b925fb204d839e5b93122190191893a0299b2e1b589\": unexpected end of JSON input" 2025-12-12T16:25:15.144991094+00:00 stderr F E1212 16:25:15.144950 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:3b94ccfa422b8ba0014302a3cfc6916b69f0f5a9dfd757b6704049834d4ff0ae\": unexpected end of JSON input" 2025-12-12T16:25:15.147999543+00:00 stderr F E1212 16:25:15.147946 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:46a4e73ddb085d1f36b39903ea13ba307bb958789707e9afde048764b3e3cae2\": unexpected end of JSON input" 2025-12-12T16:25:15.150839007+00:00 stderr F E1212 16:25:15.150790 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:bcb0e15cc9d2d3449f0b1acac7b0275035a80e1b3b835391b5464f7bf4553b89\": unexpected end of JSON input" 2025-12-12T16:25:15.157950054+00:00 stderr F I1212 16:25:15.157916 1 trace.go:236] Trace[159251919]: "Create" accept:application/json, */*,audit-id:3c629b65-cbd3-4b87-95d2-55e661c55396,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:11.987) (total time: 3170ms): 2025-12-12T16:25:15.157950054+00:00 stderr F Trace[159251919]: [3.170769004s] [3.170769004s] END 2025-12-12T16:25:15.924752450+00:00 stderr F E1212 16:25:15.924594 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:e851770fd181ef49193111f7afcdbf872ad23f3a8234e0e07a742c4ca2882c3d\": unexpected end of JSON input" 2025-12-12T16:25:15.928109538+00:00 stderr F E1212 16:25:15.928067 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:ce5c0becf829aca80734b4caf3ab6b76cb00f7d78f4e39fb136636a764dea7f6\": unexpected end of JSON input" 2025-12-12T16:25:15.931274971+00:00 stderr F E1212 16:25:15.931238 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:3f00540ce2a3a01d2a147a7d73825fe78697be213a050bd09edae36266d6bc40\": unexpected end of JSON input" 2025-12-12T16:25:15.933797938+00:00 stderr F E1212 16:25:15.933762 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:868224c3b7c309b9e04003af70a5563af8e4c662f0c53f2a7606e0573c9fad85\": unexpected end of JSON input" 2025-12-12T16:25:15.936129029+00:00 stderr F E1212 16:25:15.936089 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:0669a28577b41bb05c67492ef18a1d48a299ac54d1500df8f9f8f760ce4be24b\": unexpected end of JSON input" 2025-12-12T16:25:15.939538498+00:00 stderr F E1212 16:25:15.939501 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:9036a59a8275f9c205ef5fc674f38c0495275a1a7912029f9a784406bb00b1f5\": unexpected end of JSON input" 2025-12-12T16:25:15.942320832+00:00 stderr F E1212 16:25:15.942277 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:425e2c7c355bea32be238aa2c7bdd363b6ab3709412bdf095efe28a8f6c07d84\": unexpected end of JSON input" 2025-12-12T16:25:15.944978381+00:00 stderr F E1212 16:25:15.944951 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:67fee4b64b269f5666a1051d806635b675903ef56d07b7cc019d3d59ff1aa97c\": unexpected end of JSON input" 2025-12-12T16:25:15.947773265+00:00 stderr F E1212 16:25:15.947711 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:b85cbdbc289752c91ac7f468cffef916fe9ab01865f3e32cfcc44ccdd633b168\": unexpected end of JSON input" 2025-12-12T16:25:15.950667871+00:00 stderr F E1212 16:25:15.950610 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:663eb81388ae8f824e7920c272f6d2e2274cf6c140d61416607261cdce9d50e2\": unexpected end of JSON input" 2025-12-12T16:25:15.958073565+00:00 stderr F I1212 16:25:15.958006 1 trace.go:236] Trace[491248223]: "Create" accept:application/json, */*,audit-id:283ba4ed-38ff-485a-a3e0-877dab422ce5,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:11.985) (total time: 3972ms): 2025-12-12T16:25:15.958073565+00:00 stderr F Trace[491248223]: [3.972116347s] [3.972116347s] END 2025-12-12T16:25:16.184570403+00:00 stderr F E1212 16:25:16.184526 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:5fb3543c0d42146f0506c1ea4d09575131da6a2f27885729b7cfce13a0fa90e3\": unexpected end of JSON input" 2025-12-12T16:25:16.188616609+00:00 stderr F E1212 16:25:16.188587 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:1d68b58a73f4cf15fcd886ab39fddf18be923b52b24cb8ec3ab1da2d3e9bd5f6\": unexpected end of JSON input" 2025-12-12T16:25:16.191990808+00:00 stderr F E1212 16:25:16.191944 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7de877b0e748cdb47cb702400f3ddaa3c3744a022887e2213c2bb27775ab4b25\": unexpected end of JSON input" 2025-12-12T16:25:16.195472759+00:00 stderr F E1212 16:25:16.195420 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:af9c08644ca057d83ef4b7d8de1489f01c5a52ff8670133b8a09162831b7fb34\": unexpected end of JSON input" 2025-12-12T16:25:16.198580791+00:00 stderr F E1212 16:25:16.198526 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:b053401886c06581d3c296855525cc13e0613100a596ed007bb69d5f8e972346\": unexpected end of JSON input" 2025-12-12T16:25:16.202033081+00:00 stderr F E1212 16:25:16.202002 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:61555b923dabe4ff734279ed1bdb9eb6d450c760e1cc04463cf88608ac8d1338\": unexpected end of JSON input" 2025-12-12T16:25:16.205372079+00:00 stderr F E1212 16:25:16.205299 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:9ab26cb4005e9b60fd6349950957bbd0120efba216036da53c547c6f1c9e5e7f\": unexpected end of JSON input" 2025-12-12T16:25:16.208600444+00:00 stderr F E1212 16:25:16.208558 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:2254dc2f421f496b504aafbbd8ea37e660652c4b6b4f9a0681664b10873be7fe\": unexpected end of JSON input" 2025-12-12T16:25:16.211470999+00:00 stderr F E1212 16:25:16.211433 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:e4b1599ba6e88f6df7c4e67d6397371d61b6829d926411184e9855e71e840b8c\": unexpected end of JSON input" 2025-12-12T16:25:16.218510104+00:00 stderr F I1212 16:25:16.218445 1 trace.go:236] Trace[130000193]: "Create" accept:application/json, */*,audit-id:4fd073a7-c174-4245-b1f3-9076cadc0816,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.662) (total time: 3555ms): 2025-12-12T16:25:16.218510104+00:00 stderr F Trace[130000193]: [3.55560413s] [3.55560413s] END 2025-12-12T16:25:16.351630550+00:00 stderr F E1212 16:25:16.351553 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:431753c8a6a8541fdc0edd3385b2c765925d244fdd2347d2baa61303789696be\": unexpected end of JSON input" 2025-12-12T16:25:16.355808520+00:00 stderr F E1212 16:25:16.355742 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:64acf3403b5c2c85f7a28f326c63f1312b568db059c66d90b34e3c59fde3a74b\": unexpected end of JSON input" 2025-12-12T16:25:16.359496796+00:00 stderr F E1212 16:25:16.359427 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:74051f86b00fb102e34276f03a310c16bc57b9c2a001a56ba66359e15ee48ba6\": unexpected end of JSON input" 2025-12-12T16:25:16.362379132+00:00 stderr F E1212 16:25:16.362335 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:33d4dff40514e91d86b42e90b24b09a5ca770d9f67657c936363d348cd33d188\": unexpected end of JSON input" 2025-12-12T16:25:16.365775881+00:00 stderr F E1212 16:25:16.365731 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7711108ef60ef6f0536bfa26914af2afaf6455ce6e4c4abd391e31a2d95d0178\": unexpected end of JSON input" 2025-12-12T16:25:16.369728435+00:00 stderr F E1212 16:25:16.369674 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:b163564be6ed5b80816e61a4ee31e42f42dbbf345253daac10ecc9fadf31baa3\": unexpected end of JSON input" 2025-12-12T16:25:16.373675989+00:00 stderr F E1212 16:25:16.373359 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:920ff7e5efc777cb523669c425fd7b553176c9f4b34a85ceddcb548c2ac5f78a\": unexpected end of JSON input" 2025-12-12T16:25:16.376905354+00:00 stderr F E1212 16:25:16.376857 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:32a5e806bd88b40568d46864fd313541498e38fabfc5afb5f3bdfe052c4b4c5f\": unexpected end of JSON input" 2025-12-12T16:25:16.379965504+00:00 stderr F E1212 16:25:16.379914 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:229ee7b88c5f700c95d557d0b37b8f78dbb6b125b188c3bf050cfdb32aec7962\": unexpected end of JSON input" 2025-12-12T16:25:16.382711256+00:00 stderr F E1212 16:25:16.382680 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:78bf175cecb15524b2ef81bff8cc11acdf7c0f74c08417f0e443483912e4878a\": unexpected end of JSON input" 2025-12-12T16:25:16.389467194+00:00 stderr F I1212 16:25:16.389422 1 trace.go:236] Trace[1850590690]: "Create" accept:application/json, */*,audit-id:c603824d-c98b-40fe-b890-ff0e5a64d415,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.140) (total time: 4248ms): 2025-12-12T16:25:16.389467194+00:00 stderr F Trace[1850590690]: [4.248825714s] [4.248825714s] END 2025-12-12T16:25:16.591435117+00:00 stderr F E1212 16:25:16.591357 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7bcc365e0ba823ed020ee6e6c3e0c23be5871c8dea3f7f1a65029002c83f9e55\": unexpected end of JSON input" 2025-12-12T16:25:16.595059432+00:00 stderr F E1212 16:25:16.595007 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:6a9e81b2eea2f32f2750909b6aa037c2c2e68be3bc9daf3c7a3163c9e1df379f\": unexpected end of JSON input" 2025-12-12T16:25:16.598058321+00:00 stderr F E1212 16:25:16.598005 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:00cf28cf9a6c427962f922855a6cc32692c760764ce2ce7411cf605dd510367f\": unexpected end of JSON input" 2025-12-12T16:25:16.601311487+00:00 stderr F E1212 16:25:16.601257 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:2cee344e4cfcfdc9a117fd82baa6f2d5daa7eeed450e02cd5d5554b424410439\": unexpected end of JSON input" 2025-12-12T16:25:16.604345256+00:00 stderr F E1212 16:25:16.604305 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:aa02a20c2edf83a009746b45a0fd2e0b4a2b224fdef1581046f6afef38c0bee2\": unexpected end of JSON input" 2025-12-12T16:25:16.607366646+00:00 stderr F E1212 16:25:16.607316 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:59b88fb0c467ca43bf3c1af6bfd8777577638dd8079f995cdb20b6f4e20ce0b6\": unexpected end of JSON input" 2025-12-12T16:25:16.610259862+00:00 stderr F E1212 16:25:16.610218 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:603d10af5e3476add5b5726fdef893033869ae89824ee43949a46c9f004ef65d\": unexpected end of JSON input" 2025-12-12T16:25:16.613324872+00:00 stderr F E1212 16:25:16.613130 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:eed7e29bf583e4f01e170bb9f22f2a78098bf15243269b670c307caa6813b783\": unexpected end of JSON input" 2025-12-12T16:25:16.616002912+00:00 stderr F E1212 16:25:16.615954 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:b80a514f136f738736d6bf654dc3258c13b04a819e001dd8a39ef2f7475fd9d9\": unexpected end of JSON input" 2025-12-12T16:25:16.619172696+00:00 stderr F E1212 16:25:16.619107 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7ef75cdbc399425105060771cb8e700198cc0bddcfb60bf4311bf87ea62fd440\": unexpected end of JSON input" 2025-12-12T16:25:16.627387451+00:00 stderr F I1212 16:25:16.627267 1 trace.go:236] Trace[1145593826]: "Create" accept:application/json, */*,audit-id:9b2cf014-d42e-409b-bb14-bba307272191,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.739) (total time: 3887ms): 2025-12-12T16:25:16.627387451+00:00 stderr F Trace[1145593826]: [3.88730724s] [3.88730724s] END 2025-12-12T16:25:16.884122933+00:00 stderr F E1212 16:25:16.884031 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:a8e4081414cfa644e212ded354dfee12706e63afb19a27c0c0ae2c8c64e56ca6\": unexpected end of JSON input" 2025-12-12T16:25:16.887646736+00:00 stderr F E1212 16:25:16.887606 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:38c7e4f7dea04bb536f05d78e0107ebc2a3607cf030db7f5c249f13ce1f52d59\": unexpected end of JSON input" 2025-12-12T16:25:16.889040122+00:00 stderr F E1212 16:25:16.888995 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:496e23be70520863bce6f7cdc54d280aca2c133d06e992795c4dcbde1a9dd1ab\": unexpected end of JSON input" 2025-12-12T16:25:16.891634300+00:00 stderr F E1212 16:25:16.891529 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:d2f17aaf2f871fda5620466d69ac67b9c355c0bae5912a1dbef9a51ca8813e50\": unexpected end of JSON input" 2025-12-12T16:25:16.893319125+00:00 stderr F E1212 16:25:16.893254 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:022488b1bf697b7dd8c393171a3247bef4ea545a9ab828501e72168f2aac9415\": unexpected end of JSON input" 2025-12-12T16:25:16.895338288+00:00 stderr F E1212 16:25:16.895311 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:e4be2fb7216f432632819b2441df42a5a0063f7f473c2923ca6912b2d64b7494\": unexpected end of JSON input" 2025-12-12T16:25:16.896135549+00:00 stderr F E1212 16:25:16.896117 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7164a06e9ba98a3ce9991bd7019512488efe30895175bb463e255f00eb9421fd\": unexpected end of JSON input" 2025-12-12T16:25:16.899256230+00:00 stderr F E1212 16:25:16.899240 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:14de89e89efc97aee3b50141108b7833708c3a93ad90bf89940025ab5267ba86\": unexpected end of JSON input" 2025-12-12T16:25:16.900310878+00:00 stderr F E1212 16:25:16.900294 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:81684e422367a075ac113e69ea11d8721416ce4bedea035e25313c5e726fd7d1\": unexpected end of JSON input" 2025-12-12T16:25:16.901946251+00:00 stderr F E1212 16:25:16.901926 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:f438230ed2c2e609d0d7dbc430ccf1e9bad2660e6410187fd6e9b14a2952e70b\": unexpected end of JSON input" 2025-12-12T16:25:16.904223541+00:00 stderr F E1212 16:25:16.904171 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:b838fa18dab68d43a19f0c329c3643850691b8f9915823c4f8d25685eb293a11\": unexpected end of JSON input" 2025-12-12T16:25:16.905337700+00:00 stderr F E1212 16:25:16.905318 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:f953734d89252219c3dcd8f703ba8b58c9c8a0f5dfa9425c9e56ec0834f7d288\": unexpected end of JSON input" 2025-12-12T16:25:16.907340093+00:00 stderr F E1212 16:25:16.907320 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:8a5b580b76c2fc2dfe55d13bb0dd53e8c71d718fc1a3773264b1710f49060222\": unexpected end of JSON input" 2025-12-12T16:25:16.908733959+00:00 stderr F E1212 16:25:16.908715 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:e4223a60b887ec24cad7dd70fdb6c3f2c107fb7118331be6f45d626219cfe7f3\": unexpected end of JSON input" 2025-12-12T16:25:16.910853085+00:00 stderr F E1212 16:25:16.910826 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:2f59ad75b66a3169b0b03032afb09aa3cfa531dbd844e3d3a562246e7d09c282\": unexpected end of JSON input" 2025-12-12T16:25:16.914567343+00:00 stderr F E1212 16:25:16.914542 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:9d759db3bb650e5367216ce261779c5a58693fc7ae10f21cd264011562bd746d\": unexpected end of JSON input" 2025-12-12T16:25:16.918596028+00:00 stderr F I1212 16:25:16.916956 1 trace.go:236] Trace[493748285]: "Create" accept:application/json, */*,audit-id:aafb099b-ca55-4bac-854d-df5f7ad0a3a9,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:13.578) (total time: 3338ms): 2025-12-12T16:25:16.918596028+00:00 stderr F Trace[493748285]: [3.338270162s] [3.338270162s] END 2025-12-12T16:25:16.920329204+00:00 stderr F E1212 16:25:16.920302 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:bf5e518dba2aa935829d9db88d933a264e54ffbfa80041b41287fd70c1c35ba5\": unexpected end of JSON input" 2025-12-12T16:25:16.926040084+00:00 stderr F E1212 16:25:16.925937 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:f7ca08a8dda3610fcc10cc1fe5f5d0b9f8fc7a283b01975d0fe2c1e77ae06193\": unexpected end of JSON input" 2025-12-12T16:25:16.943157513+00:00 stderr F I1212 16:25:16.942873 1 trace.go:236] Trace[660678704]: "Create" accept:application/json, */*,audit-id:ed23a65f-cc51-4665-99a3-2dd76b79c353,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.980) (total time: 3962ms): 2025-12-12T16:25:16.943157513+00:00 stderr F Trace[660678704]: [3.962052783s] [3.962052783s] END 2025-12-12T16:25:18.356851397+00:00 stderr F E1212 16:25:18.356761 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:2ae058ee7239213fb495491112be8cc7e6d6661864fd399deb27f23f50f05eb4\": unexpected end of JSON input" 2025-12-12T16:25:18.360858353+00:00 stderr F E1212 16:25:18.360796 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:db3f5192237bfdab2355304f17916e09bc29d6d529fdec48b09a08290ae35905\": unexpected end of JSON input" 2025-12-12T16:25:18.364850967+00:00 stderr F E1212 16:25:18.364803 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:b4cb02a4e7cb915b6890d592ed5b4ab67bcef19bf855029c95231f51dd071352\": unexpected end of JSON input" 2025-12-12T16:25:18.368794561+00:00 stderr F E1212 16:25:18.368750 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:fa9556628c15b8eb22cafccb737b3fbcecfd681a5c2cfea3302dd771c644a7db\": unexpected end of JSON input" 2025-12-12T16:25:18.373956257+00:00 stderr F E1212 16:25:18.373922 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:a0a6db2dcdb3d49e36bd0665e3e00f242a690391700e42cab14e86b154152bfd\": unexpected end of JSON input" 2025-12-12T16:25:18.378218668+00:00 stderr F E1212 16:25:18.378163 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:e90172ca0f09acf5db1721bd7df304dffd184e00145072132cb71c7f0797adf6\": unexpected end of JSON input" 2025-12-12T16:25:18.382657295+00:00 stderr F E1212 16:25:18.382599 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:421d1f6a10e263677b7687ccea8e4a59058e2e3c80585505eec9a9c2e6f9f40e\": unexpected end of JSON input" 2025-12-12T16:25:18.385881350+00:00 stderr F E1212 16:25:18.385829 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:6c009f430da02bdcff618a7dcd085d7d22547263eeebfb8d6377a4cf6f58769d\": unexpected end of JSON input" 2025-12-12T16:25:18.390660095+00:00 stderr F E1212 16:25:18.390593 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:dc84fed0f6f40975a2277c126438c8aa15c70eeac75981dbaa4b6b853eff61a6\": unexpected end of JSON input" 2025-12-12T16:25:18.394468465+00:00 stderr F E1212 16:25:18.394393 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:78af15475eac13d2ff439b33a9c3bdd39147858a824c420e8042fd5f35adce15\": unexpected end of JSON input" 2025-12-12T16:25:18.398375298+00:00 stderr F E1212 16:25:18.398310 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:06bbbf9272d5c5161f444388593e9bd8db793d8a2d95a50b429b3c0301fafcdd\": unexpected end of JSON input" 2025-12-12T16:25:18.401348486+00:00 stderr F E1212 16:25:18.401298 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:caba895933209aa9a4f3121f9ec8e5e8013398ab4f72bd3ff255227aad8d2c3e\": unexpected end of JSON input" 2025-12-12T16:25:18.404791646+00:00 stderr F E1212 16:25:18.404720 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:dbe9905fe2b20ed30b0e2d64543016fa9c145eeb5a678f720ba9d2055f0c9f88\": unexpected end of JSON input" 2025-12-12T16:25:18.415975820+00:00 stderr F I1212 16:25:18.414419 1 trace.go:236] Trace[850564133]: "Create" accept:application/json, */*,audit-id:1ad17afa-28e5-4e41-8845-dbcd40553cdf,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:25:12.703) (total time: 5711ms): 2025-12-12T16:25:18.415975820+00:00 stderr F Trace[850564133]: [5.711255777s] [5.711255777s] END 2025-12-12T16:28:03.107015101+00:00 stderr F E1212 16:28:03.106921 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:7d0f3dfa01d60f720089fa3a7adf5ce417c12be54872c5973a267e029ba3561b\": unexpected end of JSON input" 2025-12-12T16:28:03.113825384+00:00 stderr F I1212 16:28:03.113298 1 trace.go:236] Trace[167663692]: "Create" accept:application/json, */*,audit-id:944dcae0-7b8c-4f73-85ca-56e9a77b06f7,client:38.102.83.97,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:service-telemetry,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/service-telemetry/imagestreamimports,user-agent:oc/4.20.0 (linux/amd64) kubernetes/0963a01,verb:POST (12-Dec-2025 16:28:02.296) (total time: 816ms): 2025-12-12T16:28:03.113825384+00:00 stderr F Trace[167663692]: [816.970267ms] [816.970267ms] END 2025-12-12T16:28:04.342219253+00:00 stderr F E1212 16:28:04.342020 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:919dbd673de74ca9084a6e2a8e3d3bc6e703857821cde1c1849f1863b2f11fdc\": unexpected end of JSON input" 2025-12-12T16:28:04.348460121+00:00 stderr F I1212 16:28:04.348420 1 trace.go:236] Trace[715784407]: "Create" accept:application/json, */*,audit-id:c5143c74-d986-47aa-bf13-e1121d94abe8,client:38.102.83.97,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:service-telemetry,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/service-telemetry/imagestreamimports,user-agent:oc/4.20.0 (linux/amd64) kubernetes/0963a01,verb:POST (12-Dec-2025 16:28:03.845) (total time: 502ms): 2025-12-12T16:28:04.348460121+00:00 stderr F Trace[715784407]: [502.760054ms] [502.760054ms] END 2025-12-12T16:28:11.412248505+00:00 stderr F E1212 16:28:11.411862 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:3f7a5a6e548e23a777e21f467552523aae63a157da293498ac1921c3be8c9f8a\": unexpected end of JSON input" 2025-12-12T16:28:11.417629451+00:00 stderr F I1212 16:28:11.417445 1 trace.go:236] Trace[454935207]: "Create" accept:application/json, */*,audit-id:fa21660c-dae2-4369-a8fd-c950062f73f1,client:38.102.83.97,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:service-telemetry,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/service-telemetry/imagestreamimports,user-agent:oc/4.20.0 (linux/amd64) kubernetes/0963a01,verb:POST (12-Dec-2025 16:28:10.734) (total time: 682ms): 2025-12-12T16:28:11.417629451+00:00 stderr F Trace[454935207]: [682.545964ms] [682.545964ms] END 2025-12-12T16:28:16.370727567+00:00 stderr F I1212 16:28:16.370613 1 controller.go:667] quota admission added evaluator for: buildconfigs.build.openshift.io 2025-12-12T16:28:16.370879631+00:00 stderr F I1212 16:28:16.370845 1 controller.go:667] quota admission added evaluator for: buildconfigs.build.openshift.io 2025-12-12T16:28:16.378799412+00:00 stderr F I1212 16:28:16.378555 1 trace.go:236] Trace[625167422]: "Create" accept:application/json,audit-id:f7b61516-0304-4a5d-8538-a79e0e1489c9,client:38.102.83.97,api-group:build.openshift.io,api-version:v1,name:,subresource:,namespace:service-telemetry,protocol:HTTP/2.0,resource:buildconfigs,scope:resource,url:/apis/build.openshift.io/v1/namespaces/service-telemetry/buildconfigs,user-agent:OpenAPI-Generator/31.0.0/python,verb:POST (12-Dec-2025 16:28:15.746) (total time: 631ms): 2025-12-12T16:28:16.378799412+00:00 stderr F Trace[625167422]: [631.519703ms] [631.519703ms] END 2025-12-12T16:28:18.052512100+00:00 stderr F I1212 16:28:18.052429 1 controller.go:667] quota admission added evaluator for: builds.build.openshift.io 2025-12-12T16:28:18.052778587+00:00 stderr F I1212 16:28:18.052727 1 controller.go:667] quota admission added evaluator for: builds.build.openshift.io 2025-12-12T16:28:24.482387402+00:00 stderr F W1212 16:28:24.482277 1 rest.go:320] error streaming binary content with build pod service-telemetry/service-telemetry-framework-index-1-build: &errors.StatusError{ErrStatus:v1.Status{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ListMeta:v1.ListMeta{SelfLink:"", ResourceVersion:"", Continue:"", RemainingItemCount:(*int64)(nil)}, Status:"Failure", Message:"error dialing backend: remote error: tls: internal error", Reason:"", Details:(*v1.StatusDetails)(nil), Code:500}} 2025-12-12T16:28:49.674399750+00:00 stderr F W1212 16:28:49.674312 1 rest.go:320] error streaming binary content with build pod service-telemetry/service-telemetry-framework-index-2-build: &errors.StatusError{ErrStatus:v1.Status{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ListMeta:v1.ListMeta{SelfLink:"", ResourceVersion:"", Continue:"", RemainingItemCount:(*int64)(nil)}, Status:"Failure", Message:"error dialing backend: remote error: tls: internal error", Reason:"", Details:(*v1.StatusDetails)(nil), Code:500}} 2025-12-12T16:29:05.801293561+00:00 stderr F W1212 16:29:05.801142 1 rest.go:320] error streaming binary content with build pod service-telemetry/service-telemetry-framework-index-3-build: &errors.StatusError{ErrStatus:v1.Status{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ListMeta:v1.ListMeta{SelfLink:"", ResourceVersion:"", Continue:"", RemainingItemCount:(*int64)(nil)}, Status:"Failure", Message:"error dialing backend: remote error: tls: internal error", Reason:"", Details:(*v1.StatusDetails)(nil), Code:500}} 2025-12-12T16:29:20.975495751+00:00 stderr F W1212 16:29:20.975371 1 rest.go:320] error streaming binary content with build pod service-telemetry/service-telemetry-framework-index-4-build: &errors.StatusError{ErrStatus:v1.Status{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ListMeta:v1.ListMeta{SelfLink:"", ResourceVersion:"", Continue:"", RemainingItemCount:(*int64)(nil)}, Status:"Failure", Message:"error dialing backend: remote error: tls: internal error", Reason:"", Details:(*v1.StatusDetails)(nil), Code:500}} 2025-12-12T16:38:37.963584518+00:00 stderr F E1212 16:38:37.962682 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:52f9b4df3f7833876ee502a6bff2539491db07e060b213b6a0a8fda0c4a881c1\": unexpected end of JSON input" 2025-12-12T16:38:37.971881037+00:00 stderr F I1212 16:38:37.971826 1 trace.go:236] Trace[1329758252]: "Create" accept:application/json, */*,audit-id:008ef911-ada9-4253-ab60-74a8a951d163,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:38:37.334) (total time: 637ms): 2025-12-12T16:38:37.971881037+00:00 stderr F Trace[1329758252]: [637.352174ms] [637.352174ms] END 2025-12-12T16:42:22.995901534+00:00 stderr F E1212 16:42:22.995821 1 strategy.go:60] "Unhandled Error" err="unable to parse manifest for \"sha256:3bc55cb1bafdd281a784ec7950c8e95914079522f152f642e8172869e83b4585\": unexpected end of JSON input" 2025-12-12T16:42:23.002605722+00:00 stderr F I1212 16:42:23.002547 1 trace.go:236] Trace[37625764]: "Create" accept:application/json, */*,audit-id:e2b03711-06ec-4519-92a8-1589175471d2,client:10.217.0.69,api-group:image.openshift.io,api-version:v1,name:,subresource:,namespace:openshift,protocol:HTTP/2.0,resource:imagestreamimports,scope:resource,url:/apis/image.openshift.io/v1/namespaces/openshift/imagestreamimports,user-agent:openshift-controller-manager/v0.0.0 (linux/amd64) kubernetes/$Format/system:serviceaccount:openshift-infra:image-import-controller,verb:POST (12-Dec-2025 16:42:22.334) (total time: 668ms): 2025-12-12T16:42:23.002605722+00:00 stderr F Trace[37625764]: [668.24243ms] [668.24243ms] END ././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015117043062033127 5ustar zuulzuul././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000644000175000017500000007235415117043043033143 0ustar zuulzuul2025-12-12T16:16:49.660047917+00:00 stderr F W1212 16:16:49.659383 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:16:49.660564400+00:00 stderr F I1212 16:16:49.660452 1 crypto.go:594] Generating new CA for check-endpoints-signer@1765556209 cert, and key in /tmp/serving-cert-3469422996/serving-signer.crt, /tmp/serving-cert-3469422996/serving-signer.key 2025-12-12T16:16:49.660564400+00:00 stderr F Validity period of the certificate for "check-endpoints-signer@1765556209" is unset, resetting to 43800h0m0s! 2025-12-12T16:16:50.104307854+00:00 stderr F I1212 16:16:50.103166 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:50.104307854+00:00 stderr F I1212 16:16:50.103469 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:50.104307854+00:00 stderr F I1212 16:16:50.103475 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:50.104307854+00:00 stderr F I1212 16:16:50.103478 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:50.104307854+00:00 stderr F I1212 16:16:50.103482 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:50.104307854+00:00 stderr F I1212 16:16:50.104289 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:50.134158232+00:00 stderr F I1212 16:16:50.134082 1 builder.go:304] check-endpoints version v0.0.0-unknown-c3d9642-c3d9642 2025-12-12T16:16:50.135920846+00:00 stderr F I1212 16:16:50.135568 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" 2025-12-12T16:16:50.928759192+00:00 stderr F I1212 16:16:50.927972 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:16:50.931669153+00:00 stderr F I1212 16:16:50.930233 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:50.931669153+00:00 stderr F I1212 16:16:50.930250 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:50.931669153+00:00 stderr F I1212 16:16:50.930275 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:16:50.931669153+00:00 stderr F I1212 16:16:50.930280 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:16:50.934319998+00:00 stderr F I1212 16:16:50.934235 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:50.934319998+00:00 stderr F W1212 16:16:50.934254 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:50.934319998+00:00 stderr F W1212 16:16:50.934258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:50.934319998+00:00 stderr F W1212 16:16:50.934264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:50.934319998+00:00 stderr F W1212 16:16:50.934266 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:50.934319998+00:00 stderr F W1212 16:16:50.934269 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:50.934319998+00:00 stderr F W1212 16:16:50.934271 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:50.934841501+00:00 stderr F I1212 16:16:50.934425 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:16:50.938541621+00:00 stderr F I1212 16:16:50.938287 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556209\" (2025-12-12 16:16:49 +0000 UTC to 2025-12-12 16:16:50 +0000 UTC (now=2025-12-12 16:16:50.938265424 +0000 UTC))" 2025-12-12T16:16:50.938541621+00:00 stderr F I1212 16:16:50.938497 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:50 +0000 UTC to 2028-12-12 15:16:50 +0000 UTC (now=2025-12-12 16:16:50.93847904 +0000 UTC))" 2025-12-12T16:16:50.938541621+00:00 stderr F I1212 16:16:50.938511 1 secure_serving.go:211] Serving securely on [::]:17698 2025-12-12T16:16:50.938603943+00:00 stderr F I1212 16:16:50.938541 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:16:50.938603943+00:00 stderr F I1212 16:16:50.938557 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:50.938603943+00:00 stderr F I1212 16:16:50.938581 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:50.938614813+00:00 stderr F I1212 16:16:50.938604 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" 2025-12-12T16:16:50.939330310+00:00 stderr F I1212 16:16:50.938750 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:50.939330310+00:00 stderr F I1212 16:16:50.938852 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:50.939330310+00:00 stderr F I1212 16:16:50.938862 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:50.939330310+00:00 stderr F I1212 16:16:50.938874 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:50.939330310+00:00 stderr F I1212 16:16:50.938885 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:50.953272651+00:00 stderr F I1212 16:16:50.953139 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.954388828+00:00 stderr F I1212 16:16:50.953952 1 base_controller.go:76] Waiting for caches to sync for CheckEndpointsTimeToStart 2025-12-12T16:16:50.954388828+00:00 stderr F I1212 16:16:50.954199 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.955008923+00:00 stderr F I1212 16:16:50.954762 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:51.039271830+00:00 stderr F I1212 16:16:51.038760 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:51.040124591+00:00 stderr F I1212 16:16:51.040068 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:51.040210883+00:00 stderr F I1212 16:16:51.040170 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:51.040497740+00:00 stderr F I1212 16:16:51.040447 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:51.040414598 +0000 UTC))" 2025-12-12T16:16:51.040724746+00:00 stderr F I1212 16:16:51.040704 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556209\" (2025-12-12 16:16:49 +0000 UTC to 2025-12-12 16:16:50 +0000 UTC (now=2025-12-12 16:16:51.040685805 +0000 UTC))" 2025-12-12T16:16:51.040933571+00:00 stderr F I1212 16:16:51.040908 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:50 +0000 UTC to 2028-12-12 15:16:50 +0000 UTC (now=2025-12-12 16:16:51.04089032 +0000 UTC))" 2025-12-12T16:16:51.041264719+00:00 stderr F I1212 16:16:51.041207 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:51.041145786 +0000 UTC))" 2025-12-12T16:16:51.041264719+00:00 stderr F I1212 16:16:51.041241 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:51.041227488 +0000 UTC))" 2025-12-12T16:16:51.041264719+00:00 stderr F I1212 16:16:51.041258 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:51.041247549 +0000 UTC))" 2025-12-12T16:16:51.041367902+00:00 stderr F I1212 16:16:51.041287 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:51.041263599 +0000 UTC))" 2025-12-12T16:16:51.041367902+00:00 stderr F I1212 16:16:51.041309 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:51.04129793 +0000 UTC))" 2025-12-12T16:16:51.041367902+00:00 stderr F I1212 16:16:51.041346 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:51.041331281 +0000 UTC))" 2025-12-12T16:16:51.041381282+00:00 stderr F I1212 16:16:51.041373 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:51.041358071 +0000 UTC))" 2025-12-12T16:16:51.041407203+00:00 stderr F I1212 16:16:51.041391 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:51.041380112 +0000 UTC))" 2025-12-12T16:16:51.041672429+00:00 stderr F I1212 16:16:51.041634 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556209\" (2025-12-12 16:16:49 +0000 UTC to 2025-12-12 16:16:50 +0000 UTC (now=2025-12-12 16:16:51.041616678 +0000 UTC))" 2025-12-12T16:16:51.041932965+00:00 stderr F I1212 16:16:51.041861 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:50 +0000 UTC to 2028-12-12 15:16:50 +0000 UTC (now=2025-12-12 16:16:51.041844463 +0000 UTC))" 2025-12-12T16:16:51.127572546+00:00 stderr F I1212 16:16:51.127460 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.154299 1 base_controller.go:82] Caches are synced for CheckEndpointsTimeToStart 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.154365 1 base_controller.go:119] Starting #1 worker of CheckEndpointsTimeToStart controller ... 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.154511 1 base_controller.go:76] Waiting for caches to sync for CheckEndpointsStop 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.154519 1 base_controller.go:82] Caches are synced for CheckEndpointsStop 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.154524 1 base_controller.go:119] Starting #1 worker of CheckEndpointsStop controller ... 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.155248 1 base_controller.go:181] Shutting down CheckEndpointsTimeToStart ... 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.155273 1 base_controller.go:123] Shutting down worker of CheckEndpointsTimeToStart controller ... 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.155280 1 base_controller.go:113] All CheckEndpointsTimeToStart workers have been terminated 2025-12-12T16:16:51.155398846+00:00 stderr F I1212 16:16:51.155298 1 base_controller.go:76] Waiting for caches to sync for check-endpoints 2025-12-12T16:16:51.159800773+00:00 stderr F I1212 16:16:51.159706 1 reflector.go:430] "Caches populated" type="*v1alpha1.PodNetworkConnectivityCheck" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:51.160029069+00:00 stderr F I1212 16:16:51.159981 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:51.256037643+00:00 stderr F I1212 16:16:51.255943 1 base_controller.go:82] Caches are synced for check-endpoints 2025-12-12T16:16:51.256037643+00:00 stderr F I1212 16:16:51.255990 1 base_controller.go:119] Starting #1 worker of check-endpoints controller ... 2025-12-12T16:16:55.911055151+00:00 stderr F I1212 16:16:55.910359 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.910305603 +0000 UTC))" 2025-12-12T16:16:55.911055151+00:00 stderr F I1212 16:16:55.911039 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.91101608 +0000 UTC))" 2025-12-12T16:16:55.911088262+00:00 stderr F I1212 16:16:55.911058 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.911047021 +0000 UTC))" 2025-12-12T16:16:55.911095982+00:00 stderr F I1212 16:16:55.911077 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.911064891 +0000 UTC))" 2025-12-12T16:16:55.911201425+00:00 stderr F I1212 16:16:55.911107 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.911094552 +0000 UTC))" 2025-12-12T16:16:55.911201425+00:00 stderr F I1212 16:16:55.911133 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.911121423 +0000 UTC))" 2025-12-12T16:16:55.911201425+00:00 stderr F I1212 16:16:55.911152 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.911140483 +0000 UTC))" 2025-12-12T16:16:55.911201425+00:00 stderr F I1212 16:16:55.911170 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.911157304 +0000 UTC))" 2025-12-12T16:16:55.911215435+00:00 stderr F I1212 16:16:55.911207 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.911194595 +0000 UTC))" 2025-12-12T16:16:55.911237846+00:00 stderr F I1212 16:16:55.911230 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.911218565 +0000 UTC))" 2025-12-12T16:16:55.915259234+00:00 stderr F I1212 16:16:55.911480 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556209\" (2025-12-12 16:16:49 +0000 UTC to 2025-12-12 16:16:50 +0000 UTC (now=2025-12-12 16:16:55.911453581 +0000 UTC))" 2025-12-12T16:16:55.915259234+00:00 stderr F I1212 16:16:55.911671 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:50 +0000 UTC to 2028-12-12 15:16:50 +0000 UTC (now=2025-12-12 16:16:55.911654266 +0000 UTC))" 2025-12-12T16:17:46.317197232+00:00 stderr F I1212 16:17:46.316932 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.316872474 +0000 UTC))" 2025-12-12T16:17:46.317197232+00:00 stderr F I1212 16:17:46.317150 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.31713449 +0000 UTC))" 2025-12-12T16:17:46.317197232+00:00 stderr F I1212 16:17:46.317170 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.317158081 +0000 UTC))" 2025-12-12T16:17:46.317268273+00:00 stderr F I1212 16:17:46.317209 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.317194712 +0000 UTC))" 2025-12-12T16:17:46.317268273+00:00 stderr F I1212 16:17:46.317230 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.317216732 +0000 UTC))" 2025-12-12T16:17:46.317268273+00:00 stderr F I1212 16:17:46.317248 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.317235783 +0000 UTC))" 2025-12-12T16:17:46.317295504+00:00 stderr F I1212 16:17:46.317266 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.317254033 +0000 UTC))" 2025-12-12T16:17:46.317295504+00:00 stderr F I1212 16:17:46.317287 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.317272593 +0000 UTC))" 2025-12-12T16:17:46.317513669+00:00 stderr F I1212 16:17:46.317304 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.317292794 +0000 UTC))" 2025-12-12T16:17:46.317513669+00:00 stderr F I1212 16:17:46.317329 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.317318495 +0000 UTC))" 2025-12-12T16:17:46.317513669+00:00 stderr F I1212 16:17:46.317348 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.317335835 +0000 UTC))" 2025-12-12T16:17:46.319570800+00:00 stderr F I1212 16:17:46.317572 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-3469422996/tls.crt::/tmp/serving-cert-3469422996/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556209\" (2025-12-12 16:16:49 +0000 UTC to 2025-12-12 16:16:50 +0000 UTC (now=2025-12-12 16:17:46.31755503 +0000 UTC))" 2025-12-12T16:17:46.319570800+00:00 stderr F I1212 16:17:46.317751 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:50 +0000 UTC to 2028-12-12 15:16:50 +0000 UTC (now=2025-12-12 16:17:46.317734545 +0000 UTC))" 2025-12-12T16:19:31.992760515+00:00 stderr F I1212 16:19:31.991874 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:38.657645747+00:00 stderr F I1212 16:19:38.657100 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:42.393025024+00:00 stderr F I1212 16:19:42.392310 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:10.624223959+00:00 stderr F I1212 16:20:10.623732 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.212151508+00:00 stderr F I1212 16:20:13.211574 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:21.275712416+00:00 stderr F I1212 16:20:21.274809 1 reflector.go:430] "Caches populated" type="*v1alpha1.PodNetworkConnectivityCheck" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" ././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015117043062033127 5ustar zuulzuul././@LongLink0000644000000000000000000000027500000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000644000175000017500000000000015117043043033116 0ustar zuulzuul././@LongLink0000644000000000000000000000021000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043044032767 5ustar zuulzuul././@LongLink0000644000000000000000000000022400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000023100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000000036015117043044032770 0ustar zuulzuul2025-12-12T16:15:06.105011010+00:00 stderr F I1212 16:15:06.104703 1 readyz.go:175] Listening on 0.0.0.0:9980 2025-12-12T16:15:17.927459470+00:00 stderr F I1212 16:15:17.927290 1 etcdcli_pool.go:70] creating a new cached client ././@LongLink0000644000000000000000000000023500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000000000015117043044032757 0ustar zuulzuul././@LongLink0000644000000000000000000000022000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000022500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000000000015117043044032757 0ustar zuulzuul././@LongLink0000644000000000000000000000022500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000023200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000004305415117043044032777 0ustar zuulzuul2025-12-12T16:15:05.905851853+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.903989Z","caller":"etcdmain/grpc_proxy.go:237","msg":"gRPC proxy server TLS","tls-info":"cert = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-metrics-crc.crt, key = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-metrics-crc.key, client-cert=, client-key=, trusted-ca = /etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/metrics-ca-bundle.crt, client-cert-auth = false, crl-file = "} 2025-12-12T16:15:05.907206407+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.907138Z","caller":"etcdmain/grpc_proxy.go:460","msg":"listening for gRPC proxy client requests","address":"127.0.0.1:9977"} 2025-12-12T16:15:05.907513313+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.907469Z","caller":"etcdmain/grpc_proxy.go:430","msg":"gRPC proxy client TLS","tls-info":"cert = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.crt, key = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.key, client-cert=, client-key=, trusted-ca = /etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt, client-cert-auth = false, crl-file = "} 2025-12-12T16:15:05.908106103+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.907547Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Channel created"} 2025-12-12T16:15:05.908359368+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908324Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] original dial target is: \"etcd-endpoints://0xc0000165a0/192.168.126.11:9978\""} 2025-12-12T16:15:05.908395929+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908378Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] parsed dial target is: {URL:{Scheme:etcd-endpoints Opaque: User: Host:0xc0000165a0 Path:/192.168.126.11:9978 RawPath: OmitHost:false ForceQuery:false RawQuery: Fragment: RawFragment:}}"} 2025-12-12T16:15:05.908402799+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908393Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Channel authority set to \"192.168.126.11:9978\""} 2025-12-12T16:15:05.908791466+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908753Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Resolver state updated: {\n \"Addresses\": [\n {\n \"Addr\": \"192.168.126.11:9978\",\n \"ServerName\": \"192.168.126.11:9978\",\n \"Attributes\": null,\n \"BalancerAttributes\": null,\n \"Metadata\": null\n }\n ],\n \"Endpoints\": [\n {\n \"Addresses\": [\n {\n \"Addr\": \"192.168.126.11:9978\",\n \"ServerName\": \"192.168.126.11:9978\",\n \"Attributes\": null,\n \"BalancerAttributes\": null,\n \"Metadata\": null\n }\n ],\n \"Attributes\": null\n }\n ],\n \"ServiceConfig\": {\n \"Config\": {\n \"Config\": null,\n \"LB\": \"round_robin\",\n \"Methods\": {}\n },\n \"Err\": null\n },\n \"Attributes\": null\n} (service config updated; resolver returned new addresses)"} 2025-12-12T16:15:05.908861997+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908810Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Channel switches to new LB policy \"round_robin\""} 2025-12-12T16:15:05.908926358+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908903Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: got new ClientConn state: {{[{Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }] [{[{Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }] }] 0xc000494840 } }"} 2025-12-12T16:15:05.908965699+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908930Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel created"} 2025-12-12T16:15:05.908996879+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908969Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[roundrobin] roundrobinPicker: Build called with info: {map[]}"} 2025-12-12T16:15:05.908996879+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.908990Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Channel Connectivity change to CONNECTING"} 2025-12-12T16:15:05.909128872+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909030Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to CONNECTING"} 2025-12-12T16:15:05.909169712+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909121Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel picks a new address \"192.168.126.11:9978\" to connect"} 2025-12-12T16:15:05.909305845+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909216Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, CONNECTING"} 2025-12-12T16:15:05.909754383+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909708Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] Creating new client transport to \"{Addr: \\\"192.168.126.11:9978\\\", ServerName: \\\"192.168.126.11:9978\\\", }\": connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:05.909784953+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:05.909753Z","caller":"zapgrpc/zapgrpc.go:191","msg":"[core] [Channel #1 SubChannel #2] grpc: addrConn.createTransport failed to connect to {Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }. Err: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:05.909828324+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909795Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to TRANSIENT_FAILURE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:05.909864165+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909838Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, TRANSIENT_FAILURE"} 2025-12-12T16:15:05.909871995+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.909858Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Channel Connectivity change to TRANSIENT_FAILURE"} 2025-12-12T16:15:05.910547427+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.910507Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Server #3] Server created"} 2025-12-12T16:15:05.910672919+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.910641Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Server #3 ListenSocket #4] ListenSocket created"} 2025-12-12T16:15:05.911148077+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.911099Z","caller":"etcdmain/grpc_proxy.go:614","msg":"gRPC proxy listening for metrics","address":"https://0.0.0.0:9979"} 2025-12-12T16:15:05.911148077+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.911116Z","caller":"etcdmain/grpc_proxy.go:287","msg":"started gRPC proxy","address":"127.0.0.1:9977"} 2025-12-12T16:15:05.911148077+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.911124Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"} 2025-12-12T16:15:05.911148077+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.911130Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"} 2025-12-12T16:15:05.911427292+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:05.911393Z","caller":"etcdmain/grpc_proxy.go:277","msg":"gRPC proxy server metrics URL serving"} 2025-12-12T16:15:06.910971159+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.910843Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to IDLE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:06.910971159+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.910925Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, IDLE"} 2025-12-12T16:15:06.911270574+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.910999Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to CONNECTING"} 2025-12-12T16:15:06.911270574+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.911034Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel picks a new address \"192.168.126.11:9978\" to connect"} 2025-12-12T16:15:06.911270574+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.911174Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, CONNECTING"} 2025-12-12T16:15:06.911516079+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.911435Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] Creating new client transport to \"{Addr: \\\"192.168.126.11:9978\\\", ServerName: \\\"192.168.126.11:9978\\\", }\": connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:06.911546719+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:06.911520Z","caller":"zapgrpc/zapgrpc.go:191","msg":"[core] [Channel #1 SubChannel #2] grpc: addrConn.createTransport failed to connect to {Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }. Err: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:06.911577780+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.911553Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to TRANSIENT_FAILURE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:06.911633041+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:06.911601Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, TRANSIENT_FAILURE"} 2025-12-12T16:15:08.824553931+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.823656Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to IDLE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:08.824553931+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824501Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, IDLE"} 2025-12-12T16:15:08.824606633+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824534Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to CONNECTING"} 2025-12-12T16:15:08.824606633+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824563Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel picks a new address \"192.168.126.11:9978\" to connect"} 2025-12-12T16:15:08.824746356+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824677Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, CONNECTING"} 2025-12-12T16:15:08.824818818+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824779Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] Creating new client transport to \"{Addr: \\\"192.168.126.11:9978\\\", ServerName: \\\"192.168.126.11:9978\\\", }\": connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:08.824858919+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:08.824825Z","caller":"zapgrpc/zapgrpc.go:191","msg":"[core] [Channel #1 SubChannel #2] grpc: addrConn.createTransport failed to connect to {Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }. Err: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:08.824858919+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824847Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to TRANSIENT_FAILURE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:08.824887339+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:08.824865Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, TRANSIENT_FAILURE"} 2025-12-12T16:15:11.342370878+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342258Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to IDLE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:11.342370878+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342324Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, IDLE"} 2025-12-12T16:15:11.342418469+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342352Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to CONNECTING"} 2025-12-12T16:15:11.342418469+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342379Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel picks a new address \"192.168.126.11:9978\" to connect"} 2025-12-12T16:15:11.342519931+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342468Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, CONNECTING"} 2025-12-12T16:15:11.342734487+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342682Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] Creating new client transport to \"{Addr: \\\"192.168.126.11:9978\\\", ServerName: \\\"192.168.126.11:9978\\\", }\": connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:11.342743517+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:11.342728Z","caller":"zapgrpc/zapgrpc.go:191","msg":"[core] [Channel #1 SubChannel #2] grpc: addrConn.createTransport failed to connect to {Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }. Err: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:11.342776868+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342756Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to TRANSIENT_FAILURE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:11.342808748+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:11.342789Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, TRANSIENT_FAILURE"} 2025-12-12T16:15:15.470109340+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.470003Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to IDLE, last error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:9978: connect: connection refused\""} 2025-12-12T16:15:15.470109340+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.470068Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, IDLE"} 2025-12-12T16:15:15.470157871+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.470107Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to CONNECTING"} 2025-12-12T16:15:15.470157871+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.470140Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel picks a new address \"192.168.126.11:9978\" to connect"} 2025-12-12T16:15:15.470272784+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.470234Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, CONNECTING"} 2025-12-12T16:15:15.476026152+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.475933Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1 SubChannel #2] Subchannel Connectivity change to READY"} 2025-12-12T16:15:15.476026152+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.475997Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[balancer] base.baseBalancer: handle SubConn state change: 0xc00021b740, READY"} 2025-12-12T16:15:15.476064073+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.476030Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[roundrobin] roundrobinPicker: Build called with info: {map[SubConn(id:2):{{Addr: \"192.168.126.11:9978\", ServerName: \"192.168.126.11:9978\", }}]}"} 2025-12-12T16:15:15.476190916+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:15.476071Z","caller":"zapgrpc/zapgrpc.go:174","msg":"[core] [Channel #1] Channel Connectivity change to READY"} ././@LongLink0000644000000000000000000000021600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000022300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000000023415117043044032770 0ustar zuulzuul2025-12-12T16:15:01.387051190+00:00 stdout P Fixing etcd log permissions. 2025-12-12T16:15:01.392353406+00:00 stdout P Fixing etcd auto backup permissions. ././@LongLink0000644000000000000000000000022100000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000022600000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000000017415117043044032773 0ustar zuulzuul2025-12-12T16:15:16.656010269+00:00 stderr F I1212 16:15:16.655811 1 etcdcli_pool.go:70] creating a new cached client ././@LongLink0000644000000000000000000000023400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000000000015117043044032757 0ustar zuulzuul././@LongLink0000644000000000000000000000021500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015117043062032767 5ustar zuulzuul././@LongLink0000644000000000000000000000022200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000644000175000017500000007051515117043044033001 0ustar zuulzuul2025-12-12T16:15:10.019116959+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:10.018671Z","logger":"etcd-client","caller":"v3@v3.5.21/retry_interceptor.go:63","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00002a3c0/192.168.126.11:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 192.168.126.11:2379: connect: connection refused\""} 2025-12-12T16:15:10.019116959+00:00 stderr F Error: context deadline exceeded 2025-12-12T16:15:10.246495187+00:00 stderr F dataDir is present on crc 2025-12-12T16:15:12.250719331+00:00 stderr P failed to create etcd client, but the server is already initialized as member "crc" before, starting as etcd member: context deadline exceeded 2025-12-12T16:15:12.252743670+00:00 stdout P Waiting for ports 2379, 2380 and 9978 to be released. 2025-12-12T16:15:12.262360872+00:00 stderr F 2025-12-12T16:15:12.262360872+00:00 stderr F real 0m0.010s 2025-12-12T16:15:12.262360872+00:00 stderr F user 0m0.001s 2025-12-12T16:15:12.262360872+00:00 stderr F sys 0m0.008s 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_QUOTA_BACKEND_BYTES=8589934592 2025-12-12T16:15:12.264626096+00:00 stdout F ALL_ETCD_ENDPOINTS=https://192.168.126.11:2379 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_TLS_MIN_VERSION=TLS1.2 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_IMAGE=quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9e388ee2b3562b6267447cbcc4b95ca7a61bf361840d36a682480da671b83612 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_STATIC_POD_VERSION=2 2025-12-12T16:15:12.264626096+00:00 stdout F ETCDCTL_ENDPOINTS=https://192.168.126.11:2379 2025-12-12T16:15:12.264626096+00:00 stdout F ETCDCTL_KEY=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.key 2025-12-12T16:15:12.264626096+00:00 stdout F ETCDCTL_API=3 2025-12-12T16:15:12.264626096+00:00 stdout F ETCDCTL_CACERT=/etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_HEARTBEAT_INTERVAL=100 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_NAME=crc 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_SOCKET_REUSE_ADDRESS=true 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_EXPERIMENTAL_WARNING_APPLY_DURATION=200ms 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_EXPERIMENTAL_MAX_LEARNERS=1 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_DATA_DIR=/var/lib/etcd 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_ELECTION_TIMEOUT=1000 2025-12-12T16:15:12.264626096+00:00 stdout F ETCDCTL_CERT=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.crt 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_INITIAL_CLUSTER_STATE=existing 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_INITIAL_CLUSTER= 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_CIPHER_SUITES=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_EXPERIMENTAL_WATCH_PROGRESS_NOTIFY_INTERVAL=5s 2025-12-12T16:15:12.264626096+00:00 stdout F ETCD_ENABLE_PPROF=true 2025-12-12T16:15:12.265100828+00:00 stderr F + exec nice -n -19 ionice -c2 -n0 etcd --logger=zap --log-level=info --experimental-initial-corrupt-check=true --snapshot-count=10000 --initial-advertise-peer-urls=https://192.168.126.11:2380 --cert-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-crc.crt --key-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-crc.key --trusted-ca-file=/etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt --client-cert-auth=true --peer-cert-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.crt --peer-key-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.key --peer-trusted-ca-file=/etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt --peer-client-cert-auth=true --advertise-client-urls=https://192.168.126.11:2379 --listen-client-urls=https://0.0.0.0:2379 --listen-peer-urls=https://0.0.0.0:2380 --metrics=extensive --listen-metrics-urls=https://0.0.0.0:9978 2025-12-12T16:15:12.283230755+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.282798Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_CIPHER_SUITES","variable-value":"TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256"} 2025-12-12T16:15:12.283230755+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283204Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_DATA_DIR","variable-value":"/var/lib/etcd"} 2025-12-12T16:15:12.283230755+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283216Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_ELECTION_TIMEOUT","variable-value":"1000"} 2025-12-12T16:15:12.283278086+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283223Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_ENABLE_PPROF","variable-value":"true"} 2025-12-12T16:15:12.283278086+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283242Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_EXPERIMENTAL_MAX_LEARNERS","variable-value":"1"} 2025-12-12T16:15:12.283278086+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283254Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_EXPERIMENTAL_WARNING_APPLY_DURATION","variable-value":"200ms"} 2025-12-12T16:15:12.283278086+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283261Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_EXPERIMENTAL_WATCH_PROGRESS_NOTIFY_INTERVAL","variable-value":"5s"} 2025-12-12T16:15:12.283278086+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283270Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_HEARTBEAT_INTERVAL","variable-value":"100"} 2025-12-12T16:15:12.283287756+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283278Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_INITIAL_CLUSTER_STATE","variable-value":"existing"} 2025-12-12T16:15:12.283318327+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283293Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_NAME","variable-value":"crc"} 2025-12-12T16:15:12.283342267+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283325Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_QUOTA_BACKEND_BYTES","variable-value":"8589934592"} 2025-12-12T16:15:12.283342267+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283336Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_SOCKET_REUSE_ADDRESS","variable-value":"true"} 2025-12-12T16:15:12.283349768+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283343Z","caller":"flags/flag.go:113","msg":"recognized and used environment variable","variable-name":"ETCD_TLS_MIN_VERSION","variable-value":"TLS1.2"} 2025-12-12T16:15:12.283372438+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.283353Z","caller":"flags/flag.go:93","msg":"unrecognized environment variable","environment-variable":"ETCD_IMAGE=quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9e388ee2b3562b6267447cbcc4b95ca7a61bf361840d36a682480da671b83612"} 2025-12-12T16:15:12.283372438+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.283360Z","caller":"flags/flag.go:93","msg":"unrecognized environment variable","environment-variable":"ETCD_STATIC_POD_VERSION=2"} 2025-12-12T16:15:12.283391659+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.283370Z","caller":"flags/flag.go:93","msg":"unrecognized environment variable","environment-variable":"ETCD_INITIAL_CLUSTER="} 2025-12-12T16:15:12.283446090+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.283397Z","caller":"embed/config.go:694","msg":"Running http and grpc server on single port. This is not recommended for production."} 2025-12-12T16:15:12.283446090+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283423Z","caller":"etcdmain/etcd.go:73","msg":"Running: ","args":["etcd","--logger=zap","--log-level=info","--experimental-initial-corrupt-check=true","--snapshot-count=10000","--initial-advertise-peer-urls=https://192.168.126.11:2380","--cert-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-crc.crt","--key-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-crc.key","--trusted-ca-file=/etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt","--client-cert-auth=true","--peer-cert-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.crt","--peer-key-file=/etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.key","--peer-trusted-ca-file=/etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt","--peer-client-cert-auth=true","--advertise-client-urls=https://192.168.126.11:2379","--listen-client-urls=https://0.0.0.0:2379","--listen-peer-urls=https://0.0.0.0:2380","--metrics=extensive","--listen-metrics-urls=https://0.0.0.0:9978"]} 2025-12-12T16:15:12.283509221+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.283479Z","caller":"etcdmain/etcd.go:446","msg":"found invalid file under data directory","filename":"revision.json","data-dir":"/var/lib/etcd"} 2025-12-12T16:15:12.283509221+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283491Z","caller":"etcdmain/etcd.go:116","msg":"server has been already initialized","data-dir":"/var/lib/etcd","dir-type":"member"} 2025-12-12T16:15:12.283509221+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.283500Z","caller":"embed/config.go:694","msg":"Running http and grpc server on single port. This is not recommended for production."} 2025-12-12T16:15:12.283517962+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283506Z","caller":"embed/etcd.go:134","msg":"configuring socket options","reuse-address":true,"reuse-port":false} 2025-12-12T16:15:12.283517962+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283512Z","caller":"embed/etcd.go:140","msg":"configuring peer listeners","listen-peer-urls":["https://0.0.0.0:2380"]} 2025-12-12T16:15:12.283557223+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.283534Z","caller":"embed/etcd.go:531","msg":"starting with peer TLS","tls-info":"cert = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.crt, key = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-peer-crc.key, client-cert=, client-key=, trusted-ca = /etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt, client-cert-auth = true, crl-file = ","cipher-suites":["TLS_AES_128_GCM_SHA256","TLS_AES_256_GCM_SHA384","TLS_CHACHA20_POLY1305_SHA256","TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384","TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384","TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256","TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256"]} 2025-12-12T16:15:12.284150927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.284118Z","caller":"embed/etcd.go:148","msg":"configuring client listeners","listen-client-urls":["https://0.0.0.0:2379"]} 2025-12-12T16:15:12.284150927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.284138Z","caller":"embed/etcd.go:657","msg":"pprof is enabled","path":"/debug/pprof"} 2025-12-12T16:15:12.284297770+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.284251Z","caller":"embed/etcd.go:325","msg":"starting an etcd server","etcd-version":"3.5.21","git-sha":"df6e08fa","go-version":"go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime","go-os":"linux","go-arch":"amd64","max-cpu-set":12,"max-cpu-available":12,"member-initialized":true,"name":"crc","data-dir":"/var/lib/etcd","wal-dir":"","wal-dir-dedicated":"","member-dir":"/var/lib/etcd/member","force-new-cluster":false,"heartbeat-interval":"100ms","election-timeout":"1s","initial-election-tick-advance":true,"snapshot-count":10000,"max-wals":5,"max-snapshots":5,"snapshot-catchup-entries":5000,"initial-advertise-peer-urls":["https://192.168.126.11:2380"],"listen-peer-urls":["https://0.0.0.0:2380"],"advertise-client-urls":["https://192.168.126.11:2379"],"listen-client-urls":["https://0.0.0.0:2379"],"listen-metrics-urls":["https://0.0.0.0:9978"],"cors":["*"],"host-whitelist":["*"],"initial-cluster":"","initial-cluster-state":"existing","initial-cluster-token":"","quota-backend-bytes":8589934592,"max-request-bytes":1572864,"max-concurrent-streams":4294967295,"pre-vote":true,"initial-corrupt-check":true,"corrupt-check-time-interval":"0s","compact-check-time-enabled":false,"compact-check-time-interval":"1m0s","auto-compaction-mode":"periodic","auto-compaction-retention":"0s","auto-compaction-interval":"0s","discovery-url":"","discovery-proxy":"","downgrade-check-interval":"5s","max-learners":1} 2025-12-12T16:15:12.284618368+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.284570Z","caller":"fileutil/fileutil.go:53","msg":"check file permission","error":"directory \"/var/lib/etcd/member/snap\" exist, but the permission is \"drwxr-xr-x\". The recommended permission is \"-rwx------\" to prevent possible unprivileged access to the data"} 2025-12-12T16:15:12.299915427+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.299810Z","caller":"etcdserver/backend.go:81","msg":"opened backend db","path":"/var/lib/etcd/member/snap/db","took":"15.157795ms"} 2025-12-12T16:15:12.599226907+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.594032Z","caller":"etcdserver/server.go:516","msg":"recovered v2 store from snapshot","snapshot-index":30003,"snapshot-size":"8.9 kB"} 2025-12-12T16:15:12.599305619+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.599245Z","caller":"etcdserver/server.go:529","msg":"recovered v3 backend from snapshot","backend-size-bytes":63799296,"backend-size":"64 MB","backend-size-in-use-bytes":60313600,"backend-size-in-use":"60 MB"} 2025-12-12T16:15:12.876494236+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.876298Z","caller":"etcdserver/raft.go:542","msg":"restarting local member","cluster-id":"37a6ceb54a88a89a","local-member-id":"d44fc94b15474c4c","commit-index":39602} 2025-12-12T16:15:12.877005338+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.876931Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c switched to configuration voters=(15298667783517588556)"} 2025-12-12T16:15:12.877024129+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.877000Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c became follower at term 9"} 2025-12-12T16:15:12.877039529+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.877016Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"newRaft d44fc94b15474c4c [peers: [d44fc94b15474c4c], term: 9, commit: 39602, applied: 30003, lastindex: 39602, lastterm: 9]"} 2025-12-12T16:15:12.877246204+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.877160Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"} 2025-12-12T16:15:12.877246204+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.877212Z","caller":"membership/cluster.go:280","msg":"recovered/added member from store","cluster-id":"37a6ceb54a88a89a","local-member-id":"d44fc94b15474c4c","recovered-remote-peer-id":"d44fc94b15474c4c","recovered-remote-peer-urls":["https://192.168.126.11:2380"],"recovered-remote-peer-is-learner":false} 2025-12-12T16:15:12.877246204+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.877225Z","caller":"membership/cluster.go:290","msg":"set cluster version from store","cluster-version":"3.5"} 2025-12-12T16:15:12.877590233+00:00 stderr F {"level":"warn","ts":"2025-12-12T16:15:12.877523Z","caller":"auth/store.go:1241","msg":"simple token is not cryptographically signed"} 2025-12-12T16:15:12.877675795+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.877613Z","caller":"mvcc/kvstore.go:348","msg":"restored last compact revision","meta-bucket-name":"meta","meta-bucket-name-key":"finishedCompactRev","restored-compact-revision":33058} 2025-12-12T16:15:12.916591022+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.916432Z","caller":"mvcc/kvstore.go:425","msg":"kvstore restored","current-rev":35816} 2025-12-12T16:15:12.916591022+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.916541Z","caller":"etcdserver/server.go:637","msg":"restore consistentIndex","index":39602} 2025-12-12T16:15:12.916715555+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.916670Z","caller":"etcdserver/quota.go:117","msg":"enabled backend quota","quota-name":"v3-applier","quota-size-bytes":8589934592,"quota-size":"8.6 GB"} 2025-12-12T16:15:12.917397852+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.917324Z","caller":"etcdserver/corrupt.go:96","msg":"starting initial corruption check","local-member-id":"d44fc94b15474c4c","timeout":"27s"} 2025-12-12T16:15:12.933882589+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.933747Z","caller":"etcdserver/corrupt.go:177","msg":"initial corruption checking passed; no corruption","local-member-id":"d44fc94b15474c4c"} 2025-12-12T16:15:12.933882589+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.933833Z","caller":"etcdserver/server.go:875","msg":"starting etcd server","local-member-id":"d44fc94b15474c4c","local-server-version":"3.5.21","cluster-id":"37a6ceb54a88a89a","cluster-version":"3.5"} 2025-12-12T16:15:12.934381881+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.934164Z","caller":"etcdserver/server.go:768","msg":"started as single-node; fast-forwarding election ticks","local-member-id":"d44fc94b15474c4c","forward-ticks":9,"forward-duration":"900ms","election-ticks":10,"election-timeout":"1s"} 2025-12-12T16:15:12.934466583+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.934335Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/etcd/member/snap","suffix":"snap.db","max":5,"interval":"30s"} 2025-12-12T16:15:12.934528594+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.934451Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/etcd/member/snap","suffix":"snap","max":5,"interval":"30s"} 2025-12-12T16:15:12.934548405+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.934477Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/etcd/member/wal","suffix":"wal","max":5,"interval":"30s"} 2025-12-12T16:15:12.935250682+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.935108Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"} 2025-12-12T16:15:12.936354418+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.936245Z","caller":"embed/etcd.go:765","msg":"starting with client TLS","tls-info":"cert = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-crc.crt, key = /etc/kubernetes/static-pod-certs/secrets/etcd-all-certs/etcd-serving-crc.key, client-cert=, client-key=, trusted-ca = /etc/kubernetes/static-pod-certs/configmaps/etcd-all-bundles/server-ca-bundle.crt, client-cert-auth = true, crl-file = ","cipher-suites":["TLS_AES_128_GCM_SHA256","TLS_AES_256_GCM_SHA384","TLS_CHACHA20_POLY1305_SHA256","TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384","TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384","TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256","TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256"]} 2025-12-12T16:15:12.936583974+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.936461Z","caller":"embed/etcd.go:636","msg":"serving peer traffic","address":"[::]:2380"} 2025-12-12T16:15:12.936583974+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.936539Z","caller":"embed/etcd.go:606","msg":"cmux::serve","address":"[::]:2380"} 2025-12-12T16:15:12.937042255+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.936952Z","caller":"embed/etcd.go:294","msg":"now serving peer/client/metrics","local-member-id":"d44fc94b15474c4c","initial-advertise-peer-urls":["https://192.168.126.11:2380"],"listen-peer-urls":["https://0.0.0.0:2380"],"advertise-client-urls":["https://192.168.126.11:2379"],"listen-client-urls":["https://0.0.0.0:2379"],"listen-metrics-urls":["https://0.0.0.0:9978"]} 2025-12-12T16:15:12.937141047+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:12.937080Z","caller":"embed/etcd.go:911","msg":"serving metrics","address":"https://0.0.0.0:9978"} 2025-12-12T16:15:13.678064458+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.677863Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c is starting a new election at term 9"} 2025-12-12T16:15:13.678064458+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.677974Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c became pre-candidate at term 9"} 2025-12-12T16:15:13.678064458+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678042Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c received MsgPreVoteResp from d44fc94b15474c4c at term 9"} 2025-12-12T16:15:13.678116899+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c became candidate at term 10"} 2025-12-12T16:15:13.678116899+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678074Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c received MsgVoteResp from d44fc94b15474c4c at term 10"} 2025-12-12T16:15:13.678116899+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678087Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"d44fc94b15474c4c became leader at term 10"} 2025-12-12T16:15:13.678116899+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678097Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: d44fc94b15474c4c elected leader d44fc94b15474c4c at term 10"} 2025-12-12T16:15:13.678708013+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678643Z","caller":"etcdserver/server.go:2153","msg":"published local member to cluster through raft","local-member-id":"d44fc94b15474c4c","local-member-attributes":"{Name:crc ClientURLs:[https://192.168.126.11:2379]}","request-path":"/0/members/d44fc94b15474c4c/attributes","cluster-id":"37a6ceb54a88a89a","publish-timeout":"27s"} 2025-12-12T16:15:13.678761175+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678671Z","caller":"embed/serve.go:124","msg":"ready to serve client requests"} 2025-12-12T16:15:13.678969440+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678887Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"} 2025-12-12T16:15:13.678969440+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.678951Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"} 2025-12-12T16:15:13.680092717+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.680028Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"} 2025-12-12T16:15:13.681116301+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:13.681052Z","caller":"embed/serve.go:275","msg":"serving client traffic securely","traffic":"grpc+http","address":"[::]:2379"} 2025-12-12T16:15:20.357819809+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:20.357196Z","caller":"etcdserver/server.go:1485","msg":"triggering snapshot","local-member-id":"d44fc94b15474c4c","local-member-applied-index":40004,"local-member-snapshot-index":30003,"local-member-snapshot-count":10000} 2025-12-12T16:15:20.358081286+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:20.358042Z","caller":"etcdserver/server.go:2548","msg":"saved snapshot","snapshot-index":40004} 2025-12-12T16:15:20.358127077+00:00 stderr F {"level":"info","ts":"2025-12-12T16:15:20.358101Z","caller":"etcdserver/server.go:2578","msg":"compacted Raft logs","compact-index":35004} 2025-12-12T16:26:43.260288092+00:00 stderr F {"level":"info","ts":"2025-12-12T16:26:43.258099Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":40129} 2025-12-12T16:26:43.484170358+00:00 stderr F {"level":"info","ts":"2025-12-12T16:26:43.484049Z","caller":"mvcc/kvstore_compaction.go:71","msg":"finished scheduled compaction","compact-revision":40129,"took":"221.026934ms","hash":1105480092,"current-db-size-bytes":86675456,"current-db-size":"87 MB","current-db-size-in-use-bytes":46112768,"current-db-size-in-use":"46 MB"} 2025-12-12T16:26:43.484170358+00:00 stderr F {"level":"info","ts":"2025-12-12T16:26:43.484118Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":1105480092,"revision":40129,"compact-revision":33058} 2025-12-12T16:26:56.480461028+00:00 stderr F {"level":"info","ts":"2025-12-12T16:26:56.479902Z","caller":"wal/wal.go:788","msg":"created a new WAL segment","path":"/var/lib/etcd/member/wal/0000000000000003-000000000000b2a3.wal"} 2025-12-12T16:31:43.261911613+00:00 stderr F {"level":"info","ts":"2025-12-12T16:31:43.261647Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":41075} 2025-12-12T16:31:43.406909798+00:00 stderr F {"level":"info","ts":"2025-12-12T16:31:43.406756Z","caller":"mvcc/kvstore_compaction.go:71","msg":"finished scheduled compaction","compact-revision":41075,"took":"139.562412ms","hash":3490726872,"current-db-size-bytes":111894528,"current-db-size":"112 MB","current-db-size-in-use-bytes":106135552,"current-db-size-in-use":"106 MB"} 2025-12-12T16:31:43.406909798+00:00 stderr F {"level":"info","ts":"2025-12-12T16:31:43.406840Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3490726872,"revision":41075,"compact-revision":40129} 2025-12-12T16:36:43.266221366+00:00 stderr F {"level":"info","ts":"2025-12-12T16:36:43.265166Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":44407} 2025-12-12T16:36:43.482747266+00:00 stderr F {"level":"info","ts":"2025-12-12T16:36:43.482635Z","caller":"mvcc/kvstore_compaction.go:71","msg":"finished scheduled compaction","compact-revision":44407,"took":"207.909484ms","hash":1717688947,"current-db-size-bytes":112570368,"current-db-size":"113 MB","current-db-size-in-use-bytes":53338112,"current-db-size-in-use":"53 MB"} 2025-12-12T16:36:43.482747266+00:00 stderr F {"level":"info","ts":"2025-12-12T16:36:43.482694Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":1717688947,"revision":44407,"compact-revision":41075} 2025-12-12T16:37:24.412417407+00:00 stderr F {"level":"info","ts":"2025-12-12T16:37:24.412265Z","caller":"etcdserver/server.go:1485","msg":"triggering snapshot","local-member-id":"d44fc94b15474c4c","local-member-applied-index":50005,"local-member-snapshot-index":40004,"local-member-snapshot-count":10000} 2025-12-12T16:37:24.414442857+00:00 stderr F {"level":"info","ts":"2025-12-12T16:37:24.414344Z","caller":"etcdserver/server.go:2548","msg":"saved snapshot","snapshot-index":50005} 2025-12-12T16:37:24.414520619+00:00 stderr F {"level":"info","ts":"2025-12-12T16:37:24.414479Z","caller":"etcdserver/server.go:2578","msg":"compacted Raft logs","compact-index":45005} 2025-12-12T16:41:43.268058824+00:00 stderr F {"level":"info","ts":"2025-12-12T16:41:43.267914Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":45125} 2025-12-12T16:41:43.421285794+00:00 stderr F {"level":"info","ts":"2025-12-12T16:41:43.417797Z","caller":"mvcc/kvstore_compaction.go:71","msg":"finished scheduled compaction","compact-revision":45125,"took":"145.041344ms","hash":3764047441,"current-db-size-bytes":112570368,"current-db-size":"113 MB","current-db-size-in-use-bytes":53886976,"current-db-size-in-use":"54 MB"} 2025-12-12T16:41:43.421285794+00:00 stderr F {"level":"info","ts":"2025-12-12T16:41:43.417837Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3764047441,"revision":45125,"compact-revision":44407} ././@LongLink0000644000000000000000000000023600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043043033043 5ustar zuulzuul././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000017270515117043043033061 0ustar zuulzuul2025-12-12T16:17:36.598332599+00:00 stderr F I1212 16:17:36.597830 1 cmd.go:95] &{ true {false} installer true map[cert-configmaps:0xc00013aa00 cert-dir:0xc00013abe0 cert-secrets:0xc00013a960 configmaps:0xc00013a500 namespace:0xc00013a320 optional-cert-configmaps:0xc00013ab40 optional-cert-secrets:0xc00013aaa0 optional-configmaps:0xc00013a640 optional-secrets:0xc00013a5a0 pod:0xc00013a3c0 pod-manifest-dir:0xc00013a780 resource-dir:0xc00013a6e0 revision:0xc00013a280 secrets:0xc00013a460 v:0xc00079e460] [0xc00079e460 0xc00013a280 0xc00013a320 0xc00013a3c0 0xc00013a6e0 0xc00013a780 0xc00013a500 0xc00013a640 0xc00013a460 0xc00013a5a0 0xc00013abe0 0xc00013aa00 0xc00013ab40 0xc00013a960 0xc00013aaa0] [] map[cert-configmaps:0xc00013aa00 cert-dir:0xc00013abe0 cert-secrets:0xc00013a960 configmaps:0xc00013a500 help:0xc00079e820 kubeconfig:0xc00013a1e0 log-flush-frequency:0xc00079e3c0 namespace:0xc00013a320 optional-cert-configmaps:0xc00013ab40 optional-cert-secrets:0xc00013aaa0 optional-configmaps:0xc00013a640 optional-secrets:0xc00013a5a0 pod:0xc00013a3c0 pod-manifest-dir:0xc00013a780 pod-manifests-lock-file:0xc00013a8c0 resource-dir:0xc00013a6e0 revision:0xc00013a280 secrets:0xc00013a460 timeout-duration:0xc00013a820 v:0xc00079e460 vmodule:0xc00079e500] [0xc00013a1e0 0xc00013a280 0xc00013a320 0xc00013a3c0 0xc00013a460 0xc00013a500 0xc00013a5a0 0xc00013a640 0xc00013a6e0 0xc00013a780 0xc00013a820 0xc00013a8c0 0xc00013a960 0xc00013aa00 0xc00013aaa0 0xc00013ab40 0xc00013abe0 0xc00079e3c0 0xc00079e460 0xc00079e500 0xc00079e820] [0xc00013aa00 0xc00013abe0 0xc00013a960 0xc00013a500 0xc00079e820 0xc00013a1e0 0xc00079e3c0 0xc00013a320 0xc00013ab40 0xc00013aaa0 0xc00013a640 0xc00013a5a0 0xc00013a3c0 0xc00013a780 0xc00013a8c0 0xc00013a6e0 0xc00013a280 0xc00013a460 0xc00013a820 0xc00079e460 0xc00079e500] map[104:0xc00079e820 118:0xc00079e460] [] -1 0 0xc0006a7170 true 0xae3c00 []} 2025-12-12T16:17:36.598767482+00:00 stderr F I1212 16:17:36.598357 1 cmd.go:96] (*installerpod.InstallOptions)(0xc0003a9040)({ 2025-12-12T16:17:36.598767482+00:00 stderr F KubeConfig: (string) "", 2025-12-12T16:17:36.598767482+00:00 stderr F KubeClient: (kubernetes.Interface) , 2025-12-12T16:17:36.598767482+00:00 stderr F Revision: (string) (len=2) "12", 2025-12-12T16:17:36.598767482+00:00 stderr F NodeName: (string) "", 2025-12-12T16:17:36.598767482+00:00 stderr F Namespace: (string) (len=24) "openshift-kube-apiserver", 2025-12-12T16:17:36.598767482+00:00 stderr F Clock: (clock.RealClock) { 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F PodConfigMapNamePrefix: (string) (len=18) "kube-apiserver-pod", 2025-12-12T16:17:36.598767482+00:00 stderr F SecretNamePrefixes: ([]string) (len=3 cap=4) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=11) "etcd-client", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=34) "localhost-recovery-serving-certkey", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=31) "localhost-recovery-client-token" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F OptionalSecretNamePrefixes: ([]string) (len=2 cap=2) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=17) "encryption-config", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "webhook-authenticator" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F ConfigMapNamePrefixes: ([]string) (len=8 cap=8) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=18) "kube-apiserver-pod", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=6) "config", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=37) "kube-apiserver-cert-syncer-kubeconfig", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=28) "bound-sa-token-signing-certs", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=15) "etcd-serving-ca", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=18) "kubelet-serving-ca", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=22) "sa-token-signing-certs", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=29) "kube-apiserver-audit-policies" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F OptionalConfigMapNamePrefixes: ([]string) (len=4 cap=4) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=14) "oauth-metadata", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=12) "cloud-config", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=24) "kube-apiserver-server-ca", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=11) "auth-config" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F CertSecretNames: ([]string) (len=10 cap=16) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=17) "aggregator-client", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=30) "localhost-serving-cert-certkey", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=31) "service-network-serving-certkey", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=37) "external-loadbalancer-serving-certkey", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=37) "internal-loadbalancer-serving-certkey", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=33) "bound-service-account-signing-key", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=40) "control-plane-node-admin-client-cert-key", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=31) "check-endpoints-client-cert-key", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=14) "kubelet-client", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=16) "node-kubeconfigs" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F OptionalCertSecretNamePrefixes: ([]string) (len=11 cap=16) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=17) "user-serving-cert", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-000", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-001", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-002", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-003", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-004", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-005", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-006", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-007", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-008", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=21) "user-serving-cert-009" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F CertConfigMapNamePrefixes: ([]string) (len=4 cap=4) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=20) "aggregator-client-ca", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=9) "client-ca", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=29) "control-plane-node-kubeconfig", 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=26) "check-endpoints-kubeconfig" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F OptionalCertConfigMapNamePrefixes: ([]string) (len=1 cap=1) { 2025-12-12T16:17:36.598767482+00:00 stderr F (string) (len=17) "trusted-ca-bundle" 2025-12-12T16:17:36.598767482+00:00 stderr F }, 2025-12-12T16:17:36.598767482+00:00 stderr F CertDir: (string) (len=57) "/etc/kubernetes/static-pod-resources/kube-apiserver-certs", 2025-12-12T16:17:36.598767482+00:00 stderr F ResourceDir: (string) (len=36) "/etc/kubernetes/static-pod-resources", 2025-12-12T16:17:36.598767482+00:00 stderr F PodManifestDir: (string) (len=25) "/etc/kubernetes/manifests", 2025-12-12T16:17:36.598767482+00:00 stderr F Timeout: (time.Duration) 2m0s, 2025-12-12T16:17:36.598767482+00:00 stderr F StaticPodManifestsLockFile: (string) "", 2025-12-12T16:17:36.598767482+00:00 stderr F PodMutationFns: ([]installerpod.PodMutationFunc) , 2025-12-12T16:17:36.598767482+00:00 stderr F KubeletVersion: (string) "" 2025-12-12T16:17:36.598767482+00:00 stderr F }) 2025-12-12T16:17:36.598819063+00:00 stderr F I1212 16:17:36.598802 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:36.598819063+00:00 stderr F I1212 16:17:36.598813 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:36.598827414+00:00 stderr F I1212 16:17:36.598817 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:36.598827414+00:00 stderr F I1212 16:17:36.598822 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:36.598834994+00:00 stderr F I1212 16:17:36.598825 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:36.599421331+00:00 stderr F I1212 16:17:36.599371 1 cmd.go:413] Getting controller reference for node crc 2025-12-12T16:17:36.610108497+00:00 stderr F I1212 16:17:36.610019 1 cmd.go:426] Waiting for installer revisions to settle for node crc 2025-12-12T16:17:36.612020172+00:00 stderr F I1212 16:17:36.611965 1 cmd.go:518] Waiting additional period after revisions have settled for node crc 2025-12-12T16:18:06.614135076+00:00 stderr F I1212 16:18:06.614055 1 cmd.go:524] Getting installer pods for node crc 2025-12-12T16:18:06.621130309+00:00 stderr F I1212 16:18:06.621059 1 cmd.go:542] Latest installer revision for node crc is: 12 2025-12-12T16:18:06.621130309+00:00 stderr F I1212 16:18:06.621090 1 cmd.go:431] Querying kubelet version for node crc 2025-12-12T16:18:06.624889512+00:00 stderr F I1212 16:18:06.624854 1 cmd.go:444] Got kubelet version 1.33.5 on target node crc 2025-12-12T16:18:06.624889512+00:00 stderr F I1212 16:18:06.624873 1 cmd.go:293] Creating target resource directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12" ... 2025-12-12T16:18:06.625533858+00:00 stderr F I1212 16:18:06.625507 1 cmd.go:221] Creating target resource directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12" ... 2025-12-12T16:18:06.625533858+00:00 stderr F I1212 16:18:06.625523 1 cmd.go:229] Getting secrets ... 2025-12-12T16:18:06.628724237+00:00 stderr F I1212 16:18:06.628681 1 copy.go:32] Got secret openshift-kube-apiserver/etcd-client-12 2025-12-12T16:18:06.632753737+00:00 stderr F I1212 16:18:06.632298 1 copy.go:32] Got secret openshift-kube-apiserver/localhost-recovery-client-token-12 2025-12-12T16:18:06.635420512+00:00 stderr F I1212 16:18:06.635379 1 copy.go:32] Got secret openshift-kube-apiserver/localhost-recovery-serving-certkey-12 2025-12-12T16:18:06.638616232+00:00 stderr F I1212 16:18:06.638581 1 copy.go:24] Failed to get secret openshift-kube-apiserver/encryption-config-12: secrets "encryption-config-12" not found 2025-12-12T16:18:06.641534184+00:00 stderr F I1212 16:18:06.641495 1 copy.go:32] Got secret openshift-kube-apiserver/webhook-authenticator-12 2025-12-12T16:18:06.641590445+00:00 stderr F I1212 16:18:06.641580 1 cmd.go:242] Getting config maps ... 2025-12-12T16:18:06.644510167+00:00 stderr F I1212 16:18:06.644450 1 copy.go:60] Got configMap openshift-kube-apiserver/bound-sa-token-signing-certs-12 2025-12-12T16:18:06.647692996+00:00 stderr F I1212 16:18:06.647648 1 copy.go:60] Got configMap openshift-kube-apiserver/config-12 2025-12-12T16:18:06.650849904+00:00 stderr F I1212 16:18:06.650803 1 copy.go:60] Got configMap openshift-kube-apiserver/etcd-serving-ca-12 2025-12-12T16:18:06.817171836+00:00 stderr F I1212 16:18:06.817096 1 copy.go:60] Got configMap openshift-kube-apiserver/kube-apiserver-audit-policies-12 2025-12-12T16:18:07.021033546+00:00 stderr F I1212 16:18:07.020951 1 copy.go:60] Got configMap openshift-kube-apiserver/kube-apiserver-cert-syncer-kubeconfig-12 2025-12-12T16:18:07.218204831+00:00 stderr F I1212 16:18:07.218110 1 copy.go:60] Got configMap openshift-kube-apiserver/kube-apiserver-pod-12 2025-12-12T16:18:07.418210575+00:00 stderr F I1212 16:18:07.418151 1 copy.go:60] Got configMap openshift-kube-apiserver/kubelet-serving-ca-12 2025-12-12T16:18:07.623905471+00:00 stderr F I1212 16:18:07.618010 1 copy.go:60] Got configMap openshift-kube-apiserver/sa-token-signing-certs-12 2025-12-12T16:18:07.817670351+00:00 stderr F I1212 16:18:07.817575 1 copy.go:52] Failed to get config map openshift-kube-apiserver/auth-config-12: configmaps "auth-config-12" not found 2025-12-12T16:18:08.018052855+00:00 stderr F I1212 16:18:08.017994 1 copy.go:52] Failed to get config map openshift-kube-apiserver/cloud-config-12: configmaps "cloud-config-12" not found 2025-12-12T16:18:08.219798082+00:00 stderr F I1212 16:18:08.218815 1 copy.go:60] Got configMap openshift-kube-apiserver/kube-apiserver-server-ca-12 2025-12-12T16:18:08.419320635+00:00 stderr F I1212 16:18:08.418582 1 copy.go:60] Got configMap openshift-kube-apiserver/oauth-metadata-12 2025-12-12T16:18:08.419320635+00:00 stderr F I1212 16:18:08.419279 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/etcd-client" ... 2025-12-12T16:18:08.419637662+00:00 stderr F I1212 16:18:08.419605 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/etcd-client/tls.crt" ... 2025-12-12T16:18:08.419782276+00:00 stderr F I1212 16:18:08.419760 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/etcd-client/tls.key" ... 2025-12-12T16:18:08.419900619+00:00 stderr F I1212 16:18:08.419881 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-client-token" ... 2025-12-12T16:18:08.419965371+00:00 stderr F I1212 16:18:08.419946 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-client-token/ca.crt" ... 2025-12-12T16:18:08.420086464+00:00 stderr F I1212 16:18:08.420065 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-client-token/namespace" ... 2025-12-12T16:18:08.420192286+00:00 stderr F I1212 16:18:08.420159 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-client-token/service-ca.crt" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420297 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-client-token/token" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420400 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-serving-certkey" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420460 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-serving-certkey/tls.crt" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420559 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/localhost-recovery-serving-certkey/tls.key" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420655 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/webhook-authenticator" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420744 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/secrets/webhook-authenticator/kubeConfig" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420827 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/bound-sa-token-signing-certs" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.420913 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/bound-sa-token-signing-certs/service-account-001.pub" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421004 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/config" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421055 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/config/config.yaml" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421137 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/etcd-serving-ca" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421372 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/etcd-serving-ca/ca-bundle.crt" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421466 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-audit-policies" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421512 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-audit-policies/policy.yaml" ... 2025-12-12T16:18:08.421618401+00:00 stderr F I1212 16:18:08.421583 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-cert-syncer-kubeconfig" ... 2025-12-12T16:18:08.421697623+00:00 stderr F I1212 16:18:08.421625 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig" ... 2025-12-12T16:18:08.421733694+00:00 stderr F I1212 16:18:08.421706 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-pod" ... 2025-12-12T16:18:08.421782066+00:00 stderr F I1212 16:18:08.421757 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-pod/forceRedeploymentReason" ... 2025-12-12T16:18:08.421844977+00:00 stderr F I1212 16:18:08.421825 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-pod/kube-apiserver-startup-monitor-pod.yaml" ... 2025-12-12T16:18:08.421926329+00:00 stderr F I1212 16:18:08.421908 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-pod/pod.yaml" ... 2025-12-12T16:18:08.422007441+00:00 stderr F I1212 16:18:08.421989 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-pod/version" ... 2025-12-12T16:18:08.422084043+00:00 stderr F I1212 16:18:08.422066 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kubelet-serving-ca" ... 2025-12-12T16:18:08.422171845+00:00 stderr F I1212 16:18:08.422154 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kubelet-serving-ca/ca-bundle.crt" ... 2025-12-12T16:18:08.422278048+00:00 stderr F I1212 16:18:08.422259 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/sa-token-signing-certs" ... 2025-12-12T16:18:08.422333339+00:00 stderr F I1212 16:18:08.422315 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/sa-token-signing-certs/service-account-001.pub" ... 2025-12-12T16:18:08.422415301+00:00 stderr F I1212 16:18:08.422397 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/sa-token-signing-certs/service-account-002.pub" ... 2025-12-12T16:18:08.422495963+00:00 stderr F I1212 16:18:08.422477 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-server-ca" ... 2025-12-12T16:18:08.422562985+00:00 stderr F I1212 16:18:08.422544 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/kube-apiserver-server-ca/ca-bundle.crt" ... 2025-12-12T16:18:08.422653587+00:00 stderr F I1212 16:18:08.422631 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/oauth-metadata" ... 2025-12-12T16:18:08.422708888+00:00 stderr F I1212 16:18:08.422691 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/configmaps/oauth-metadata/oauthMetadata" ... 2025-12-12T16:18:08.422801151+00:00 stderr F I1212 16:18:08.422778 1 cmd.go:221] Creating target resource directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs" ... 2025-12-12T16:18:08.422801151+00:00 stderr F I1212 16:18:08.422794 1 cmd.go:229] Getting secrets ... 2025-12-12T16:18:08.619368020+00:00 stderr F I1212 16:18:08.619301 1 copy.go:32] Got secret openshift-kube-apiserver/aggregator-client 2025-12-12T16:18:08.820359209+00:00 stderr F I1212 16:18:08.818550 1 copy.go:32] Got secret openshift-kube-apiserver/bound-service-account-signing-key 2025-12-12T16:18:09.017848732+00:00 stderr F I1212 16:18:09.017763 1 copy.go:32] Got secret openshift-kube-apiserver/check-endpoints-client-cert-key 2025-12-12T16:18:09.217907198+00:00 stderr F I1212 16:18:09.217814 1 copy.go:32] Got secret openshift-kube-apiserver/control-plane-node-admin-client-cert-key 2025-12-12T16:18:09.419017440+00:00 stderr F I1212 16:18:09.418944 1 copy.go:32] Got secret openshift-kube-apiserver/external-loadbalancer-serving-certkey 2025-12-12T16:18:09.623161547+00:00 stderr F I1212 16:18:09.623098 1 copy.go:32] Got secret openshift-kube-apiserver/internal-loadbalancer-serving-certkey 2025-12-12T16:18:09.817368118+00:00 stderr F I1212 16:18:09.817279 1 copy.go:32] Got secret openshift-kube-apiserver/kubelet-client 2025-12-12T16:18:10.017773513+00:00 stderr F I1212 16:18:10.017675 1 copy.go:32] Got secret openshift-kube-apiserver/localhost-serving-cert-certkey 2025-12-12T16:18:10.218865285+00:00 stderr F I1212 16:18:10.218791 1 copy.go:32] Got secret openshift-kube-apiserver/node-kubeconfigs 2025-12-12T16:18:10.417650439+00:00 stderr F I1212 16:18:10.417578 1 copy.go:32] Got secret openshift-kube-apiserver/service-network-serving-certkey 2025-12-12T16:18:10.616924026+00:00 stderr F I1212 16:18:10.616860 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert: secrets "user-serving-cert" not found 2025-12-12T16:18:10.816753366+00:00 stderr F I1212 16:18:10.816680 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-000: secrets "user-serving-cert-000" not found 2025-12-12T16:18:11.018849293+00:00 stderr F I1212 16:18:11.018753 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-001: secrets "user-serving-cert-001" not found 2025-12-12T16:18:11.217591146+00:00 stderr F I1212 16:18:11.217532 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-002: secrets "user-serving-cert-002" not found 2025-12-12T16:18:11.418654088+00:00 stderr F I1212 16:18:11.418590 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-003: secrets "user-serving-cert-003" not found 2025-12-12T16:18:11.618574540+00:00 stderr F I1212 16:18:11.618492 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-004: secrets "user-serving-cert-004" not found 2025-12-12T16:18:11.817956138+00:00 stderr F I1212 16:18:11.817880 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-005: secrets "user-serving-cert-005" not found 2025-12-12T16:18:12.019601914+00:00 stderr F I1212 16:18:12.019500 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-006: secrets "user-serving-cert-006" not found 2025-12-12T16:18:12.218758387+00:00 stderr F I1212 16:18:12.218641 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-007: secrets "user-serving-cert-007" not found 2025-12-12T16:18:12.418071065+00:00 stderr F I1212 16:18:12.417979 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-008: secrets "user-serving-cert-008" not found 2025-12-12T16:18:12.619056104+00:00 stderr F I1212 16:18:12.618930 1 copy.go:24] Failed to get secret openshift-kube-apiserver/user-serving-cert-009: secrets "user-serving-cert-009" not found 2025-12-12T16:18:12.619056104+00:00 stderr F I1212 16:18:12.618985 1 cmd.go:242] Getting config maps ... 2025-12-12T16:18:12.818250939+00:00 stderr F I1212 16:18:12.818132 1 copy.go:60] Got configMap openshift-kube-apiserver/aggregator-client-ca 2025-12-12T16:18:13.019890914+00:00 stderr F I1212 16:18:13.019043 1 copy.go:60] Got configMap openshift-kube-apiserver/check-endpoints-kubeconfig 2025-12-12T16:18:13.218066883+00:00 stderr F I1212 16:18:13.217953 1 copy.go:60] Got configMap openshift-kube-apiserver/client-ca 2025-12-12T16:18:13.419414311+00:00 stderr F I1212 16:18:13.419276 1 copy.go:60] Got configMap openshift-kube-apiserver/control-plane-node-kubeconfig 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.697747 1 copy.go:60] Got configMap openshift-kube-apiserver/trusted-ca-bundle 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.698388 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/aggregator-client" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.698746 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/aggregator-client/tls.crt" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699126 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/aggregator-client/tls.key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699336 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/bound-service-account-signing-key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699365 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/bound-service-account-signing-key/service-account.key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699514 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/bound-service-account-signing-key/service-account.pub" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699636 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/check-endpoints-client-cert-key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699652 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/check-endpoints-client-cert-key/tls.crt" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699795 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/check-endpoints-client-cert-key/tls.key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699957 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/control-plane-node-admin-client-cert-key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.699973 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/control-plane-node-admin-client-cert-key/tls.crt" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700083 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/control-plane-node-admin-client-cert-key/tls.key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700167 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/external-loadbalancer-serving-certkey" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700214 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/external-loadbalancer-serving-certkey/tls.crt" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700374 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/external-loadbalancer-serving-certkey/tls.key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700555 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/internal-loadbalancer-serving-certkey" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700568 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/internal-loadbalancer-serving-certkey/tls.key" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.700747 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/internal-loadbalancer-serving-certkey/tls.crt" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.701011 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/kubelet-client" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.701049 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/kubelet-client/tls.crt" ... 2025-12-12T16:18:13.796684448+00:00 stderr F I1212 16:18:13.796456 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/kubelet-client/tls.key" ... 2025-12-12T16:18:13.796797841+00:00 stderr F I1212 16:18:13.796766 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/localhost-serving-cert-certkey" ... 2025-12-12T16:18:13.796807111+00:00 stderr F I1212 16:18:13.796795 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/localhost-serving-cert-certkey/tls.key" ... 2025-12-12T16:18:13.797009996+00:00 stderr F I1212 16:18:13.796978 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/localhost-serving-cert-certkey/tls.crt" ... 2025-12-12T16:18:13.797332314+00:00 stderr F I1212 16:18:13.797301 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/node-kubeconfigs" ... 2025-12-12T16:18:13.797332314+00:00 stderr F I1212 16:18:13.797326 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/node-kubeconfigs/lb-ext.kubeconfig" ... 2025-12-12T16:18:13.797517059+00:00 stderr F I1212 16:18:13.797486 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/node-kubeconfigs/lb-int.kubeconfig" ... 2025-12-12T16:18:13.797651112+00:00 stderr F I1212 16:18:13.797621 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/node-kubeconfigs/localhost-recovery.kubeconfig" ... 2025-12-12T16:18:13.798319729+00:00 stderr F I1212 16:18:13.798283 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/node-kubeconfigs/localhost.kubeconfig" ... 2025-12-12T16:18:13.798473093+00:00 stderr F I1212 16:18:13.798435 1 cmd.go:261] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/service-network-serving-certkey" ... 2025-12-12T16:18:13.798473093+00:00 stderr F I1212 16:18:13.798459 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/service-network-serving-certkey/tls.crt" ... 2025-12-12T16:18:13.798664097+00:00 stderr F I1212 16:18:13.798631 1 cmd.go:639] Writing secret manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/secrets/service-network-serving-certkey/tls.key" ... 2025-12-12T16:18:13.803533468+00:00 stderr F I1212 16:18:13.803461 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/aggregator-client-ca" ... 2025-12-12T16:18:13.803533468+00:00 stderr F I1212 16:18:13.803512 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/aggregator-client-ca/ca-bundle.crt" ... 2025-12-12T16:18:13.805368003+00:00 stderr F I1212 16:18:13.805310 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/check-endpoints-kubeconfig" ... 2025-12-12T16:18:13.805385933+00:00 stderr F I1212 16:18:13.805362 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/check-endpoints-kubeconfig/kubeconfig" ... 2025-12-12T16:18:13.805615009+00:00 stderr F I1212 16:18:13.805582 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/client-ca" ... 2025-12-12T16:18:13.805615009+00:00 stderr F I1212 16:18:13.805608 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/client-ca/ca-bundle.crt" ... 2025-12-12T16:18:13.806123932+00:00 stderr F I1212 16:18:13.806095 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/control-plane-node-kubeconfig" ... 2025-12-12T16:18:13.806133262+00:00 stderr F I1212 16:18:13.806126 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/control-plane-node-kubeconfig/kubeconfig" ... 2025-12-12T16:18:13.806282996+00:00 stderr F I1212 16:18:13.806254 1 cmd.go:277] Creating directory "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/trusted-ca-bundle" ... 2025-12-12T16:18:13.806399199+00:00 stderr F I1212 16:18:13.806373 1 cmd.go:629] Writing config file "/etc/kubernetes/static-pod-resources/kube-apiserver-certs/configmaps/trusted-ca-bundle/ca-bundle.crt" ... 2025-12-12T16:18:13.806683586+00:00 stderr F I1212 16:18:13.806647 1 cmd.go:335] Getting pod configmaps/kube-apiserver-pod-12 -n openshift-kube-apiserver 2025-12-12T16:18:13.816882748+00:00 stderr F I1212 16:18:13.816814 1 cmd.go:351] Creating directory for static pod manifest "/etc/kubernetes/manifests" ... 2025-12-12T16:18:13.816882748+00:00 stderr F I1212 16:18:13.816855 1 cmd.go:379] Writing a pod under "kube-apiserver-pod.yaml" key 2025-12-12T16:18:13.816882748+00:00 stderr P {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver","namespace":"openshift-kube-apiserver","creationTimestamp":null,"labels":{"apiserver":"true","app":"openshift-kube-apiserver","revision":"12"},"annotations":{"kubectl.kubernetes.io/default-container":"kube-apiserver","target.workload.openshift.io/management":"{\"effect\": \"PreferredDuringScheduling\"}"}},"spec":{"volumes":[{"name":"resource-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12"}},{"name":"cert-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources/kube-apiserver-certs"}},{"name":"audit-dir","hostPath":{"path":"/var/log/kube-apiserver"}},{"name":"tmp-dir","emptyDir":{}},{"name":"ca-bundle-dir","emptyDir":{}}],"initContainers":[{"name":"setup","image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8a46fa8feeea5d04fd602559027f8bacc97e12bbf8e33793dca08e812e1f8825","command":["/usr/bin/timeout","100","/bin/bash","-ec"],"args":["echo \"Fixing audit permissions ...\"\nchmod 0700 /var/log/kube-apiserver \u0026\u0026 touch /var/log/kube-apiserver/audit.log \u0026\u0026 chmod 0600 /var/log/kube-apiserver/*\n\nLOCK=/var/log/kube-apiserver/.lock\necho \"Acquiring exclusive lock ${LOCK} ...\"\n\n# Waiting for 15s max for old kube-apiserver's watch-termination process to exit and remove the lock.\n# Two cases:\n# 1. if kubelet does not start the old and new in parallel (i.e. works as expected), the flock will always succeed without any time.\n# 2. if kubelet does overlap old and new pods for up to 130s, the flock will wait and immediate return when the old finishes.\n#\n# NOTE: We can increase 15s for a bigger expected overlap. But a higher value means less noise about the broken kubelet behaviour, i.e. we hide a bug.\n# NOTE: Do not tweak these timings without considering the livenessProbe initialDelaySeconds\nexec {LOCK_FD}\u003e${LOCK} \u0026\u0026 flock --verbose -w 15 \"${LOCK_FD}\" || {\n echo \"$(date -Iseconds -u) kubelet did not terminate old kube-apiserver before new one\" \u003e\u003e /var/log/kube-apiserver/lock.log\n echo -n \": WARNING: kubelet did not terminate old kube-apiserver before new one.\"\n\n # We failed to acquire exclusive lock, which means there is old kube-apiserver running in system.\n # Since we utilize SO_REUSEPORT, we need to make sure the old kube-apiserver stopped listening.\n #\n # NOTE: This is a fallback for broken kubelet, if you observe this please report a bug.\n echo -n \"Waiting for port 6443 to be released due to likely bug in kubelet or CRI-O \"\n while [ -n \"$(ss -Htan state listening '( sport = 6443 or sport = 6080 )')\" ]; do\n echo -n \".\"\n sleep 1\n (( tries += 1 ))\n if [[ \"${tries}\" -gt 10 ]]; then\n echo \"Timed out waiting for port :6443 and :6080 to be released, this is likely a bug in kubelet or CRI-O\"\n exit 1\n fi\n done\n # This is to make sure the server has terminated independently from the lock.\n # After the port has been freed (requests can be pending and need 60s max).\n sleep 65\n}\n# We cannot hold the lock from the init container to the main container. We release it here. There is no risk, at this point we know we are safe.\nflock -u \"${LOCK_FD}\"\n"],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"audit-dir","mountPath":"/var/log/kube-apiserver"},{"name":"tmp-dir","mountPath":"/tmp"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"privileged":true,"readOnlyRootFilesystem":true}}],"containers":[{"name":"kube-apiserver","image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8a46fa8feeea5d04fd602559027f8bacc97e12bbf8e33793dca08e812e1f8825","command":["/bin/bash","-ec"],"args":["LOCK=/var/log/kube-apiserver/.lock\n# We should be able to acquire the lock immediatelly. If not, it means the init container has not released it yet and kubelet or CRI-O started container prematurely.\nexec {LOCK_FD}\u003e${LOCK} \u0026\u0026 flock --verbose -w 30 \"${LOCK_FD}\" || {\n echo \"Failed to acquire lock for kube-apiserver. Please check setup container for details. This is likely kubelet or CRI-O bug.\"\n exit 1\n}\nif [ -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt ]; then\n echo \"Copying system trust bundle ...\"\n cp -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem\nfi\n\nexec watch-termination --termination-touch-file=/var/log/kube-apiserver/.terminating --termination-log-file=/var/log/kube-apiserver/termination.log --graceful-termination-duration=15s --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig -- hyperkube kube-apiserver --openshift-config=/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml --advertise-address=${HOST_IP} -v=2 --permit-address-sharing\n"],"ports":[{"containerPort":6443}],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}},{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}},{"name":"STATIC_POD_VERSION","value":"12"},{"name":"HOST_IP","valueFrom":{"fieldRef":{"fieldPath":"status.hostIP"}}},{"name":"GOGC","value":"100"}],"resources":{"requests":{"cpu":"265m","memory":"1Gi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"cert-dir","mountPath":"/etc/kubernetes/static-pod-certs"},{"name":"audit-dir","mountPath":"/var/log/kube-apiserver"},{"name":"tmp-dir","mountPath":"/tmp"},{"name":"ca-bundle-dir","mountPath":"/etc/pki/ca-trust/extracted/pem"}],"livenessProbe":{"httpGet":{"path":"livez?exclude=etcd","port":6443,"scheme":"HTTPS"},"timeoutSeconds":10,"periodSeconds":10,"successThreshold":1,"failureThreshold":3},"readinessProbe":{"httpGet":{"path":"readyz","port":6443,"scheme":"HTTPS"},"timeoutSeconds":10,"periodSeconds":5,"successThreshold":1,"failureThreshold":3},"startupProbe":{"httpGet":{"path":"livez","port":6443,"scheme":"HTTPS"},"timeoutSeconds":10,"periodSeconds":5,"successThreshold":1,"failureThreshold":30},"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"privileged":true,"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-cert-syncer","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","cert-syncer"],"args":["--kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig","--namespace=$(POD_NAMESPACE)","--destination-dir=/etc/kubernetes/static-pod-certs"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}},{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}}],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"cert-dir","mountPath":"/etc/kubernetes/static-pod-certs"},{"name":"tmp-dir","mountPath":"/tmp"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-cert-regeneration-controller","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","cert-regeneration-controller"],"args":["--kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig","--namespace=$(POD_NAMESPACE)","-v=2"],"env":[{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}},{"name":"OPERATOR_IMAGE_VERSION","value":"4.20.1"}],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"resource-dir"," 2025-12-12T16:18:13.816920509+00:00 stderr F mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"tmp-dir","mountPath":"/tmp"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-insecure-readyz","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","insecure-readyz"],"args":["--insecure-port=6080","--delegate-url=https://localhost:6443/readyz"],"ports":[{"containerPort":6080}],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-check-endpoints","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","check-endpoints"],"args":["--kubeconfig","/etc/kubernetes/static-pod-certs/configmaps/check-endpoints-kubeconfig/kubeconfig","--listen","0.0.0.0:17697","--namespace","$(POD_NAMESPACE)","--v","2"],"ports":[{"name":"check-endpoints","hostPort":17697,"containerPort":17697,"protocol":"TCP"}],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}},{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}}],"resources":{"requests":{"cpu":"10m","memory":"50Mi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"cert-dir","mountPath":"/etc/kubernetes/static-pod-certs"},{"name":"tmp-dir","mountPath":"/tmp"}],"livenessProbe":{"httpGet":{"path":"healthz","port":17697,"scheme":"HTTPS"},"initialDelaySeconds":10,"timeoutSeconds":10},"readinessProbe":{"httpGet":{"path":"healthz","port":17697,"scheme":"HTTPS"},"initialDelaySeconds":10,"timeoutSeconds":10},"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}}],"terminationGracePeriodSeconds":15,"hostNetwork":true,"tolerations":[{"operator":"Exists"}],"priorityClassName":"system-node-critical"},"status":{}} 2025-12-12T16:18:13.819908232+00:00 stderr F I1212 16:18:13.819854 1 cmd.go:610] Writing pod manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/kube-apiserver-pod.yaml" ... 2025-12-12T16:18:13.820136958+00:00 stderr F I1212 16:18:13.820100 1 cmd.go:617] Removed existing static pod manifest "/etc/kubernetes/manifests/kube-apiserver-pod.yaml" ... 2025-12-12T16:18:13.820136958+00:00 stderr F I1212 16:18:13.820112 1 cmd.go:621] Writing static pod manifest "/etc/kubernetes/manifests/kube-apiserver-pod.yaml" ... 2025-12-12T16:18:13.820136958+00:00 stderr P {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver","namespace":"openshift-kube-apiserver","creationTimestamp":null,"labels":{"apiserver":"true","app":"openshift-kube-apiserver","revision":"12"},"annotations":{"kubectl.kubernetes.io/default-container":"kube-apiserver","target.workload.openshift.io/management":"{\"effect\": \"PreferredDuringScheduling\"}"}},"spec":{"volumes":[{"name":"resource-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12"}},{"name":"cert-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources/kube-apiserver-certs"}},{"name":"audit-dir","hostPath":{"path":"/var/log/kube-apiserver"}},{"name":"tmp-dir","emptyDir":{}},{"name":"ca-bundle-dir","emptyDir":{}}],"initContainers":[{"name":"setup","image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8a46fa8feeea5d04fd602559027f8bacc97e12bbf8e33793dca08e812e1f8825","command":["/usr/bin/timeout","100","/bin/bash","-ec"],"args":["echo \"Fixing audit permissions ...\"\nchmod 0700 /var/log/kube-apiserver \u0026\u0026 touch /var/log/kube-apiserver/audit.log \u0026\u0026 chmod 0600 /var/log/kube-apiserver/*\n\nLOCK=/var/log/kube-apiserver/.lock\necho \"Acquiring exclusive lock ${LOCK} ...\"\n\n# Waiting for 15s max for old kube-apiserver's watch-termination process to exit and remove the lock.\n# Two cases:\n# 1. if kubelet does not start the old and new in parallel (i.e. works as expected), the flock will always succeed without any time.\n# 2. if kubelet does overlap old and new pods for up to 130s, the flock will wait and immediate return when the old finishes.\n#\n# NOTE: We can increase 15s for a bigger expected overlap. But a higher value means less noise about the broken kubelet behaviour, i.e. we hide a bug.\n# NOTE: Do not tweak these timings without considering the livenessProbe initialDelaySeconds\nexec {LOCK_FD}\u003e${LOCK} \u0026\u0026 flock --verbose -w 15 \"${LOCK_FD}\" || {\n echo \"$(date -Iseconds -u) kubelet did not terminate old kube-apiserver before new one\" \u003e\u003e /var/log/kube-apiserver/lock.log\n echo -n \": WARNING: kubelet did not terminate old kube-apiserver before new one.\"\n\n # We failed to acquire exclusive lock, which means there is old kube-apiserver running in system.\n # Since we utilize SO_REUSEPORT, we need to make sure the old kube-apiserver stopped listening.\n #\n # NOTE: This is a fallback for broken kubelet, if you observe this please report a bug.\n echo -n \"Waiting for port 6443 to be released due to likely bug in kubelet or CRI-O \"\n while [ -n \"$(ss -Htan state listening '( sport = 6443 or sport = 6080 )')\" ]; do\n echo -n \".\"\n sleep 1\n (( tries += 1 ))\n if [[ \"${tries}\" -gt 10 ]]; then\n echo \"Timed out waiting for port :6443 and :6080 to be released, this is likely a bug in kubelet or CRI-O\"\n exit 1\n fi\n done\n # This is to make sure the server has terminated independently from the lock.\n # After the port has been freed (requests can be pending and need 60s max).\n sleep 65\n}\n# We cannot hold the lock from the init container to the main container. We release it here. There is no risk, at this point we know we are safe.\nflock -u \"${LOCK_FD}\"\n"],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"audit-dir","mountPath":"/var/log/kube-apiserver"},{"name":"tmp-dir","mountPath":"/tmp"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"privileged":true,"readOnlyRootFilesystem":true}}],"containers":[{"name":"kube-apiserver","image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8a46fa8feeea5d04fd602559027f8bacc97e12bbf8e33793dca08e812e1f8825","command":["/bin/bash","-ec"],"args":["LOCK=/var/log/kube-apiserver/.lock\n# We should be able to acquire the lock immediatelly. If not, it means the init container has not released it yet and kubelet or CRI-O started container prematurely.\nexec {LOCK_FD}\u003e${LOCK} \u0026\u0026 flock --verbose -w 30 \"${LOCK_FD}\" || {\n echo \"Failed to acquire lock for kube-apiserver. Please check setup container for details. This is likely kubelet or CRI-O bug.\"\n exit 1\n}\nif [ -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt ]; then\n echo \"Copying system trust bundle ...\"\n cp -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem\nfi\n\nexec watch-termination --termination-touch-file=/var/log/kube-apiserver/.terminating --termination-log-file=/var/log/kube-apiserver/termination.log --graceful-termination-duration=15s --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig -- hyperkube kube-apiserver --openshift-config=/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml --advertise-address=${HOST_IP} -v=2 --permit-address-sharing\n"],"ports":[{"containerPort":6443}],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}},{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}},{"name":"STATIC_POD_VERSION","value":"12"},{"name":"HOST_IP","valueFrom":{"fieldRef":{"fieldPath":"status.hostIP"}}},{"name":"GOGC","value":"100"}],"resources":{"requests":{"cpu":"265m","memory":"1Gi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"cert-dir","mountPath":"/etc/kubernetes/static-pod-certs"},{"name":"audit-dir","mountPath":"/var/log/kube-apiserver"},{"name":"tmp-dir","mountPath":"/tmp"},{"name":"ca-bundle-dir","mountPath":"/etc/pki/ca-trust/extracted/pem"}],"livenessProbe":{"httpGet":{"path":"livez?exclude=etcd","port":6443,"scheme":"HTTPS"},"timeoutSeconds":10,"periodSeconds":10,"successThreshold":1,"failureThreshold":3},"readinessProbe":{"httpGet":{"path":"readyz","port":6443,"scheme":"HTTPS"},"timeoutSeconds":10,"periodSeconds":5,"successThreshold":1,"failureThreshold":3},"startupProbe":{"httpGet":{"path":"livez","port":6443,"scheme":"HTTPS"},"timeoutSeconds":10,"periodSeconds":5,"successThreshold":1,"failureThreshold":30},"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"privileged":true,"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-cert-syncer","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","cert-syncer"],"args":["--kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig","--namespace=$(POD_NAMESPACE)","--destination-dir=/etc/kubernetes/static-pod-certs"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}},{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}}],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"cert-dir","mountPath":"/etc/kubernetes/static-pod-certs"},{"name":"tmp-dir","mountPath":"/tmp"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-cert-regeneration-controller","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","cert-regeneration-controller"],"args":["--kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig","--namespace=$(POD_NAMESPACE)","-v=2"],"env":[{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}},{"name":"OPERATOR_IMAGE_VERSION","value":"4.20.1"}],"resources":{"requests":{"cpu":"5m","memory 2025-12-12T16:18:13.820155439+00:00 stderr F ":"50Mi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"tmp-dir","mountPath":"/tmp"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-insecure-readyz","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","insecure-readyz"],"args":["--insecure-port=6080","--delegate-url=https://localhost:6443/readyz"],"ports":[{"containerPort":6080}],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}},{"name":"kube-apiserver-check-endpoints","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","check-endpoints"],"args":["--kubeconfig","/etc/kubernetes/static-pod-certs/configmaps/check-endpoints-kubeconfig/kubeconfig","--listen","0.0.0.0:17697","--namespace","$(POD_NAMESPACE)","--v","2"],"ports":[{"name":"check-endpoints","hostPort":17697,"containerPort":17697,"protocol":"TCP"}],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}},{"name":"POD_NAMESPACE","valueFrom":{"fieldRef":{"fieldPath":"metadata.namespace"}}}],"resources":{"requests":{"cpu":"10m","memory":"50Mi"}},"volumeMounts":[{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"cert-dir","mountPath":"/etc/kubernetes/static-pod-certs"},{"name":"tmp-dir","mountPath":"/tmp"}],"livenessProbe":{"httpGet":{"path":"healthz","port":17697,"scheme":"HTTPS"},"initialDelaySeconds":10,"timeoutSeconds":10},"readinessProbe":{"httpGet":{"path":"healthz","port":17697,"scheme":"HTTPS"},"initialDelaySeconds":10,"timeoutSeconds":10},"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"readOnlyRootFilesystem":true}}],"terminationGracePeriodSeconds":15,"hostNetwork":true,"tolerations":[{"operator":"Exists"}],"priorityClassName":"system-node-critical"},"status":{}} 2025-12-12T16:18:13.821245036+00:00 stderr F I1212 16:18:13.820608 1 cmd.go:379] Writing a pod under "kube-apiserver-startup-monitor-pod.yaml" key 2025-12-12T16:18:13.821245036+00:00 stderr F {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-startup-monitor","namespace":"openshift-kube-apiserver","creationTimestamp":null,"labels":{"revision":"12"}},"spec":{"volumes":[{"name":"resource-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources"}},{"name":"manifests","hostPath":{"path":"/etc/kubernetes/manifests"}},{"name":"pod-resource-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12"}},{"name":"var-lock","hostPath":{"path":"/var/lock"}},{"name":"var-log","hostPath":{"path":"/var/log/kube-apiserver"}}],"containers":[{"name":"startup-monitor","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","startup-monitor"],"args":["-v=2","--fallback-timeout-duration=300s","--target-name=kube-apiserver","--manifests-dir=/etc/kubernetes/manifests","--resource-dir=/etc/kubernetes/static-pod-resources","--installer-lock-file=/var/lock/kube-apiserver-installer.lock","--revision=12","--node-name=crc","--kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig","--log-file-path=/var/log/kube-apiserver/startup.log"],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"manifests","mountPath":"/etc/kubernetes/manifests"},{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"pod-resource-dir","readOnly":true,"mountPath":"/etc/kubernetes/static-pod-resources/secrets","subPath":"secrets"},{"name":"pod-resource-dir","readOnly":true,"mountPath":"/etc/kubernetes/static-pod-resources/configmaps","subPath":"configmaps"},{"name":"var-lock","mountPath":"/var/lock"},{"name":"var-log","mountPath":"/var/log/kube-apiserver"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"privileged":true}}],"terminationGracePeriodSeconds":5,"hostNetwork":true,"tolerations":[{"operator":"Exists"}],"priorityClassName":"system-node-critical"},"status":{}} 2025-12-12T16:18:13.821451461+00:00 stderr F I1212 16:18:13.821416 1 cmd.go:610] Writing pod manifest "/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12/kube-apiserver-startup-monitor-pod.yaml" ... 2025-12-12T16:18:13.821876621+00:00 stderr F I1212 16:18:13.821843 1 cmd.go:621] Writing static pod manifest "/etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml" ... 2025-12-12T16:18:13.821876621+00:00 stderr F {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-startup-monitor","namespace":"openshift-kube-apiserver","creationTimestamp":null,"labels":{"revision":"12"}},"spec":{"volumes":[{"name":"resource-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources"}},{"name":"manifests","hostPath":{"path":"/etc/kubernetes/manifests"}},{"name":"pod-resource-dir","hostPath":{"path":"/etc/kubernetes/static-pod-resources/kube-apiserver-pod-12"}},{"name":"var-lock","hostPath":{"path":"/var/lock"}},{"name":"var-log","hostPath":{"path":"/var/log/kube-apiserver"}}],"containers":[{"name":"startup-monitor","image":"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:68c07ee2fb6450c7b3b35bfdfc158dc475aaa0bcf9fba28b5e310d7e03355c04","command":["cluster-kube-apiserver-operator","startup-monitor"],"args":["-v=2","--fallback-timeout-duration=300s","--target-name=kube-apiserver","--manifests-dir=/etc/kubernetes/manifests","--resource-dir=/etc/kubernetes/static-pod-resources","--installer-lock-file=/var/lock/kube-apiserver-installer.lock","--revision=12","--node-name=crc","--kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig","--log-file-path=/var/log/kube-apiserver/startup.log"],"resources":{"requests":{"cpu":"5m","memory":"50Mi"}},"volumeMounts":[{"name":"manifests","mountPath":"/etc/kubernetes/manifests"},{"name":"resource-dir","mountPath":"/etc/kubernetes/static-pod-resources"},{"name":"pod-resource-dir","readOnly":true,"mountPath":"/etc/kubernetes/static-pod-resources/secrets","subPath":"secrets"},{"name":"pod-resource-dir","readOnly":true,"mountPath":"/etc/kubernetes/static-pod-resources/configmaps","subPath":"configmaps"},{"name":"var-lock","mountPath":"/var/lock"},{"name":"var-log","mountPath":"/var/log/kube-apiserver"}],"terminationMessagePolicy":"FallbackToLogsOnError","imagePullPolicy":"IfNotPresent","securityContext":{"privileged":true}}],"terminationGracePeriodSeconds":5,"hostNetwork":true,"tolerations":[{"operator":"Exists"}],"priorityClassName":"system-node-critical"},"status":{}} 2025-12-12T16:18:14.016443371+00:00 stderr F W1212 16:18:14.016171 1 recorder.go:207] Error creating event &Event{ObjectMeta:{installer-12-crc.188084184c377e2d.d66109ff openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:installer-12-crc,UID:214aeed8-f6a2-4251-b4d0-c81fd217c7c2,APIVersion:v1,ResourceVersion:,FieldPath:,},Reason:StaticPodInstallerCompleted,Message:Successfully installed revision 12,Source:EventSource{Component:static-pod-installer,Host:,},FirstTimestamp:2025-12-12 16:18:13.822021165 +0000 UTC m=+37.823615759,LastTimestamp:2025-12-12 16:18:13.822021165 +0000 UTC m=+37.823615759,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:,ReportingInstance:,}: Post "https://10.217.4.1:443/api/v1/namespaces/openshift-kube-apiserver/events?timeout=14s": dial tcp 10.217.4.1:443: connect: connection refused ././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043043032775 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000000227415117043043033004 0ustar zuulzuul2025-12-12T16:16:47.094948913+00:00 stderr F W1212 16:16:47.094673 1 deprecated.go:66] 2025-12-12T16:16:47.094948913+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:47.094948913+00:00 stderr F 2025-12-12T16:16:47.094948913+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:47.094948913+00:00 stderr F 2025-12-12T16:16:47.094948913+00:00 stderr F =============================================== 2025-12-12T16:16:47.094948913+00:00 stderr F 2025-12-12T16:16:47.094948913+00:00 stderr F I1212 16:16:47.094863 1 kube-rbac-proxy.go:532] Reading config file: /etc/kube-rbac-proxy/config-file.yaml 2025-12-12T16:16:47.095678821+00:00 stderr F I1212 16:16:47.095623 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:47.097137916+00:00 stderr F I1212 16:16:47.096849 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:47.106876594+00:00 stderr F I1212 16:16:47.106764 1 kube-rbac-proxy.go:397] Starting TCP socket on 0.0.0.0:9001 2025-12-12T16:16:47.108335230+00:00 stderr F I1212 16:16:47.108294 1 kube-rbac-proxy.go:404] Listening securely on 0.0.0.0:9001 ././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043063032777 5ustar zuulzuul././@LongLink0000644000000000000000000000033400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000254516515117043063033023 0ustar zuulzuul2025-12-12T16:16:45.284337278+00:00 stderr F I1212 16:16:45.283349 1 start.go:52] Version: 4.20.1 (Raw: 89b561f0, Hash: f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:16:45.287227439+00:00 stderr F I1212 16:16:45.287104 1 metrics.go:92] Registering Prometheus metrics 2025-12-12T16:16:45.287910076+00:00 stderr F I1212 16:16:45.287691 1 metrics.go:99] Starting metrics listener on 127.0.0.1:8797 2025-12-12T16:16:45.289383312+00:00 stderr F I1212 16:16:45.289294 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.447046531+00:00 stderr F I1212 16:16:45.445149 1 leaderelection.go:257] attempting to acquire leader lease openshift-machine-config-operator/machine-config... 2025-12-12T16:16:45.470395031+00:00 stderr F I1212 16:16:45.468412 1 leaderelection.go:271] successfully acquired lease openshift-machine-config-operator/machine-config 2025-12-12T16:16:45.533653115+00:00 stderr F I1212 16:16:45.532738 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:45.600686392+00:00 stderr F I1212 16:16:45.599510 1 featuregates.go:112] FeatureGates initialized: enabled=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks], disabled=[AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:45.600686392+00:00 stderr F I1212 16:16:45.600012 1 event.go:377] Event(v1.ObjectReference{Kind:"Node", Namespace:"openshift-machine-config-operator", Name:"crc", UID:"23216ff3-032e-49af-af7e-1d23d5907b59", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:45.871468092+00:00 stderr F I1212 16:16:45.870382 1 operator.go:415] Change observed to kube-apiserver-server-ca 2025-12-12T16:16:45.896334329+00:00 stderr F I1212 16:16:45.896277 1 operator.go:395] Starting MachineConfigOperator 2025-12-12T16:16:45.901093275+00:00 stderr F E1212 16:16:45.898454 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:45.905630266+00:00 stderr F E1212 16:16:45.904634 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:45.916280906+00:00 stderr F E1212 16:16:45.915699 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:45.939005141+00:00 stderr F E1212 16:16:45.938107 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:46.001036455+00:00 stderr F E1212 16:16:45.978603 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:46.001036455+00:00 stderr F E1212 16:16:45.979095 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:46.141263939+00:00 stderr F E1212 16:16:46.139873 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:46.210097089+00:00 stderr F E1212 16:16:46.208441 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:46.467130735+00:00 stderr F E1212 16:16:46.466513 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:46.537226696+00:00 stderr F E1212 16:16:46.537058 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.025737643+00:00 stderr F E1212 16:16:47.024982 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.091146360+00:00 stderr F E1212 16:16:47.091035 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.276279910+00:00 stderr F E1212 16:16:47.272430 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.368326577+00:00 stderr F E1212 16:16:47.362571 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.742115553+00:00 stderr F E1212 16:16:47.732385 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.742115553+00:00 stderr F E1212 16:16:47.739435 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.742115553+00:00 stderr F E1212 16:16:47.739688 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:47.751479952+00:00 stderr F E1212 16:16:47.751419 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.761398254+00:00 stderr F E1212 16:16:47.759823 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.777496977+00:00 stderr F E1212 16:16:47.774698 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.797141006+00:00 stderr F E1212 16:16:47.796259 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.836464356+00:00 stderr F E1212 16:16:47.834612 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:47.840517475+00:00 stderr F E1212 16:16:47.837969 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.000511021+00:00 stderr F E1212 16:16:48.000343 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.182304810+00:00 stderr F E1212 16:16:48.181507 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.322876152+00:00 stderr F E1212 16:16:48.322806 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.392053711+00:00 stderr F E1212 16:16:48.390421 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.489044439+00:00 stderr F E1212 16:16:48.488626 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.671957415+00:00 stderr F E1212 16:16:48.671915 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.882199488+00:00 stderr F E1212 16:16:48.880500 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:48.933683885+00:00 stderr F E1212 16:16:48.933511 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.042701266+00:00 stderr F E1212 16:16:49.042158 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.178522742+00:00 stderr F E1212 16:16:49.178452 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.179733002+00:00 stderr F E1212 16:16:49.178707 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:49.373250845+00:00 stderr F E1212 16:16:49.371855 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.378521184+00:00 stderr F E1212 16:16:49.378469 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.390016275+00:00 stderr F E1212 16:16:49.389961 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.413325954+00:00 stderr F E1212 16:16:49.413268 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.457293867+00:00 stderr F E1212 16:16:49.456691 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.540877188+00:00 stderr F E1212 16:16:49.537951 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.563779627+00:00 stderr F E1212 16:16:49.563719 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.575771310+00:00 stderr F E1212 16:16:49.575718 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.643427001+00:00 stderr F E1212 16:16:49.641976 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.699944851+00:00 stderr F E1212 16:16:49.699879 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.707766002+00:00 stderr F E1212 16:16:49.707706 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.726246233+00:00 stderr F E1212 16:16:49.724339 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.824413690+00:00 stderr F E1212 16:16:49.823973 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:49.871220483+00:00 stderr F E1212 16:16:49.867487 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:50.323862484+00:00 stderr F E1212 16:16:50.323110 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:50.586242450+00:00 stderr F E1212 16:16:50.583872 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:50.586242450+00:00 stderr F E1212 16:16:50.584045 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:50.984312929+00:00 stderr F E1212 16:16:50.983847 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:50.990092930+00:00 stderr F E1212 16:16:50.990051 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.000852712+00:00 stderr F E1212 16:16:51.000788 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.021923157+00:00 stderr F E1212 16:16:51.021853 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.063105542+00:00 stderr F E1212 16:16:51.063017 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.144314755+00:00 stderr F E1212 16:16:51.144223 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.305556491+00:00 stderr F E1212 16:16:51.305508 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.462575495+00:00 stderr F E1212 16:16:51.462500 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.462952304+00:00 stderr F E1212 16:16:51.462932 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.627733937+00:00 stderr F E1212 16:16:51.627669 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.659948804+00:00 stderr F E1212 16:16:51.658198 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:51.780238301+00:00 stderr F E1212 16:16:51.776850 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:52.254008738+00:00 stderr F E1212 16:16:52.251336 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:52.777453257+00:00 stderr F E1212 16:16:52.775975 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:52.796223435+00:00 stderr F E1212 16:16:52.794475 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.300452625+00:00 stderr F E1212 16:16:53.299914 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.300452625+00:00 stderr F E1212 16:16:53.300081 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:53.466626322+00:00 stderr F E1212 16:16:53.465598 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.472689830+00:00 stderr F E1212 16:16:53.472631 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.481889174+00:00 stderr F E1212 16:16:53.481613 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.484094618+00:00 stderr F E1212 16:16:53.484063 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.533319610+00:00 stderr F E1212 16:16:53.529518 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.611339015+00:00 stderr F E1212 16:16:53.610859 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:53.780243659+00:00 stderr F E1212 16:16:53.773355 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:54.058280547+00:00 stderr F E1212 16:16:54.057910 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:54.098236822+00:00 stderr F E1212 16:16:54.095413 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:54.214890430+00:00 stderr F E1212 16:16:54.212766 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:54.955944753+00:00 stderr F E1212 16:16:54.954217 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:54.992816873+00:00 stderr F E1212 16:16:54.989152 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.045214262+00:00 stderr F E1212 16:16:55.045113 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.070255194+00:00 stderr F E1212 16:16:55.067272 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.379612236+00:00 stderr F E1212 16:16:55.377495 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.834081962+00:00 stderr F E1212 16:16:55.832505 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.834081962+00:00 stderr F E1212 16:16:55.833321 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:55.856264143+00:00 stderr F E1212 16:16:55.854085 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.863233824+00:00 stderr F E1212 16:16:55.863007 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.879224704+00:00 stderr F E1212 16:16:55.876151 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.879224704+00:00 stderr F E1212 16:16:55.878820 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.907938055+00:00 stderr F E1212 16:16:55.907872 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.913457250+00:00 stderr F E1212 16:16:55.912405 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:55.992830798+00:00 stderr F E1212 16:16:55.992032 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.255609983+00:00 stderr F E1212 16:16:56.255538 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.315636109+00:00 stderr F E1212 16:16:56.313657 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.396132034+00:00 stderr F E1212 16:16:56.390382 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.592761104+00:00 stderr F E1212 16:16:56.592643 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.601515587+00:00 stderr F E1212 16:16:56.601428 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.612230119+00:00 stderr F E1212 16:16:56.612135 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.625073812+00:00 stderr F E1212 16:16:56.624973 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.634273647+00:00 stderr F E1212 16:16:56.634103 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.696241120+00:00 stderr F E1212 16:16:56.692279 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.696241120+00:00 stderr F E1212 16:16:56.692492 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:56.707069864+00:00 stderr F E1212 16:16:56.706802 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.716388692+00:00 stderr F E1212 16:16:56.715650 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.718892463+00:00 stderr F E1212 16:16:56.718536 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.722551992+00:00 stderr F E1212 16:16:56.722511 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.728873457+00:00 stderr F E1212 16:16:56.728816 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.742332485+00:00 stderr F E1212 16:16:56.741951 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.784233848+00:00 stderr F E1212 16:16:56.781680 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.803569040+00:00 stderr F E1212 16:16:56.802931 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.810133391+00:00 stderr F E1212 16:16:56.810079 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.812999281+00:00 stderr F E1212 16:16:56.812565 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.824713447+00:00 stderr F E1212 16:16:56.822980 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.851331316+00:00 stderr F E1212 16:16:56.850330 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.882391855+00:00 stderr F E1212 16:16:56.882197 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.903036629+00:00 stderr F E1212 16:16:56.902698 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.914367835+00:00 stderr F E1212 16:16:56.914289 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.934806674+00:00 stderr F E1212 16:16:56.934714 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:56.935059601+00:00 stderr F E1212 16:16:56.934949 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:16:57.894763681+00:00 stderr F E1212 16:16:57.894316 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:57.900752168+00:00 stderr F E1212 16:16:57.900684 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:57.911920070+00:00 stderr F E1212 16:16:57.911858 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:57.932919513+00:00 stderr F E1212 16:16:57.932856 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:57.974387005+00:00 stderr F E1212 16:16:57.973849 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:58.055604058+00:00 stderr F E1212 16:16:58.055546 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:58.216801844+00:00 stderr F E1212 16:16:58.216734 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:58.250968508+00:00 stderr F E1212 16:16:58.250906 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:58.510002742+00:00 stderr F E1212 16:16:58.509606 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:58.538734654+00:00 stderr F E1212 16:16:58.538561 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:58.549148948+00:00 stderr F E1212 16:16:58.549066 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:59.517272614+00:00 stderr F E1212 16:16:59.513670 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:16:59.729575887+00:00 stderr F E1212 16:16:59.729505 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:00.749782314+00:00 stderr F E1212 16:17:00.748553 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:01.103062429+00:00 stderr F E1212 16:17:01.101822 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.217789295+00:00 stderr F E1212 16:17:02.217719 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.219275731+00:00 stderr F E1212 16:17:02.218040 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:02.324222703+00:00 stderr F E1212 16:17:02.324131 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.330404604+00:00 stderr F E1212 16:17:02.330350 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.341402723+00:00 stderr F E1212 16:17:02.341362 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.362511788+00:00 stderr F E1212 16:17:02.362441 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.403622242+00:00 stderr F E1212 16:17:02.403546 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.426877720+00:00 stderr F E1212 16:17:02.426817 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.484850235+00:00 stderr F E1212 16:17:02.484798 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.805952515+00:00 stderr F E1212 16:17:02.805899 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:02.975459863+00:00 stderr F E1212 16:17:02.975382 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.027610536+00:00 stderr F E1212 16:17:03.026107 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.086009442+00:00 stderr F E1212 16:17:03.085956 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.250645302+00:00 stderr F E1212 16:17:03.250572 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.448066241+00:00 stderr F E1212 16:17:03.448004 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.676982269+00:00 stderr F E1212 16:17:03.673928 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.677367719+00:00 stderr F E1212 16:17:03.677344 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.696131717+00:00 stderr F E1212 16:17:03.696063 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.696445895+00:00 stderr F E1212 16:17:03.696427 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:03.698079144+00:00 stderr F E1212 16:17:03.698056 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.707701169+00:00 stderr F E1212 16:17:03.703961 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.714946546+00:00 stderr F E1212 16:17:03.714866 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.735909898+00:00 stderr F E1212 16:17:03.735816 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.777030142+00:00 stderr F E1212 16:17:03.776933 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.859331351+00:00 stderr F E1212 16:17:03.858247 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:03.892512471+00:00 stderr F E1212 16:17:03.892418 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.020193239+00:00 stderr F E1212 16:17:04.020094 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.655269204+00:00 stderr F E1212 16:17:04.653462 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.663904945+00:00 stderr F E1212 16:17:04.661682 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.681625027+00:00 stderr F E1212 16:17:04.681577 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.724033883+00:00 stderr F E1212 16:17:04.723963 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.769937043+00:00 stderr F E1212 16:17:04.769889 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:04.793110229+00:00 stderr F E1212 16:17:04.792202 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:05.451319109+00:00 stderr F E1212 16:17:05.450614 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.352321926+00:00 stderr F E1212 16:17:06.352228 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.352452410+00:00 stderr F E1212 16:17:06.352411 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:06.378514756+00:00 stderr F E1212 16:17:06.378466 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.385252460+00:00 stderr F E1212 16:17:06.384790 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.395564102+00:00 stderr F E1212 16:17:06.395527 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.416531774+00:00 stderr F E1212 16:17:06.416470 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.434507303+00:00 stderr F E1212 16:17:06.433591 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.458468418+00:00 stderr F E1212 16:17:06.458370 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.620372671+00:00 stderr F E1212 16:17:06.620313 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.720119086+00:00 stderr F E1212 16:17:06.719950 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.857243504+00:00 stderr F E1212 16:17:06.854503 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.862392119+00:00 stderr F E1212 16:17:06.860939 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:06.941845889+00:00 stderr F E1212 16:17:06.941780 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:08.252060727+00:00 stderr F E1212 16:17:08.249370 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:09.650212392+00:00 stderr F E1212 16:17:09.650127 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:09.808028075+00:00 stderr F E1212 16:17:09.807920 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:11.049833782+00:00 stderr F E1212 16:17:11.049228 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:11.866233854+00:00 stderr F E1212 16:17:11.865214 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:11.866233854+00:00 stderr F E1212 16:17:11.865393 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:12.062812523+00:00 stderr F E1212 16:17:12.062748 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.068677127+00:00 stderr F E1212 16:17:12.068618 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.079951862+00:00 stderr F E1212 16:17:12.079866 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.100895663+00:00 stderr F E1212 16:17:12.100844 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.142462238+00:00 stderr F E1212 16:17:12.142377 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.223897886+00:00 stderr F E1212 16:17:12.223806 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.385238315+00:00 stderr F E1212 16:17:12.385151 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.448125650+00:00 stderr F E1212 16:17:12.448044 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:12.706430706+00:00 stderr F E1212 16:17:12.706355 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:13.852781983+00:00 stderr F E1212 16:17:13.851087 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:13.987396280+00:00 stderr F E1212 16:17:13.987302 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:15.251632404+00:00 stderr F E1212 16:17:15.250490 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:15.525929331+00:00 stderr F E1212 16:17:15.525573 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:16.127560839+00:00 stderr F E1212 16:17:16.127486 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:16.651343846+00:00 stderr F E1212 16:17:16.649978 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:17.416667733+00:00 stderr F E1212 16:17:17.416156 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:17.416667733+00:00 stderr F E1212 16:17:17.416359 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:19.108853393+00:00 stderr F E1212 16:17:19.108392 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.115439972+00:00 stderr F E1212 16:17:19.115375 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.126626313+00:00 stderr F E1212 16:17:19.126556 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.151777985+00:00 stderr F E1212 16:17:19.151677 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.193056699+00:00 stderr F E1212 16:17:19.192987 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.274873966+00:00 stderr F E1212 16:17:19.274723 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.436683709+00:00 stderr F E1212 16:17:19.436576 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:19.758640966+00:00 stderr F E1212 16:17:19.758112 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:20.402023225+00:00 stderr F E1212 16:17:20.401909 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:21.683753548+00:00 stderr F E1212 16:17:21.683631 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:23.251709424+00:00 stderr F E1212 16:17:23.251628 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:24.245261760+00:00 stderr F E1212 16:17:24.245158 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:26.335349776+00:00 stderr F E1212 16:17:26.334618 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:26.352430036+00:00 stderr F E1212 16:17:26.352369 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:26.423846345+00:00 stderr F E1212 16:17:26.423659 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:30.221500443+00:00 stderr F E1212 16:17:30.220712 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:30.221677248+00:00 stderr F E1212 16:17:30.221634 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:30.930765882+00:00 stderr F E1212 16:17:30.927726 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:30.934455108+00:00 stderr F E1212 16:17:30.934394 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:30.946248487+00:00 stderr F E1212 16:17:30.946057 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:30.967832186+00:00 stderr F E1212 16:17:30.967307 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:31.009045448+00:00 stderr F E1212 16:17:31.008712 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:31.090115274+00:00 stderr F E1212 16:17:31.090048 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:31.251600147+00:00 stderr F E1212 16:17:31.251527 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:31.579785303+00:00 stderr F E1212 16:17:31.573760 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:32.217096499+00:00 stderr F E1212 16:17:32.216518 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:33.499949194+00:00 stderr F E1212 16:17:33.499046 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:33.672360131+00:00 stderr F E1212 16:17:33.670593 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:36.061871328+00:00 stderr F E1212 16:17:36.061270 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:38.920569717+00:00 stderr F E1212 16:17:38.919859 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:44.878045732+00:00 stderr F E1212 16:17:44.875982 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:45.816546914+00:00 stderr F E1212 16:17:45.815399 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:45.825289630+00:00 stderr F E1212 16:17:45.825169 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:45.826246834+00:00 stderr F E1212 16:17:45.825400 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:17:46.296535401+00:00 stderr F E1212 16:17:46.295077 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.306685802+00:00 stderr F E1212 16:17:46.301526 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.317384896+00:00 stderr F E1212 16:17:46.314127 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.317384896+00:00 stderr F E1212 16:17:46.314857 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.356544694+00:00 stderr F E1212 16:17:46.356474 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.382036145+00:00 stderr F E1212 16:17:46.381685 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.413605625+00:00 stderr F E1212 16:17:46.413250 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.437888965+00:00 stderr F E1212 16:17:46.437760 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:46.505387644+00:00 stderr F E1212 16:17:46.504211 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:47.079390134+00:00 stderr F E1212 16:17:47.079325 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:47.092035887+00:00 stderr F E1212 16:17:47.091971 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:47.102265460+00:00 stderr F E1212 16:17:47.101314 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:49.512671874+00:00 stderr F E1212 16:17:49.511939 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:49.642024552+00:00 stderr F E1212 16:17:49.641966 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:51.098115159+00:00 stderr F E1212 16:17:51.096768 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:52.311937988+00:00 stderr F E1212 16:17:52.311859 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:17:52.312583994+00:00 stderr F E1212 16:17:52.312039 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:18:00.829283640+00:00 stderr F E1212 16:18:00.825492 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.833368111+00:00 stderr F E1212 16:18:00.833290 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.844419554+00:00 stderr F E1212 16:18:00.844284 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.866672804+00:00 stderr F E1212 16:18:00.865970 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.908962430+00:00 stderr F E1212 16:18:00.907841 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.933224490+00:00 stderr F E1212 16:18:00.933034 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.979737959+00:00 stderr F E1212 16:18:00.979672 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.989079830+00:00 stderr F E1212 16:18:00.988997 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:00.995229212+00:00 stderr F E1212 16:18:00.995137 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:01.067718084+00:00 stderr F E1212 16:18:01.066886 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:01.126481897+00:00 stderr F E1212 16:18:01.126415 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:01.630963639+00:00 stderr F E1212 16:18:01.630642 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.340416869+00:00 stderr F E1212 16:18:06.339630 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.363737596+00:00 stderr F E1212 16:18:06.361208 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.382494969+00:00 stderr F E1212 16:18:06.382437 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.398959647+00:00 stderr F E1212 16:18:06.398880 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.399254354+00:00 stderr F E1212 16:18:06.399222 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:18:06.470263159+00:00 stderr F E1212 16:18:06.469685 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.484218644+00:00 stderr F E1212 16:18:06.480039 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.503983383+00:00 stderr F E1212 16:18:06.503889 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.527639718+00:00 stderr F E1212 16:18:06.526356 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.567573535+00:00 stderr F E1212 16:18:06.567486 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.649525371+00:00 stderr F E1212 16:18:06.649447 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:06.810793558+00:00 stderr F E1212 16:18:06.810727 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:07.132035070+00:00 stderr F E1212 16:18:07.131934 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:07.533596048+00:00 stderr F E1212 16:18:07.533530 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:07.774061333+00:00 stderr F E1212 16:18:07.773526 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:10.336450882+00:00 stderr F E1212 16:18:10.335998 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:15.458804000+00:00 stderr F E1212 16:18:15.458226 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:25.701606680+00:00 stderr F E1212 16:18:25.701069 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:18:45.701411100+00:00 stderr F E1212 16:18:45.700906 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-config-operator/leases/machine-config": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:45.701986214+00:00 stderr F E1212 16:18:45.701951 1 leaderelection.go:436] error retrieving resource lock openshift-machine-config-operator/machine-config: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-config-operator/leases/machine-config": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.184097373+00:00 stderr F E1212 16:18:46.183363 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:27.144911355+00:00 stderr F E1212 16:19:27.144520 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:27.423127861+00:00 stderr F E1212 16:19:27.423066 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:27.423293905+00:00 stderr F E1212 16:19:27.423257 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:19:31.427142753+00:00 stderr F E1212 16:19:31.426709 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:31.427734758+00:00 stderr F E1212 16:19:31.427717 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:31.433852152+00:00 stderr F E1212 16:19:31.433807 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:31.455035934+00:00 stderr F E1212 16:19:31.454964 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:31.495935061+00:00 stderr F E1212 16:19:31.495890 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:31.576886033+00:00 stderr F E1212 16:19:31.576827 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:31.737949837+00:00 stderr F E1212 16:19:31.737901 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:32.059450489+00:00 stderr F E1212 16:19:32.059390 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:32.701067510+00:00 stderr F E1212 16:19:32.700991 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:33.983973921+00:00 stderr F E1212 16:19:33.983067 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:34.100582408+00:00 stderr F E1212 16:19:34.100524 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:36.399002508+00:00 stderr F E1212 16:19:36.398920 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:36.544971443+00:00 stderr F E1212 16:19:36.544836 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:39.525961369+00:00 stderr F E1212 16:19:39.525892 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:40.700010007+00:00 stderr F E1212 16:19:40.699944 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.280498395+00:00 stderr F E1212 16:19:44.280418 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.280877065+00:00 stderr F E1212 16:19:44.280571 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:19:44.282622638+00:00 stderr F E1212 16:19:44.282601 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.289603904+00:00 stderr F E1212 16:19:44.289437 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.300645071+00:00 stderr F E1212 16:19:44.300587 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.322299275+00:00 stderr F E1212 16:19:44.322061 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.363172001+00:00 stderr F E1212 16:19:44.363071 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.444285218+00:00 stderr F E1212 16:19:44.444120 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.606514301+00:00 stderr F E1212 16:19:44.605491 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:44.927370786+00:00 stderr F E1212 16:19:44.927112 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:45.568818001+00:00 stderr F E1212 16:19:45.568739 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:46.852252046+00:00 stderr F E1212 16:19:46.850521 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:49.206980918+00:00 stderr F E1212 16:19:49.206415 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:49.416227942+00:00 stderr F E1212 16:19:49.412524 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:49.621014644+00:00 stderr F E1212 16:19:49.620925 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:52.059353724+00:00 stderr F E1212 16:19:52.059276 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:57.415788074+00:00 stderr F E1212 16:19:57.415721 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.258501833+00:00 stderr F E1212 16:19:58.257428 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.258501833+00:00 stderr F E1212 16:19:58.257710 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:19:58.259809166+00:00 stderr F E1212 16:19:58.259517 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.265503798+00:00 stderr F E1212 16:19:58.265459 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.277041418+00:00 stderr F E1212 16:19:58.276977 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.298304762+00:00 stderr F E1212 16:19:58.298236 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.339254900+00:00 stderr F E1212 16:19:58.339198 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.420443689+00:00 stderr F E1212 16:19:58.420355 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.563353507+00:00 stderr F E1212 16:19:58.563283 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.563908211+00:00 stderr F E1212 16:19:58.563870 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.581971744+00:00 stderr F E1212 16:19:58.581894 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.844805914+00:00 stderr F E1212 16:19:58.844750 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.845267175+00:00 stderr F E1212 16:19:58.845245 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:58.845642695+00:00 stderr F E1212 16:19:58.845608 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:19:59.864137116+00:00 stderr F E1212 16:19:59.863789 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:00.395717153+00:00 stderr F E1212 16:20:00.395246 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:03.410265552+00:00 stderr F E1212 16:20:03.409779 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:03.970341464+00:00 stderr F E1212 16:20:03.970290 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:03.970570430+00:00 stderr F E1212 16:20:03.970552 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:20:05.002260234+00:00 stderr F E1212 16:20:05.001747 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.008464970+00:00 stderr F E1212 16:20:05.008383 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.019546268+00:00 stderr F E1212 16:20:05.019498 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.040650708+00:00 stderr F E1212 16:20:05.040600 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.082338885+00:00 stderr F E1212 16:20:05.082246 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.164031816+00:00 stderr F E1212 16:20:05.163960 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.325577532+00:00 stderr F E1212 16:20:05.325455 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:05.646799317+00:00 stderr F E1212 16:20:05.646723 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:06.288086788+00:00 stderr F E1212 16:20:06.288016 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:07.569523512+00:00 stderr F E1212 16:20:07.569434 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:07.869494594+00:00 stderr F E1212 16:20:07.869419 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:07.870351266+00:00 stderr F E1212 16:20:07.869864 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:08.671670136+00:00 stderr F E1212 16:20:08.671556 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:10.131645722+00:00 stderr F E1212 16:20:10.131555 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:10.871230171+00:00 stderr F E1212 16:20:10.871103 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:10.871767145+00:00 stderr F E1212 16:20:10.871726 1 sync.go:528] Merging registry secrets failed with: failed to merge global pull secret: do**********ig bytes contain JSON null 2025-12-12T16:20:10.871880628+00:00 stderr F E1212 16:20:10.871847 1 operator.go:467] "Unhandled Error" err="failed to merge global pull secret: do**********ig bytes contain JSON null" 2025-12-12T16:20:16.915722707+00:00 stderr F E1212 16:20:16.914890 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:23.472204446+00:00 stderr F E1212 16:20:23.471504 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:23.472204446+00:00 stderr F E1212 16:20:23.471593 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:24.471484276+00:00 stderr F E1212 16:20:24.471415 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:24.471536437+00:00 stderr F E1212 16:20:24.471483 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:25.470961910+00:00 stderr F E1212 16:20:25.470832 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:25.470961910+00:00 stderr F E1212 16:20:25.470913 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:26.471065541+00:00 stderr F E1212 16:20:26.470982 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:26.471106422+00:00 stderr F E1212 16:20:26.471055 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:27.471796367+00:00 stderr F E1212 16:20:27.471731 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:27.471843589+00:00 stderr F E1212 16:20:27.471795 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:28.471786704+00:00 stderr F E1212 16:20:28.471473 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:28.471839986+00:00 stderr F E1212 16:20:28.471797 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:29.471735972+00:00 stderr F E1212 16:20:29.471664 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:29.471735972+00:00 stderr F E1212 16:20:29.471721 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:30.471537708+00:00 stderr F E1212 16:20:30.470925 1 sync.go:2231] Failed to stamp bootimages configmap: failed to grab rendered MC rendered-master-842a93c7bb3e86c26c29ba8a7f596b70, error: machineconfig.machineconfiguration.openshift.io "rendered-master-842a93c7bb3e86c26c29ba8a7f596b70" not found 2025-12-12T16:20:30.471585429+00:00 stderr F E1212 16:20:30.471531 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:31.471658070+00:00 stderr F E1212 16:20:31.471459 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:32.483051719+00:00 stderr F E1212 16:20:32.480070 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:33.472174384+00:00 stderr F E1212 16:20:33.471532 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:34.471558798+00:00 stderr F E1212 16:20:34.471342 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:35.012579487+00:00 stderr F I1212 16:20:35.011822 1 operator.go:415] Change observed to kube-apiserver-server-ca 2025-12-12T16:20:35.471226068+00:00 stderr F E1212 16:20:35.471158 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:36.471916506+00:00 stderr F E1212 16:20:36.471120 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:37.471680730+00:00 stderr F E1212 16:20:37.471583 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:38.471475383+00:00 stderr F E1212 16:20:38.471368 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:39.472073739+00:00 stderr F E1212 16:20:39.471540 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:40.471236827+00:00 stderr F E1212 16:20:40.470822 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:41.471812062+00:00 stderr F E1212 16:20:41.470921 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:42.471888233+00:00 stderr F E1212 16:20:42.471057 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:43.473961227+00:00 stderr F E1212 16:20:43.471324 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:44.471465142+00:00 stderr F E1212 16:20:44.471219 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:45.472382186+00:00 stderr F E1212 16:20:45.471830 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:46.472672802+00:00 stderr F E1212 16:20:46.471562 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:47.472843397+00:00 stderr F E1212 16:20:47.471691 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:48.471860661+00:00 stderr F E1212 16:20:48.471645 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:49.471812439+00:00 stderr F E1212 16:20:49.471176 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:50.471146661+00:00 stderr F E1212 16:20:50.471091 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:51.470832423+00:00 stderr F E1212 16:20:51.470732 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:52.471577152+00:00 stderr F E1212 16:20:52.471480 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:53.472472695+00:00 stderr F E1212 16:20:53.471490 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:54.471635013+00:00 stderr F E1212 16:20:54.471576 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:55.471822718+00:00 stderr F E1212 16:20:55.471766 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:56.471626472+00:00 stderr F E1212 16:20:56.471545 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:57.472681999+00:00 stderr F E1212 16:20:57.472602 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:58.470902212+00:00 stderr F E1212 16:20:58.470820 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:20:59.471314983+00:00 stderr F E1212 16:20:59.471265 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:00.471052725+00:00 stderr F E1212 16:21:00.470979 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:01.472123103+00:00 stderr F E1212 16:21:01.471565 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:02.546609537+00:00 stderr F E1212 16:21:02.546515 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:03.471292692+00:00 stderr F E1212 16:21:03.471225 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:04.471402443+00:00 stderr F E1212 16:21:04.471347 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:05.471576608+00:00 stderr F E1212 16:21:05.471481 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:06.471638990+00:00 stderr F E1212 16:21:06.471402 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:07.471949326+00:00 stderr F E1212 16:21:07.471808 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:08.471134255+00:00 stderr F E1212 16:21:08.471047 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:09.471502115+00:00 stderr F E1212 16:21:09.471417 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:10.471063453+00:00 stderr F E1212 16:21:10.470943 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:11.472129429+00:00 stderr F E1212 16:21:11.471656 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:12.471482274+00:00 stderr F E1212 16:21:12.471410 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:13.471637589+00:00 stderr F E1212 16:21:13.471581 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:14.471766782+00:00 stderr F E1212 16:21:14.471688 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:15.471346692+00:00 stderr F E1212 16:21:15.471279 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:16.471729014+00:00 stderr F E1212 16:21:16.471636 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:17.471910069+00:00 stderr F E1212 16:21:17.470977 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:18.471652713+00:00 stderr F E1212 16:21:18.471562 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:19.471479359+00:00 stderr F E1212 16:21:19.471373 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:20.471265105+00:00 stderr F E1212 16:21:20.471079 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:21.471733246+00:00 stderr F E1212 16:21:21.471623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:22.471812479+00:00 stderr F E1212 16:21:22.471726 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:23.471215235+00:00 stderr F E1212 16:21:23.470828 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:24.471791199+00:00 stderr F E1212 16:21:24.471491 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:25.471369979+00:00 stderr F E1212 16:21:25.471280 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:26.471577195+00:00 stderr F E1212 16:21:26.471465 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:27.471098523+00:00 stderr F E1212 16:21:27.471000 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:28.471127195+00:00 stderr F E1212 16:21:28.471046 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:29.470955720+00:00 stderr F E1212 16:21:29.470852 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:30.471464303+00:00 stderr F E1212 16:21:30.471365 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:31.472327005+00:00 stderr F E1212 16:21:31.471714 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:32.471337470+00:00 stderr F E1212 16:21:32.471228 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:33.471393062+00:00 stderr F E1212 16:21:33.471304 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:34.471689180+00:00 stderr F E1212 16:21:34.471599 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:35.471574512+00:00 stderr F E1212 16:21:35.471467 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:36.471291851+00:00 stderr F E1212 16:21:36.471230 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:37.471804142+00:00 stderr F E1212 16:21:37.471687 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:38.471414838+00:00 stderr F E1212 16:21:38.470783 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:39.472498382+00:00 stderr F E1212 16:21:39.470882 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:40.471510664+00:00 stderr F E1212 16:21:40.471303 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:41.471048939+00:00 stderr F E1212 16:21:41.470954 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:42.471250672+00:00 stderr F E1212 16:21:42.471137 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:43.471339101+00:00 stderr F E1212 16:21:43.471249 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:44.471511583+00:00 stderr F E1212 16:21:44.471383 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:45.471962902+00:00 stderr F E1212 16:21:45.471410 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:46.471799674+00:00 stderr F E1212 16:21:46.471721 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:47.471437412+00:00 stderr F E1212 16:21:47.471347 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:48.470917906+00:00 stderr F E1212 16:21:48.470819 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:49.471350694+00:00 stderr F E1212 16:21:49.470763 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:50.471263617+00:00 stderr F E1212 16:21:50.471166 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:51.471434449+00:00 stderr F E1212 16:21:51.471349 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:52.471473346+00:00 stderr F E1212 16:21:52.471369 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:53.471773881+00:00 stderr F E1212 16:21:53.471620 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:54.472739353+00:00 stderr F E1212 16:21:54.471909 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:55.471424625+00:00 stderr F E1212 16:21:55.471305 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:56.471014531+00:00 stderr F E1212 16:21:56.470938 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:57.472620428+00:00 stderr F E1212 16:21:57.472547 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:58.471037543+00:00 stderr F E1212 16:21:58.470888 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:21:59.471648766+00:00 stderr F E1212 16:21:59.471570 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:00.471703724+00:00 stderr F E1212 16:22:00.471062 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:01.471692480+00:00 stderr F E1212 16:22:01.471575 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:02.472826817+00:00 stderr F E1212 16:22:02.471759 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:03.471166557+00:00 stderr F E1212 16:22:03.471044 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:04.471860362+00:00 stderr F E1212 16:22:04.471786 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:05.472304482+00:00 stderr F E1212 16:22:05.471700 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:06.471756867+00:00 stderr F E1212 16:22:06.470827 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:07.471150721+00:00 stderr F E1212 16:22:07.470996 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:08.472289237+00:00 stderr F E1212 16:22:08.471488 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:09.472022370+00:00 stderr F E1212 16:22:09.471004 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:10.471497316+00:00 stderr F E1212 16:22:10.471366 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:11.472204302+00:00 stderr F E1212 16:22:11.471585 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:12.471498612+00:00 stderr F E1212 16:22:12.471403 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:13.471649545+00:00 stderr F E1212 16:22:13.471556 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:14.471644784+00:00 stderr F E1212 16:22:14.471546 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:15.471533259+00:00 stderr F E1212 16:22:15.471425 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:16.471138618+00:00 stderr F E1212 16:22:16.471042 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:17.472303667+00:00 stderr F E1212 16:22:17.471752 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:18.470965032+00:00 stderr F E1212 16:22:18.470878 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:19.471381231+00:00 stderr F E1212 16:22:19.471294 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:20.472279402+00:00 stderr F E1212 16:22:20.471603 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:21.471374279+00:00 stderr F E1212 16:22:21.471243 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:22.471392727+00:00 stderr F E1212 16:22:22.471291 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:23.472689739+00:00 stderr F E1212 16:22:23.471537 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:24.471534949+00:00 stderr F E1212 16:22:24.471449 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:25.471823236+00:00 stderr F E1212 16:22:25.471729 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:26.471587266+00:00 stderr F E1212 16:22:26.471492 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:27.471840981+00:00 stderr F E1212 16:22:27.470897 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:28.471669116+00:00 stderr F E1212 16:22:28.471531 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:29.471464617+00:00 stderr F E1212 16:22:29.471294 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:30.471611699+00:00 stderr F E1212 16:22:30.471494 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:31.471222387+00:00 stderr F E1212 16:22:31.471095 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:32.471571835+00:00 stderr F E1212 16:22:32.471498 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:33.471750927+00:00 stderr F E1212 16:22:33.471648 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:34.471368075+00:00 stderr F E1212 16:22:34.471288 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:35.472458721+00:00 stderr F E1212 16:22:35.471868 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:36.471553797+00:00 stderr F E1212 16:22:36.471463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:37.471819031+00:00 stderr F E1212 16:22:37.471744 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:38.470986129+00:00 stderr F E1212 16:22:38.470855 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:39.471796697+00:00 stderr F E1212 16:22:39.470986 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:40.471002700+00:00 stderr F E1212 16:22:40.470797 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:41.471722353+00:00 stderr F E1212 16:22:41.471592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:42.471739938+00:00 stderr F E1212 16:22:42.471647 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:43.471825564+00:00 stderr F E1212 16:22:43.471752 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:44.472355010+00:00 stderr F E1212 16:22:44.471736 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:45.471337109+00:00 stderr F E1212 16:22:45.471235 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:46.471605490+00:00 stderr F E1212 16:22:46.471503 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:47.471309945+00:00 stderr F E1212 16:22:47.471217 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:48.472519950+00:00 stderr F E1212 16:22:48.471324 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:49.470939614+00:00 stderr F E1212 16:22:49.470794 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:50.471097851+00:00 stderr F E1212 16:22:50.470997 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:51.472312975+00:00 stderr F E1212 16:22:51.471140 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:52.471876128+00:00 stderr F E1212 16:22:52.471760 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:53.471943924+00:00 stderr F E1212 16:22:53.471830 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:54.472202613+00:00 stderr F E1212 16:22:54.471325 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:55.471541581+00:00 stderr F E1212 16:22:55.471422 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:56.470928678+00:00 stderr F E1212 16:22:56.470846 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:57.471068906+00:00 stderr F E1212 16:22:57.470983 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:58.471004767+00:00 stderr F E1212 16:22:58.470911 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:22:59.471462183+00:00 stderr F E1212 16:22:59.470964 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:00.472491632+00:00 stderr F E1212 16:23:00.471783 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:01.471924712+00:00 stderr F E1212 16:23:01.471789 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:02.471097254+00:00 stderr F E1212 16:23:02.471004 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:03.471378204+00:00 stderr F E1212 16:23:03.471284 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:04.471919832+00:00 stderr F E1212 16:23:04.471337 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:05.472097999+00:00 stderr F E1212 16:23:05.471455 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:06.471638462+00:00 stderr F E1212 16:23:06.471553 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:07.471913293+00:00 stderr F E1212 16:23:07.471826 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:08.472834940+00:00 stderr F E1212 16:23:08.471559 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:09.471720825+00:00 stderr F E1212 16:23:09.471383 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:10.472109129+00:00 stderr F E1212 16:23:10.471708 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:11.471829258+00:00 stderr F E1212 16:23:11.471709 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:12.470965049+00:00 stderr F E1212 16:23:12.470855 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:13.470963163+00:00 stderr F E1212 16:23:13.470873 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:14.471156093+00:00 stderr F E1212 16:23:14.471073 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:15.472104782+00:00 stderr F E1212 16:23:15.471451 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:16.471351016+00:00 stderr F E1212 16:23:16.471256 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:17.471376282+00:00 stderr F E1212 16:23:17.471270 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:18.471623553+00:00 stderr F E1212 16:23:18.471481 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:19.471773241+00:00 stderr F E1212 16:23:19.471670 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:20.472086662+00:00 stderr F E1212 16:23:20.471713 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:21.472014305+00:00 stderr F E1212 16:23:21.471637 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:22.470835940+00:00 stderr F E1212 16:23:22.470734 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:23.471021718+00:00 stderr F E1212 16:23:23.470911 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:24.471670128+00:00 stderr F E1212 16:23:24.471536 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:25.471878617+00:00 stderr F E1212 16:23:25.471782 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:26.471548663+00:00 stderr F E1212 16:23:26.471422 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:27.471565306+00:00 stderr F E1212 16:23:27.471483 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:28.471717744+00:00 stderr F E1212 16:23:28.471626 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:29.471933344+00:00 stderr F E1212 16:23:29.470898 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:30.471308241+00:00 stderr F E1212 16:23:30.471170 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:31.471390997+00:00 stderr F E1212 16:23:31.471296 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:32.471365050+00:00 stderr F E1212 16:23:32.471251 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:33.471412235+00:00 stderr F E1212 16:23:33.471284 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:34.472384053+00:00 stderr F E1212 16:23:34.471389 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:35.471387741+00:00 stderr F E1212 16:23:35.471298 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:36.471284713+00:00 stderr F E1212 16:23:36.470999 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:37.471731406+00:00 stderr F E1212 16:23:37.471640 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:38.471932966+00:00 stderr F E1212 16:23:38.471817 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:39.471941830+00:00 stderr F E1212 16:23:39.471032 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:40.471496562+00:00 stderr F E1212 16:23:40.471400 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:41.470940341+00:00 stderr F E1212 16:23:41.470843 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:42.470985066+00:00 stderr F E1212 16:23:42.470916 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:43.470888687+00:00 stderr F E1212 16:23:43.470816 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:44.471013701+00:00 stderr F E1212 16:23:44.470935 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:45.471630051+00:00 stderr F E1212 16:23:45.471524 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:46.471579614+00:00 stderr F E1212 16:23:46.471450 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:47.471683332+00:00 stderr F E1212 16:23:47.471584 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:48.471817240+00:00 stderr F E1212 16:23:48.471723 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:49.472693126+00:00 stderr F E1212 16:23:49.472103 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:50.471790929+00:00 stderr F E1212 16:23:50.471685 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:51.471752254+00:00 stderr F E1212 16:23:51.471647 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:52.471406029+00:00 stderr F E1212 16:23:52.471294 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:53.471632140+00:00 stderr F E1212 16:23:53.471535 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:54.471685677+00:00 stderr F E1212 16:23:54.471607 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:55.471565458+00:00 stderr F E1212 16:23:55.471463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:56.471654295+00:00 stderr F E1212 16:23:56.471547 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:57.472428190+00:00 stderr F E1212 16:23:57.471698 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:58.471399429+00:00 stderr F E1212 16:23:58.471315 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:23:59.471745522+00:00 stderr F E1212 16:23:59.471624 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:00.471757208+00:00 stderr F E1212 16:24:00.471655 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:01.471174738+00:00 stderr F E1212 16:24:01.471041 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:02.471380268+00:00 stderr F E1212 16:24:02.471242 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:03.471901136+00:00 stderr F E1212 16:24:03.471789 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:04.471666175+00:00 stderr F E1212 16:24:04.471574 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:05.471601049+00:00 stderr F E1212 16:24:05.471452 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:06.472131646+00:00 stderr F E1212 16:24:06.471450 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:07.471580437+00:00 stderr F E1212 16:24:07.471467 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:08.470933897+00:00 stderr F E1212 16:24:08.470824 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:09.471790733+00:00 stderr F E1212 16:24:09.471616 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:10.472678029+00:00 stderr F E1212 16:24:10.471763 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:11.471712860+00:00 stderr F E1212 16:24:11.471574 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:12.470909855+00:00 stderr F E1212 16:24:12.470803 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:13.471071164+00:00 stderr F E1212 16:24:13.470998 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:14.471264854+00:00 stderr F E1212 16:24:14.471161 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:15.471979597+00:00 stderr F E1212 16:24:15.471895 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:16.471467989+00:00 stderr F E1212 16:24:16.471394 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:17.472825727+00:00 stderr F E1212 16:24:17.471897 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:18.471636223+00:00 stderr F E1212 16:24:18.471575 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:19.471820272+00:00 stderr F E1212 16:24:19.471759 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:20.471436897+00:00 stderr F E1212 16:24:20.471350 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:21.471673948+00:00 stderr F E1212 16:24:21.471596 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:22.471329485+00:00 stderr F E1212 16:24:22.471231 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:23.471457543+00:00 stderr F E1212 16:24:23.471341 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:24.470972495+00:00 stderr F E1212 16:24:24.470890 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:25.470846897+00:00 stderr F E1212 16:24:25.470779 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:26.472387490+00:00 stderr F E1212 16:24:26.471524 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:27.471637427+00:00 stderr F E1212 16:24:27.471568 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:28.471278792+00:00 stderr F E1212 16:24:28.471220 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:29.471919943+00:00 stderr F E1212 16:24:29.471837 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:30.471631091+00:00 stderr F E1212 16:24:30.471576 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:31.471869721+00:00 stderr F E1212 16:24:31.471782 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:32.470996415+00:00 stderr F E1212 16:24:32.470904 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:33.471095812+00:00 stderr F E1212 16:24:33.471008 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:34.471467747+00:00 stderr F E1212 16:24:34.471395 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:35.472252251+00:00 stderr F E1212 16:24:35.471315 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:36.471639380+00:00 stderr F E1212 16:24:36.471577 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:37.471008150+00:00 stderr F E1212 16:24:37.470942 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:38.471044415+00:00 stderr F E1212 16:24:38.470960 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:39.471278576+00:00 stderr F E1212 16:24:39.471128 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:40.471369504+00:00 stderr F E1212 16:24:40.471309 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:41.471588134+00:00 stderr F E1212 16:24:41.471527 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:42.471709111+00:00 stderr F E1212 16:24:42.471650 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:43.472671050+00:00 stderr F E1212 16:24:43.471840 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:44.470904892+00:00 stderr F E1212 16:24:44.470841 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:45.471264025+00:00 stderr F E1212 16:24:45.471193 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:46.471612388+00:00 stderr F E1212 16:24:46.471535 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:47.471210724+00:00 stderr F E1212 16:24:47.471122 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:48.471345552+00:00 stderr F E1212 16:24:48.471264 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:49.471559472+00:00 stderr F E1212 16:24:49.471464 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:50.471917591+00:00 stderr F E1212 16:24:50.471830 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:51.472527647+00:00 stderr F E1212 16:24:51.471500 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:52.471489870+00:00 stderr F E1212 16:24:52.471404 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:53.471700054+00:00 stderr F E1212 16:24:53.471623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:54.471809907+00:00 stderr F E1212 16:24:54.471735 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:55.471074607+00:00 stderr F E1212 16:24:55.470985 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:56.471392315+00:00 stderr F E1212 16:24:56.471331 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:57.471599530+00:00 stderr F E1212 16:24:57.471513 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:58.471595060+00:00 stderr F E1212 16:24:58.471529 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:24:59.471431195+00:00 stderr F E1212 16:24:59.471364 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:00.471771993+00:00 stderr F E1212 16:25:00.471071 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:01.471664120+00:00 stderr F E1212 16:25:01.471587 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:02.471397353+00:00 stderr F E1212 16:25:02.471316 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:03.472368137+00:00 stderr F E1212 16:25:03.471557 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:04.471279598+00:00 stderr F E1212 16:25:04.471044 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:05.470888798+00:00 stderr F E1212 16:25:05.470805 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:06.471028573+00:00 stderr F E1212 16:25:06.470931 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:07.471758763+00:00 stderr F E1212 16:25:07.471669 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:08.472036180+00:00 stderr F E1212 16:25:08.471596 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:09.471944849+00:00 stderr F E1212 16:25:09.471386 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:10.471737804+00:00 stderr F E1212 16:25:10.471650 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:11.472035542+00:00 stderr F E1212 16:25:11.471946 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:12.471660423+00:00 stderr F E1212 16:25:12.471599 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:13.471033856+00:00 stderr F E1212 16:25:13.470954 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:14.471451227+00:00 stderr F E1212 16:25:14.471380 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:15.471400955+00:00 stderr F E1212 16:25:15.471316 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:16.471839327+00:00 stderr F E1212 16:25:16.471732 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:17.471890038+00:00 stderr F E1212 16:25:17.471813 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:18.471282412+00:00 stderr F E1212 16:25:18.471170 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:19.471378696+00:00 stderr F E1212 16:25:19.471313 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:20.472517195+00:00 stderr F E1212 16:25:20.471608 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:21.471850007+00:00 stderr F E1212 16:25:21.471761 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:22.470955344+00:00 stderr F E1212 16:25:22.470888 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:23.471143998+00:00 stderr F E1212 16:25:23.471055 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:24.472337790+00:00 stderr F E1212 16:25:24.472261 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:25.471450408+00:00 stderr F E1212 16:25:25.471379 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:26.471069900+00:00 stderr F E1212 16:25:26.470943 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:27.472668913+00:00 stderr F E1212 16:25:27.471740 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:28.472020288+00:00 stderr F E1212 16:25:28.471442 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:29.472775879+00:00 stderr F E1212 16:25:29.472374 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:30.471648113+00:00 stderr F E1212 16:25:30.471112 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:31.471010990+00:00 stderr F E1212 16:25:31.470896 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:32.471243657+00:00 stderr F E1212 16:25:32.471095 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:33.471494536+00:00 stderr F E1212 16:25:33.471376 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:34.471216671+00:00 stderr F E1212 16:25:34.471106 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:35.472881066+00:00 stderr F E1212 16:25:35.471802 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:36.471736718+00:00 stderr F E1212 16:25:36.471621 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:37.470998902+00:00 stderr F E1212 16:25:37.470877 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:38.471592819+00:00 stderr F E1212 16:25:38.470990 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:39.471760925+00:00 stderr F E1212 16:25:39.471656 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:40.471284665+00:00 stderr F E1212 16:25:40.471045 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:41.471663007+00:00 stderr F E1212 16:25:41.471567 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:42.470918830+00:00 stderr F E1212 16:25:42.470832 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:43.471628339+00:00 stderr F E1212 16:25:43.471531 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:44.471653722+00:00 stderr F E1212 16:25:44.471548 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:45.470933717+00:00 stderr F E1212 16:25:45.470836 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:46.472378925+00:00 stderr F E1212 16:25:46.471580 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:47.471797662+00:00 stderr F E1212 16:25:47.471702 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:48.470939124+00:00 stderr F E1212 16:25:48.470827 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:49.472284290+00:00 stderr F E1212 16:25:49.470915 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:50.471051061+00:00 stderr F E1212 16:25:50.470934 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:51.471567856+00:00 stderr F E1212 16:25:51.471482 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:52.471509107+00:00 stderr F E1212 16:25:52.471401 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:53.471614051+00:00 stderr F E1212 16:25:53.471520 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:54.471237427+00:00 stderr F E1212 16:25:54.471104 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:55.472093332+00:00 stderr F E1212 16:25:55.471168 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:56.471433459+00:00 stderr F E1212 16:25:56.471349 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:57.471782040+00:00 stderr F E1212 16:25:57.471627 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:58.471700432+00:00 stderr F E1212 16:25:58.471592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:25:59.472666770+00:00 stderr F E1212 16:25:59.471763 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:00.471362280+00:00 stderr F E1212 16:26:00.471241 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:01.471020324+00:00 stderr F E1212 16:26:01.470915 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:02.472491955+00:00 stderr F E1212 16:26:02.471672 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:03.471766110+00:00 stderr F E1212 16:26:03.471637 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:04.471956398+00:00 stderr F E1212 16:26:04.471864 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:05.471702625+00:00 stderr F E1212 16:26:05.471591 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:06.471787541+00:00 stderr F E1212 16:26:06.471670 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:07.471288652+00:00 stderr F E1212 16:26:07.471137 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:08.470974557+00:00 stderr F E1212 16:26:08.470886 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:09.471160965+00:00 stderr F E1212 16:26:09.471089 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:10.472076962+00:00 stderr F E1212 16:26:10.471682 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:11.472438404+00:00 stderr F E1212 16:26:11.471651 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:12.471721989+00:00 stderr F E1212 16:26:12.471172 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:13.471657631+00:00 stderr F E1212 16:26:13.471570 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:14.471435459+00:00 stderr F E1212 16:26:14.471345 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:15.470951550+00:00 stderr F E1212 16:26:15.470898 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:16.471725933+00:00 stderr F E1212 16:26:16.471572 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:17.471743487+00:00 stderr F E1212 16:26:17.471690 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:18.471778131+00:00 stderr F E1212 16:26:18.471698 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:19.471173149+00:00 stderr F E1212 16:26:19.471088 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:20.471620944+00:00 stderr F E1212 16:26:20.471543 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:21.471757141+00:00 stderr F E1212 16:26:21.471685 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:22.472426701+00:00 stderr F E1212 16:26:22.471859 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:23.471878470+00:00 stderr F E1212 16:26:23.471793 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:24.471627678+00:00 stderr F E1212 16:26:24.471533 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:25.472381231+00:00 stderr F E1212 16:26:25.471452 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:26.471612475+00:00 stderr F E1212 16:26:26.471529 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:27.471817283+00:00 stderr F E1212 16:26:27.471724 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:28.472438813+00:00 stderr F E1212 16:26:28.471509 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:29.471833380+00:00 stderr F E1212 16:26:29.471744 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:30.471019792+00:00 stderr F E1212 16:26:30.470960 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:31.471578240+00:00 stderr F E1212 16:26:31.471513 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:32.471113432+00:00 stderr F E1212 16:26:32.471014 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:33.471135894+00:00 stderr F E1212 16:26:33.471063 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:34.472130243+00:00 stderr F E1212 16:26:34.471338 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:35.471627884+00:00 stderr F E1212 16:26:35.471507 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:36.471162105+00:00 stderr F E1212 16:26:36.471102 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:37.471536158+00:00 stderr F E1212 16:26:37.471471 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:38.471386878+00:00 stderr F E1212 16:26:38.471312 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:39.472273964+00:00 stderr F E1212 16:26:39.471398 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:40.471708832+00:00 stderr F E1212 16:26:40.470743 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:41.471291745+00:00 stderr F E1212 16:26:41.471213 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:42.470867028+00:00 stderr F E1212 16:26:42.470770 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:43.471686023+00:00 stderr F E1212 16:26:43.470975 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:44.471094740+00:00 stderr F E1212 16:26:44.470999 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:45.471850493+00:00 stderr F E1212 16:26:45.471047 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:46.471876697+00:00 stderr F E1212 16:26:46.471811 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:47.471068190+00:00 stderr F E1212 16:26:47.470963 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:48.471776351+00:00 stderr F E1212 16:26:48.471086 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:49.472244017+00:00 stderr F E1212 16:26:49.472152 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:50.472352233+00:00 stderr F E1212 16:26:50.471447 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:51.471114525+00:00 stderr F E1212 16:26:51.471037 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:52.471128039+00:00 stderr F E1212 16:26:52.471040 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:53.471980684+00:00 stderr F E1212 16:26:53.471072 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:54.471653438+00:00 stderr F E1212 16:26:54.471577 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:55.471714814+00:00 stderr F E1212 16:26:55.470972 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:56.471371149+00:00 stderr F E1212 16:26:56.471315 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:57.477108577+00:00 stderr F E1212 16:26:57.471292 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:58.474450929+00:00 stderr F E1212 16:26:58.470957 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:26:59.472001485+00:00 stderr F E1212 16:26:59.471427 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:00.473228195+00:00 stderr F E1212 16:27:00.470831 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:01.473241564+00:00 stderr F E1212 16:27:01.472408 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:02.471370925+00:00 stderr F E1212 16:27:02.471221 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:03.474383871+00:00 stderr F E1212 16:27:03.471091 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:04.471594989+00:00 stderr F E1212 16:27:04.471493 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:05.479342993+00:00 stderr F E1212 16:27:05.478452 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:06.473395191+00:00 stderr F E1212 16:27:06.472457 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:07.471399620+00:00 stderr F E1212 16:27:07.471172 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:08.475315138+00:00 stderr F E1212 16:27:08.471653 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:09.472845253+00:00 stderr F E1212 16:27:09.472765 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:10.472166425+00:00 stderr F E1212 16:27:10.471552 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:11.471927757+00:00 stderr F E1212 16:27:11.471483 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:12.470994752+00:00 stderr F E1212 16:27:12.470802 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:13.472829177+00:00 stderr F E1212 16:27:13.471578 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:14.471650226+00:00 stderr F E1212 16:27:14.471568 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:15.471563172+00:00 stderr F E1212 16:27:15.471471 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:16.471697533+00:00 stderr F E1212 16:27:16.471270 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:17.471751144+00:00 stderr F E1212 16:27:17.471650 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:18.471083816+00:00 stderr F E1212 16:27:18.471006 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:19.474224933+00:00 stderr F E1212 16:27:19.471667 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:20.471442341+00:00 stderr F E1212 16:27:20.471346 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:21.471716537+00:00 stderr F E1212 16:27:21.471622 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:22.471549472+00:00 stderr F E1212 16:27:22.471457 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:23.471777845+00:00 stderr F E1212 16:27:23.471672 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:24.471373524+00:00 stderr F E1212 16:27:24.471293 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:25.473411754+00:00 stderr F E1212 16:27:25.472858 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:26.471342120+00:00 stderr F E1212 16:27:26.471268 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:27.471671317+00:00 stderr F E1212 16:27:27.471601 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:28.470952067+00:00 stderr F E1212 16:27:28.470885 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:29.473701056+00:00 stderr F E1212 16:27:29.472489 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:30.473115689+00:00 stderr F E1212 16:27:30.471639 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:31.473050336+00:00 stderr F E1212 16:27:31.471233 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:32.476238446+00:00 stderr F E1212 16:27:32.472548 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:33.471234828+00:00 stderr F E1212 16:27:33.471122 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:34.471303417+00:00 stderr F E1212 16:27:34.471208 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:35.472495676+00:00 stderr F E1212 16:27:35.471531 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:36.472636559+00:00 stderr F E1212 16:27:36.471335 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:37.472231006+00:00 stderr F E1212 16:27:37.472142 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:38.471885426+00:00 stderr F E1212 16:27:38.471809 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:39.471884975+00:00 stderr F E1212 16:27:39.471216 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:40.473791122+00:00 stderr F E1212 16:27:40.473696 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:41.471365699+00:00 stderr F E1212 16:27:41.471269 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:42.472231421+00:00 stderr F E1212 16:27:42.471063 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:43.471716237+00:00 stderr F E1212 16:27:43.471628 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:44.475235154+00:00 stderr F E1212 16:27:44.471668 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:45.471665793+00:00 stderr F E1212 16:27:45.471576 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:46.472892873+00:00 stderr F E1212 16:27:46.472561 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:47.471648371+00:00 stderr F E1212 16:27:47.471553 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:48.471600407+00:00 stderr F E1212 16:27:48.470919 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:49.471121465+00:00 stderr F E1212 16:27:49.470993 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:50.472429457+00:00 stderr F E1212 16:27:50.471853 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:51.472955329+00:00 stderr F E1212 16:27:51.471369 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:52.471524631+00:00 stderr F E1212 16:27:52.471423 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:53.470976146+00:00 stderr F E1212 16:27:53.470804 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:54.471139139+00:00 stderr F E1212 16:27:54.471039 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:55.471482986+00:00 stderr F E1212 16:27:55.471364 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:56.472402369+00:00 stderr F E1212 16:27:56.471548 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:57.471888034+00:00 stderr F E1212 16:27:57.471806 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:58.471695349+00:00 stderr F E1212 16:27:58.471604 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:27:59.471642305+00:00 stderr F E1212 16:27:59.471559 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:00.472330902+00:00 stderr F E1212 16:28:00.471469 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:01.471051508+00:00 stderr F E1212 16:28:01.470934 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:02.472443271+00:00 stderr F E1212 16:28:02.471519 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:03.471525817+00:00 stderr F E1212 16:28:03.471438 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:04.474980073+00:00 stderr F E1212 16:28:04.474781 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:05.471850533+00:00 stderr F E1212 16:28:05.471738 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:06.471596934+00:00 stderr F E1212 16:28:06.471524 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:07.471846959+00:00 stderr F E1212 16:28:07.471745 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:08.477244044+00:00 stderr F E1212 16:28:08.475519 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:09.472279727+00:00 stderr F E1212 16:28:09.472151 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:10.472497931+00:00 stderr F E1212 16:28:10.471544 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:11.471865954+00:00 stderr F E1212 16:28:11.471773 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:12.470932539+00:00 stderr F E1212 16:28:12.470828 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:13.471629974+00:00 stderr F E1212 16:28:13.471509 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:14.471857739+00:00 stderr F E1212 16:28:14.471746 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:15.471162830+00:00 stderr F E1212 16:28:15.471054 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:16.471473397+00:00 stderr F E1212 16:28:16.471371 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:17.471726671+00:00 stderr F E1212 16:28:17.471598 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:18.472762146+00:00 stderr F E1212 16:28:18.471610 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:19.471768410+00:00 stderr F E1212 16:28:19.471682 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:20.472306831+00:00 stderr F E1212 16:28:20.471514 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:21.471708435+00:00 stderr F E1212 16:28:21.471476 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:22.471811357+00:00 stderr F E1212 16:28:22.471733 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:23.471777156+00:00 stderr F E1212 16:28:23.471681 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:24.472518262+00:00 stderr F E1212 16:28:24.471614 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:25.471240739+00:00 stderr F E1212 16:28:25.471145 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:26.471650408+00:00 stderr F E1212 16:28:26.471592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:27.472124248+00:00 stderr F E1212 16:28:27.471257 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:28.471410229+00:00 stderr F E1212 16:28:28.471252 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:29.470914955+00:00 stderr F E1212 16:28:29.470840 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:30.472937525+00:00 stderr F E1212 16:28:30.472017 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:31.471237500+00:00 stderr F E1212 16:28:31.470876 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:32.471768413+00:00 stderr F E1212 16:28:32.471604 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:33.471892275+00:00 stderr F E1212 16:28:33.471761 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:34.471368340+00:00 stderr F E1212 16:28:34.471285 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:35.472746034+00:00 stderr F E1212 16:28:35.471711 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:36.471038840+00:00 stderr F E1212 16:28:36.470921 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:37.471129982+00:00 stderr F E1212 16:28:37.471049 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:38.471416947+00:00 stderr F E1212 16:28:38.471326 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:39.472553365+00:00 stderr F E1212 16:28:39.471680 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:40.471139718+00:00 stderr F E1212 16:28:40.470974 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:41.472262306+00:00 stderr F E1212 16:28:41.471010 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:42.471785812+00:00 stderr F E1212 16:28:42.471693 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:43.472058118+00:00 stderr F E1212 16:28:43.471657 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:44.472497607+00:00 stderr F E1212 16:28:44.471743 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:45.470984307+00:00 stderr F E1212 16:28:45.470914 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:46.471566821+00:00 stderr F E1212 16:28:46.471497 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:47.471744755+00:00 stderr F E1212 16:28:47.471663 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:48.471556379+00:00 stderr F E1212 16:28:48.471487 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:49.472217173+00:00 stderr F E1212 16:28:49.471611 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:50.471142875+00:00 stderr F E1212 16:28:50.471025 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:51.471790690+00:00 stderr F E1212 16:28:51.471712 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:52.471139582+00:00 stderr F E1212 16:28:52.471040 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:53.471367576+00:00 stderr F E1212 16:28:53.470955 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:54.471397515+00:00 stderr F E1212 16:28:54.470888 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:55.471624569+00:00 stderr F E1212 16:28:55.471501 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:56.471669168+00:00 stderr F E1212 16:28:56.471568 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:57.471087043+00:00 stderr F E1212 16:28:57.470982 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:58.471164863+00:00 stderr F E1212 16:28:58.471089 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:28:59.471578623+00:00 stderr F E1212 16:28:59.471465 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:00.472511774+00:00 stderr F E1212 16:29:00.471731 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:01.471396295+00:00 stderr F E1212 16:29:01.471269 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:02.471353805+00:00 stderr F E1212 16:29:02.471245 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:03.471750993+00:00 stderr F E1212 16:29:03.470977 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:04.471211948+00:00 stderr F E1212 16:29:04.471103 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:05.473757001+00:00 stderr F E1212 16:29:05.473198 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:06.472269803+00:00 stderr F E1212 16:29:06.471634 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:07.472853655+00:00 stderr F E1212 16:29:07.472754 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:08.471454209+00:00 stderr F E1212 16:29:08.471375 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:09.471663344+00:00 stderr F E1212 16:29:09.471575 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:10.471787754+00:00 stderr F E1212 16:29:10.470848 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:11.471596600+00:00 stderr F E1212 16:29:11.471441 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:12.471401396+00:00 stderr F E1212 16:29:12.471320 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:13.471267084+00:00 stderr F E1212 16:29:13.471074 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:14.471379287+00:00 stderr F E1212 16:29:14.471246 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:15.471658815+00:00 stderr F E1212 16:29:15.471529 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:16.472127328+00:00 stderr F E1212 16:29:16.471491 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:17.471741118+00:00 stderr F E1212 16:29:17.471607 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:18.470892547+00:00 stderr F E1212 16:29:18.470791 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:19.471716909+00:00 stderr F E1212 16:29:19.471605 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:20.473072674+00:00 stderr F E1212 16:29:20.471732 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:21.471800952+00:00 stderr F E1212 16:29:21.471691 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:22.471501115+00:00 stderr F E1212 16:29:22.471392 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:23.472174193+00:00 stderr F E1212 16:29:23.471431 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:24.472391610+00:00 stderr F E1212 16:29:24.471506 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:25.471579109+00:00 stderr F E1212 16:29:25.471469 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:26.471695983+00:00 stderr F E1212 16:29:26.471594 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:27.471778476+00:00 stderr F E1212 16:29:27.471653 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:28.472722490+00:00 stderr F E1212 16:29:28.471617 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:29.471625413+00:00 stderr F E1212 16:29:29.471521 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:30.472144537+00:00 stderr F E1212 16:29:30.471057 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:31.471172043+00:00 stderr F E1212 16:29:31.471059 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:32.471780238+00:00 stderr F E1212 16:29:32.471679 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:33.471817700+00:00 stderr F E1212 16:29:33.471715 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:34.471962945+00:00 stderr F E1212 16:29:34.471142 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:35.471357350+00:00 stderr F E1212 16:29:35.471254 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:36.471718970+00:00 stderr F E1212 16:29:36.471594 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:37.470964452+00:00 stderr F E1212 16:29:37.470809 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:38.471893696+00:00 stderr F E1212 16:29:38.471790 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:39.471361822+00:00 stderr F E1212 16:29:39.471002 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:40.472294456+00:00 stderr F E1212 16:29:40.471416 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:41.471398913+00:00 stderr F E1212 16:29:41.471309 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:42.471962367+00:00 stderr F E1212 16:29:42.471843 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:43.471603509+00:00 stderr F E1212 16:29:43.471524 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:44.471427886+00:00 stderr F E1212 16:29:44.471357 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:45.472432272+00:00 stderr F E1212 16:29:45.471759 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:46.471122269+00:00 stderr F E1212 16:29:46.471018 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:47.471575592+00:00 stderr F E1212 16:29:47.471512 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:48.471780858+00:00 stderr F E1212 16:29:48.471684 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:49.471883020+00:00 stderr F E1212 16:29:49.471781 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:50.471756377+00:00 stderr F E1212 16:29:50.471646 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:51.471525442+00:00 stderr F E1212 16:29:51.471449 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:52.471621565+00:00 stderr F E1212 16:29:52.470919 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:53.471747658+00:00 stderr F E1212 16:29:53.471661 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:54.471703457+00:00 stderr F E1212 16:29:54.471039 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:55.471572804+00:00 stderr F E1212 16:29:55.471491 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:56.471533554+00:00 stderr F E1212 16:29:56.471441 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:57.471298018+00:00 stderr F E1212 16:29:57.471202 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:58.471668168+00:00 stderr F E1212 16:29:58.471583 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:29:59.471825924+00:00 stderr F E1212 16:29:59.471086 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:00.471689941+00:00 stderr F E1212 16:30:00.470959 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:01.471874946+00:00 stderr F E1212 16:30:01.471354 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:02.471887537+00:00 stderr F E1212 16:30:02.471745 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:03.472206536+00:00 stderr F E1212 16:30:03.471573 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:04.472879183+00:00 stderr F E1212 16:30:04.471631 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:05.471538640+00:00 stderr F E1212 16:30:05.471455 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:06.471991883+00:00 stderr F E1212 16:30:06.471870 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:07.471467460+00:00 stderr F E1212 16:30:07.471341 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:08.472163627+00:00 stderr F E1212 16:30:08.471481 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:09.471714517+00:00 stderr F E1212 16:30:09.471622 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:10.471798560+00:00 stderr F E1212 16:30:10.471683 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:11.471575779+00:00 stderr F E1212 16:30:11.471472 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:12.472411285+00:00 stderr F E1212 16:30:12.471702 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:13.471233312+00:00 stderr F E1212 16:30:13.471142 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:14.471701610+00:00 stderr F E1212 16:30:14.471603 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:15.471740326+00:00 stderr F E1212 16:30:15.471640 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:16.471436114+00:00 stderr F E1212 16:30:16.471234 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:17.471840551+00:00 stderr F E1212 16:30:17.471687 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:18.470953444+00:00 stderr F E1212 16:30:18.470854 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:19.471085873+00:00 stderr F E1212 16:30:19.471013 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:20.470971556+00:00 stderr F E1212 16:30:20.470888 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:21.471778432+00:00 stderr F E1212 16:30:21.471645 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:22.472253970+00:00 stderr F E1212 16:30:22.471612 1 sync.go:1753] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:22.473135492+00:00 stderr F I1212 16:30:22.473043 1 event.go:377] Event(v1.ObjectReference{Kind:"", Namespace:"openshift-machine-config-operator", Name:"machine-config", UID:"7f2fd96d-8d64-472c-934f-96c0625ce7a9", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'OperatorDegraded: RequiredPoolsFailed' Failed to resync 4.20.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"")] 2025-12-12T16:30:28.018998669+00:00 stderr F E1212 16:30:28.018666 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:29.019025135+00:00 stderr F E1212 16:30:29.018890 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:30.018239112+00:00 stderr F E1212 16:30:30.017996 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:31.018975067+00:00 stderr F E1212 16:30:31.018852 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:32.018567643+00:00 stderr F E1212 16:30:32.017936 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:33.018118158+00:00 stderr F E1212 16:30:33.018009 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:34.018443582+00:00 stderr F E1212 16:30:34.018311 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:35.018382487+00:00 stderr F E1212 16:30:35.018304 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:36.018119467+00:00 stderr F E1212 16:30:36.018017 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:37.018116492+00:00 stderr F E1212 16:30:37.018011 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:38.018443587+00:00 stderr F E1212 16:30:38.018352 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:39.018642698+00:00 stderr F E1212 16:30:39.018552 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:40.019612267+00:00 stderr F E1212 16:30:40.018814 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:41.018516196+00:00 stderr F E1212 16:30:41.018397 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:42.018255826+00:00 stderr F E1212 16:30:42.018106 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:43.018961939+00:00 stderr F E1212 16:30:43.018858 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:44.018407970+00:00 stderr F E1212 16:30:44.018299 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:45.018404037+00:00 stderr F E1212 16:30:45.018322 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:46.018856414+00:00 stderr F E1212 16:30:46.018760 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:47.019497675+00:00 stderr F E1212 16:30:47.018876 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:48.018236719+00:00 stderr F E1212 16:30:48.018111 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:49.018389189+00:00 stderr F E1212 16:30:49.018278 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:50.018513378+00:00 stderr F E1212 16:30:50.018417 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:51.018921714+00:00 stderr F E1212 16:30:51.018833 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:52.018143070+00:00 stderr F E1212 16:30:52.018010 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:53.018234358+00:00 stderr F E1212 16:30:53.018046 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:54.018629544+00:00 stderr F E1212 16:30:54.018253 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:55.018820434+00:00 stderr F E1212 16:30:55.018377 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:56.018876912+00:00 stderr F E1212 16:30:56.018790 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:57.018251022+00:00 stderr F E1212 16:30:57.018069 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:58.018646147+00:00 stderr F E1212 16:30:58.018565 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:30:59.018633623+00:00 stderr F E1212 16:30:59.018516 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:00.018600488+00:00 stderr F E1212 16:31:00.018480 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:01.019101167+00:00 stderr F E1212 16:31:01.017976 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:02.018752444+00:00 stderr F E1212 16:31:02.018623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:03.018444193+00:00 stderr F E1212 16:31:03.018337 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:04.018561643+00:00 stderr F E1212 16:31:04.018425 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:05.018046787+00:00 stderr F E1212 16:31:05.017955 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:06.019676025+00:00 stderr F E1212 16:31:06.018909 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:07.018783310+00:00 stderr F E1212 16:31:07.018117 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:08.019416671+00:00 stderr F E1212 16:31:08.018316 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:09.019069257+00:00 stderr F E1212 16:31:09.018943 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:10.018557471+00:00 stderr F E1212 16:31:10.018444 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:11.018941827+00:00 stderr F E1212 16:31:11.018554 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:12.018918431+00:00 stderr F E1212 16:31:12.018349 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:13.017939623+00:00 stderr F E1212 16:31:13.017819 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:14.018660756+00:00 stderr F E1212 16:31:14.018523 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:15.018681152+00:00 stderr F E1212 16:31:15.018589 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:16.018473116+00:00 stderr F E1212 16:31:16.018302 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:17.018755992+00:00 stderr F E1212 16:31:17.018609 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:18.018787582+00:00 stderr F E1212 16:31:18.018669 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:19.018800611+00:00 stderr F E1212 16:31:19.018720 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:20.018801088+00:00 stderr F E1212 16:31:20.018661 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:21.020055218+00:00 stderr F E1212 16:31:21.018919 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:22.018094028+00:00 stderr F E1212 16:31:22.017977 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:23.018703891+00:00 stderr F E1212 16:31:23.018580 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:24.018272899+00:00 stderr F E1212 16:31:24.018170 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:25.018662538+00:00 stderr F E1212 16:31:25.018565 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:26.018986034+00:00 stderr F E1212 16:31:26.018870 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:27.018037148+00:00 stderr F E1212 16:31:27.017903 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:28.019518463+00:00 stderr F E1212 16:31:28.018522 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:29.018330773+00:00 stderr F E1212 16:31:29.017988 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:30.018780551+00:00 stderr F E1212 16:31:30.018678 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:31.019464217+00:00 stderr F E1212 16:31:31.018165 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:32.019249400+00:00 stderr F E1212 16:31:32.019047 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:33.018014029+00:00 stderr F E1212 16:31:33.017934 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:34.018385356+00:00 stderr F E1212 16:31:34.018233 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:35.018403235+00:00 stderr F E1212 16:31:35.018247 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:36.019611053+00:00 stderr F E1212 16:31:36.018538 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:37.018017742+00:00 stderr F E1212 16:31:37.017894 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:38.018906193+00:00 stderr F E1212 16:31:38.018799 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:39.019566778+00:00 stderr F E1212 16:31:39.018528 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:40.018969802+00:00 stderr F E1212 16:31:40.018847 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:41.018434857+00:00 stderr F E1212 16:31:41.018346 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:42.018061556+00:00 stderr F E1212 16:31:42.017950 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:43.019546853+00:00 stderr F E1212 16:31:43.018626 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:44.018567548+00:00 stderr F E1212 16:31:44.018469 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:45.019016177+00:00 stderr F E1212 16:31:45.018924 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:46.018606735+00:00 stderr F E1212 16:31:46.018477 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:47.018665996+00:00 stderr F E1212 16:31:47.018574 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:48.018114121+00:00 stderr F E1212 16:31:48.017991 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:49.018113010+00:00 stderr F E1212 16:31:49.018020 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:50.018655473+00:00 stderr F E1212 16:31:50.017965 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:51.018750914+00:00 stderr F E1212 16:31:51.018350 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:52.018575327+00:00 stderr F E1212 16:31:52.018463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:53.018976186+00:00 stderr F E1212 16:31:53.018828 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:54.017973050+00:00 stderr F E1212 16:31:54.017870 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:55.018268596+00:00 stderr F E1212 16:31:55.018194 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:56.018786577+00:00 stderr F E1212 16:31:56.018129 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:57.018353485+00:00 stderr F E1212 16:31:57.018258 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:58.017927184+00:00 stderr F E1212 16:31:58.017867 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:31:59.018031284+00:00 stderr F E1212 16:31:59.017923 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:00.018172747+00:00 stderr F E1212 16:32:00.018058 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:01.019166060+00:00 stderr F E1212 16:32:01.018199 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:02.018885343+00:00 stderr F E1212 16:32:02.018769 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:03.018703216+00:00 stderr F E1212 16:32:03.018600 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:04.019011303+00:00 stderr F E1212 16:32:04.018893 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:05.018321875+00:00 stderr F E1212 16:32:05.018204 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:06.018728083+00:00 stderr F E1212 16:32:06.018623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:07.019189293+00:00 stderr F E1212 16:32:07.017939 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:08.018696379+00:00 stderr F E1212 16:32:08.018611 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:09.018344260+00:00 stderr F E1212 16:32:09.018065 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:10.018991093+00:00 stderr F E1212 16:32:10.018897 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:11.018101290+00:00 stderr F E1212 16:32:11.017987 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:12.018124529+00:00 stderr F E1212 16:32:12.018010 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:13.018682590+00:00 stderr F E1212 16:32:13.018580 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:14.018690390+00:00 stderr F E1212 16:32:14.018550 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:15.019587560+00:00 stderr F E1212 16:32:15.018637 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:16.018439771+00:00 stderr F E1212 16:32:16.018325 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:17.018361227+00:00 stderr F E1212 16:32:17.018225 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:18.017993627+00:00 stderr F E1212 16:32:18.017905 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:19.018302223+00:00 stderr F E1212 16:32:19.018125 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:20.018395043+00:00 stderr F E1212 16:32:20.018024 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:21.018152949+00:00 stderr F E1212 16:32:21.017983 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:22.018287883+00:00 stderr F E1212 16:32:22.018151 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:23.018294803+00:00 stderr F E1212 16:32:23.018089 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:24.019219615+00:00 stderr F E1212 16:32:24.018518 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:25.018385163+00:00 stderr F E1212 16:32:25.018295 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:26.018371582+00:00 stderr F E1212 16:32:26.018255 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:27.019024029+00:00 stderr F E1212 16:32:27.018344 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:28.018530746+00:00 stderr F E1212 16:32:28.018426 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:29.018854194+00:00 stderr F E1212 16:32:29.018729 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:30.018786552+00:00 stderr F E1212 16:32:30.018668 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:31.018987537+00:00 stderr F E1212 16:32:31.018493 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:32.019046699+00:00 stderr F E1212 16:32:32.018797 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:33.018566297+00:00 stderr F E1212 16:32:33.018459 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:34.018990568+00:00 stderr F E1212 16:32:34.018854 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:35.018246498+00:00 stderr F E1212 16:32:35.018128 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:36.018196508+00:00 stderr F E1212 16:32:36.018057 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:37.018568727+00:00 stderr F E1212 16:32:37.018464 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:38.019040059+00:00 stderr F E1212 16:32:38.018050 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:39.018390153+00:00 stderr F E1212 16:32:39.018275 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:40.018686531+00:00 stderr F E1212 16:32:40.018566 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:41.018816055+00:00 stderr F E1212 16:32:41.018699 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:42.018004724+00:00 stderr F E1212 16:32:42.017912 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:43.018238271+00:00 stderr F E1212 16:32:43.018115 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:44.018387175+00:00 stderr F E1212 16:32:44.018258 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:45.018622891+00:00 stderr F E1212 16:32:45.018480 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:46.019285278+00:00 stderr F E1212 16:32:46.018366 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:47.018613972+00:00 stderr F E1212 16:32:47.018526 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:48.018856829+00:00 stderr F E1212 16:32:48.018679 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:49.019066314+00:00 stderr F E1212 16:32:49.018950 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:50.017975038+00:00 stderr F E1212 16:32:50.017883 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:51.018560953+00:00 stderr F E1212 16:32:51.018447 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:52.018671247+00:00 stderr F E1212 16:32:52.018542 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:53.018575034+00:00 stderr F E1212 16:32:53.018489 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:54.019411136+00:00 stderr F E1212 16:32:54.018494 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:55.018773911+00:00 stderr F E1212 16:32:55.018668 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:56.018433422+00:00 stderr F E1212 16:32:56.018310 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:57.018773512+00:00 stderr F E1212 16:32:57.018675 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:58.018276420+00:00 stderr F E1212 16:32:58.018135 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:32:59.019458471+00:00 stderr F E1212 16:32:59.018536 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:00.018219589+00:00 stderr F E1212 16:33:00.018123 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:01.018119667+00:00 stderr F E1212 16:33:01.018005 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:02.018357234+00:00 stderr F E1212 16:33:02.018270 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:03.018006574+00:00 stderr F E1212 16:33:03.017947 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:04.018151748+00:00 stderr F E1212 16:33:04.018082 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:05.019248746+00:00 stderr F E1212 16:33:05.018260 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:06.018559529+00:00 stderr F E1212 16:33:06.018485 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:07.018890166+00:00 stderr F E1212 16:33:07.018787 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:08.019040390+00:00 stderr F E1212 16:33:08.018933 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:09.018199189+00:00 stderr F E1212 16:33:09.018088 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:10.018438856+00:00 stderr F E1212 16:33:10.018330 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:11.018860546+00:00 stderr F E1212 16:33:11.018294 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:12.018139878+00:00 stderr F E1212 16:33:12.018014 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:13.019048611+00:00 stderr F E1212 16:33:13.018949 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:14.018306822+00:00 stderr F E1212 16:33:14.018217 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:15.018723772+00:00 stderr F E1212 16:33:15.018643 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:16.018376683+00:00 stderr F E1212 16:33:16.018322 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:17.018297722+00:00 stderr F E1212 16:33:17.018173 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:18.018401314+00:00 stderr F E1212 16:33:18.018317 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:19.019037720+00:00 stderr F E1212 16:33:19.018408 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:20.019228466+00:00 stderr F E1212 16:33:20.018629 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:21.018649640+00:00 stderr F E1212 16:33:21.018550 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:22.018597060+00:00 stderr F E1212 16:33:22.018494 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:23.018718644+00:00 stderr F E1212 16:33:23.018641 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:24.018789127+00:00 stderr F E1212 16:33:24.017976 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:25.018973221+00:00 stderr F E1212 16:33:25.018829 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:26.019855363+00:00 stderr F E1212 16:33:26.018853 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:27.018150382+00:00 stderr F E1212 16:33:27.018037 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:28.018446159+00:00 stderr F E1212 16:33:28.018340 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:29.018087930+00:00 stderr F E1212 16:33:29.017986 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:30.018455910+00:00 stderr F E1212 16:33:30.018394 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:31.018648996+00:00 stderr F E1212 16:33:31.018115 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:32.018719918+00:00 stderr F E1212 16:33:32.018643 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:33.018090053+00:00 stderr F E1212 16:33:33.018007 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:34.018253238+00:00 stderr F E1212 16:33:34.018100 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:35.018681160+00:00 stderr F E1212 16:33:35.018565 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:36.018296740+00:00 stderr F E1212 16:33:36.018161 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:37.018341232+00:00 stderr F E1212 16:33:37.018283 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:38.019133723+00:00 stderr F E1212 16:33:38.018553 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:39.018344163+00:00 stderr F E1212 16:33:39.017908 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:40.018551829+00:00 stderr F E1212 16:33:40.018475 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:41.018308953+00:00 stderr F E1212 16:33:41.018240 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:42.017923695+00:00 stderr F E1212 16:33:42.017840 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:43.018841368+00:00 stderr F E1212 16:33:43.018764 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:44.019280229+00:00 stderr F E1212 16:33:44.019126 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:45.018114651+00:00 stderr F E1212 16:33:45.018054 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:46.019974537+00:00 stderr F E1212 16:33:46.018879 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:47.020586963+00:00 stderr F E1212 16:33:47.020483 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:48.018905182+00:00 stderr F E1212 16:33:48.018839 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:49.018542923+00:00 stderr F E1212 16:33:49.018480 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:50.019853526+00:00 stderr F E1212 16:33:50.018790 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:51.018611365+00:00 stderr F E1212 16:33:51.018491 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:52.018087322+00:00 stderr F E1212 16:33:52.018032 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:53.018646205+00:00 stderr F E1212 16:33:53.018544 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:54.018715496+00:00 stderr F E1212 16:33:54.018664 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:55.018914542+00:00 stderr F E1212 16:33:55.018817 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:56.018064211+00:00 stderr F E1212 16:33:56.017989 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:57.018071090+00:00 stderr F E1212 16:33:57.017981 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:58.019546357+00:00 stderr F E1212 16:33:58.018602 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:33:59.018868581+00:00 stderr F E1212 16:33:59.018772 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:00.018165044+00:00 stderr F E1212 16:34:00.018091 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:01.018733128+00:00 stderr F E1212 16:34:01.018668 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:02.018452382+00:00 stderr F E1212 16:34:02.018354 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:04.132551154+00:00 stderr F E1212 16:34:04.132469 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:05.019088999+00:00 stderr F E1212 16:34:05.018479 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:06.018867604+00:00 stderr F E1212 16:34:06.018593 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:07.018170177+00:00 stderr F E1212 16:34:07.018096 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:08.018655599+00:00 stderr F E1212 16:34:08.018568 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:09.018902496+00:00 stderr F E1212 16:34:09.018835 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:10.018061766+00:00 stderr F E1212 16:34:10.017992 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:11.018448596+00:00 stderr F E1212 16:34:11.018388 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:12.018667083+00:00 stderr F E1212 16:34:12.018565 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:13.018842848+00:00 stderr F E1212 16:34:13.018476 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:14.019467784+00:00 stderr F E1212 16:34:14.018940 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:15.018158361+00:00 stderr F E1212 16:34:15.018081 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:16.018499490+00:00 stderr F E1212 16:34:16.018418 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:17.018325427+00:00 stderr F E1212 16:34:17.018163 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:18.019349073+00:00 stderr F E1212 16:34:18.018309 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:19.018529942+00:00 stderr F E1212 16:34:19.018444 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:20.019132278+00:00 stderr F E1212 16:34:20.018510 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:21.018428761+00:00 stderr F E1212 16:34:21.018342 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:22.018798270+00:00 stderr F E1212 16:34:22.018698 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:23.018024161+00:00 stderr F E1212 16:34:23.017932 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:24.018633465+00:00 stderr F E1212 16:34:24.018563 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:25.018709806+00:00 stderr F E1212 16:34:25.018648 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:26.019026173+00:00 stderr F E1212 16:34:26.018291 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:27.018951953+00:00 stderr F E1212 16:34:27.018826 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:28.018859811+00:00 stderr F E1212 16:34:28.018764 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:29.019906577+00:00 stderr F E1212 16:34:29.018744 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:30.018842211+00:00 stderr F E1212 16:34:30.018747 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:31.018650925+00:00 stderr F E1212 16:34:31.018550 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:32.018740873+00:00 stderr F E1212 16:34:32.018679 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:33.018314187+00:00 stderr F E1212 16:34:33.018114 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:34.018772444+00:00 stderr F E1212 16:34:34.018704 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:35.018888912+00:00 stderr F E1212 16:34:35.018789 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:36.019314467+00:00 stderr F E1212 16:34:36.018500 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:37.018456961+00:00 stderr F E1212 16:34:37.018371 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:38.018447216+00:00 stderr F E1212 16:34:38.018374 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:39.018943914+00:00 stderr F E1212 16:34:39.018856 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:40.018155159+00:00 stderr F E1212 16:34:40.018084 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:41.018493093+00:00 stderr F E1212 16:34:41.018404 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:42.018260943+00:00 stderr F E1212 16:34:42.018171 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:43.019417078+00:00 stderr F E1212 16:34:43.018143 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:44.018484779+00:00 stderr F E1212 16:34:44.018397 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:45.018576617+00:00 stderr F E1212 16:34:45.018501 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:46.019023414+00:00 stderr F E1212 16:34:46.018931 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:47.018125156+00:00 stderr F E1212 16:34:47.018050 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:48.018373428+00:00 stderr F E1212 16:34:48.018294 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:49.018966519+00:00 stderr F E1212 16:34:49.018890 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:50.019047647+00:00 stderr F E1212 16:34:50.018307 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:51.018820035+00:00 stderr F E1212 16:34:51.018763 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:52.018928403+00:00 stderr F E1212 16:34:52.018866 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:53.018749975+00:00 stderr F E1212 16:34:53.018668 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:54.018432242+00:00 stderr F E1212 16:34:54.018361 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:55.018057298+00:00 stderr F E1212 16:34:55.017973 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:56.018057124+00:00 stderr F E1212 16:34:56.017968 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:57.018021509+00:00 stderr F E1212 16:34:57.017932 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:58.019483090+00:00 stderr F E1212 16:34:58.018567 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:34:59.018792979+00:00 stderr F E1212 16:34:59.018698 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:00.018669512+00:00 stderr F E1212 16:35:00.018602 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:01.018800179+00:00 stderr F E1212 16:35:01.018719 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:02.018771545+00:00 stderr F E1212 16:35:02.018687 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:03.019717283+00:00 stderr F E1212 16:35:03.018724 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:04.018702232+00:00 stderr F E1212 16:35:04.018608 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:05.017949688+00:00 stderr F E1212 16:35:05.017878 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:06.018318153+00:00 stderr F E1212 16:35:06.018256 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:07.018624595+00:00 stderr F E1212 16:35:07.018565 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:08.017977124+00:00 stderr F E1212 16:35:08.017910 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:09.018065991+00:00 stderr F E1212 16:35:09.017996 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:10.019260997+00:00 stderr F E1212 16:35:10.018538 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:11.019199771+00:00 stderr F E1212 16:35:11.018632 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:12.018781585+00:00 stderr F E1212 16:35:12.018716 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:13.019039257+00:00 stderr F E1212 16:35:13.018935 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:14.019647607+00:00 stderr F E1212 16:35:14.019583 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:15.018819092+00:00 stderr F E1212 16:35:15.018742 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:16.018361414+00:00 stderr F E1212 16:35:16.018278 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:17.018468463+00:00 stderr F E1212 16:35:17.018403 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:18.019448803+00:00 stderr F E1212 16:35:18.018653 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:19.018952885+00:00 stderr F E1212 16:35:19.018875 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:20.018623792+00:00 stderr F E1212 16:35:20.018535 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:21.018119365+00:00 stderr F E1212 16:35:21.018060 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:22.018266714+00:00 stderr F E1212 16:35:22.018159 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:23.018059694+00:00 stderr F E1212 16:35:23.017969 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:24.019687810+00:00 stderr F E1212 16:35:24.018583 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:25.018886626+00:00 stderr F E1212 16:35:25.018818 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:26.018556523+00:00 stderr F E1212 16:35:26.018482 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:27.018878256+00:00 stderr F E1212 16:35:27.018804 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:28.018402250+00:00 stderr F E1212 16:35:28.018339 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:29.018613731+00:00 stderr F E1212 16:35:29.018550 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:30.018278718+00:00 stderr F E1212 16:35:30.018219 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:31.020033287+00:00 stderr F E1212 16:35:31.018931 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:32.018710560+00:00 stderr F E1212 16:35:32.018646 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:33.018802348+00:00 stderr F E1212 16:35:33.018727 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:34.018915266+00:00 stderr F E1212 16:35:34.018839 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:35.019087816+00:00 stderr F E1212 16:35:35.018993 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:36.018268061+00:00 stderr F E1212 16:35:36.018156 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:37.019283141+00:00 stderr F E1212 16:35:37.018592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:38.018479997+00:00 stderr F E1212 16:35:38.018420 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:39.018248367+00:00 stderr F E1212 16:35:39.018160 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:40.018035287+00:00 stderr F E1212 16:35:40.017941 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:41.018947694+00:00 stderr F E1212 16:35:41.018854 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:42.018137789+00:00 stderr F E1212 16:35:42.018005 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:43.018330699+00:00 stderr F E1212 16:35:43.018157 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:44.018741282+00:00 stderr F E1212 16:35:44.018650 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:45.018676426+00:00 stderr F E1212 16:35:45.018548 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:46.019664577+00:00 stderr F E1212 16:35:46.018667 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:47.018104723+00:00 stderr F E1212 16:35:47.017991 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:48.018239911+00:00 stderr F E1212 16:35:48.018124 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:49.018909524+00:00 stderr F E1212 16:35:49.018829 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:50.018265243+00:00 stderr F E1212 16:35:50.018151 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:51.018830382+00:00 stderr F E1212 16:35:51.018737 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:52.019257807+00:00 stderr F E1212 16:35:52.018574 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:53.018670357+00:00 stderr F E1212 16:35:53.018560 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:54.018242522+00:00 stderr F E1212 16:35:54.018069 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:55.018716939+00:00 stderr F E1212 16:35:55.018601 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:56.019754870+00:00 stderr F E1212 16:35:56.018623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:57.018017362+00:00 stderr F E1212 16:35:57.017910 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:58.018329365+00:00 stderr F E1212 16:35:58.018173 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:35:59.018518154+00:00 stderr F E1212 16:35:59.018049 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:00.018479479+00:00 stderr F E1212 16:36:00.017902 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:01.018679999+00:00 stderr F E1212 16:36:01.018613 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:02.018775876+00:00 stderr F E1212 16:36:02.018705 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:03.019885169+00:00 stderr F E1212 16:36:03.019478 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:04.018217242+00:00 stderr F E1212 16:36:04.018087 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:05.018342271+00:00 stderr F E1212 16:36:05.018267 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:06.019135795+00:00 stderr F E1212 16:36:06.018158 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:07.018494795+00:00 stderr F E1212 16:36:07.018333 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:08.018372827+00:00 stderr F E1212 16:36:08.018262 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:09.018607318+00:00 stderr F E1212 16:36:09.018480 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:10.018923931+00:00 stderr F E1212 16:36:10.018781 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:11.018524047+00:00 stderr F E1212 16:36:11.018410 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:12.018938403+00:00 stderr F E1212 16:36:12.018849 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:13.018996279+00:00 stderr F E1212 16:36:13.018881 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:14.019370433+00:00 stderr F E1212 16:36:14.018928 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:15.018832246+00:00 stderr F E1212 16:36:15.018260 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:16.018400431+00:00 stderr F E1212 16:36:16.018284 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:17.018468678+00:00 stderr F E1212 16:36:17.018326 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:18.018900294+00:00 stderr F E1212 16:36:18.018767 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:19.018868609+00:00 stderr F E1212 16:36:19.018731 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:20.018248739+00:00 stderr F E1212 16:36:20.018077 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:21.019102226+00:00 stderr F E1212 16:36:21.018041 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:22.018382764+00:00 stderr F E1212 16:36:22.018153 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:23.018564614+00:00 stderr F E1212 16:36:23.018451 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:24.019741407+00:00 stderr F E1212 16:36:24.018758 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:25.018417038+00:00 stderr F E1212 16:36:25.018277 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:26.018332671+00:00 stderr F E1212 16:36:26.018157 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:27.018730675+00:00 stderr F E1212 16:36:27.018553 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:28.019477279+00:00 stderr F E1212 16:36:28.018611 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:29.018549570+00:00 stderr F E1212 16:36:29.018450 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:30.018598957+00:00 stderr F E1212 16:36:30.018498 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:31.018995042+00:00 stderr F E1212 16:36:31.018891 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:32.018756851+00:00 stderr F E1212 16:36:32.018662 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:33.018969241+00:00 stderr F E1212 16:36:33.018807 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:34.018637139+00:00 stderr F E1212 16:36:34.018491 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:35.018696885+00:00 stderr F E1212 16:36:35.018560 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:36.019251484+00:00 stderr F E1212 16:36:36.018415 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:37.018637814+00:00 stderr F E1212 16:36:37.018505 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:38.018793193+00:00 stderr F E1212 16:36:38.018681 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:39.018295896+00:00 stderr F E1212 16:36:39.018224 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:40.018386233+00:00 stderr F E1212 16:36:40.018267 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:41.018675126+00:00 stderr F E1212 16:36:41.018571 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:42.018720542+00:00 stderr F E1212 16:36:42.018601 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:43.019011425+00:00 stderr F E1212 16:36:43.018413 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:44.019137963+00:00 stderr F E1212 16:36:44.018560 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:45.018925883+00:00 stderr F E1212 16:36:45.018795 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:46.019032571+00:00 stderr F E1212 16:36:46.018852 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:47.019941010+00:00 stderr F E1212 16:36:47.018876 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:48.018755746+00:00 stderr F E1212 16:36:48.018636 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:49.018449083+00:00 stderr F E1212 16:36:49.018335 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:50.019904925+00:00 stderr F E1212 16:36:50.018796 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:51.018930276+00:00 stderr F E1212 16:36:51.018827 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:52.018098189+00:00 stderr F E1212 16:36:52.017944 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:53.018118916+00:00 stderr F E1212 16:36:53.018038 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:54.019796324+00:00 stderr F E1212 16:36:54.018830 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:55.018407875+00:00 stderr F E1212 16:36:55.018286 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:56.017978109+00:00 stderr F E1212 16:36:56.017882 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:57.018683752+00:00 stderr F E1212 16:36:57.018586 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:58.019415577+00:00 stderr F E1212 16:36:58.018512 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:36:59.018746776+00:00 stderr F E1212 16:36:59.018637 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:00.018948496+00:00 stderr F E1212 16:37:00.018828 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:01.019828233+00:00 stderr F E1212 16:37:01.018743 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:02.018603438+00:00 stderr F E1212 16:37:02.018495 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:03.018736475+00:00 stderr F E1212 16:37:03.018591 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:04.018916475+00:00 stderr F E1212 16:37:04.018783 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:05.018742386+00:00 stderr F E1212 16:37:05.018623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:06.018256308+00:00 stderr F E1212 16:37:06.018075 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:07.018457647+00:00 stderr F E1212 16:37:07.018356 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:08.019681083+00:00 stderr F E1212 16:37:08.018649 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:09.018447328+00:00 stderr F E1212 16:37:09.018336 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:10.018725980+00:00 stderr F E1212 16:37:10.018608 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:11.018745705+00:00 stderr F E1212 16:37:11.018609 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:12.018360122+00:00 stderr F E1212 16:37:12.018218 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:13.018665015+00:00 stderr F E1212 16:37:13.018562 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:14.019393718+00:00 stderr F E1212 16:37:14.018523 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:15.018543262+00:00 stderr F E1212 16:37:15.018397 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:16.018819124+00:00 stderr F E1212 16:37:16.017976 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:17.018114132+00:00 stderr F E1212 16:37:17.018022 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:18.018234310+00:00 stderr F E1212 16:37:18.018119 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:19.019072207+00:00 stderr F E1212 16:37:19.018402 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:20.018601751+00:00 stderr F E1212 16:37:20.018514 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:21.018564225+00:00 stderr F E1212 16:37:21.018463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:22.018516469+00:00 stderr F E1212 16:37:22.018373 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:23.018256168+00:00 stderr F E1212 16:37:23.018151 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:24.018974852+00:00 stderr F E1212 16:37:24.018098 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:25.018942496+00:00 stderr F E1212 16:37:25.018363 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:26.018699176+00:00 stderr F E1212 16:37:26.018117 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:27.027596725+00:00 stderr F E1212 16:37:27.027514 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:28.021272451+00:00 stderr F E1212 16:37:28.020573 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:29.018739893+00:00 stderr F E1212 16:37:29.018592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:30.018664526+00:00 stderr F E1212 16:37:30.018578 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:31.018500718+00:00 stderr F E1212 16:37:31.017907 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:32.018962304+00:00 stderr F E1212 16:37:32.018807 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:33.019122754+00:00 stderr F E1212 16:37:33.019003 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:34.024265359+00:00 stderr F E1212 16:37:34.021432 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:35.019570556+00:00 stderr F E1212 16:37:35.018342 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:36.018301110+00:00 stderr F E1212 16:37:36.018167 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:37.018690826+00:00 stderr F E1212 16:37:37.018569 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:38.019318408+00:00 stderr F E1212 16:37:38.018616 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:39.018835970+00:00 stderr F E1212 16:37:39.018731 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:40.019222776+00:00 stderr F E1212 16:37:40.018874 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:41.018825702+00:00 stderr F E1212 16:37:41.018463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:42.018756845+00:00 stderr F E1212 16:37:42.018552 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:43.018673128+00:00 stderr F E1212 16:37:43.018590 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:44.019392461+00:00 stderr F E1212 16:37:44.019316 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:45.018659178+00:00 stderr F E1212 16:37:45.018571 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:46.019458962+00:00 stderr F E1212 16:37:46.018495 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:47.018157735+00:00 stderr F E1212 16:37:47.017969 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:48.017981356+00:00 stderr F E1212 16:37:48.017899 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:49.018265668+00:00 stderr F E1212 16:37:49.018135 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:50.018819656+00:00 stderr F E1212 16:37:50.018716 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:51.018262678+00:00 stderr F E1212 16:37:51.018138 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:52.019124654+00:00 stderr F E1212 16:37:52.018549 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:53.018484592+00:00 stderr F E1212 16:37:53.018370 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:54.018758295+00:00 stderr F E1212 16:37:54.018640 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:55.018661258+00:00 stderr F E1212 16:37:55.018528 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:56.018280744+00:00 stderr F E1212 16:37:56.018134 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:57.018691158+00:00 stderr F E1212 16:37:57.018550 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:58.018015697+00:00 stderr F E1212 16:37:58.017916 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:37:59.019652113+00:00 stderr F E1212 16:37:59.018628 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:00.018665613+00:00 stderr F E1212 16:38:00.018543 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:01.018620557+00:00 stderr F E1212 16:38:01.018519 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:02.018687124+00:00 stderr F E1212 16:38:02.018568 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:03.018877555+00:00 stderr F E1212 16:38:03.018776 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:04.019506465+00:00 stderr F E1212 16:38:04.018957 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:05.019088951+00:00 stderr F E1212 16:38:05.018627 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:06.018713517+00:00 stderr F E1212 16:38:06.018592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:07.018812746+00:00 stderr F E1212 16:38:07.018701 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:08.019752344+00:00 stderr F E1212 16:38:08.018633 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:09.018281363+00:00 stderr F E1212 16:38:09.018139 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:10.019106829+00:00 stderr F E1212 16:38:10.019020 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:11.021237438+00:00 stderr F E1212 16:38:11.019027 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:12.018786932+00:00 stderr F E1212 16:38:12.018674 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:13.019053684+00:00 stderr F E1212 16:38:13.018470 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:14.018626639+00:00 stderr F E1212 16:38:14.018538 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:15.018038549+00:00 stderr F E1212 16:38:15.017963 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:16.018491816+00:00 stderr F E1212 16:38:16.018398 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:17.018707567+00:00 stderr F E1212 16:38:17.018609 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:18.019467261+00:00 stderr F E1212 16:38:18.018653 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:19.018769999+00:00 stderr F E1212 16:38:19.018686 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:20.018060097+00:00 stderr F E1212 16:38:20.017935 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:21.018593195+00:00 stderr F E1212 16:38:21.018466 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:22.018244220+00:00 stderr F E1212 16:38:22.018144 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:23.019086065+00:00 stderr F E1212 16:38:23.018405 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:24.018755232+00:00 stderr F E1212 16:38:24.018160 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:25.018030929+00:00 stderr F E1212 16:38:25.017927 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:26.019013168+00:00 stderr F E1212 16:38:26.018883 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:27.018105991+00:00 stderr F E1212 16:38:27.017987 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:28.019047070+00:00 stderr F E1212 16:38:28.018058 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:29.018353647+00:00 stderr F E1212 16:38:29.018196 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:30.018449655+00:00 stderr F E1212 16:38:30.018377 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:31.018685076+00:00 stderr F E1212 16:38:31.018593 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:32.018479527+00:00 stderr F E1212 16:38:32.017917 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:33.018240515+00:00 stderr F E1212 16:38:33.018015 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:34.018467827+00:00 stderr F E1212 16:38:34.018383 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:35.018469652+00:00 stderr F E1212 16:38:35.018345 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:36.018730843+00:00 stderr F E1212 16:38:36.018598 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:37.018817221+00:00 stderr F E1212 16:38:37.018675 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:38.018578390+00:00 stderr F E1212 16:38:38.018463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:39.018552845+00:00 stderr F E1212 16:38:39.018461 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:40.019839712+00:00 stderr F E1212 16:38:40.018639 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:41.018230626+00:00 stderr F E1212 16:38:41.018094 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:42.018710944+00:00 stderr F E1212 16:38:42.018605 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:43.017961570+00:00 stderr F E1212 16:38:43.017842 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:44.018110069+00:00 stderr F E1212 16:38:44.018023 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:45.018059314+00:00 stderr F E1212 16:38:45.017924 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:46.018545171+00:00 stderr F E1212 16:38:46.018426 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:47.019633834+00:00 stderr F E1212 16:38:47.018450 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:48.018528182+00:00 stderr F E1212 16:38:48.018410 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:49.018765273+00:00 stderr F E1212 16:38:49.018638 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:50.018371929+00:00 stderr F E1212 16:38:50.018285 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:51.018928308+00:00 stderr F E1212 16:38:51.018851 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:52.018013761+00:00 stderr F E1212 16:38:52.017917 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:53.018258743+00:00 stderr F E1212 16:38:53.018062 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:54.018434132+00:00 stderr F E1212 16:38:54.018301 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:55.019026662+00:00 stderr F E1212 16:38:55.018334 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:56.018847283+00:00 stderr F E1212 16:38:56.018283 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:57.018498710+00:00 stderr F E1212 16:38:57.018287 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:58.018094105+00:00 stderr F E1212 16:38:58.018033 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:38:59.018405178+00:00 stderr F E1212 16:38:59.018257 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:00.019051950+00:00 stderr F E1212 16:39:00.018314 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:01.018782978+00:00 stderr F E1212 16:39:01.018680 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:02.018406303+00:00 stderr F E1212 16:39:02.018315 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:03.018406028+00:00 stderr F E1212 16:39:03.018347 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:04.018720901+00:00 stderr F E1212 16:39:04.018641 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:05.018243544+00:00 stderr F E1212 16:39:05.018130 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:06.017894681+00:00 stderr F E1212 16:39:06.017840 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:07.019792394+00:00 stderr F E1212 16:39:07.018572 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:08.019205204+00:00 stderr F E1212 16:39:08.019119 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:09.019425326+00:00 stderr F E1212 16:39:09.019333 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:10.018689832+00:00 stderr F E1212 16:39:10.018079 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:11.019142579+00:00 stderr F E1212 16:39:11.018529 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:12.022830197+00:00 stderr F E1212 16:39:12.022741 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:13.018575495+00:00 stderr F E1212 16:39:13.018502 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:14.018744985+00:00 stderr F E1212 16:39:14.018680 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:15.018391421+00:00 stderr F E1212 16:39:15.018335 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:16.020517169+00:00 stderr F E1212 16:39:16.019125 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:17.018568245+00:00 stderr F E1212 16:39:17.018456 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:18.019176966+00:00 stderr F E1212 16:39:18.018902 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:19.019136579+00:00 stderr F E1212 16:39:19.018535 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:20.018898209+00:00 stderr F E1212 16:39:20.018802 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:21.018847404+00:00 stderr F E1212 16:39:21.018744 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:22.019072445+00:00 stderr F E1212 16:39:22.018987 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:23.018457034+00:00 stderr F E1212 16:39:23.018343 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:24.018430839+00:00 stderr F E1212 16:39:24.018327 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:25.018598169+00:00 stderr F E1212 16:39:25.018501 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:26.019637660+00:00 stderr F E1212 16:39:26.018430 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:27.018894507+00:00 stderr F E1212 16:39:27.018758 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:28.018707708+00:00 stderr F E1212 16:39:28.018617 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:29.018518629+00:00 stderr F E1212 16:39:29.018450 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:30.018800771+00:00 stderr F E1212 16:39:30.018680 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:31.018797256+00:00 stderr F E1212 16:39:31.018718 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:32.018214787+00:00 stderr F E1212 16:39:32.018083 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:33.019361662+00:00 stderr F E1212 16:39:33.018642 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:34.018864334+00:00 stderr F E1212 16:39:34.018780 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:35.018882311+00:00 stderr F E1212 16:39:35.018755 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:36.017920372+00:00 stderr F E1212 16:39:36.017837 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:37.018493791+00:00 stderr F E1212 16:39:37.018402 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:38.018024305+00:00 stderr F E1212 16:39:38.017926 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:39.018751839+00:00 stderr F E1212 16:39:39.018674 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:40.019464243+00:00 stderr F E1212 16:39:40.018756 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:41.018666816+00:00 stderr F E1212 16:39:41.018577 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:42.018751233+00:00 stderr F E1212 16:39:42.018623 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:43.018110682+00:00 stderr F E1212 16:39:43.017969 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:44.018638529+00:00 stderr F E1212 16:39:44.018510 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:45.018628335+00:00 stderr F E1212 16:39:45.018515 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:46.018273761+00:00 stderr F E1212 16:39:46.018172 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:47.019268642+00:00 stderr F E1212 16:39:47.018410 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:48.018700002+00:00 stderr F E1212 16:39:48.018607 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:49.018709367+00:00 stderr F E1212 16:39:49.018597 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:50.018591300+00:00 stderr F E1212 16:39:50.018492 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:51.019840655+00:00 stderr F E1212 16:39:51.018650 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:52.018625310+00:00 stderr F E1212 16:39:52.018553 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:53.018866272+00:00 stderr F E1212 16:39:53.018767 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:54.018839176+00:00 stderr F E1212 16:39:54.018770 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:55.018323508+00:00 stderr F E1212 16:39:55.017945 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:56.019064592+00:00 stderr F E1212 16:39:56.018405 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:57.018510054+00:00 stderr F E1212 16:39:57.018436 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:58.018664904+00:00 stderr F E1212 16:39:58.018570 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:39:59.018750670+00:00 stderr F E1212 16:39:59.018638 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:00.019581647+00:00 stderr F E1212 16:40:00.018726 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:01.019765257+00:00 stderr F E1212 16:40:01.018583 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:02.018516370+00:00 stderr F E1212 16:40:02.018419 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:03.018496746+00:00 stderr F E1212 16:40:03.018411 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:04.018591613+00:00 stderr F E1212 16:40:04.018500 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:05.019941373+00:00 stderr F E1212 16:40:05.018846 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:06.018825399+00:00 stderr F E1212 16:40:06.018741 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:07.019233155+00:00 stderr F E1212 16:40:07.018636 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:08.019225211+00:00 stderr F E1212 16:40:08.019057 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:09.018345254+00:00 stderr F E1212 16:40:09.018226 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:10.018615946+00:00 stderr F E1212 16:40:10.018495 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:11.020548141+00:00 stderr F E1212 16:40:11.019592 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:12.018174637+00:00 stderr F E1212 16:40:12.018045 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:13.018647894+00:00 stderr F E1212 16:40:13.018550 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:14.019615244+00:00 stderr F E1212 16:40:14.018649 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:15.018549823+00:00 stderr F E1212 16:40:15.018455 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:16.018442045+00:00 stderr F E1212 16:40:16.018294 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:17.018303647+00:00 stderr F E1212 16:40:17.018207 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:18.018409646+00:00 stderr F E1212 16:40:18.018270 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:19.019247783+00:00 stderr F E1212 16:40:19.018451 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:20.018323245+00:00 stderr F E1212 16:40:20.018161 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:21.019680624+00:00 stderr F E1212 16:40:21.018458 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:22.018803338+00:00 stderr F E1212 16:40:22.018610 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:23.019411389+00:00 stderr F E1212 16:40:23.018366 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:24.019367672+00:00 stderr F E1212 16:40:24.018387 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:25.018536087+00:00 stderr F E1212 16:40:25.018442 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:26.018921422+00:00 stderr F E1212 16:40:26.018025 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:27.018860575+00:00 stderr F E1212 16:40:27.018741 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:27.018928177+00:00 stderr F E1212 16:40:27.018860 1 sync.go:1753] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:32.589534859+00:00 stderr F E1212 16:40:32.588389 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:33.589216437+00:00 stderr F E1212 16:40:33.588572 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:34.588944305+00:00 stderr F E1212 16:40:34.588288 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:35.588377587+00:00 stderr F E1212 16:40:35.588271 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:36.588688520+00:00 stderr F E1212 16:40:36.588605 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:37.588881810+00:00 stderr F E1212 16:40:37.588806 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:38.588875835+00:00 stderr F E1212 16:40:38.588153 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:39.588646625+00:00 stderr F E1212 16:40:39.588537 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:40.589339738+00:00 stderr F E1212 16:40:40.588692 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:41.589531898+00:00 stderr F E1212 16:40:41.588515 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:42.588721123+00:00 stderr F E1212 16:40:42.588649 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:43.588574215+00:00 stderr F E1212 16:40:43.588479 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:44.588735235+00:00 stderr F E1212 16:40:44.588660 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:45.589333485+00:00 stderr F E1212 16:40:45.588615 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:46.588745686+00:00 stderr F E1212 16:40:46.588659 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:47.588679360+00:00 stderr F E1212 16:40:47.588588 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:48.588687255+00:00 stderr F E1212 16:40:48.588579 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:49.589022879+00:00 stderr F E1212 16:40:49.588404 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:50.588684546+00:00 stderr F E1212 16:40:50.588586 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:51.588755974+00:00 stderr F E1212 16:40:51.588650 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:52.588803790+00:00 stderr F E1212 16:40:52.588713 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:53.588898418+00:00 stderr F E1212 16:40:53.588812 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:54.589170020+00:00 stderr F E1212 16:40:54.588526 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:55.589077433+00:00 stderr F E1212 16:40:55.588976 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:56.588964336+00:00 stderr F E1212 16:40:56.588874 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:57.588621593+00:00 stderr F E1212 16:40:57.588539 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:58.588967267+00:00 stderr F E1212 16:40:58.588889 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:40:59.589358312+00:00 stderr F E1212 16:40:59.588626 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:00.588494735+00:00 stderr F E1212 16:41:00.588385 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:01.588940621+00:00 stderr F E1212 16:41:01.588864 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:02.588657058+00:00 stderr F E1212 16:41:02.588547 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:03.588856718+00:00 stderr F E1212 16:41:03.588723 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:04.589352946+00:00 stderr F E1212 16:41:04.588634 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:05.588510831+00:00 stderr F E1212 16:41:05.588433 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:06.589085899+00:00 stderr F E1212 16:41:06.589005 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:07.588668985+00:00 stderr F E1212 16:41:07.588591 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:08.588519917+00:00 stderr F E1212 16:41:08.588424 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:09.588958413+00:00 stderr F E1212 16:41:09.588839 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:10.589099232+00:00 stderr F E1212 16:41:10.588996 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:11.588912623+00:00 stderr F E1212 16:41:11.588113 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:12.588779655+00:00 stderr F E1212 16:41:12.588689 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:13.589248362+00:00 stderr F E1212 16:41:13.589134 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:14.588842057+00:00 stderr F E1212 16:41:14.588754 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:15.589035618+00:00 stderr F E1212 16:41:15.588936 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:16.588628233+00:00 stderr F E1212 16:41:16.588527 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:17.588762191+00:00 stderr F E1212 16:41:17.588673 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:18.588563412+00:00 stderr F E1212 16:41:18.588487 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:19.588572798+00:00 stderr F E1212 16:41:19.588496 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:20.588695656+00:00 stderr F E1212 16:41:20.588066 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:21.588713812+00:00 stderr F E1212 16:41:21.588631 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:22.589008475+00:00 stderr F E1212 16:41:22.588900 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:23.588900638+00:00 stderr F E1212 16:41:23.588816 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:24.588813611+00:00 stderr F E1212 16:41:24.588743 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:25.589071833+00:00 stderr F E1212 16:41:25.588965 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:26.588359541+00:00 stderr F E1212 16:41:26.588276 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:27.589158165+00:00 stderr F E1212 16:41:27.588577 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:28.588564436+00:00 stderr F E1212 16:41:28.588475 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:29.588868650+00:00 stderr F E1212 16:41:29.588743 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:30.590528767+00:00 stderr F E1212 16:41:30.589498 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:31.588875520+00:00 stderr F E1212 16:41:31.588787 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:32.589721657+00:00 stderr F E1212 16:41:32.588873 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:33.589086967+00:00 stderr F E1212 16:41:33.588983 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:34.588507128+00:00 stderr F E1212 16:41:34.588424 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:35.588458382+00:00 stderr F E1212 16:41:35.588316 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:36.589484383+00:00 stderr F E1212 16:41:36.588579 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:37.588887594+00:00 stderr F E1212 16:41:37.588790 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:38.588709664+00:00 stderr F E1212 16:41:38.588553 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:39.588761931+00:00 stderr F E1212 16:41:39.588633 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:40.588803527+00:00 stderr F E1212 16:41:40.588610 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:41.589229504+00:00 stderr F E1212 16:41:41.588646 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:42.588725496+00:00 stderr F E1212 16:41:42.588651 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:43.589142792+00:00 stderr F E1212 16:41:43.588525 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:44.588654185+00:00 stderr F E1212 16:41:44.588568 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:45.588267610+00:00 stderr F E1212 16:41:45.588166 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:46.588247305+00:00 stderr F E1212 16:41:46.588130 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:47.588879137+00:00 stderr F E1212 16:41:47.588808 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:48.588983835+00:00 stderr F E1212 16:41:48.588904 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:49.589260437+00:00 stderr F E1212 16:41:49.588567 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:50.589012986+00:00 stderr F E1212 16:41:50.588924 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:51.588644813+00:00 stderr F E1212 16:41:51.588567 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:52.588459254+00:00 stderr F E1212 16:41:52.588386 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:53.588231393+00:00 stderr F E1212 16:41:53.588143 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:54.588650439+00:00 stderr F E1212 16:41:54.588582 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:55.588488341+00:00 stderr F E1212 16:41:55.588148 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:56.588488286+00:00 stderr F E1212 16:41:56.588204 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:57.588395599+00:00 stderr F E1212 16:41:57.588268 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:58.588965599+00:00 stderr F E1212 16:41:58.588845 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:41:59.588588795+00:00 stderr F E1212 16:41:59.588505 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:00.589155474+00:00 stderr F E1212 16:42:00.589076 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:01.589118739+00:00 stderr F E1212 16:42:01.588561 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:02.588766406+00:00 stderr F E1212 16:42:02.588700 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:03.588731710+00:00 stderr F E1212 16:42:03.588664 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:04.588051238+00:00 stderr F E1212 16:42:04.587991 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:05.588227358+00:00 stderr F E1212 16:42:05.588093 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:06.588130042+00:00 stderr F E1212 16:42:06.588071 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:07.589071250+00:00 stderr F E1212 16:42:07.588541 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:08.588855020+00:00 stderr F E1212 16:42:08.588761 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:09.588592449+00:00 stderr F E1212 16:42:09.588515 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:10.588642385+00:00 stderr F E1212 16:42:10.588546 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:11.588563169+00:00 stderr F E1212 16:42:11.588463 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:12.588623906+00:00 stderr F E1212 16:42:12.588561 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:13.588735345+00:00 stderr F E1212 16:42:13.588668 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:14.588334859+00:00 stderr F E1212 16:42:14.587936 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:15.588860998+00:00 stderr F E1212 16:42:15.588581 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:16.588975517+00:00 stderr F E1212 16:42:16.588887 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:17.588660095+00:00 stderr F E1212 16:42:17.588589 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:18.588245369+00:00 stderr F E1212 16:42:18.588128 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:19.589032495+00:00 stderr F E1212 16:42:19.588931 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:20.588923098+00:00 stderr F E1212 16:42:20.588798 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:21.589047055+00:00 stderr F E1212 16:42:21.588475 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:22.589056251+00:00 stderr F E1212 16:42:22.588844 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:23.588617016+00:00 stderr F E1212 16:42:23.588524 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:24.588514039+00:00 stderr F E1212 16:42:24.588375 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:25.588773860+00:00 stderr F E1212 16:42:25.588660 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:26.588643493+00:00 stderr F E1212 16:42:26.588554 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:27.588488915+00:00 stderr F E1212 16:42:27.588371 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:28.588597552+00:00 stderr F E1212 16:42:28.588053 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:29.588677010+00:00 stderr F E1212 16:42:29.588519 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:30.588977203+00:00 stderr F E1212 16:42:30.588914 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:31.588113797+00:00 stderr F E1212 16:42:31.588047 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:32.588911622+00:00 stderr F E1212 16:42:32.588856 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:33.589002750+00:00 stderr F E1212 16:42:33.588454 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:34.589443946+00:00 stderr F E1212 16:42:34.588901 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:35.588259521+00:00 stderr F E1212 16:42:35.588098 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:36.588208196+00:00 stderr F E1212 16:42:36.588104 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:37.588278433+00:00 stderr F E1212 16:42:37.588214 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:38.588925625+00:00 stderr F E1212 16:42:38.588823 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:39.588799247+00:00 stderr F E1212 16:42:39.588744 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:40.588967607+00:00 stderr F E1212 16:42:40.588877 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:41.588158032+00:00 stderr F E1212 16:42:41.588056 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:42.589946873+00:00 stderr F E1212 16:42:42.589073 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:43.588529822+00:00 stderr F E1212 16:42:43.588467 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:44.589259226+00:00 stderr F E1212 16:42:44.589134 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:45.588392640+00:00 stderr F E1212 16:42:45.588278 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:46.590331363+00:00 stderr F E1212 16:42:46.588244 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:47.588648827+00:00 stderr F E1212 16:42:47.588509 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:48.588674763+00:00 stderr F E1212 16:42:48.588572 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:49.589911690+00:00 stderr F E1212 16:42:49.588958 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:50.588652393+00:00 stderr F E1212 16:42:50.588256 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:51.588135576+00:00 stderr F E1212 16:42:51.588033 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:52.588326777+00:00 stderr F E1212 16:42:52.588215 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:53.588771163+00:00 stderr F E1212 16:42:53.588659 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:54.588655456+00:00 stderr F E1212 16:42:54.588535 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:55.589482762+00:00 stderr F E1212 16:42:55.588371 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:56.588825542+00:00 stderr F E1212 16:42:56.588731 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:57.588350405+00:00 stderr F E1212 16:42:57.588173 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:58.588126005+00:00 stderr F E1212 16:42:58.588041 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:42:59.588265194+00:00 stderr F E1212 16:42:59.588131 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:00.589670774+00:00 stderr F E1212 16:43:00.589239 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:01.588696243+00:00 stderr F E1212 16:43:01.588277 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:02.588623207+00:00 stderr F E1212 16:43:02.588525 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:03.588673783+00:00 stderr F E1212 16:43:03.588513 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:04.588825362+00:00 stderr F E1212 16:43:04.588340 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:05.588705395+00:00 stderr F E1212 16:43:05.588571 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:06.588728191+00:00 stderr F E1212 16:43:06.588643 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:07.588660264+00:00 stderr F E1212 16:43:07.588569 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:08.588468434+00:00 stderr F E1212 16:43:08.588402 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:09.589072364+00:00 stderr F E1212 16:43:09.589015 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:10.589011588+00:00 stderr F E1212 16:43:10.588027 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:11.588168242+00:00 stderr F E1212 16:43:11.588089 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:12.588349092+00:00 stderr F E1212 16:43:12.588280 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" 2025-12-12T16:43:13.589145958+00:00 stderr F E1212 16:43:13.588586 1 sync.go:1689] Error syncing Required MachineConfigPools: "error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: \"unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \\\"/var/lib/kubelet/config.json\\\"\")" ././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-contro0000755000175000017500000000000015117043043033106 5ustar zuulzuul././@LongLink0000644000000000000000000000033100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-contro0000755000175000017500000000000015117043062033107 5ustar zuulzuul././@LongLink0000644000000000000000000000033600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-contro0000644000175000017500000005434615117043043033124 0ustar zuulzuul2025-12-12T16:20:48.851376666+00:00 stderr F I1212 16:20:48.851146 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:20:48.851376666+00:00 stderr F I1212 16:20:48.851351 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:20:48.852040593+00:00 stderr F I1212 16:20:48.851905 1 observer_polling.go:159] Starting file observer 2025-12-12T16:20:48.852553117+00:00 stderr F I1212 16:20:48.852516 1 builder.go:304] route-controller-manager version 4.20.0-202510211040.p2.gbf2fa66.assembly.stream.el9-bf2fa66-bf2fa662f57f233d8541f94c4953e0dcd7a5ab20 2025-12-12T16:20:48.853347977+00:00 stderr F I1212 16:20:48.853316 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:20:49.550276137+00:00 stderr F I1212 16:20:49.549666 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:20:49.553895362+00:00 stderr F I1212 16:20:49.553822 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:20:49.553895362+00:00 stderr F I1212 16:20:49.553848 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:20:49.553895362+00:00 stderr F I1212 16:20:49.553874 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:20:49.553895362+00:00 stderr F I1212 16:20:49.553880 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:20:49.557783214+00:00 stderr F I1212 16:20:49.557741 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:20:49.557783214+00:00 stderr F W1212 16:20:49.557774 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:20:49.557783214+00:00 stderr F W1212 16:20:49.557778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:20:49.557805934+00:00 stderr F W1212 16:20:49.557782 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:20:49.557805934+00:00 stderr F W1212 16:20:49.557785 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:20:49.557805934+00:00 stderr F W1212 16:20:49.557788 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:20:49.557805934+00:00 stderr F W1212 16:20:49.557791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:20:49.558153253+00:00 stderr F I1212 16:20:49.558129 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.560916 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"route-controller-manager.openshift-route-controller-manager.svc\" [serving] validServingFor=[route-controller-manager.openshift-route-controller-manager.svc,route-controller-manager.openshift-route-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:22 +0000 UTC to 2027-11-02 07:52:23 +0000 UTC (now=2025-12-12 16:20:49.560880265 +0000 UTC))" 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.561074 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556449\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556449\" (2025-12-12 15:20:48 +0000 UTC to 2028-12-12 15:20:48 +0000 UTC (now=2025-12-12 16:20:49.561051049 +0000 UTC))" 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.561091 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.561117 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.561140 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.561298 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:20:49.561363208+00:00 stderr F I1212 16:20:49.561338 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:20:49.561463000+00:00 stderr F I1212 16:20:49.561442 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:20:49.561570003+00:00 stderr F I1212 16:20:49.561544 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:20:49.562001134+00:00 stderr F I1212 16:20:49.561970 1 leaderelection.go:257] attempting to acquire leader lease openshift-route-controller-manager/openshift-route-controllers... 2025-12-12T16:20:49.562372054+00:00 stderr F I1212 16:20:49.562343 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:20:49.562387445+00:00 stderr F I1212 16:20:49.562371 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:20:49.562428676+00:00 stderr F I1212 16:20:49.562408 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:20:49.562428676+00:00 stderr F I1212 16:20:49.562420 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:20:49.568792052+00:00 stderr F I1212 16:20:49.568704 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.569167102+00:00 stderr F I1212 16:20:49.569130 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.570895778+00:00 stderr F I1212 16:20:49.570843 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.576253488+00:00 stderr F I1212 16:20:49.576190 1 leaderelection.go:271] successfully acquired lease openshift-route-controller-manager/openshift-route-controllers 2025-12-12T16:20:49.576731501+00:00 stderr F I1212 16:20:49.576590 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-route-controller-manager", Name:"openshift-route-controllers", UID:"d0c7a6f9-4328-40a1-9cc1-17373aec90ec", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"40037", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' route-controller-manager-8fdcdbb66-mzfqh_8332c60e-6754-4189-b1b2-5739357e900b became leader 2025-12-12T16:20:49.577743177+00:00 stderr F I1212 16:20:49.577675 1 controller_manager.go:36] Starting "openshift.io/ingress-ip" 2025-12-12T16:20:49.577743177+00:00 stderr F I1212 16:20:49.577692 1 controller_manager.go:46] Started "openshift.io/ingress-ip" 2025-12-12T16:20:49.577743177+00:00 stderr F I1212 16:20:49.577698 1 controller_manager.go:36] Starting "openshift.io/ingress-to-route" 2025-12-12T16:20:49.581214918+00:00 stderr F I1212 16:20:49.580763 1 ingress.go:262] ingress-to-route metrics registered with prometheus 2025-12-12T16:20:49.581214918+00:00 stderr F I1212 16:20:49.580783 1 controller_manager.go:46] Started "openshift.io/ingress-to-route" 2025-12-12T16:20:49.581214918+00:00 stderr F I1212 16:20:49.580790 1 controller_manager.go:48] Started Route Controllers 2025-12-12T16:20:49.581214918+00:00 stderr F I1212 16:20:49.581165 1 ingress.go:313] Starting controller 2025-12-12T16:20:49.583050057+00:00 stderr F I1212 16:20:49.583014 1 reflector.go:430] "Caches populated" type="*v1.IngressClass" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.583970901+00:00 stderr F I1212 16:20:49.583926 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.584528235+00:00 stderr F I1212 16:20:49.584499 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.595264077+00:00 stderr F I1212 16:20:49.593660 1 reflector.go:430] "Caches populated" type="*v1.Route" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:49.661897355+00:00 stderr F I1212 16:20:49.661821 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:20:49.669927185+00:00 stderr F I1212 16:20:49.669746 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:20:49.669971556+00:00 stderr F I1212 16:20:49.669843 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:20:49.671174768+00:00 stderr F I1212 16:20:49.670630 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:20:49.670592953 +0000 UTC))" 2025-12-12T16:20:49.671295121+00:00 stderr F I1212 16:20:49.671267 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:20:49.67124348 +0000 UTC))" 2025-12-12T16:20:49.671407724+00:00 stderr F I1212 16:20:49.671384 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:20:49.671281711 +0000 UTC))" 2025-12-12T16:20:49.671604039+00:00 stderr F I1212 16:20:49.671580 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:20:49.671564608 +0000 UTC))" 2025-12-12T16:20:49.671638490+00:00 stderr F I1212 16:20:49.671617 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:20:49.671591529 +0000 UTC))" 2025-12-12T16:20:49.671664151+00:00 stderr F I1212 16:20:49.671644 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:20:49.67163043 +0000 UTC))" 2025-12-12T16:20:49.671685811+00:00 stderr F I1212 16:20:49.671666 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:20:49.67165093 +0000 UTC))" 2025-12-12T16:20:49.671718632+00:00 stderr F I1212 16:20:49.671699 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:20:49.671679041 +0000 UTC))" 2025-12-12T16:20:49.671744313+00:00 stderr F I1212 16:20:49.671725 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:20:49.671710712 +0000 UTC))" 2025-12-12T16:20:49.671778594+00:00 stderr F I1212 16:20:49.671759 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:20:49.671740223 +0000 UTC))" 2025-12-12T16:20:49.672150184+00:00 stderr F I1212 16:20:49.672120 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"route-controller-manager.openshift-route-controller-manager.svc\" [serving] validServingFor=[route-controller-manager.openshift-route-controller-manager.svc,route-controller-manager.openshift-route-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:22 +0000 UTC to 2027-11-02 07:52:23 +0000 UTC (now=2025-12-12 16:20:49.672096592 +0000 UTC))" 2025-12-12T16:20:49.674335591+00:00 stderr F I1212 16:20:49.674303 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556449\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556449\" (2025-12-12 15:20:48 +0000 UTC to 2028-12-12 15:20:48 +0000 UTC (now=2025-12-12 16:20:49.674278159 +0000 UTC))" 2025-12-12T16:20:49.680810961+00:00 stderr F I1212 16:20:49.680738 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:20:49.680680437 +0000 UTC))" 2025-12-12T16:20:49.680810961+00:00 stderr F I1212 16:20:49.680784 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:20:49.68077234 +0000 UTC))" 2025-12-12T16:20:49.680810961+00:00 stderr F I1212 16:20:49.680802 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:20:49.68079029 +0000 UTC))" 2025-12-12T16:20:49.680881913+00:00 stderr F I1212 16:20:49.680819 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:20:49.680809781 +0000 UTC))" 2025-12-12T16:20:49.680881913+00:00 stderr F I1212 16:20:49.680837 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:20:49.680825281 +0000 UTC))" 2025-12-12T16:20:49.680881913+00:00 stderr F I1212 16:20:49.680855 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:20:49.680844182 +0000 UTC))" 2025-12-12T16:20:49.680881913+00:00 stderr F I1212 16:20:49.680871 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:20:49.680859442 +0000 UTC))" 2025-12-12T16:20:49.680934664+00:00 stderr F I1212 16:20:49.680890 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:20:49.680882103 +0000 UTC))" 2025-12-12T16:20:49.680945054+00:00 stderr F I1212 16:20:49.680937 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:20:49.680920574 +0000 UTC))" 2025-12-12T16:20:49.680977505+00:00 stderr F I1212 16:20:49.680955 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:20:49.680947334 +0000 UTC))" 2025-12-12T16:20:49.680986905+00:00 stderr F I1212 16:20:49.680975 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:20:49.680964545 +0000 UTC))" 2025-12-12T16:20:49.681224042+00:00 stderr F I1212 16:20:49.681196 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"route-controller-manager.openshift-route-controller-manager.svc\" [serving] validServingFor=[route-controller-manager.openshift-route-controller-manager.svc,route-controller-manager.openshift-route-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:22 +0000 UTC to 2027-11-02 07:52:23 +0000 UTC (now=2025-12-12 16:20:49.68116488 +0000 UTC))" 2025-12-12T16:20:49.681409426+00:00 stderr F I1212 16:20:49.681381 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556449\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556449\" (2025-12-12 15:20:48 +0000 UTC to 2028-12-12 15:20:48 +0000 UTC (now=2025-12-12 16:20:49.681365685 +0000 UTC))" 2025-12-12T16:20:49.718464918+00:00 stderr F I1212 16:20:49.718383 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" ././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000364415117043043033106 0ustar zuulzuul2025-12-12T16:26:53.168250631+00:00 stdout F skipping a dir without errors: / 2025-12-12T16:26:53.168250631+00:00 stdout F skipping a dir without errors: /bundle 2025-12-12T16:26:53.168473286+00:00 stdout F skipping all files in the dir: /dev 2025-12-12T16:26:53.168473286+00:00 stdout F skipping a dir without errors: /etc 2025-12-12T16:26:53.168473286+00:00 stdout F skipping a dir without errors: /manifests 2025-12-12T16:26:53.168716883+00:00 stdout F skipping a dir without errors: /metadata 2025-12-12T16:26:53.173288338+00:00 stdout F skipping all files in the dir: /proc 2025-12-12T16:26:53.173288338+00:00 stdout F skipping a dir without errors: /run 2025-12-12T16:26:53.173288338+00:00 stdout F skipping a dir without errors: /run/secrets 2025-12-12T16:26:53.173336039+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm 2025-12-12T16:26:53.173336039+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm/ca 2025-12-12T16:26:53.173383420+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm/syspurpose 2025-12-12T16:26:53.173430082+00:00 stdout F skipping all files in the dir: /sys 2025-12-12T16:26:53.173457712+00:00 stdout F skipping a dir without errors: /util 2025-12-12T16:26:53.173497443+00:00 stdout F skipping a dir without errors: /var 2025-12-12T16:26:53.173527124+00:00 stdout F skipping a dir without errors: /var/run 2025-12-12T16:26:53.173560155+00:00 stdout F skipping a dir without errors: /var/run/secrets 2025-12-12T16:26:53.173591166+00:00 stdout F skipping a dir without errors: /var/run/secrets/kubernetes.io 2025-12-12T16:26:53.173646377+00:00 stdout F skipping a dir without errors: /var/run/secrets/kubernetes.io/serviceaccount 2025-12-12T16:26:53.173672898+00:00 stdout F skipping a dir without errors: /var/run/secrets/kubernetes.io/serviceaccount/..2025_12_12_16_26_49.2698376012 2025-12-12T16:26:53.173759890+00:00 stdout F &{metadata/annotations.yaml manifests/} ././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000010715117043043033075 0ustar zuulzuul2025-12-12T16:26:50.011225994+00:00 stdout F '/bin/cpb' -> '/util/cpb' ././@LongLink0000644000000000000000000000032200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000422515117043043033102 0ustar zuulzuul2025-12-12T16:26:54.359302830+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Using in-cluster kube client config" 2025-12-12T16:26:54.401820284+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/agents.agent.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.408255807+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/apmservers.apm.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.414763441+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/beats.beat.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.434075169+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/elasticmapsservers.maps.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.434075169+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/elasticsearch-eck-operator-certified.clusterserviceversion.yaml 2025-12-12T16:26:54.448240347+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/elasticsearchautoscalers.autoscaling.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.448240347+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/elasticsearches.elasticsearch.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.462237351+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/enterprisesearches.enterprisesearch.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.466218571+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/kibanas.kibana.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.471543436+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/logstashes.logstash.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.475326971+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/manifests/stackconfigpolicies.stackconfigpolicy.k8s.elastic.co.crd.yaml 2025-12-12T16:26:54.475326971+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="Reading file" file=/bundle/metadata/annotations.yaml ././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000755000175000017500000000000015117043043033063 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000755000175000017500000000000015117043063033065 5ustar zuulzuul././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000644000175000017500000000202015117043043033057 0ustar zuulzuul2025-12-12T16:16:47.307087262+00:00 stderr F W1212 16:16:47.305272 1 deprecated.go:66] 2025-12-12T16:16:47.307087262+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:47.307087262+00:00 stderr F 2025-12-12T16:16:47.307087262+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:47.307087262+00:00 stderr F 2025-12-12T16:16:47.307087262+00:00 stderr F =============================================== 2025-12-12T16:16:47.307087262+00:00 stderr F 2025-12-12T16:16:47.314743029+00:00 stderr F I1212 16:16:47.310648 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:47.314743029+00:00 stderr F I1212 16:16:47.312686 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:47.314743029+00:00 stderr F I1212 16:16:47.313123 1 kube-rbac-proxy.go:397] Starting TCP socket on :9393 2025-12-12T16:16:47.314743029+00:00 stderr F I1212 16:16:47.313629 1 kube-rbac-proxy.go:404] Listening securely on :9393 ././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000755000175000017500000000000015117043062033064 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000644000175000017500000013676715117043043033111 0ustar zuulzuul2025-12-12T16:16:46.224817139+00:00 stderr F 2025-12-12T16:16:46.217Z INFO operator.main ingress-operator/start.go:76 using operator namespace {"namespace": "openshift-ingress-operator"} 2025-12-12T16:16:46.288234907+00:00 stderr F 2025-12-12T16:16:46.285Z INFO operator.init.KubeAPIWarningLogger rest/warnings.go:144 v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice 2025-12-12T16:16:46.288573465+00:00 stderr F 2025-12-12T16:16:46.288Z INFO operator.main ingress-operator/start.go:76 registering Prometheus metrics for canary_controller 2025-12-12T16:16:46.288573465+00:00 stderr F 2025-12-12T16:16:46.288Z INFO operator.main ingress-operator/start.go:76 registering Prometheus metrics for ingress_controller 2025-12-12T16:16:46.289220661+00:00 stderr F 2025-12-12T16:16:46.288Z INFO operator.main ingress-operator/start.go:76 registering Prometheus metrics for route_metrics_controller 2025-12-12T16:16:46.289220661+00:00 stderr F 2025-12-12T16:16:46.288Z INFO operator.main ingress-operator/start.go:76 watching file {"filename": "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem"} 2025-12-12T16:16:46.290218866+00:00 stderr F 2025-12-12T16:16:46.289Z INFO operator.init runtime/asm_amd64.s:1700 starting metrics listener {"addr": "127.0.0.1:60000"} 2025-12-12T16:16:46.376239196+00:00 stderr F I1212 16:16:46.375499 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.382255173+00:00 stderr F I1212 16:16:46.381899 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-ingress-operator", Name:"ingress-operator", UID:"", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:46.382255173+00:00 stderr F 2025-12-12T16:16:46.381Z INFO operator.init ingress-operator/start.go:218 FeatureGates initialized {"knownFeatures": ["AWSClusterHostedDNS","AWSClusterHostedDNSInstall","AWSDedicatedHosts","AWSServiceLBNetworkSecurityGroup","AdditionalRoutingCapabilities","AdminNetworkPolicy","AlibabaPlatform","AutomatedEtcdBackup","AzureClusterHostedDNSInstall","AzureDedicatedHosts","AzureMultiDisk","AzureWorkloadIdentity","BootImageSkewEnforcement","BootcNodeManagement","BuildCSIVolumes","CPMSMachineNamePrefix","ClusterAPIInstall","ClusterAPIInstallIBMCloud","ClusterMonitoringConfig","ClusterVersionOperatorConfiguration","ConsolePluginContentSecurityPolicy","DNSNameResolver","DualReplica","DyanmicServiceEndpointIBMCloud","DynamicResourceAllocation","EtcdBackendQuota","EventedPLEG","Example","Example2","ExternalOIDC","ExternalOIDCWithUIDAndExtraClaimMappings","ExternalSnapshotMetadata","GCPClusterHostedDNS","GCPClusterHostedDNSInstall","GCPCustomAPIEndpoints","GCPCustomAPIEndpointsInstall","GatewayAPI","GatewayAPIController","HighlyAvailableArbiter","ImageModeStatusReporting","ImageStreamImportMode","ImageVolume","IngressControllerDynamicConfigurationManager","IngressControllerLBSubnetsAWS","InsightsConfig","InsightsConfigAPI","InsightsOnDemandDataGather","IrreconcilableMachineConfig","KMSEncryptionProvider","KMSv1","MachineAPIMigration","MachineAPIOperatorDisableMachineHealthCheckController","MachineConfigNodes","ManagedBootImages","ManagedBootImagesAWS","ManagedBootImagesAzure","ManagedBootImagesvSphere","MaxUnavailableStatefulSet","MetricsCollectionProfiles","MinimumKubeletVersion","MixedCPUsAllocation","MultiArchInstallAzure","MultiDiskSetup","MutatingAdmissionPolicy","NetworkDiagnosticsConfig","NetworkLiveMigration","NetworkSegmentation","NewOLM","NewOLMCatalogdAPIV1Metas","NewOLMOwnSingleNamespace","NewOLMPreflightPermissionChecks","NewOLMWebhookProviderOpenshiftServiceCA","NoRegistryClusterOperations","NodeSwap","NutanixMultiSubnets","OVNObservability","OpenShiftPodSecurityAdmission","PinnedImages","PreconfiguredUDNAddresses","ProcMountType","RouteAdvertisements","RouteExternalCertificate","SELinuxMount","ServiceAccountTokenNodeBinding","SetEIPForNLBIngressController","ShortCertRotation","SignatureStores","SigstoreImageVerification","SigstoreImageVerificationPKI","StoragePerformantSecurityPolicy","TranslateStreamCloseWebsocketRequests","UpgradeStatus","UserNamespacesPodSecurityStandards","UserNamespacesSupport","VSphereConfigurableMaxAllowedBlockVolumesPerNode","VSphereHostVMGroupZonal","VSphereMixedNodeEnv","VSphereMultiDisk","VSphereMultiNetworks","VolumeAttributesClass","VolumeGroupSnapshot"]} 2025-12-12T16:16:46.401252486+00:00 stderr F I1212 16:16:46.397777 1 base_controller.go:67] Waiting for caches to sync for spread-default-router-pods 2025-12-12T16:16:46.493495928+00:00 stderr F 2025-12-12T16:16:46.489Z INFO operator.init.controller-runtime.metrics manager/runnable_group.go:226 Starting metrics server 2025-12-12T16:16:46.493495928+00:00 stderr F 2025-12-12T16:16:46.490Z INFO operator.init.controller-runtime.metrics manager/runnable_group.go:226 Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-12T16:16:46.508752371+00:00 stderr F I1212 16:16:46.498361 1 base_controller.go:73] Caches are synced for spread-default-router-pods 2025-12-12T16:16:46.508752371+00:00 stderr F I1212 16:16:46.498390 1 base_controller.go:110] Starting #1 worker of spread-default-router-pods controller ... 2025-12-12T16:16:47.504057021+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.Infrastructure"} 2025-12-12T16:16:47.504126623+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "status_controller", "source": "kind source: *v1alpha1.Subscription"} 2025-12-12T16:16:47.504460791+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.Pod"} 2025-12-12T16:16:47.504609134+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.504731777+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.Deployment"} 2025-12-12T16:16:47.504828720+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.Service"} 2025-12-12T16:16:47.504913532+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "configurable_route_controller", "source": "kind source: *v1.RoleBinding"} 2025-12-12T16:16:47.505169718+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "status_controller", "source": "kind source: *v1alpha1.Subscription"} 2025-12-12T16:16:47.505323692+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "status_controller", "source": "kind source: *v1.CustomResourceDefinition"} 2025-12-12T16:16:47.505458105+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "configurable_route_controller", "source": "kind source: *v1.Ingress"} 2025-12-12T16:16:47.505623849+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "configurable_route_controller", "source": "kind source: *v1.Role"} 2025-12-12T16:16:47.505715601+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "status_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.505878885+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "status_controller", "source": "kind source: *v1.ClusterOperator"} 2025-12-12T16:16:47.505959217+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "error_page_configmap_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.506083620+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "error_page_configmap_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.506172063+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "certificate_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.506311506+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "error_page_configmap_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.506351547+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.DNSRecord"} 2025-12-12T16:16:47.506452119+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.DNS"} 2025-12-12T16:16:47.506639384+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "clientca_configmap_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.506722876+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "clientca_configmap_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.506875340+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.Secret"} 2025-12-12T16:16:47.506963822+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "clientca_configmap_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.507037334+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.Ingress"} 2025-12-12T16:16:47.507065274+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingress_controller", "source": "kind source: *v1.Proxy"} 2025-12-12T16:16:47.507072925+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "certificate_publisher_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.507151626+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.DNSRecord"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.DNS"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "crl", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.Infrastructure"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "certificate_publisher_controller", "source": "informer source: 0xc000aca940"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "canary_controller", "source": "kind source: *v1.Service"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "canary_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.499Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "canary_controller", "source": "kind source: *v1.Route"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "monitoring_dashboard_controller", "source": "kind source: *v1.Infrastructure"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "canary_controller", "source": "kind source: *v1.DaemonSet"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "crl", "source": "informer source: 0xc000acadc0"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "crl", "source": "informer source: 0xc000acadc0"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "monitoring_dashboard_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingressclass_controller", "source": "kind source: *v1.IngressClass"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "ingressclass_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "route_metrics_controller", "source": "kind source: *v1.Route"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "route_metrics_controller", "source": "kind source: *v1.IngressController"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gatewayapi_controller", "source": "kind source: *v1.CustomResourceDefinition"} 2025-12-12T16:16:47.512250431+00:00 stderr F 2025-12-12T16:16:47.500Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gatewayapi_controller", "source": "kind source: *v1.FeatureGate"} 2025-12-12T16:16:47.521808864+00:00 stderr F 2025-12-12T16:16:47.518Z ERROR operator.init.controller-runtime.source.EventHandler wait/loop.go:53 failed to get informer from cache {"error": "failed to get restmapping: failed to get API group resources: unable to retrieve the complete list of server APIs: route.openshift.io/v1: the server is currently unable to handle the request"} 2025-12-12T16:16:47.533427748+00:00 stderr F 2025-12-12T16:16:47.523Z INFO operator.certificate_publisher_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:16:47.549529021+00:00 stderr F 2025-12-12T16:16:47.549Z ERROR operator.init.controller-runtime.source.EventHandler wait/loop.go:53 failed to get informer from cache {"error": "unable to retrieve the complete list of server APIs: route.openshift.io/v1: no matches for route.openshift.io/v1, Resource="} 2025-12-12T16:16:47.614021446+00:00 stderr F 2025-12-12T16:16:47.613Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:16:47.614100708+00:00 stderr F 2025-12-12T16:16:47.614Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:16:47.614581699+00:00 stderr F 2025-12-12T16:16:47.614Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:16:47.614646851+00:00 stderr F 2025-12-12T16:16:47.614Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.719Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "certificate_controller"} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.719Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "certificate_controller", "worker count": 1} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.720Z INFO operator.certificate_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.720Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "certificate_publisher_controller"} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.720Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "certificate_publisher_controller", "worker count": 1} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.720Z INFO operator.certificate_publisher_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.720Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "clientca_configmap_controller"} 2025-12-12T16:16:47.723349805+00:00 stderr F 2025-12-12T16:16:47.720Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "clientca_configmap_controller", "worker count": 1} 2025-12-12T16:16:47.937244877+00:00 stderr F 2025-12-12T16:16:47.933Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "monitoring_dashboard_controller"} 2025-12-12T16:16:47.937244877+00:00 stderr F 2025-12-12T16:16:47.933Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "monitoring_dashboard_controller", "worker count": 1} 2025-12-12T16:16:47.937244877+00:00 stderr F 2025-12-12T16:16:47.935Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:16:48.036900930+00:00 stderr F 2025-12-12T16:16:48.033Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "crl"} 2025-12-12T16:16:48.036900930+00:00 stderr F 2025-12-12T16:16:48.033Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "crl", "worker count": 1} 2025-12-12T16:16:48.036900930+00:00 stderr F 2025-12-12T16:16:48.033Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "gatewayapi_controller"} 2025-12-12T16:16:48.036900930+00:00 stderr F 2025-12-12T16:16:48.033Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "gatewayapi_controller", "worker count": 1} 2025-12-12T16:16:48.036900930+00:00 stderr F 2025-12-12T16:16:48.033Z INFO operator.gatewayapi_controller controller/controller.go:119 reconciling {"request": {"name":"cluster"}} 2025-12-12T16:16:48.046393392+00:00 stderr F 2025-12-12T16:16:48.043Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "ingressclass_controller"} 2025-12-12T16:16:48.046393392+00:00 stderr F 2025-12-12T16:16:48.043Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "ingressclass_controller", "worker count": 1} 2025-12-12T16:16:48.046393392+00:00 stderr F 2025-12-12T16:16:48.043Z INFO operator.ingressclass_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.134Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "error_page_configmap_controller"} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "error_page_configmap_controller", "worker count": 1} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "configurable_route_controller"} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "configurable_route_controller", "worker count": 1} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.configurable_route_controller controller/controller.go:119 reconciling {"request": {"name":"cluster"}} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "dns_controller"} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "dns_controller", "worker count": 1} 2025-12-12T16:16:48.137452955+00:00 stderr F 2025-12-12T16:16:48.135Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:16:48.139819183+00:00 stderr F 2025-12-12T16:16:48.139Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "ingress_controller"} 2025-12-12T16:16:48.139819183+00:00 stderr F 2025-12-12T16:16:48.139Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "ingress_controller", "worker count": 1} 2025-12-12T16:16:48.139855834+00:00 stderr F 2025-12-12T16:16:48.139Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.149818237+00:00 stderr F 2025-12-12T16:16:48.145Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "status_controller"} 2025-12-12T16:16:48.149818237+00:00 stderr F 2025-12-12T16:16:48.145Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "status_controller", "worker count": 1} 2025-12-12T16:16:48.149818237+00:00 stderr F 2025-12-12T16:16:48.145Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.277295089+00:00 stderr F 2025-12-12T16:16:48.276Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "e2286664-d45d-4fac-8b60-2ece91709504", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.289493247+00:00 stderr F 2025-12-12T16:16:48.287Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.330006046+00:00 stderr F 2025-12-12T16:16:48.326Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "96f74f75-555c-4b60-98d3-d76a1688b2fb", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.347895773+00:00 stderr F 2025-12-12T16:16:48.338Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.395360152+00:00 stderr F 2025-12-12T16:16:48.395Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "8554e613-57f3-404a-ad49-c9bc7057cefa", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.424302178+00:00 stderr F 2025-12-12T16:16:48.417Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.499291719+00:00 stderr F 2025-12-12T16:16:48.494Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "f0026390-3d1a-419b-8807-adf1b53ce9a9", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.539313276+00:00 stderr F 2025-12-12T16:16:48.535Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.584750505+00:00 stderr F 2025-12-12T16:16:48.583Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "fae60d72-8518-47d0-b409-934188a5f1a7", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.652570701+00:00 stderr F 2025-12-12T16:16:48.646Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.652570701+00:00 stderr F 2025-12-12T16:16:48.646Z INFO operator.ingressclass_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.652570701+00:00 stderr F 2025-12-12T16:16:48.647Z ERROR operator.ingress_controller controller/controller.go:119 got retryable error; requeueing {"after": "1m0s", "error": "IngressController is degraded: DeploymentReplicasAllAvailable=False (DeploymentReplicasNotAvailable: 0/1 of replicas are available)"} 2025-12-12T16:16:48.652570701+00:00 stderr F 2025-12-12T16:16:48.647Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.673856211+00:00 stderr F 2025-12-12T16:16:48.671Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "a6296f7b-da40-4777-ac67-57754d33bea8", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.673856211+00:00 stderr F 2025-12-12T16:16:48.671Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.706458367+00:00 stderr F 2025-12-12T16:16:48.704Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "1845f246-3803-4ecc-bf4d-4a471087c401", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.829398658+00:00 stderr F 2025-12-12T16:16:48.829Z INFO operator.certificate_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.834645247+00:00 stderr F 2025-12-12T16:16:48.832Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:48.861944103+00:00 stderr F 2025-12-12T16:16:48.860Z ERROR operator.init controller/controller.go:294 Reconciler error {"controller": "status_controller", "object": {"name":"default","namespace":"openshift-ingress-operator"}, "namespace": "openshift-ingress-operator", "name": "default", "reconcileID": "14a20915-1f10-4992-b69e-f10eb8d4b7a2", "error": "failed to get operator state: failed to list gateway classes: Index with name field:gatewayclassController does not exist"} 2025-12-12T16:16:48.921556878+00:00 stderr F 2025-12-12T16:16:48.919Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gateway_labeler_controller", "source": "kind source: *v1.Gateway"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.941Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gatewayclass_controller", "source": "kind source: *v1.CustomResourceDefinition"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.942Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "service_dns_controller", "source": "kind source: *v1.DNSRecord"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.942Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gatewayclass_controller", "source": "kind source: *v1.GatewayClass"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.942Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gatewayclass_controller", "source": "kind source: *v1alpha1.Subscription"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.942Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gatewayclass_controller", "source": "kind source: *v1alpha1.InstallPlan"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.942Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "service_dns_controller", "source": "kind source: *v1.Gateway"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.942Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "service_dns_controller", "source": "kind source: *v1.Service"} 2025-12-12T16:16:48.948888446+00:00 stderr F 2025-12-12T16:16:48.945Z INFO operator.init runtime/asm_amd64.s:1700 Starting EventSource {"controller": "gateway_labeler_controller", "source": "kind source: *v1.GatewayClass"} 2025-12-12T16:16:48.959640448+00:00 stderr F 2025-12-12T16:16:48.958Z ERROR operator.ingress_controller controller/controller.go:119 got retryable error; requeueing {"after": "1m0s", "error": "IngressController is degraded: DeploymentReplicasAllAvailable=False (DeploymentReplicasNotAvailable: 0/1 of replicas are available)"} 2025-12-12T16:16:49.060256695+00:00 stderr F 2025-12-12T16:16:49.059Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "gateway_labeler_controller"} 2025-12-12T16:16:49.060256695+00:00 stderr F 2025-12-12T16:16:49.059Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "gateway_labeler_controller", "worker count": 1} 2025-12-12T16:16:49.507401741+00:00 stderr F 2025-12-12T16:16:49.501Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:49.552419480+00:00 stderr F 2025-12-12T16:16:49.550Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "service_dns_controller"} 2025-12-12T16:16:49.552419480+00:00 stderr F 2025-12-12T16:16:49.550Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "service_dns_controller", "worker count": 1} 2025-12-12T16:16:49.552419480+00:00 stderr F 2025-12-12T16:16:49.550Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "gatewayclass_controller"} 2025-12-12T16:16:49.552419480+00:00 stderr F 2025-12-12T16:16:49.550Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "gatewayclass_controller", "worker count": 1} 2025-12-12T16:16:49.572610933+00:00 stderr F 2025-12-12T16:16:49.571Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:57.623380256+00:00 stderr F 2025-12-12T16:16:57.622Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "route_metrics_controller"} 2025-12-12T16:16:57.623380256+00:00 stderr F 2025-12-12T16:16:57.623Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "route_metrics_controller", "worker count": 1} 2025-12-12T16:16:57.623418087+00:00 stderr F 2025-12-12T16:16:57.622Z INFO operator.route_metrics_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:16:57.623427697+00:00 stderr F 2025-12-12T16:16:57.623Z INFO operator.route_metrics_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:57.623603441+00:00 stderr F 2025-12-12T16:16:57.623Z INFO operator.route_metrics_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:16:57.623603441+00:00 stderr F 2025-12-12T16:16:57.623Z INFO operator.route_metrics_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:16:57.623619491+00:00 stderr F 2025-12-12T16:16:57.623Z INFO operator.route_metrics_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:16:57.623684113+00:00 stderr F 2025-12-12T16:16:57.623Z INFO operator.route_metrics_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:16:57.826536146+00:00 stderr F 2025-12-12T16:16:57.825Z INFO operator.route_metrics_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:16:58.124978292+00:00 stderr F 2025-12-12T16:16:58.123Z INFO operator.init controller/controller.go:262 Starting Controller {"controller": "canary_controller"} 2025-12-12T16:16:58.124978292+00:00 stderr F 2025-12-12T16:16:58.124Z INFO operator.init controller/controller.go:262 Starting workers {"controller": "canary_controller", "worker count": 1} 2025-12-12T16:16:58.540760143+00:00 stderr F 2025-12-12T16:16:58.540Z ERROR operator.canary_controller wait/backoff.go:233 error performing canary route check {"error": "error sending canary HTTP request to \"canary-openshift-ingress-canary.apps-crc.testing\": Get \"https://canary-openshift-ingress-canary.apps-crc.testing\": dial tcp 38.102.83.180:443: connect: connection refused"} 2025-12-12T16:17:03.408671120+00:00 stderr F 2025-12-12T16:17:03.406Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:17:03.408671120+00:00 stderr F 2025-12-12T16:17:03.407Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:17:03.408671120+00:00 stderr F 2025-12-12T16:17:03.407Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:03.485914966+00:00 stderr F 2025-12-12T16:17:03.485Z ERROR operator.ingress_controller controller/controller.go:119 got retryable error; requeueing {"after": "1m0s", "error": "IngressController is degraded: DeploymentReplicasAllAvailable=False (DeploymentReplicasNotAvailable: 0/1 of replicas are available)"} 2025-12-12T16:17:33.504658199+00:00 stderr F 2025-12-12T16:17:33.503Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:17:33.504658199+00:00 stderr F 2025-12-12T16:17:33.504Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:17:33.504658199+00:00 stderr F 2025-12-12T16:17:33.504Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:33.633246109+00:00 stderr F 2025-12-12T16:17:33.630Z INFO operator.route_metrics_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:33.633246109+00:00 stderr F 2025-12-12T16:17:33.630Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:33.633246109+00:00 stderr F 2025-12-12T16:17:33.631Z INFO operator.certificate_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:33.633246109+00:00 stderr F 2025-12-12T16:17:33.631Z INFO operator.ingressclass_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:33.638237552+00:00 stderr F 2025-12-12T16:17:33.636Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:33.675869031+00:00 stderr F 2025-12-12T16:17:33.674Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:17:48.648980389+00:00 stderr F 2025-12-12T16:17:48.648Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:18:47.532300484+00:00 stderr F 2025-12-12T16:18:47.531Z ERROR operator.init wait/backoff.go:233 failed to fetch ingress config {"error": "Get \"https://10.217.4.1:443/apis/config.openshift.io/v1/ingresses/cluster\": dial tcp 10.217.4.1:443: connect: connection refused"} 2025-12-12T16:19:25.136275644+00:00 stderr F 2025-12-12T16:19:25.135Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:19:37.814216640+00:00 stderr F 2025-12-12T16:19:37.813Z INFO operator.gatewayapi_controller controller/controller.go:119 reconciling {"request": {"name":"cluster"}} 2025-12-12T16:19:46.393801775+00:00 stderr F 2025-12-12T16:19:46.391Z INFO operator.ingressclass_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:19:51.364498089+00:00 stderr F 2025-12-12T16:19:51.363Z INFO operator.gatewayapi_controller controller/controller.go:119 reconciling {"request": {"name":"cluster"}} 2025-12-12T16:19:51.617701976+00:00 stderr F 2025-12-12T16:19:51.617Z INFO operator.gatewayapi_controller controller/controller.go:119 reconciling {"request": {"name":"cluster"}} 2025-12-12T16:20:04.960291700+00:00 stderr F 2025-12-12T16:20:04.959Z INFO operator.certificate_publisher_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:20:04.960396663+00:00 stderr F 2025-12-12T16:20:04.960Z INFO operator.certificate_publisher_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default"} 2025-12-12T16:20:04.960546007+00:00 stderr F 2025-12-12T16:20:04.960Z INFO operator.certificate_publisher_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:17.689258608+00:00 stderr F 2025-12-12T16:20:17.688Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:20:17.689258608+00:00 stderr F 2025-12-12T16:20:17.689Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:20:17.689342110+00:00 stderr F 2025-12-12T16:20:17.689Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:18.914668276+00:00 stderr F 2025-12-12T16:20:18.914Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:18.914668276+00:00 stderr F 2025-12-12T16:20:18.914Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:18.914772119+00:00 stderr F 2025-12-12T16:20:18.914Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:20.760233524+00:00 stderr F 2025-12-12T16:20:20.759Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:20:20.760233524+00:00 stderr F 2025-12-12T16:20:20.760Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingress {"name": "default", "related": ""} 2025-12-12T16:20:20.760233524+00:00 stderr F 2025-12-12T16:20:20.760Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:21.319063705+00:00 stderr F 2025-12-12T16:20:21.318Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:21.319154167+00:00 stderr F 2025-12-12T16:20:21.319Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:21.319231289+00:00 stderr F 2025-12-12T16:20:21.319Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:25.492964233+00:00 stderr F 2025-12-12T16:20:25.492Z INFO operator.route_metrics_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:25.493299491+00:00 stderr F 2025-12-12T16:20:25.493Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:25.493627279+00:00 stderr F 2025-12-12T16:20:25.493Z INFO operator.certificate_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:25.493763543+00:00 stderr F 2025-12-12T16:20:25.493Z INFO operator.ingressclass_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:25.493935577+00:00 stderr F 2025-12-12T16:20:25.493Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:35.242716534+00:00 stderr F 2025-12-12T16:20:35.241Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:35.242788016+00:00 stderr F 2025-12-12T16:20:35.242Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:35.242833237+00:00 stderr F 2025-12-12T16:20:35.242Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:20:35.374398978+00:00 stderr F 2025-12-12T16:20:35.373Z INFO operator.configurable_route_controller controller/controller.go:119 reconciling {"request": {"name":"cluster"}} 2025-12-12T16:20:35.374398978+00:00 stderr F 2025-12-12T16:20:35.373Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:35.374398978+00:00 stderr F 2025-12-12T16:20:35.373Z INFO operator.ingress_controller handler/enqueue_mapped.go:140 queueing ingresscontroller {"name": "default", "related": ""} 2025-12-12T16:20:35.374398978+00:00 stderr F 2025-12-12T16:20:35.373Z INFO operator.ingress_controller controller/controller.go:119 reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} 2025-12-12T16:26:40.946790954+00:00 stderr F 2025-12-12T16:26:40.945Z INFO operator.status_controller controller/controller.go:119 Reconciling {"request": {"name":"default","namespace":"openshift-ingress-operator"}} ././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_down0000755000175000017500000000000015117043043033137 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_down0000755000175000017500000000000015117043062033140 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_down0000644000175000017500000010557415117043043033155 0ustar zuulzuul2025-12-12T16:16:59.488018840+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:16:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:02.882984235+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:09.476453470+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:12.888374658+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:19.474094092+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:22.881590345+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:29.473926324+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:32.882588881+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:39.481800119+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:42.881951353+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:49.474856579+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:52.881244973+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:17:59.474331161+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:17:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:02.881220059+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:09.473885756+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:12.880567239+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:19.475325779+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:22.880507074+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:29.474485625+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:32.881289401+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:39.476259986+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:42.880829486+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:49.473223200+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:52.882269220+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:18:59.474567639+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:18:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:02.881675011+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:09.473693553+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:12.881334909+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:19.473469535+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:22.882137327+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:29.473206645+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:32.880844863+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:39.473768218+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:42.881242483+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:49.473422808+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:52.880151333+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:19:59.473912438+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:19:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:02.881060154+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:09.474133594+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:12.881810573+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:19.475349944+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:22.881779932+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:29.473642069+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:32.881301015+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:39.473825195+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:42.882014921+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:49.473592595+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:52.883374243+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:20:59.473216463+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:20:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:02.881861381+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:09.472413238+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:12.880402677+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:19.474300980+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:22.882420054+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:29.473592706+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:32.881393611+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:39.473645503+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:42.881279254+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:49.473470129+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:52.880108082+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:21:59.473973668+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:21:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:03.093619592+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:03] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:09.474593294+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:12.881228230+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:19.473509484+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:22.882026508+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:29.473687483+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:32.880777499+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:39.473562621+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:42.880051535+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:49.474935244+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:52.880875192+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:22:59.474351296+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:22:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:02.881092493+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:09.473407888+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:12.880648150+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:19.473216947+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:22.881255690+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:29.474262653+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:32.880674072+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:39.474135925+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:42.881658852+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:49.474057420+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:52.881217220+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:23:59.473678691+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:23:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:02.881293411+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:09.475099295+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:12.880814238+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:19.474331635+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:22.880974641+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:29.474335303+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:32.881132783+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:39.474299961+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:42.881521712+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:49.474216382+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:52.880421317+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:24:59.472854013+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:24:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:02.882021156+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:09.473629093+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:12.881791533+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:19.473722877+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:22.881969177+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:29.473641871+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:32.881227413+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:39.473799716+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:42.880496845+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:49.474827104+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:52.880782535+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:25:59.473845180+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:25:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:02.881982740+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:09.473451413+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:12.881740338+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:19.472496992+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:22.882290175+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:29.473459871+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:32.881528209+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:39.474487180+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:42.882279462+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:49.474118654+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:52.883263121+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:26:59.475792441+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:26:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:02.882024908+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:09.475136001+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:12.882264080+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:19.473956036+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:22.883264891+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:29.473656845+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:32.881350288+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:39.474020209+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:42.881309565+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:49.474820448+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:52.883486197+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:27:59.473769039+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:27:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:02.882298974+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:09.474436511+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:12.881761337+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:19.473845343+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:22.883256121+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:29.474274310+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:32.882415966+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:39.472995066+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:42.882320782+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:49.473152437+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:52.881315182+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:28:59.475501672+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:28:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:02.881893535+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:09.473252994+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:12.881006913+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:19.474648953+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:22.880980599+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:29.474422343+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:32.881731643+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:39.473429504+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:42.882450566+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:49.474009764+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:52.883673953+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:29:59.472607984+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:29:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:02.881648048+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:09.473090572+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:12.881444117+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:19.472988480+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:22.883293432+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:29.475650415+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:32.882633575+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:39.473522974+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:42.881563959+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:49.474271400+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:52.880833528+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:30:59.474818072+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:30:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:02.882585881+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:09.475285547+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:12.882265165+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:19.473889080+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:22.881223241+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:29.474909880+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:32.881442720+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:39.474784362+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:42.882495663+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:49.473592300+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:52.882152292+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:31:59.475008981+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:31:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:02.882208970+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:09.474618458+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:12.880982034+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:19.474856749+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:22.880879188+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:29.474521311+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:32.882923427+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:39.474274946+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:42.880727074+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:49.473721216+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:52.882256607+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:32:59.474285846+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:32:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:02.881091784+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:09.473698892+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:12.881385681+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:19.473411705+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:22.880888080+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:29.475320647+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:32.881880869+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:39.473349264+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:42.880741977+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:49.473469402+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:52.880722409+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:33:59.473658806+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:33:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:02.880222718+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:09.472798509+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:12.881791993+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:19.473572094+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:22.882414321+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:29.475628196+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:32.881139190+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:39.474837399+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:42.881469282+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:49.473817377+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:52.881666050+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:34:59.473981046+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:34:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:02.881138941+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:09.474046338+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:12.881160552+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:19.474282465+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:22.880777314+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:29.473275675+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:32.881767945+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:39.474213723+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:42.880960577+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:49.473892505+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:52.881840639+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:35:59.473959298+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:35:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:02.882538228+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:09.474965684+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:12.881119664+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:19.473620445+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:22.881943791+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:29.474263220+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:32.881091667+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:39.473422461+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:42.881151171+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:49.474246205+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:52.882515068+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:36:59.474051535+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:36:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:02.882128623+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:09.473880591+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:12.880528924+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:19.474217533+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:22.882288042+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:29.474936745+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:32.882123042+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:39.474855038+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:42.883146813+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:49.474323416+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:52.881258074+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:37:59.474002209+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:37:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:02.880611821+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:09.474352372+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:12.881143549+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:19.473674059+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:22.880890503+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:29.474848247+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:32.881502630+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:39.474816638+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:42.880790314+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:49.474585076+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:52.881656871+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:38:59.473502853+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:38:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:02.882268338+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:09.473404732+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:12.881947022+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:19.473976257+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:22.881299918+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:29.474379982+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:32.887131520+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:39.473837164+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:42.881758977+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:49.474117389+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:52.883927511+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:39:59.473612279+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:39:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:02.880806676+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:09.474714300+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:12.881848317+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:19.474493692+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:22.881267748+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:29.474866733+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:32.883739912+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:39.473943353+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:42.881391427+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:49.475138808+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:52.881422522+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:40:59.472694520+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:40:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:02.880504111+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:09.473236036+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:12.882370412+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:19.473883706+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:22.881248448+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:29.473731057+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:32.883068208+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:39.474150051+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:42.888411315+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:49.473843387+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:52.882223584+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:41:59.473847412+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:41:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:02.881670245+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:09.474253887+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:12.880405147+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:12] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:19.474168069+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:19] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:22.882398522+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:22] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:29.473909076+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:29] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:32.881435211+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:32] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:39.473230353+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:39] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:42.881816655+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:42] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:49.475669330+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:49] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:52.881666757+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:52] "GET / HTTP/1.1" 200 - 2025-12-12T16:42:59.474246739+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:42:59] "GET / HTTP/1.1" 200 - 2025-12-12T16:43:02.882165333+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:43:02] "GET / HTTP/1.1" 200 - 2025-12-12T16:43:09.472955287+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:43:09] "GET / HTTP/1.1" 200 - 2025-12-12T16:43:12.881442076+00:00 stderr F ::ffff:10.217.0.2 - - [12/Dec/2025 16:43:12] "GET / HTTP/1.1" 200 - ././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-op0000755000175000017500000000000015117043043033062 5ustar zuulzuul././@LongLink0000644000000000000000000000033400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-op0000755000175000017500000000000015117043062033063 5ustar zuulzuul././@LongLink0000644000000000000000000000034100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-op0000644000175000017500000027177115117043043033103 0ustar zuulzuul2025-12-12T16:16:41.243008903+00:00 stderr F I1212 16:16:41.241902 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:41.243629208+00:00 stderr F I1212 16:16:41.243244 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:41.244292014+00:00 stderr F I1212 16:16:41.244122 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:41.269823617+00:00 stderr F I1212 16:16:41.269750 1 builder.go:304] openshift-apiserver-operator version 4.20.0-202510211040.p2.g9cfa567.assembly.stream.el9-9cfa567-9cfa5679a8ac1e5a68eea32179d9e069da85dfcf 2025-12-12T16:16:41.515964987+00:00 stderr F I1212 16:16:41.515904 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:41.515964987+00:00 stderr F W1212 16:16:41.515937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:41.515964987+00:00 stderr F W1212 16:16:41.515943 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:41.515964987+00:00 stderr F W1212 16:16:41.515947 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:41.515964987+00:00 stderr F W1212 16:16:41.515949 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:41.515964987+00:00 stderr F W1212 16:16:41.515952 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:41.515964987+00:00 stderr F W1212 16:16:41.515955 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:41.518756925+00:00 stderr F I1212 16:16:41.518713 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:41.518978880+00:00 stderr F I1212 16:16:41.518908 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:41.519279618+00:00 stderr F I1212 16:16:41.519258 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:41.519363260+00:00 stderr F I1212 16:16:41.519283 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:41.521300277+00:00 stderr F I1212 16:16:41.519324 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:41.521437060+00:00 stderr F I1212 16:16:41.519648 1 leaderelection.go:257] attempting to acquire leader lease openshift-apiserver-operator/openshift-apiserver-operator-lock... 2025-12-12T16:16:41.521561403+00:00 stderr F I1212 16:16:41.519645 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:41.521714587+00:00 stderr F I1212 16:16:41.519889 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:41.521918662+00:00 stderr F I1212 16:16:41.521887 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:41.523103821+00:00 stderr F I1212 16:16:41.521823 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:41.523144262+00:00 stderr F I1212 16:16:41.521832 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:41.562235266+00:00 stderr F I1212 16:16:41.558319 1 leaderelection.go:271] successfully acquired lease openshift-apiserver-operator/openshift-apiserver-operator-lock 2025-12-12T16:16:41.562235266+00:00 stderr F I1212 16:16:41.560821 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator-lock", UID:"650999d0-e638-4a73-8009-69404328cbf8", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"36849", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' openshift-apiserver-operator-846cbfc458-zf8cv_718ae0e0-6c22-41ba-b2be-ac7686535eee became leader 2025-12-12T16:16:41.577411027+00:00 stderr F I1212 16:16:41.576604 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:41.589242156+00:00 stderr F I1212 16:16:41.588258 1 starter.go:144] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:41.589242156+00:00 stderr F I1212 16:16:41.588289 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:41.624360803+00:00 stderr F I1212 16:16:41.624280 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:41.624706042+00:00 stderr F I1212 16:16:41.624671 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:41.624706042+00:00 stderr F I1212 16:16:41.624674 1 base_controller.go:76] Waiting for caches to sync for ConnectivityCheckController 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.628432 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.629496 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630022 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-EncryptionCondition 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630051 1 base_controller.go:76] Waiting for caches to sync for SecretRevisionPruneController 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630068 1 base_controller.go:76] Waiting for caches to sync for RevisionController 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630059 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630081 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-EncryptionKey 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630098 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-EncryptionMigration 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630112 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-RemoveStaleConditions 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630125 1 base_controller.go:76] Waiting for caches to sync for NamespaceFinalizerController_openshift-apiserver 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630151 1 base_controller.go:76] Waiting for caches to sync for OpenShiftAPIServer-WorkloadWorkloadController 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630165 1 base_controller.go:76] Waiting for caches to sync for auditPolicyController 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630195 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-EncryptionPrune 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630224 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-UnsupportedConfigOverrides 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630255 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-EncryptionState 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630297 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-APIService 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630325 1 base_controller.go:76] Waiting for caches to sync for APIServerStaticResources-StaticResources 2025-12-12T16:16:41.630675237+00:00 stderr F I1212 16:16:41.630379 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_openshift-apiserver 2025-12-12T16:16:41.631904247+00:00 stderr F I1212 16:16:41.631642 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:41.631904247+00:00 stderr F I1212 16:16:41.631779 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver 2025-12-12T16:16:41.646902764+00:00 stderr F I1212 16:16:41.646818 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:41.730393862+00:00 stderr F I1212 16:16:41.730311 1 base_controller.go:82] Caches are synced for openshift-apiserver-UnsupportedConfigOverrides 2025-12-12T16:16:41.730808092+00:00 stderr F I1212 16:16:41.730776 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:41.731346355+00:00 stderr F I1212 16:16:41.731321 1 base_controller.go:82] Caches are synced for StatusSyncer_openshift-apiserver 2025-12-12T16:16:41.731346355+00:00 stderr F I1212 16:16:41.731338 1 base_controller.go:119] Starting #1 worker of StatusSyncer_openshift-apiserver controller ... 2025-12-12T16:16:41.732959965+00:00 stderr F I1212 16:16:41.732932 1 base_controller.go:82] Caches are synced for APIServerStaticResources-StaticResources 2025-12-12T16:16:41.732959965+00:00 stderr F I1212 16:16:41.732944 1 base_controller.go:119] Starting #1 worker of APIServerStaticResources-StaticResources controller ... 2025-12-12T16:16:41.732975195+00:00 stderr F I1212 16:16:41.732959 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:41.732975195+00:00 stderr F I1212 16:16:41.732964 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:41.733105708+00:00 stderr F I1212 16:16:41.733082 1 base_controller.go:82] Caches are synced for openshift-apiserver-RemoveStaleConditions 2025-12-12T16:16:41.733105708+00:00 stderr F I1212 16:16:41.733093 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-RemoveStaleConditions controller ... 2025-12-12T16:16:41.782998346+00:00 stderr F I1212 16:16:41.782943 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:41.803759513+00:00 stderr F I1212 16:16:41.803670 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:41.831220524+00:00 stderr F I1212 16:16:41.831116 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:41.993998068+00:00 stderr F I1212 16:16:41.993938 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:41Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver ()","reason":"APIServerDeployment_UnavailablePod","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady","reason":"APIServerDeployment_NoPod::APIServices_PreconditionNotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:41.994148941+00:00 stderr F I1212 16:16:41.994098 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Degraded changed from False to True ("APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver ()") 2025-12-12T16:16:42.028508750+00:00 stderr F I1212 16:16:42.028135 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:42.187704047+00:00 stderr F E1212 16:16:42.187635 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_openshift-apiserver reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"openshift-apiserver\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:42.244365849+00:00 stderr F I1212 16:16:42.244274 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:42.431325164+00:00 stderr F I1212 16:16:42.430559 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:42.628259312+00:00 stderr F I1212 16:16:42.628144 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:42.829073495+00:00 stderr F I1212 16:16:42.827543 1 request.go:752] "Waited before sending request" delay="1.201331209s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver-operator/configmaps?limit=500&resourceVersion=0" 2025-12-12T16:16:42.846845698+00:00 stderr F I1212 16:16:42.846464 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.028584186+00:00 stderr F I1212 16:16:43.027482 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:43.028584186+00:00 stderr F I1212 16:16:43.027604 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.029302963+00:00 stderr F I1212 16:16:43.029237 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:43.241873563+00:00 stderr F I1212 16:16:43.241449 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.435702425+00:00 stderr F I1212 16:16:43.435322 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:43.436912485+00:00 stderr F I1212 16:16:43.435766 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.452859394+00:00 stderr F I1212 16:16:43.451943 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:43.641132441+00:00 stderr F I1212 16:16:43.632806 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.736256933+00:00 stderr F I1212 16:16:43.734407 1 base_controller.go:82] Caches are synced for openshift-apiserver-APIService 2025-12-12T16:16:43.736256933+00:00 stderr F I1212 16:16:43.735099 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-APIService controller ... 2025-12-12T16:16:43.830667328+00:00 stderr F I1212 16:16:43.829243 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.833318753+00:00 stderr F I1212 16:16:43.831095 1 base_controller.go:82] Caches are synced for NamespaceFinalizerController_openshift-apiserver 2025-12-12T16:16:43.833318753+00:00 stderr F I1212 16:16:43.831140 1 base_controller.go:119] Starting #1 worker of NamespaceFinalizerController_openshift-apiserver controller ... 2025-12-12T16:16:43.927224525+00:00 stderr F I1212 16:16:43.924864 1 base_controller.go:82] Caches are synced for ConnectivityCheckController 2025-12-12T16:16:43.927224525+00:00 stderr F I1212 16:16:43.924907 1 base_controller.go:119] Starting #1 worker of ConnectivityCheckController controller ... 2025-12-12T16:16:44.032924646+00:00 stderr F I1212 16:16:44.030217 1 request.go:752] "Waited before sending request" delay="2.401749547s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces?limit=500&resourceVersion=0" 2025-12-12T16:16:44.036800541+00:00 stderr F I1212 16:16:44.036680 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:44.249217167+00:00 stderr F I1212 16:16:44.248149 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:44.332117221+00:00 stderr F I1212 16:16:44.332006 1 base_controller.go:82] Caches are synced for auditPolicyController 2025-12-12T16:16:44.332117221+00:00 stderr F I1212 16:16:44.332044 1 base_controller.go:119] Starting #1 worker of auditPolicyController controller ... 2025-12-12T16:16:44.430968674+00:00 stderr F I1212 16:16:44.430885 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432061 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432093 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432402 1 base_controller.go:82] Caches are synced for openshift-apiserver-EncryptionState 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432429 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-EncryptionState controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432465 1 base_controller.go:82] Caches are synced for openshift-apiserver-EncryptionCondition 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432473 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-EncryptionCondition controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432491 1 base_controller.go:82] Caches are synced for SecretRevisionPruneController 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432496 1 base_controller.go:119] Starting #1 worker of SecretRevisionPruneController controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432627 1 base_controller.go:82] Caches are synced for RevisionController 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432632 1 base_controller.go:119] Starting #1 worker of RevisionController controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432650 1 base_controller.go:82] Caches are synced for openshift-apiserver-EncryptionKey 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432654 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-EncryptionKey controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432694 1 base_controller.go:82] Caches are synced for openshift-apiserver-EncryptionMigration 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432699 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-EncryptionMigration controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432718 1 base_controller.go:82] Caches are synced for OpenShiftAPIServer-WorkloadWorkloadController 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.432722 1 base_controller.go:119] Starting #1 worker of OpenShiftAPIServer-WorkloadWorkloadController controller ... 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.433239 1 base_controller.go:82] Caches are synced for openshift-apiserver-EncryptionPrune 2025-12-12T16:16:44.434223333+00:00 stderr F I1212 16:16:44.433246 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-EncryptionPrune controller ... 2025-12-12T16:16:44.643773760+00:00 stderr F I1212 16:16:44.639969 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:44.736542114+00:00 stderr F I1212 16:16:44.732218 1 base_controller.go:82] Caches are synced for openshift-apiserver 2025-12-12T16:16:44.736542114+00:00 stderr F I1212 16:16:44.732251 1 base_controller.go:119] Starting #1 worker of openshift-apiserver controller ... 2025-12-12T16:16:47.720118806+00:00 stderr F I1212 16:16:47.719374 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:41Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (3 containers are waiting in pending apiserver-9ddfb9f55-sg8rq pod)","reason":"APIServerDeployment_UnavailablePod","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady","reason":"APIServerDeployment_NoPod::APIServices_PreconditionNotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:47.786559318+00:00 stderr F I1212 16:16:47.782105 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver ()" to "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (3 containers are waiting in pending apiserver-9ddfb9f55-sg8rq pod)" 2025-12-12T16:16:50.568564998+00:00 stderr F I1212 16:16:50.567768 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:41Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (2 containers are waiting in pending apiserver-9ddfb9f55-sg8rq pod)","reason":"APIServerDeployment_UnavailablePod","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady","reason":"APIServerDeployment_NoPod::APIServices_PreconditionNotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:50.586403964+00:00 stderr F I1212 16:16:50.586315 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (3 containers are waiting in pending apiserver-9ddfb9f55-sg8rq pod)" to "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (2 containers are waiting in pending apiserver-9ddfb9f55-sg8rq pod)" 2025-12-12T16:16:53.281643866+00:00 stderr F I1212 16:16:53.272138 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:41Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (container is not ready in apiserver-9ddfb9f55-sg8rq pod)","reason":"APIServerDeployment_UnavailablePod","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady","reason":"APIServerDeployment_NoPod::APIServices_PreconditionNotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:53.304791641+00:00 stderr F I1212 16:16:53.303645 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (2 containers are waiting in pending apiserver-9ddfb9f55-sg8rq pod)" to "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (container is not ready in apiserver-9ddfb9f55-sg8rq pod)" 2025-12-12T16:16:54.155305406+00:00 stderr F E1212 16:16:54.142164 1 base_controller.go:279] "Unhandled Error" err=< 2025-12-12T16:16:54.155305406+00:00 stderr F openshift-apiserver-APIService reconciliation failed: apiservices.apiregistration.k8s.io/v1.apps.openshift.io: not available: endpoints for service/api in "openshift-apiserver" have no addresses with port name "https" 2025-12-12T16:16:54.155305406+00:00 stderr F apiservices.apiregistration.k8s.io/v1.authorization.openshift.io: not available: endpoints for service/api in "openshift-apiserver" have no addresses with port name "https" 2025-12-12T16:16:54.155305406+00:00 stderr F apiservices.apiregistration.k8s.io/v1.build.openshift.io: not available: endpoints for service/api in "openshift-apiserver" have no addresses with port name "https" 2025-12-12T16:16:54.155305406+00:00 stderr F > 2025-12-12T16:16:54.155305406+00:00 stderr F I1212 16:16:54.154679 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:41Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (container is not ready in apiserver-9ddfb9f55-sg8rq pod)","reason":"APIServerDeployment_UnavailablePod","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.apps.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.authorization.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.build.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"","reason":"APIServerDeployment_NoPod::APIServices_Error","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:54.222688741+00:00 stderr F I1212 16:16:54.221831 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Available message changed from "APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady" to "APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.apps.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.authorization.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.build.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"" 2025-12-12T16:16:55.814295719+00:00 stderr F I1212 16:16:55.811365 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:41Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-apiserver (container is not ready in apiserver-9ddfb9f55-sg8rq pod)","reason":"APIServerDeployment_UnavailablePod","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.","reason":"APIServerDeployment_NoPod","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:55.829163362+00:00 stderr F I1212 16:16:55.827981 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Available message changed from "APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node.\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.apps.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.authorization.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"\nAPIServicesAvailable: apiservices.apiregistration.k8s.io/v1.build.openshift.io: not available: endpoints for service/api in \"openshift-apiserver\" have no addresses with port name \"https\"" to "APIServerDeploymentAvailable: no apiserver.openshift-apiserver pods available on any node." 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910621 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.910539188 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910652 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.910642961 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910667 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.910656791 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910679 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.910672522 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910691 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.910683422 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910707 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.910695222 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910722 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.910711903 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910735 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.910727133 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910747 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.910739443 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910765 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.910753794 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.910953 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-apiserver-operator.svc\" [serving] validServingFor=[metrics.openshift-apiserver-operator.svc,metrics.openshift-apiserver-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:16:55.910941308 +0000 UTC))" 2025-12-12T16:16:55.912340513+00:00 stderr F I1212 16:16:55.911311 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556201\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556201\" (2025-12-12 15:16:41 +0000 UTC to 2028-12-12 15:16:41 +0000 UTC (now=2025-12-12 16:16:55.911297827 +0000 UTC))" 2025-12-12T16:16:57.881810305+00:00 stderr F I1212 16:16:57.881022 1 status_controller.go:229] clusteroperator/openshift-apiserver diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:14Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:16:57Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:48Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:57.896486833+00:00 stderr F I1212 16:16:57.896406 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator", UID:"f8199ef4-1467-44ed-9019-69c1f1737f70", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-apiserver changed: Degraded changed from True to False ("All is well"),Available changed from False to True ("All is well") 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.314382 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.314335281 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321481 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.321410536 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321515 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.321502208 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321538 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.321525139 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321568 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.321551669 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321598 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.32158115 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321622 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.321606211 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321645 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.321630511 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321694 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.321652662 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321728 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.321711943 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.321754 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.321741274 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.322090 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-apiserver-operator.svc\" [serving] validServingFor=[metrics.openshift-apiserver-operator.svc,metrics.openshift-apiserver-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:17:46.322068562 +0000 UTC))" 2025-12-12T16:17:46.333428693+00:00 stderr F I1212 16:17:46.326555 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556201\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556201\" (2025-12-12 15:16:41 +0000 UTC to 2028-12-12 15:16:41 +0000 UTC (now=2025-12-12 16:17:46.326518242 +0000 UTC))" 2025-12-12T16:18:41.574759076+00:00 stderr F E1212 16:18:41.573815 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-apiserver-operator/leases/openshift-apiserver-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:41.575846763+00:00 stderr F E1212 16:18:41.575793 1 leaderelection.go:436] error retrieving resource lock openshift-apiserver-operator/openshift-apiserver-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-apiserver-operator/leases/openshift-apiserver-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:41.743361515+00:00 stderr F E1212 16:18:41.743272 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:41.755383592+00:00 stderr F E1212 16:18:41.755300 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:41.774925485+00:00 stderr F E1212 16:18:41.774837 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:42.138744269+00:00 stderr F E1212 16:18:42.138665 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:42.740540768+00:00 stderr F E1212 16:18:42.740460 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:43.339621050+00:00 stderr F E1212 16:18:43.339542 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:43.743663979+00:00 stderr F E1212 16:18:43.743578 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:43.939150481+00:00 stderr F E1212 16:18:43.939083 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:44.136057749+00:00 stderr F E1212 16:18:44.135973 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.336587506+00:00 stderr F E1212 16:18:44.336002 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.739087768+00:00 stderr F E1212 16:18:44.738986 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.936348475+00:00 stderr F E1212 16:18:44.936272 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.338202490+00:00 stderr F E1212 16:18:45.338101 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.339792469+00:00 stderr F E1212 16:18:45.339734 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.536207875+00:00 stderr F E1212 16:18:45.536119 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.739429710+00:00 stderr F E1212 16:18:45.739345 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:45.937887016+00:00 stderr F E1212 16:18:45.937821 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.136075846+00:00 stderr F E1212 16:18:46.136010 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.337701711+00:00 stderr F E1212 16:18:46.337458 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.535807109+00:00 stderr F E1212 16:18:46.535726 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.737795083+00:00 stderr F E1212 16:18:46.737691 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.135654709+00:00 stderr F E1212 16:18:47.135547 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.140353425+00:00 stderr F E1212 16:18:47.140288 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.338064863+00:00 stderr F E1212 16:18:47.337983 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.735948379+00:00 stderr F E1212 16:18:47.735878 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.937457891+00:00 stderr F E1212 16:18:47.936912 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.140991383+00:00 stderr F E1212 16:18:48.140917 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.337797118+00:00 stderr F E1212 16:18:48.337621 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.536977223+00:00 stderr F E1212 16:18:48.536887 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.940854408+00:00 stderr F E1212 16:18:48.940496 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.983125183+00:00 stderr F E1212 16:18:48.983044 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.427926500+00:00 stderr F E1212 16:18:49.427844 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.820596508+00:00 stderr F E1212 16:18:49.820528 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.267959758+00:00 stderr F E1212 16:18:50.267900 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.739714981+00:00 stderr F E1212 16:18:50.739647 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.024444262+00:00 stderr F E1212 16:18:52.024347 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"v3.11.0/openshift-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"v3.11.0/openshift-apiserver/pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-apiserver/poddisruptionbudgets/openshift-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:52.384656588+00:00 stderr F E1212 16:18:52.384440 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.541248959+00:00 stderr F E1212 16:18:52.541136 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.833800302+00:00 stderr F E1212 16:18:52.833712 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.340736668+00:00 stderr F E1212 16:18:54.340664 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.139519218+00:00 stderr F E1212 16:18:56.139461 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftapiservers/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:58.898507717+00:00 stderr F I1212 16:18:58.897238 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:24.379468874+00:00 stderr F I1212 16:19:24.378635 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:33.539929122+00:00 stderr F I1212 16:19:33.539337 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:34.274537706+00:00 stderr F I1212 16:19:34.274451 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:37.502618137+00:00 stderr F I1212 16:19:37.502542 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:37.856356168+00:00 stderr F I1212 16:19:37.855966 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:37.856463701+00:00 stderr F I1212 16:19:37.856435 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:37.857934618+00:00 stderr F I1212 16:19:37.857896 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:39.046203723+00:00 stderr F I1212 16:19:39.046089 1 reflector.go:430] "Caches populated" type="*v1alpha1.StorageVersionMigration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:40.251008783+00:00 stderr F I1212 16:19:40.250662 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:41.805549134+00:00 stderr F I1212 16:19:41.805471 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:42.588489892+00:00 stderr F I1212 16:19:42.588104 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:43.545071320+00:00 stderr F I1212 16:19:43.544702 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:45.826256855+00:00 stderr F I1212 16:19:45.826166 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:48.771384791+00:00 stderr F I1212 16:19:48.770680 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:53.924634558+00:00 stderr F I1212 16:19:53.924533 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:56.187220157+00:00 stderr F I1212 16:19:56.186575 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:56.338129906+00:00 stderr F I1212 16:19:56.338054 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:00.729193936+00:00 stderr F I1212 16:20:00.728587 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:01.510779330+00:00 stderr F I1212 16:20:01.510682 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:01.510836072+00:00 stderr F I1212 16:20:01.510820 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:01.511740084+00:00 stderr F I1212 16:20:01.511697 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:01.831507533+00:00 stderr F I1212 16:20:01.831440 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:07.311431282+00:00 stderr F I1212 16:20:07.310407 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:07.377903521+00:00 stderr F I1212 16:20:07.377805 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:08.235020762+00:00 stderr F I1212 16:20:08.234203 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:08.531995719+00:00 stderr F I1212 16:20:08.531907 1 reflector.go:430] "Caches populated" type="*v1.OpenShiftAPIServer" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:08.638950284+00:00 stderr F I1212 16:20:08.638868 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:10.540825435+00:00 stderr F I1212 16:20:10.540734 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:11.318599184+00:00 stderr F I1212 16:20:11.318520 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:11.372435746+00:00 stderr F I1212 16:20:11.372364 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:11.407553998+00:00 stderr F I1212 16:20:11.407476 1 reflector.go:430] "Caches populated" type="*v1.Project" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:11.732232269+00:00 stderr F I1212 16:20:11.732144 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:15.601893159+00:00 stderr F I1212 16:20:15.601306 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:15.659216639+00:00 stderr F I1212 16:20:15.659151 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:18.088816790+00:00 stderr F I1212 16:20:18.088760 1 reflector.go:430] "Caches populated" type="*v1.APIService" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:18.168445629+00:00 stderr F I1212 16:20:18.168368 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:18.545325962+00:00 stderr F I1212 16:20:18.545250 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:18.758431143+00:00 stderr F I1212 16:20:18.758329 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:21.099058081+00:00 stderr F I1212 16:20:21.098387 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:21.289972484+00:00 stderr F I1212 16:20:21.289859 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:22.095348876+00:00 stderr F I1212 16:20:22.095235 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:24.680096283+00:00 stderr F I1212 16:20:24.679993 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:25.862244965+00:00 stderr F I1212 16:20:25.861213 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=openshiftapiservers" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:26.603583789+00:00 stderr F I1212 16:20:26.603509 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:26.603646270+00:00 stderr F I1212 16:20:26.603636 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:26.604459190+00:00 stderr F I1212 16:20:26.604419 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:28.233845200+00:00 stderr F I1212 16:20:28.232879 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:28.978479766+00:00 stderr F I1212 16:20:28.978335 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:33.202758287+00:00 stderr F I1212 16:20:33.201975 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:36.902584942+00:00 stderr F I1212 16:20:36.901488 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:37.140246306+00:00 stderr F I1212 16:20:37.139410 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:38.260261134+00:00 stderr F I1212 16:20:38.259816 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:25:34.515996218+00:00 stderr F I1212 16:25:34.515314 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:26:43.862919276+00:00 stderr F I1212 16:26:43.862055 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:30:06.607708354+00:00 stderr F I1212 16:30:06.606637 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:32:57.866264861+00:00 stderr F I1212 16:32:57.865138 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:35:12.517478175+00:00 stderr F I1212 16:35:12.516879 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:36:51.610146620+00:00 stderr F I1212 16:36:51.609524 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:38:09.869435869+00:00 stderr F I1212 16:38:09.868550 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:43:07.520028360+00:00 stderr F I1212 16:43:07.519135 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ././@LongLink0000644000000000000000000000023700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015117043043032745 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015117043062032746 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000644000175000017500000002226115117043043032752 0ustar zuulzuul2025-12-12T16:28:08.267299181+00:00 stderr F I1212 16:28:08.265380 1 controller.go:284] "configured acme dns01 nameservers" logger="cert-manager.controller.build-context" nameservers=["10.217.4.10:53"] 2025-12-12T16:28:08.267299181+00:00 stderr F I1212 16:28:08.266774 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:08.267299181+00:00 stderr F I1212 16:28:08.266796 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:08.267299181+00:00 stderr F I1212 16:28:08.266804 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:08.267299181+00:00 stderr F I1212 16:28:08.266808 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:08.273828056+00:00 stderr F I1212 16:28:08.273746 1 controller.go:88] "enabled controllers: [certificaterequests-approver certificaterequests-issuer-acme certificaterequests-issuer-ca certificaterequests-issuer-selfsigned certificaterequests-issuer-vault certificaterequests-issuer-venafi certificates-issuing certificates-key-manager certificates-metrics certificates-readiness certificates-request-manager certificates-revision-manager certificates-trigger challenges clusterissuers ingress-shim issuers orders]" logger="cert-manager.controller" 2025-12-12T16:28:08.273828056+00:00 stderr F I1212 16:28:08.273805 1 controller.go:437] "serving insecurely as tls certificate data not provided" logger="cert-manager.controller" 2025-12-12T16:28:08.273828056+00:00 stderr F I1212 16:28:08.273817 1 controller.go:101] "listening for insecure connections" logger="cert-manager.controller" address="0.0.0.0:9402" 2025-12-12T16:28:08.277024757+00:00 stderr F I1212 16:28:08.275077 1 controller.go:125] "starting metrics server" logger="cert-manager.controller" address="[::]:9402" 2025-12-12T16:28:08.277024757+00:00 stderr F I1212 16:28:08.275221 1 controller.go:176] "starting leader election" logger="cert-manager.controller" 2025-12-12T16:28:08.277024757+00:00 stderr F I1212 16:28:08.275533 1 controller.go:169] "starting healthz server" logger="cert-manager.controller" address="[::]:9403" 2025-12-12T16:28:08.279342096+00:00 stderr F I1212 16:28:08.279283 1 leaderelection.go:257] attempting to acquire leader lease kube-system/cert-manager-controller... 2025-12-12T16:28:08.303472307+00:00 stderr F I1212 16:28:08.303408 1 leaderelection.go:271] successfully acquired lease kube-system/cert-manager-controller 2025-12-12T16:28:08.307069708+00:00 stderr F I1212 16:28:08.306964 1 controller.go:223] "skipping disabled controller" logger="cert-manager.controller" controller="certificatesigningrequests-issuer-ca" 2025-12-12T16:28:08.307069708+00:00 stderr F I1212 16:28:08.306992 1 controller.go:223] "skipping disabled controller" logger="cert-manager.controller" controller="certificatesigningrequests-issuer-venafi" 2025-12-12T16:28:08.307069708+00:00 stderr F I1212 16:28:08.307049 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificaterequests-issuer-selfsigned" 2025-12-12T16:28:08.328432188+00:00 stderr F I1212 16:28:08.328345 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificaterequests-issuer-ca" 2025-12-12T16:28:08.329117916+00:00 stderr F I1212 16:28:08.329083 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-issuing" 2025-12-12T16:28:08.349670956+00:00 stderr F I1212 16:28:08.348873 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-metrics" 2025-12-12T16:28:08.368229096+00:00 stderr F I1212 16:28:08.368017 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-readiness" 2025-12-12T16:28:08.383303767+00:00 stderr F I1212 16:28:08.375305 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-request-manager" 2025-12-12T16:28:08.383303767+00:00 stderr F I1212 16:28:08.375550 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-trigger" 2025-12-12T16:28:08.383303767+00:00 stderr F I1212 16:28:08.379796 1 controller.go:223] "skipping disabled controller" logger="cert-manager.controller" controller="certificatesigningrequests-issuer-selfsigned" 2025-12-12T16:28:08.383303767+00:00 stderr F I1212 16:28:08.379937 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificaterequests-issuer-acme" 2025-12-12T16:28:08.420226051+00:00 stderr F I1212 16:28:08.401778 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificaterequests-approver" 2025-12-12T16:28:08.420226051+00:00 stderr F I1212 16:28:08.412117 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-key-manager" 2025-12-12T16:28:08.420226051+00:00 stderr F I1212 16:28:08.414924 1 controller.go:223] "skipping disabled controller" logger="cert-manager.controller" controller="certificatesigningrequests-issuer-acme" 2025-12-12T16:28:08.478014384+00:00 stderr F I1212 16:28:08.477810 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificaterequests-issuer-vault" 2025-12-12T16:28:08.478014384+00:00 stderr F I1212 16:28:08.477906 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="issuers" 2025-12-12T16:28:08.490988432+00:00 stderr F I1212 16:28:08.489354 1 controller.go:223] "skipping disabled controller" logger="cert-manager.controller" controller="gateway-shim" 2025-12-12T16:28:08.513745218+00:00 stderr F I1212 16:28:08.509275 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="challenges" 2025-12-12T16:28:08.575386048+00:00 stderr F I1212 16:28:08.575271 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="ingress-shim" 2025-12-12T16:28:08.575386048+00:00 stderr F I1212 16:28:08.575338 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificaterequests-issuer-venafi" 2025-12-12T16:28:08.575386048+00:00 stderr F I1212 16:28:08.575309 1 controller.go:223] "skipping disabled controller" logger="cert-manager.controller" controller="certificatesigningrequests-issuer-vault" 2025-12-12T16:28:08.578751704+00:00 stderr F I1212 16:28:08.578700 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="clusterissuers" 2025-12-12T16:28:08.592526552+00:00 stderr F I1212 16:28:08.592422 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="certificates-revision-manager" 2025-12-12T16:28:08.597601031+00:00 stderr F I1212 16:28:08.597531 1 controller.go:240] "starting controller" logger="cert-manager.controller" controller="orders" 2025-12-12T16:28:08.605566052+00:00 stderr F I1212 16:28:08.605483 1 reflector.go:376] Caches populated for *v1.PartialObjectMetadata from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.610068646+00:00 stderr F I1212 16:28:08.610019 1 reflector.go:376] Caches populated for *v1.Issuer from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.610676262+00:00 stderr F I1212 16:28:08.610415 1 reflector.go:376] Caches populated for *v1.CertificateRequest from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.610708382+00:00 stderr F I1212 16:28:08.610628 1 reflector.go:376] Caches populated for *v1.Secret from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.610828095+00:00 stderr F I1212 16:28:08.610796 1 reflector.go:376] Caches populated for *v1.PartialObjectMetadata from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.610968249+00:00 stderr F I1212 16:28:08.610036 1 reflector.go:376] Caches populated for *v1.ClusterIssuer from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.611332138+00:00 stderr F I1212 16:28:08.611266 1 reflector.go:376] Caches populated for *v1.Certificate from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.611332138+00:00 stderr F I1212 16:28:08.611278 1 reflector.go:376] Caches populated for *v1.Ingress from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.611808850+00:00 stderr F I1212 16:28:08.611767 1 reflector.go:376] Caches populated for *v1.Order from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.624368318+00:00 stderr F I1212 16:28:08.620998 1 reflector.go:376] Caches populated for *v1.Challenge from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.630120904+00:00 stderr F I1212 16:28:08.630041 1 reflector.go:376] Caches populated for *v1.PartialObjectMetadata from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 ././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043043033051 5ustar zuulzuul././@LongLink0000644000000000000000000000033000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043062033052 5ustar zuulzuul././@LongLink0000644000000000000000000000033500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000001747515117043043033071 0ustar zuulzuul2025-12-12T16:16:50.885487316+00:00 stderr F 2025-12-12T16:16:50Z INFO setup starting manager 2025-12-12T16:16:50.885487316+00:00 stderr F 2025-12-12T16:16:50Z INFO controller-runtime.metrics Starting metrics server 2025-12-12T16:16:50.885487316+00:00 stderr F 2025-12-12T16:16:50Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":9090", "secure": false} 2025-12-12T16:16:50.885727592+00:00 stderr F 2025-12-12T16:16:50Z INFO starting server {"name": "pprof", "addr": "[::]:6060"} 2025-12-12T16:16:50.885727592+00:00 stderr F 2025-12-12T16:16:50Z INFO starting server {"name": "health probe", "addr": "[::]:8080"} 2025-12-12T16:16:50.885794353+00:00 stderr F I1212 16:16:50.885761 1 leaderelection.go:257] attempting to acquire leader lease openshift-operator-lifecycle-manager/packageserver-controller-lock... 2025-12-12T16:16:50.936899541+00:00 stderr F I1212 16:16:50.934701 1 leaderelection.go:271] successfully acquired lease openshift-operator-lifecycle-manager/packageserver-controller-lock 2025-12-12T16:16:50.936899541+00:00 stderr F 2025-12-12T16:16:50Z INFO Starting EventSource {"controller": "clusterserviceversion", "controllerGroup": "operators.coreos.com", "controllerKind": "ClusterServiceVersion", "source": "kind source: *v1.Infrastructure"} 2025-12-12T16:16:50.937072825+00:00 stderr F 2025-12-12T16:16:50Z INFO Starting EventSource {"controller": "clusterserviceversion", "controllerGroup": "operators.coreos.com", "controllerKind": "ClusterServiceVersion", "source": "kind source: *v1alpha1.ClusterServiceVersion"} 2025-12-12T16:16:50.942097708+00:00 stderr F 2025-12-12T16:16:50Z DEBUG events package-server-manager-77f986bd66-mjzlp_93cbd889-58bd-4e3d-a4be-4adc9d46eeea became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"openshift-operator-lifecycle-manager","name":"packageserver-controller-lock","uid":"f3b3b1dc-8cb5-43d7-b516-3fe5374acd66","apiVersion":"coordination.k8s.io/v1","resourceVersion":"37610"}, "reason": "LeaderElection"} 2025-12-12T16:16:51.039742242+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver requeueing the packageserver deployment after encountering infrastructure event {"infrastructure": "cluster"} 2025-12-12T16:16:51.039742242+00:00 stderr F 2025-12-12T16:16:51Z INFO Starting Controller {"controller": "clusterserviceversion", "controllerGroup": "operators.coreos.com", "controllerKind": "ClusterServiceVersion"} 2025-12-12T16:16:51.039787463+00:00 stderr F 2025-12-12T16:16:51Z INFO Starting workers {"controller": "clusterserviceversion", "controllerGroup": "operators.coreos.com", "controllerKind": "ClusterServiceVersion", "worker count": 1} 2025-12-12T16:16:51.040048659+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver handling current request {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "request": "openshift-operator-lifecycle-manager/packageserver"} 2025-12-12T16:16:51.040048659+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver checking to see if required RBAC exists {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:16:51.455228236+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver confimed required RBAC exists {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:16:51.455228236+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver currently topology mode {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "highly available": false} 2025-12-12T16:16:51.472165059+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver reconciliation result {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "res": "unchanged"} 2025-12-12T16:16:51.472165059+00:00 stderr F 2025-12-12T16:16:51Z INFO controllers.packageserver finished request reconciliation {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:18:50.994480050+00:00 stderr F E1212 16:18:50.993963 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-operator-lifecycle-manager/leases/packageserver-controller-lock?timeout=2m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:50.998427887+00:00 stderr F E1212 16:18:50.998374 1 leaderelection.go:436] error retrieving resource lock openshift-operator-lifecycle-manager/packageserver-controller-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-operator-lifecycle-manager/leases/packageserver-controller-lock?timeout=2m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:20:22.650089404+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver requeueing the packageserver deployment after encountering infrastructure event {"infrastructure": "cluster"} 2025-12-12T16:20:22.650089404+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver requeueing the packageserver deployment after encountering infrastructure event {"infrastructure": "cluster"} 2025-12-12T16:20:22.650364941+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver handling current request {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "request": "openshift-operator-lifecycle-manager/packageserver"} 2025-12-12T16:20:22.650364941+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver checking to see if required RBAC exists {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:20:22.652038363+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver confimed required RBAC exists {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:20:22.652251449+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver currently topology mode {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "highly available": false} 2025-12-12T16:20:22.665963993+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver reconciliation result {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "res": "unchanged"} 2025-12-12T16:20:22.665963993+00:00 stderr F 2025-12-12T16:20:22Z INFO controllers.packageserver finished request reconciliation {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:20:25.206866800+00:00 stderr F 2025-12-12T16:20:25Z INFO controllers.packageserver handling current request {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "request": "openshift-operator-lifecycle-manager/packageserver"} 2025-12-12T16:20:25.206866800+00:00 stderr F 2025-12-12T16:20:25Z INFO controllers.packageserver checking to see if required RBAC exists {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:20:25.207121316+00:00 stderr F 2025-12-12T16:20:25Z INFO controllers.packageserver confimed required RBAC exists {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} 2025-12-12T16:20:25.207230679+00:00 stderr F 2025-12-12T16:20:25Z INFO controllers.packageserver currently topology mode {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "highly available": false} 2025-12-12T16:20:25.218401189+00:00 stderr F 2025-12-12T16:20:25Z INFO controllers.packageserver reconciliation result {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}, "res": "unchanged"} 2025-12-12T16:20:25.218401189+00:00 stderr F 2025-12-12T16:20:25Z INFO controllers.packageserver finished request reconciliation {"csv": {"name":"packageserver","namespace":"openshift-operator-lifecycle-manager"}} ././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043062033052 5ustar zuulzuul././@LongLink0000644000000000000000000000032600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000000203615117043043033054 0ustar zuulzuul2025-12-12T16:16:47.285526636+00:00 stderr F W1212 16:16:47.285096 1 deprecated.go:66] 2025-12-12T16:16:47.285526636+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:47.285526636+00:00 stderr F 2025-12-12T16:16:47.285526636+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:47.285526636+00:00 stderr F 2025-12-12T16:16:47.285526636+00:00 stderr F =============================================== 2025-12-12T16:16:47.285526636+00:00 stderr F 2025-12-12T16:16:47.291292006+00:00 stderr F I1212 16:16:47.291245 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:47.292808883+00:00 stderr F I1212 16:16:47.292782 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:47.293270635+00:00 stderr F I1212 16:16:47.293248 1 kube-rbac-proxy.go:397] Starting TCP socket on 0.0.0.0:8443 2025-12-12T16:16:47.293779087+00:00 stderr F I1212 16:16:47.293758 1 kube-rbac-proxy.go:404] Listening securely on 0.0.0.0:8443 ././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000755000175000017500000000000015117043043033213 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000755000175000017500000000000015117043062033214 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000644000175000017500000011574315117043043033230 0ustar zuulzuul2025-12-12T16:16:56.542313702+00:00 stderr F I1212 16:16:56.539676 1 main.go:45] Version:2f68fe12a9b9bb7676d1f0933b6be632cd4deff2 2025-12-12T16:16:56.542313702+00:00 stderr F I1212 16:16:56.541361 1 main.go:46] Starting with config{ :9091 crc} 2025-12-12T16:16:56.542313702+00:00 stderr F W1212 16:16:56.541863 1 client_config.go:618] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:56.548715298+00:00 stderr F I1212 16:16:56.548589 1 controller.go:42] Setting up event handlers 2025-12-12T16:16:56.549526368+00:00 stderr F I1212 16:16:56.548767 1 podmetrics.go:101] Serving network metrics 2025-12-12T16:16:56.549526368+00:00 stderr F I1212 16:16:56.548780 1 controller.go:101] Starting pod controller 2025-12-12T16:16:56.549526368+00:00 stderr F I1212 16:16:56.548784 1 controller.go:104] Waiting for informer caches to sync 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749641 1 controller.go:109] Starting workers 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749683 1 controller.go:114] Started workers 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749725 1 controller.go:192] Received pod 'csi-hostpathplugin-59hhc' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749811 1 controller.go:151] Successfully synced 'hostpath-provisioner/csi-hostpathplugin-59hhc' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749820 1 controller.go:192] Received pod 'openshift-apiserver-operator-846cbfc458-zf8cv' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749832 1 controller.go:151] Successfully synced 'openshift-apiserver-operator/openshift-apiserver-operator-846cbfc458-zf8cv' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749838 1 controller.go:192] Received pod 'apiserver-9ddfb9f55-sg8rq' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749847 1 controller.go:151] Successfully synced 'openshift-apiserver/apiserver-9ddfb9f55-sg8rq' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749851 1 controller.go:192] Received pod 'authentication-operator-7f5c659b84-6t92c' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749863 1 controller.go:151] Successfully synced 'openshift-authentication-operator/authentication-operator-7f5c659b84-6t92c' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749867 1 controller.go:192] Received pod 'oauth-openshift-66458b6674-brfdj' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749897 1 controller.go:151] Successfully synced 'openshift-authentication/oauth-openshift-66458b6674-brfdj' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749901 1 controller.go:192] Received pod 'cluster-samples-operator-6b564684c8-fzlkp' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749912 1 controller.go:151] Successfully synced 'openshift-cluster-samples-operator/cluster-samples-operator-6b564684c8-fzlkp' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749915 1 controller.go:192] Received pod 'openshift-config-operator-5777786469-49zmj' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749925 1 controller.go:151] Successfully synced 'openshift-config-operator/openshift-config-operator-5777786469-49zmj' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749929 1 controller.go:192] Received pod 'console-operator-67c89758df-5tw72' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749938 1 controller.go:151] Successfully synced 'openshift-console-operator/console-operator-67c89758df-5tw72' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749942 1 controller.go:192] Received pod 'console-64d44f6ddf-zhgm9' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749955 1 controller.go:151] Successfully synced 'openshift-console/console-64d44f6ddf-zhgm9' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749958 1 controller.go:192] Received pod 'downloads-747b44746d-sm46g' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749969 1 controller.go:151] Successfully synced 'openshift-console/downloads-747b44746d-sm46g' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749972 1 controller.go:192] Received pod 'openshift-controller-manager-operator-686468bdd5-xknw6' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749981 1 controller.go:151] Successfully synced 'openshift-controller-manager-operator/openshift-controller-manager-operator-686468bdd5-xknw6' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749985 1 controller.go:192] Received pod 'controller-manager-65b6cccf98-flnsl' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749993 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-65b6cccf98-flnsl' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.749997 1 controller.go:192] Received pod 'dns-operator-799b87ffcd-2w9hn' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.750008 1 controller.go:151] Successfully synced 'openshift-dns-operator/dns-operator-799b87ffcd-2w9hn' 2025-12-12T16:16:56.750039183+00:00 stderr F I1212 16:16:56.750019 1 controller.go:192] Received pod 'dns-default-rl44g' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750033 1 controller.go:151] Successfully synced 'openshift-dns/dns-default-rl44g' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750039 1 controller.go:192] Received pod 'etcd-operator-69b85846b6-mrrt5' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750055 1 controller.go:151] Successfully synced 'openshift-etcd-operator/etcd-operator-69b85846b6-mrrt5' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750058 1 controller.go:192] Received pod 'cluster-image-registry-operator-86c45576b9-sfm9v' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750067 1 controller.go:151] Successfully synced 'openshift-image-registry/cluster-image-registry-operator-86c45576b9-sfm9v' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750071 1 controller.go:192] Received pod 'image-registry-66587d64c8-jqtjf' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750079 1 controller.go:151] Successfully synced 'openshift-image-registry/image-registry-66587d64c8-jqtjf' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750083 1 controller.go:192] Received pod 'ingress-canary-tqcqf' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750094 1 controller.go:151] Successfully synced 'openshift-ingress-canary/ingress-canary-tqcqf' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750098 1 controller.go:192] Received pod 'ingress-operator-6b9cb4dbcf-5twrv' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750106 1 controller.go:151] Successfully synced 'openshift-ingress-operator/ingress-operator-6b9cb4dbcf-5twrv' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750111 1 controller.go:192] Received pod 'revision-pruner-11-crc' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750120 1 controller.go:151] Successfully synced 'openshift-kube-apiserver/revision-pruner-11-crc' 2025-12-12T16:16:56.750129596+00:00 stderr F I1212 16:16:56.750123 1 controller.go:192] Received pod 'kube-controller-manager-operator-69d5f845f8-nsdgk' 2025-12-12T16:16:56.750141296+00:00 stderr F I1212 16:16:56.750136 1 controller.go:151] Successfully synced 'openshift-kube-controller-manager-operator/kube-controller-manager-operator-69d5f845f8-nsdgk' 2025-12-12T16:16:56.750148626+00:00 stderr F I1212 16:16:56.750140 1 controller.go:192] Received pod 'openshift-kube-scheduler-operator-54f497555d-dcs9d' 2025-12-12T16:16:56.750157416+00:00 stderr F I1212 16:16:56.750152 1 controller.go:151] Successfully synced 'openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-54f497555d-dcs9d' 2025-12-12T16:16:56.750168167+00:00 stderr F I1212 16:16:56.750155 1 controller.go:192] Received pod 'revision-pruner-6-crc' 2025-12-12T16:16:56.750218988+00:00 stderr F I1212 16:16:56.750166 1 controller.go:151] Successfully synced 'openshift-kube-scheduler/revision-pruner-6-crc' 2025-12-12T16:16:56.750218988+00:00 stderr F I1212 16:16:56.750154 1 controller.go:192] Received pod 'kube-apiserver-operator-575994946d-wff8v' 2025-12-12T16:16:56.750218988+00:00 stderr F I1212 16:16:56.750171 1 controller.go:192] Received pod 'kube-storage-version-migrator-operator-565b79b866-krgxf' 2025-12-12T16:16:56.750218988+00:00 stderr F I1212 16:16:56.750205 1 controller.go:151] Successfully synced 'openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-565b79b866-krgxf' 2025-12-12T16:16:56.750218988+00:00 stderr F I1212 16:16:56.750214 1 controller.go:192] Received pod 'migrator-866fcbc849-6mhsj' 2025-12-12T16:16:56.750231168+00:00 stderr F I1212 16:16:56.750226 1 controller.go:151] Successfully synced 'openshift-kube-storage-version-migrator/migrator-866fcbc849-6mhsj' 2025-12-12T16:16:56.750238318+00:00 stderr F I1212 16:16:56.750230 1 controller.go:192] Received pod 'control-plane-machine-set-operator-75ffdb6fcd-m8gw7' 2025-12-12T16:16:56.750245349+00:00 stderr F I1212 16:16:56.750240 1 controller.go:151] Successfully synced 'openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7' 2025-12-12T16:16:56.750252299+00:00 stderr F I1212 16:16:56.750244 1 controller.go:192] Received pod 'machine-api-operator-755bb95488-dmjfw' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750259 1 controller.go:151] Successfully synced 'openshift-machine-api/machine-api-operator-755bb95488-dmjfw' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750266 1 controller.go:192] Received pod 'machine-config-controller-f9cdd68f7-ndnxt' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750275 1 controller.go:151] Successfully synced 'openshift-kube-apiserver-operator/kube-apiserver-operator-575994946d-wff8v' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750285 1 controller.go:151] Successfully synced 'openshift-machine-config-operator/machine-config-controller-f9cdd68f7-ndnxt' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750288 1 controller.go:192] Received pod 'machine-config-operator-67c9d58cbb-bg744' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750292 1 controller.go:192] Received pod 'certified-operators-kxjp8' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750306 1 controller.go:151] Successfully synced 'openshift-machine-config-operator/machine-config-operator-67c9d58cbb-bg744' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750308 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-kxjp8' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750315 1 controller.go:192] Received pod 'certified-operators-pvzzz' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750317 1 controller.go:192] Received pod 'community-operators-2gt6h' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750328 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-pvzzz' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750331 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-2gt6h' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750335 1 controller.go:192] Received pod 'community-operators-p7s65' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750339 1 controller.go:192] Received pod 'marketplace-operator-547dbd544d-xpvsb' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750349 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-p7s65' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750363 1 controller.go:192] Received pod 'redhat-marketplace-mgp9n' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750368 1 controller.go:151] Successfully synced 'openshift-marketplace/marketplace-operator-547dbd544d-xpvsb' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750375 1 controller.go:192] Received pod 'redhat-marketplace-s7x92' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750377 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-marketplace-mgp9n' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750384 1 controller.go:192] Received pod 'redhat-operators-2blsm' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750385 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-marketplace-s7x92' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750392 1 controller.go:192] Received pod 'redhat-operators-9ndfc' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750408 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-2blsm' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750415 1 controller.go:192] Received pod 'multus-admission-controller-69db94689b-xks9x' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750429 1 controller.go:151] Successfully synced 'openshift-multus/multus-admission-controller-69db94689b-xks9x' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750433 1 controller.go:192] Received pod 'network-metrics-daemon-jhhcn' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750443 1 controller.go:151] Successfully synced 'openshift-multus/network-metrics-daemon-jhhcn' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750447 1 controller.go:192] Received pod 'networking-console-plugin-5ff7774fd9-nljh6' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750457 1 controller.go:151] Successfully synced 'openshift-network-console/networking-console-plugin-5ff7774fd9-nljh6' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750461 1 controller.go:192] Received pod 'network-check-source-5bb8f5cd97-xdvz5' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750473 1 controller.go:151] Successfully synced 'openshift-network-diagnostics/network-check-source-5bb8f5cd97-xdvz5' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750478 1 controller.go:192] Received pod 'network-check-target-fhkjl' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750488 1 controller.go:151] Successfully synced 'openshift-network-diagnostics/network-check-target-fhkjl' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750493 1 controller.go:192] Received pod 'apiserver-8596bd845d-njgb5' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750502 1 controller.go:151] Successfully synced 'openshift-oauth-apiserver/apiserver-8596bd845d-njgb5' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750507 1 controller.go:192] Received pod 'catalog-operator-75ff9f647d-4v9cj' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750516 1 controller.go:151] Successfully synced 'openshift-operator-lifecycle-manager/catalog-operator-75ff9f647d-4v9cj' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750523 1 controller.go:192] Received pod 'collect-profiles-29425935-7hkrm' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750532 1 controller.go:151] Successfully synced 'openshift-operator-lifecycle-manager/collect-profiles-29425935-7hkrm' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750535 1 controller.go:192] Received pod 'olm-operator-5cdf44d969-kcw92' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750544 1 controller.go:151] Successfully synced 'openshift-operator-lifecycle-manager/olm-operator-5cdf44d969-kcw92' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750547 1 controller.go:192] Received pod 'package-server-manager-77f986bd66-mjzlp' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750558 1 controller.go:151] Successfully synced 'openshift-operator-lifecycle-manager/package-server-manager-77f986bd66-mjzlp' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750561 1 controller.go:192] Received pod 'packageserver-7d4fc7d867-lfwgk' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750573 1 controller.go:151] Successfully synced 'openshift-operator-lifecycle-manager/packageserver-7d4fc7d867-lfwgk' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750577 1 controller.go:192] Received pod 'route-controller-manager-776cdc94d6-zksq4' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750586 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750590 1 controller.go:192] Received pod 'service-ca-operator-5b9c976747-9wbcx' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750599 1 controller.go:151] Successfully synced 'openshift-service-ca-operator/service-ca-operator-5b9c976747-9wbcx' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750603 1 controller.go:192] Received pod 'service-ca-74545575db-gsm6t' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750612 1 controller.go:151] Successfully synced 'openshift-service-ca/service-ca-74545575db-gsm6t' 2025-12-12T16:16:56.752155555+00:00 stderr F I1212 16:16:56.750619 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-9ndfc' 2025-12-12T16:17:31.068475163+00:00 stderr F I1212 16:17:31.067510 1 controller.go:192] Received pod 'revision-pruner-12-crc' 2025-12-12T16:17:31.068540245+00:00 stderr F I1212 16:17:31.068469 1 controller.go:151] Successfully synced 'openshift-kube-apiserver/revision-pruner-12-crc' 2025-12-12T16:17:31.587579607+00:00 stderr F I1212 16:17:31.582607 1 controller.go:192] Received pod 'controller-manager-69f958c846-qd8rg' 2025-12-12T16:17:31.587579607+00:00 stderr F I1212 16:17:31.587283 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-69f958c846-qd8rg' 2025-12-12T16:17:31.816519516+00:00 stderr F I1212 16:17:31.811462 1 controller.go:192] Received pod 'route-controller-manager-f4599bd79-7rg9b' 2025-12-12T16:17:31.816519516+00:00 stderr F I1212 16:17:31.813419 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-f4599bd79-7rg9b' 2025-12-12T16:17:31.903589394+00:00 stderr F I1212 16:17:31.900616 1 controller.go:151] Successfully synced 'openshift-multus/cni-sysctl-allowlist-ds-q8kdt' 2025-12-12T16:17:31.929251270+00:00 stderr F I1212 16:17:31.924874 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-65b6cccf98-flnsl' 2025-12-12T16:17:31.945251269+00:00 stderr F I1212 16:17:31.940689 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4' 2025-12-12T16:17:35.508903723+00:00 stderr F I1212 16:17:35.507870 1 controller.go:192] Received pod 'installer-12-crc' 2025-12-12T16:17:35.508903723+00:00 stderr F I1212 16:17:35.508629 1 controller.go:151] Successfully synced 'openshift-kube-apiserver/installer-12-crc' 2025-12-12T16:17:47.126680223+00:00 stderr F I1212 16:17:47.121277 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-kxjp8' 2025-12-12T16:17:47.384655311+00:00 stderr F I1212 16:17:47.384565 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-marketplace-mgp9n' 2025-12-12T16:17:47.692957443+00:00 stderr F I1212 16:17:47.692624 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-p7s65' 2025-12-12T16:17:48.265279073+00:00 stderr F I1212 16:17:48.265198 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-f4599bd79-7rg9b' 2025-12-12T16:17:48.541604634+00:00 stderr F I1212 16:17:48.541403 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-2blsm' 2025-12-12T16:17:48.771370135+00:00 stderr F I1212 16:17:48.771318 1 controller.go:192] Received pod 'route-controller-manager-6b47f77689-5r77s' 2025-12-12T16:17:48.771407066+00:00 stderr F I1212 16:17:48.771398 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-6b47f77689-5r77s' 2025-12-12T16:17:49.111848074+00:00 stderr F I1212 16:17:49.111337 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-69f958c846-qd8rg' 2025-12-12T16:17:49.500955584+00:00 stderr F I1212 16:17:49.498062 1 controller.go:192] Received pod 'controller-manager-6445bd5bb7-qhd4b' 2025-12-12T16:17:49.500955584+00:00 stderr F I1212 16:17:49.498449 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-6445bd5bb7-qhd4b' 2025-12-12T16:18:08.205290773+00:00 stderr F I1212 16:18:08.204321 1 controller.go:192] Received pod 'route-controller-manager-67bd47cff9-br6nz' 2025-12-12T16:18:08.205290773+00:00 stderr F I1212 16:18:08.205098 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-67bd47cff9-br6nz' 2025-12-12T16:18:08.274782111+00:00 stderr F I1212 16:18:08.274712 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-6445bd5bb7-qhd4b' 2025-12-12T16:18:08.287936616+00:00 stderr F I1212 16:18:08.287872 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-6b47f77689-5r77s' 2025-12-12T16:18:08.540288105+00:00 stderr F I1212 16:18:08.538889 1 controller.go:192] Received pod 'controller-manager-7fffb5779-6br5z' 2025-12-12T16:18:08.540288105+00:00 stderr F I1212 16:18:08.538940 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-7fffb5779-6br5z' 2025-12-12T16:19:29.266949806+00:00 stderr F I1212 16:19:29.266273 1 controller.go:192] Received pod 'oauth-openshift-6567f5ffdb-jrpfr' 2025-12-12T16:19:29.266949806+00:00 stderr F I1212 16:19:29.266913 1 controller.go:151] Successfully synced 'openshift-authentication/oauth-openshift-6567f5ffdb-jrpfr' 2025-12-12T16:19:29.266949806+00:00 stderr F I1212 16:19:29.266924 1 controller.go:192] Received pod 'route-controller-manager-bf6bf5794-d5zzt' 2025-12-12T16:19:29.266949806+00:00 stderr F I1212 16:19:29.266941 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-bf6bf5794-d5zzt' 2025-12-12T16:19:29.267019348+00:00 stderr F I1212 16:19:29.266951 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-67bd47cff9-br6nz' 2025-12-12T16:19:29.267019348+00:00 stderr F I1212 16:19:29.266959 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-7fffb5779-6br5z' 2025-12-12T16:19:29.267019348+00:00 stderr F I1212 16:19:29.266965 1 controller.go:151] Successfully synced 'openshift-authentication/oauth-openshift-66458b6674-brfdj' 2025-12-12T16:19:29.267019348+00:00 stderr F I1212 16:19:29.266448 1 controller.go:192] Received pod 'controller-manager-79d797b698-v4v6j' 2025-12-12T16:19:29.267019348+00:00 stderr F I1212 16:19:29.266995 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-79d797b698-v4v6j' 2025-12-12T16:19:47.861834365+00:00 stderr F I1212 16:19:47.861290 1 controller.go:192] Received pod 'controller-manager-7b9f779b68-rhrzf' 2025-12-12T16:19:47.861834365+00:00 stderr F I1212 16:19:47.861813 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-7b9f779b68-rhrzf' 2025-12-12T16:19:47.985363176+00:00 stderr F I1212 16:19:47.985146 1 controller.go:151] Successfully synced 'openshift-controller-manager/controller-manager-79d797b698-v4v6j' 2025-12-12T16:20:36.747389802+00:00 stderr F I1212 16:20:36.745980 1 controller.go:192] Received pod 'marketplace-operator-547dbd544d-4vhrb' 2025-12-12T16:20:36.747389802+00:00 stderr F I1212 16:20:36.746925 1 controller.go:151] Successfully synced 'openshift-marketplace/marketplace-operator-547dbd544d-4vhrb' 2025-12-12T16:20:37.350923502+00:00 stderr F I1212 16:20:37.349810 1 controller.go:151] Successfully synced 'openshift-marketplace/marketplace-operator-547dbd544d-xpvsb' 2025-12-12T16:20:37.369249293+00:00 stderr F I1212 16:20:37.368030 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-2gt6h' 2025-12-12T16:20:37.386782233+00:00 stderr F I1212 16:20:37.385520 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-marketplace-s7x92' 2025-12-12T16:20:37.408117622+00:00 stderr F I1212 16:20:37.407041 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-pvzzz' 2025-12-12T16:20:37.427849080+00:00 stderr F I1212 16:20:37.423566 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-9ndfc' 2025-12-12T16:20:39.154496109+00:00 stderr F I1212 16:20:39.154445 1 controller.go:192] Received pod 'redhat-operators-wqdb8' 2025-12-12T16:20:39.154540600+00:00 stderr F I1212 16:20:39.154494 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-wqdb8' 2025-12-12T16:20:40.114573092+00:00 stderr F I1212 16:20:40.114061 1 controller.go:192] Received pod 'certified-operators-psnw2' 2025-12-12T16:20:40.114573092+00:00 stderr F I1212 16:20:40.114552 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-psnw2' 2025-12-12T16:20:41.523863787+00:00 stderr F I1212 16:20:41.523007 1 controller.go:192] Received pod 'community-operators-6jgv5' 2025-12-12T16:20:41.523863787+00:00 stderr F I1212 16:20:41.523719 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-6jgv5' 2025-12-12T16:20:42.571397163+00:00 stderr F I1212 16:20:42.570640 1 controller.go:192] Received pod 'redhat-marketplace-jkgqd' 2025-12-12T16:20:42.571433794+00:00 stderr F I1212 16:20:42.571395 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-marketplace-jkgqd' 2025-12-12T16:20:48.510962397+00:00 stderr F I1212 16:20:48.506091 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-bf6bf5794-d5zzt' 2025-12-12T16:20:48.571883645+00:00 stderr F I1212 16:20:48.571829 1 controller.go:192] Received pod 'route-controller-manager-8fdcdbb66-mzfqh' 2025-12-12T16:20:48.571928096+00:00 stderr F I1212 16:20:48.571877 1 controller.go:151] Successfully synced 'openshift-route-controller-manager/route-controller-manager-8fdcdbb66-mzfqh' 2025-12-12T16:25:27.420628264+00:00 stderr F I1212 16:25:27.419937 1 controller.go:151] Successfully synced 'openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988-xtrkr' 2025-12-12T16:25:28.482233465+00:00 stderr F I1212 16:25:28.481303 1 controller.go:151] Successfully synced 'openshift-ovn-kubernetes/ovnkube-node-wjw4g' 2025-12-12T16:26:40.018491152+00:00 stderr F I1212 16:26:40.017509 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-marketplace-jkgqd' 2025-12-12T16:26:40.513972980+00:00 stderr F I1212 16:26:40.513509 1 controller.go:192] Received pod 'image-registry-5d9d95bf5b-6md9w' 2025-12-12T16:26:40.514049552+00:00 stderr F I1212 16:26:40.513994 1 controller.go:151] Successfully synced 'openshift-image-registry/image-registry-5d9d95bf5b-6md9w' 2025-12-12T16:26:43.383851964+00:00 stderr F I1212 16:26:43.383031 1 controller.go:192] Received pod '6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85' 2025-12-12T16:26:43.383851964+00:00 stderr F I1212 16:26:43.383838 1 controller.go:151] Successfully synced 'openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85' 2025-12-12T16:26:49.801008463+00:00 stderr F I1212 16:26:49.800246 1 controller.go:192] Received pod '8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx' 2025-12-12T16:26:49.801008463+00:00 stderr F I1212 16:26:49.800995 1 controller.go:151] Successfully synced 'openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx' 2025-12-12T16:26:53.943164618+00:00 stderr F I1212 16:26:53.942386 1 controller.go:192] Received pod '1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5' 2025-12-12T16:26:53.943164618+00:00 stderr F I1212 16:26:53.943142 1 controller.go:151] Successfully synced 'openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5' 2025-12-12T16:26:54.055100146+00:00 stderr F I1212 16:26:54.053398 1 controller.go:192] Received pod 'certified-operators-8pl6d' 2025-12-12T16:26:54.055100146+00:00 stderr F I1212 16:26:54.053451 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-8pl6d' 2025-12-12T16:26:58.493459540+00:00 stderr F I1212 16:26:58.489702 1 controller.go:192] Received pod 'redhat-operators-b4n58' 2025-12-12T16:26:58.493459540+00:00 stderr F I1212 16:26:58.491440 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-b4n58' 2025-12-12T16:27:05.812586267+00:00 stderr F I1212 16:27:05.810792 1 controller.go:192] Received pod 'obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g' 2025-12-12T16:27:05.812586267+00:00 stderr F I1212 16:27:05.811775 1 controller.go:151] Successfully synced 'openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g' 2025-12-12T16:27:05.969922559+00:00 stderr F I1212 16:27:05.967627 1 controller.go:192] Received pod 'observability-operator-78c97476f4-qxqmn' 2025-12-12T16:27:05.969922559+00:00 stderr F I1212 16:27:05.967794 1 controller.go:151] Successfully synced 'openshift-operators/observability-operator-78c97476f4-qxqmn' 2025-12-12T16:27:06.280263873+00:00 stderr F I1212 16:27:06.276029 1 controller.go:192] Received pod 'elastic-operator-6c994c654b-42tmw' 2025-12-12T16:27:06.280263873+00:00 stderr F I1212 16:27:06.276527 1 controller.go:151] Successfully synced 'service-telemetry/elastic-operator-6c994c654b-42tmw' 2025-12-12T16:27:06.313598257+00:00 stderr F I1212 16:27:06.313505 1 controller.go:192] Received pod 'perses-operator-68bdb49cbf-nqtp8' 2025-12-12T16:27:06.313598257+00:00 stderr F I1212 16:27:06.313558 1 controller.go:151] Successfully synced 'openshift-operators/perses-operator-68bdb49cbf-nqtp8' 2025-12-12T16:27:06.378348666+00:00 stderr F I1212 16:27:06.377711 1 controller.go:192] Received pod 'obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr' 2025-12-12T16:27:06.378348666+00:00 stderr F I1212 16:27:06.377738 1 controller.go:192] Received pod 'obo-prometheus-operator-86648f486b-wbj29' 2025-12-12T16:27:06.378348666+00:00 stderr F I1212 16:27:06.377798 1 controller.go:151] Successfully synced 'openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr' 2025-12-12T16:27:06.378348666+00:00 stderr F I1212 16:27:06.377849 1 controller.go:151] Successfully synced 'openshift-operators/obo-prometheus-operator-86648f486b-wbj29' 2025-12-12T16:27:07.285352041+00:00 stderr F I1212 16:27:07.285153 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-8pl6d' 2025-12-12T16:27:29.884240696+00:00 stderr F I1212 16:27:29.882553 1 controller.go:192] Received pod 'cert-manager-operator-controller-manager-64c74584c4-djdmt' 2025-12-12T16:27:29.884240696+00:00 stderr F I1212 16:27:29.883263 1 controller.go:151] Successfully synced 'cert-manager-operator/cert-manager-operator-controller-manager-64c74584c4-djdmt' 2025-12-12T16:27:29.916308558+00:00 stderr F I1212 16:27:29.916230 1 controller.go:192] Received pod 'community-operators-9wq8j' 2025-12-12T16:27:29.916308558+00:00 stderr F I1212 16:27:29.916275 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-9wq8j' 2025-12-12T16:27:30.653531565+00:00 stderr F I1212 16:27:30.652822 1 controller.go:151] Successfully synced 'openshift-image-registry/image-registry-66587d64c8-jqtjf' 2025-12-12T16:27:30.852967462+00:00 stderr F I1212 16:27:30.852837 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-b4n58' 2025-12-12T16:27:32.067348987+00:00 stderr F I1212 16:27:32.063516 1 controller.go:192] Received pod 'elasticsearch-es-default-0' 2025-12-12T16:27:32.067348987+00:00 stderr F I1212 16:27:32.064415 1 controller.go:151] Successfully synced 'service-telemetry/elasticsearch-es-default-0' 2025-12-12T16:27:56.957222819+00:00 stderr F I1212 16:27:56.956394 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-9wq8j' 2025-12-12T16:28:00.360585623+00:00 stderr F I1212 16:28:00.358067 1 controller.go:192] Received pod 'cert-manager-858d87f86b-r7f8q' 2025-12-12T16:28:00.360585623+00:00 stderr F I1212 16:28:00.359011 1 controller.go:151] Successfully synced 'cert-manager/cert-manager-858d87f86b-r7f8q' 2025-12-12T16:28:00.371502730+00:00 stderr F I1212 16:28:00.371381 1 controller.go:192] Received pod 'cert-manager-cainjector-7dbf76d5c8-lv2hl' 2025-12-12T16:28:00.371502730+00:00 stderr F I1212 16:28:00.371434 1 controller.go:151] Successfully synced 'cert-manager/cert-manager-cainjector-7dbf76d5c8-lv2hl' 2025-12-12T16:28:00.671051171+00:00 stderr F I1212 16:28:00.670519 1 controller.go:192] Received pod 'cert-manager-webhook-7894b5b9b4-2kmrt' 2025-12-12T16:28:00.671051171+00:00 stderr F I1212 16:28:00.671041 1 controller.go:151] Successfully synced 'cert-manager/cert-manager-webhook-7894b5b9b4-2kmrt' 2025-12-12T16:28:18.837390165+00:00 stderr F I1212 16:28:18.836723 1 controller.go:192] Received pod 'service-telemetry-framework-index-1-build' 2025-12-12T16:28:18.837554159+00:00 stderr F I1212 16:28:18.837535 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-1-build' 2025-12-12T16:28:27.483303921+00:00 stderr F I1212 16:28:27.482145 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-1-build' 2025-12-12T16:28:41.108456938+00:00 stderr F I1212 16:28:41.107960 1 controller.go:192] Received pod 'service-telemetry-framework-index-2-build' 2025-12-12T16:28:41.108511950+00:00 stderr F I1212 16:28:41.108454 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-2-build' 2025-12-12T16:29:00.737747637+00:00 stderr F I1212 16:29:00.736941 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-2-build' 2025-12-12T16:29:03.855273190+00:00 stderr F I1212 16:29:03.854135 1 controller.go:192] Received pod 'service-telemetry-framework-index-3-build' 2025-12-12T16:29:03.855273190+00:00 stderr F I1212 16:29:03.854721 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-3-build' 2025-12-12T16:29:09.861242773+00:00 stderr F I1212 16:29:09.860104 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-3-build' 2025-12-12T16:29:19.653822240+00:00 stderr F I1212 16:29:19.653155 1 controller.go:192] Received pod 'service-telemetry-framework-index-4-build' 2025-12-12T16:29:19.653822240+00:00 stderr F I1212 16:29:19.653656 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-4-build' 2025-12-12T16:29:23.966207988+00:00 stderr F I1212 16:29:23.965565 1 controller.go:151] Successfully synced 'service-telemetry/service-telemetry-framework-index-4-build' 2025-12-12T16:29:24.012214008+00:00 stderr F I1212 16:29:24.012121 1 controller.go:192] Received pod 'infrawatch-operators-cj72z' 2025-12-12T16:29:24.012214008+00:00 stderr F I1212 16:29:24.012189 1 controller.go:151] Successfully synced 'service-telemetry/infrawatch-operators-cj72z' 2025-12-12T16:29:28.999387907+00:00 stderr F I1212 16:29:28.998841 1 controller.go:151] Successfully synced 'service-telemetry/infrawatch-operators-cj72z' 2025-12-12T16:29:29.220376869+00:00 stderr F I1212 16:29:29.220100 1 controller.go:192] Received pod 'infrawatch-operators-cdpts' 2025-12-12T16:29:29.220376869+00:00 stderr F I1212 16:29:29.220145 1 controller.go:151] Successfully synced 'service-telemetry/infrawatch-operators-cdpts' 2025-12-12T16:30:00.965360716+00:00 stderr F I1212 16:30:00.964764 1 controller.go:192] Received pod 'collect-profiles-29425950-g52jh' 2025-12-12T16:30:00.965433468+00:00 stderr F I1212 16:30:00.965386 1 controller.go:151] Successfully synced 'openshift-operator-lifecycle-manager/collect-profiles-29425950-g52jh' 2025-12-12T16:34:26.274712853+00:00 stderr F I1212 16:34:26.274045 1 controller.go:192] Received pod 'infrawatch-operators-6bs58' 2025-12-12T16:34:26.274887387+00:00 stderr F I1212 16:34:26.274872 1 controller.go:151] Successfully synced 'service-telemetry/infrawatch-operators-6bs58' 2025-12-12T16:37:14.270836806+00:00 stderr F I1212 16:37:14.270270 1 controller.go:192] Received pod 'certified-operators-h46w2' 2025-12-12T16:37:14.270903457+00:00 stderr F I1212 16:37:14.270849 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-h46w2' 2025-12-12T16:37:23.671448670+00:00 stderr F I1212 16:37:23.670514 1 controller.go:192] Received pod 'redhat-operators-k5p4x' 2025-12-12T16:37:23.671448670+00:00 stderr F I1212 16:37:23.671357 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-k5p4x' 2025-12-12T16:37:27.803287924+00:00 stderr F I1212 16:37:27.801000 1 controller.go:151] Successfully synced 'openshift-marketplace/certified-operators-h46w2' 2025-12-12T16:37:40.887498142+00:00 stderr F I1212 16:37:40.884809 1 controller.go:151] Successfully synced 'openshift-marketplace/redhat-operators-k5p4x' 2025-12-12T16:38:10.959253430+00:00 stderr F I1212 16:38:10.958424 1 controller.go:192] Received pod 'community-operators-4sccg' 2025-12-12T16:38:10.959323072+00:00 stderr F I1212 16:38:10.959269 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-4sccg' 2025-12-12T16:38:28.294695456+00:00 stderr F I1212 16:38:28.293979 1 controller.go:151] Successfully synced 'openshift-marketplace/community-operators-4sccg' 2025-12-12T16:40:39.023459125+00:00 stderr F I1212 16:40:39.022565 1 controller.go:192] Received pod 'must-gather-v4h5l' 2025-12-12T16:40:39.023459125+00:00 stderr F I1212 16:40:39.023419 1 controller.go:151] Successfully synced 'openshift-must-gather-2sjxj/must-gather-v4h5l' 2025-12-12T16:43:03.848074371+00:00 stderr F I1212 16:43:03.845687 1 controller.go:151] Successfully synced 'openshift-must-gather-2sjxj/must-gather-v4h5l' ././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000755000175000017500000000000015117043062033214 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000644000175000017500000000202015117043043033207 0ustar zuulzuul2025-12-12T16:16:59.062117122+00:00 stderr F W1212 16:16:59.057474 1 deprecated.go:66] 2025-12-12T16:16:59.062117122+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:59.062117122+00:00 stderr F 2025-12-12T16:16:59.062117122+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:59.062117122+00:00 stderr F 2025-12-12T16:16:59.062117122+00:00 stderr F =============================================== 2025-12-12T16:16:59.062117122+00:00 stderr F 2025-12-12T16:16:59.062117122+00:00 stderr F I1212 16:16:59.058500 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:59.062117122+00:00 stderr F I1212 16:16:59.060488 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:59.062117122+00:00 stderr F I1212 16:16:59.061927 1 kube-rbac-proxy.go:397] Starting TCP socket on :8443 2025-12-12T16:16:59.062657595+00:00 stderr F I1212 16:16:59.062535 1 kube-rbac-proxy.go:404] Listening securely on :8443 ././@LongLink0000644000000000000000000000023300000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015117043043033023 5ustar zuulzuul././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015117043062033024 5ustar zuulzuul././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000644000175000017500000004004015117043043033023 0ustar zuulzuul2025-12-12T16:16:23.207675794+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:16:23.224494235+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:16:23.233765541+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:16:23.246407840+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:16:23.250331386+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:16:23.256486826+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:17:23.280482730+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:17:23.293096972+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:17:23.307206696+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:17:23.325348077+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:17:23.333470990+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:17:23.339062310+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:18:23.355953009+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:18:23.362519561+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:18:23.368673693+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:18:23.378593149+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:18:23.385543900+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:18:23.389578140+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:19:23.407241278+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:19:23.407241278+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:19:23.413214715+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:19:23.421249094+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:19:23.425234913+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:19:23.428234737+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:20:23.440127051+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:20:23.445618799+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:20:23.450545343+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:20:23.457421195+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:20:23.460652366+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:20:23.464963835+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:21:23.474043435+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:21:23.479349788+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:21:23.485253335+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:21:23.494360062+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:21:23.500707110+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:21:23.504478504+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:22:23.515900324+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:22:23.523158836+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:22:23.528778967+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:22:23.537959108+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:22:23.541932368+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:22:23.544814940+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:23:23.557904606+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:23:23.565020496+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:23:23.571531840+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:23:23.582578038+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:23:23.587153393+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:23:23.591096033+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:24:23.603899903+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:24:23.609057711+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:24:23.614254791+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:24:23.621938552+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:24:23.624810904+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:24:23.627614244+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:25:23.638363369+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:25:23.644922081+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:25:23.650336574+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:25:23.662101003+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:25:23.666671263+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:25:23.670426281+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:26:23.687260372+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:26:23.689252622+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:26:23.700327892+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:26:23.711601517+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:26:23.717934547+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:26:23.722080851+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:27:23.733726925+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:27:23.740408204+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:27:23.746979250+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:27:23.756224434+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:27:23.759325323+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:27:23.763144009+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:28:23.779669897+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:28:23.785367771+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:28:23.790514151+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:28:23.807540842+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:28:23.807540842+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:28:23.813286458+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:29:23.818824502+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:29:23.823739596+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:29:23.828344262+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:29:23.836052327+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:29:23.838679283+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:29:23.842389097+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:30:23.852783746+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:30:23.858824167+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:30:23.864016487+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:30:23.872973980+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:30:23.875808871+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:30:23.879595766+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:31:23.890316653+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:31:23.899024837+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:31:23.904204314+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:31:23.913656877+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:31:23.916493566+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:31:23.919966482+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:32:23.930916478+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:32:23.937212206+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:32:23.945821051+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:32:23.956164289+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:32:23.961251457+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:32:23.965241106+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:33:23.976855609+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:33:23.985272399+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:33:23.994515360+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:33:24.005584497+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:33:24.010233523+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:33:24.013671289+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:34:24.025959298+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:34:24.034714747+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:34:24.042297577+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:34:24.052224555+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:34:24.055075936+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:34:24.058766508+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:35:24.069466071+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:35:24.077289867+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:35:24.082209191+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:35:24.093546486+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:35:24.096636103+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:35:24.099862584+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:36:24.115056912+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:36:24.123481043+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:36:24.130990852+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:36:24.143951148+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:36:24.150276757+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:36:24.154766799+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:37:24.168802667+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:37:24.175065874+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:37:24.181492715+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:37:24.189641409+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:37:24.192938442+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:37:24.196360728+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:38:24.212996392+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:38:24.224631145+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:38:24.232997515+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:38:24.243294153+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:38:24.248253088+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:38:24.252662859+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:39:24.265727913+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:39:24.272344359+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:39:24.279615612+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:39:24.288589307+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:39:24.292901176+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:39:24.295877520+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:40:24.308472096+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:40:24.318944459+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:40:24.329044223+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:40:24.344720517+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:40:24.349345533+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:40:24.353014935+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:41:24.365024098+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:41:24.370281270+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:41:24.375662975+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:41:24.384741493+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:41:24.388461287+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:41:24.390844376+00:00 stdout F image-registry.openshift-image-registry.svc:5000 2025-12-12T16:42:24.399577192+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:42:24.405654164+00:00 stdout F image-registry.openshift-image-registry.svc..5000 2025-12-12T16:42:24.410361253+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local..5000 2025-12-12T16:42:24.416661021+00:00 stdout F default-route-openshift-image-registry.apps-crc.testing 2025-12-12T16:42:24.419604505+00:00 stdout F image-registry.openshift-image-registry.svc.cluster.local:5000 2025-12-12T16:42:24.423150104+00:00 stdout F image-registry.openshift-image-registry.svc:5000 ././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000040500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000644000175000017500000000000015117043043033055 0ustar zuulzuul././@LongLink0000644000000000000000000000034000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000034500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000644000175000017500000000123015117043043033063 0ustar zuulzuul2025-12-12T16:16:47.178760439+00:00 stderr F I1212 16:16:47.176534 13 cmd.go:331] Waiting for process with process name "cluster-samples-operator" ... 2025-12-12T16:16:47.178760439+00:00 stderr F I1212 16:16:47.177764 13 cmd.go:341] Watching for changes in: ([]string) (len=2 cap=2) { 2025-12-12T16:16:47.178760439+00:00 stderr F (string) (len=32) "/proc/2/root/etc/secrets/tls.crt", 2025-12-12T16:16:47.178760439+00:00 stderr F (string) (len=32) "/proc/2/root/etc/secrets/tls.key" 2025-12-12T16:16:47.178760439+00:00 stderr F } 2025-12-12T16:16:47.187244896+00:00 stderr F I1212 16:16:47.185387 13 observer_polling.go:159] Starting file observer ././@LongLink0000644000000000000000000000033200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000033700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000644000175000017500000024257215117043043033103 0ustar zuulzuul2025-12-12T16:16:44.767846689+00:00 stderr F time="2025-12-12T16:16:44Z" level=info msg="Go Version: go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:16:44.767846689+00:00 stderr F time="2025-12-12T16:16:44Z" level=info msg="Go OS/Arch: linux/amd64" 2025-12-12T16:16:44.832299742+00:00 stderr F time="2025-12-12T16:16:44Z" level=info msg="template client &v1.TemplateV1Client{restClient:(*rest.RESTClient)(0xc0008f74a0)}" 2025-12-12T16:16:44.832299742+00:00 stderr F time="2025-12-12T16:16:44Z" level=info msg="image client &v1.ImageV1Client{restClient:(*rest.RESTClient)(0xc0008f7540)}" 2025-12-12T16:16:45.000735015+00:00 stderr F time="2025-12-12T16:16:44Z" level=info msg="waiting for informer caches to sync" 2025-12-12T16:16:45.023268785+00:00 stderr F E1212 16:16:45.015632 2 reflector.go:200] "Failed to watch" err="failed to list *v1.Template: the server is currently unable to handle the request (get templates.template.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/template/informers/externalversions/factory.go:101" type="*v1.Template" 2025-12-12T16:16:45.108253049+00:00 stderr F E1212 16:16:45.100288 2 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/image/informers/externalversions/factory.go:101" type="*v1.ImageStream" 2025-12-12T16:16:46.076760254+00:00 stderr F E1212 16:16:46.075541 2 reflector.go:200] "Failed to watch" err="failed to list *v1.Template: the server is currently unable to handle the request (get templates.template.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/template/informers/externalversions/factory.go:101" type="*v1.Template" 2025-12-12T16:16:46.189534797+00:00 stderr F E1212 16:16:46.189245 2 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/image/informers/externalversions/factory.go:101" type="*v1.ImageStream" 2025-12-12T16:16:47.978279109+00:00 stderr F E1212 16:16:47.977711 2 reflector.go:200] "Failed to watch" err="failed to list *v1.Template: the server is currently unable to handle the request (get templates.template.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/template/informers/externalversions/factory.go:101" type="*v1.Template" 2025-12-12T16:16:49.051483030+00:00 stderr F E1212 16:16:49.048753 2 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/image/informers/externalversions/factory.go:101" type="*v1.ImageStream" 2025-12-12T16:16:52.066963791+00:00 stderr F E1212 16:16:52.065922 2 reflector.go:200] "Failed to watch" err="failed to list *v1.Template: the server is currently unable to handle the request (get templates.template.openshift.io)" logger="UnhandledError" reflector="github.com/openshift/client-go/template/informers/externalversions/factory.go:101" type="*v1.Template" 2025-12-12T16:17:01.100683581+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="started events processor" 2025-12-12T16:17:01.100731033+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream fuse7-eap-openshift-java11 already deleted so no worries on clearing tags" 2025-12-12T16:17:01.100731033+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift-java11" 2025-12-12T16:17:01.105583261+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream fuse7-java-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.105583261+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java-openshift" 2025-12-12T16:17:01.108477422+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream dotnet-runtime already deleted so no worries on clearing tags" 2025-12-12T16:17:01.108477422+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet-runtime" 2025-12-12T16:17:01.110869200+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream dotnet already deleted so no worries on clearing tags" 2025-12-12T16:17:01.110869200+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet" 2025-12-12T16:17:01.112779727+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream fuse7-karaf-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.112779727+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift" 2025-12-12T16:17:01.113510525+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="no global imagestream configuration will block imagestream creation using " 2025-12-12T16:17:01.114778675+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:17:01.154018094+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream golang already deleted so no worries on clearing tags" 2025-12-12T16:17:01.154018094+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream golang" 2025-12-12T16:17:01.157151540+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream fuse7-java11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.157151540+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java11-openshift" 2025-12-12T16:17:01.159732263+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream fuse7-eap-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.159732263+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift" 2025-12-12T16:17:01.161258940+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream fuse7-karaf-openshift-jdk11 already deleted so no worries on clearing tags" 2025-12-12T16:17:01.161258940+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift-jdk11" 2025-12-12T16:17:01.162800138+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream java-runtime already deleted so no worries on clearing tags" 2025-12-12T16:17:01.162800138+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream java-runtime" 2025-12-12T16:17:01.164303585+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp3-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.164303585+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-openshift" 2025-12-12T16:17:01.166355425+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream httpd already deleted so no worries on clearing tags" 2025-12-12T16:17:01.166355425+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream httpd" 2025-12-12T16:17:01.168270061+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream java already deleted so no worries on clearing tags" 2025-12-12T16:17:01.168306812+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream java" 2025-12-12T16:17:01.170618519+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp3-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.170618519+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-runtime-openshift" 2025-12-12T16:17:01.173129210+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp4-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.173129210+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-runtime-openshift" 2025-12-12T16:17:01.175325664+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-datagrid73-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.175325664+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-datagrid73-openshift" 2025-12-12T16:17:01.177525347+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk8-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.177525347+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-runtime-openshift" 2025-12-12T16:17:01.179265740+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.179265740+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-runtime-openshift" 2025-12-12T16:17:01.180832718+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.180832718+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-openshift" 2025-12-12T16:17:01.183372580+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp4-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:01.183372580+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-openshift" 2025-12-12T16:17:01.185736898+00:00 stderr F time="2025-12-12T16:17:01Z" level=warning msg="Image import for imagestream jenkins tag scheduled-upgrade-redeploy generation 3 failed with detailed message Internal error occurred: registry.redhat.io/ocp-tools-4/jenkins-rhel8:v4.13.0: Get \"https://registry.redhat.io/v2/ocp-tools-4/jenkins-rhel8/manifests/v4.13.0\": unauthorized: Please login to the Red Hat Registry using your Customer Portal credentials. Further instructions can be found here: https://access.redhat.com/RegistryAuthentication" 2025-12-12T16:17:03.045496883+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="initiated an imagestreamimport retry for imagestream/tag jenkins/scheduled-upgrade-redeploy" 2025-12-12T16:17:03.049483940+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream mariadb already deleted so no worries on clearing tags" 2025-12-12T16:17:03.049483940+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream mariadb" 2025-12-12T16:17:03.054497263+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk8-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:03.054497263+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-openshift" 2025-12-12T16:17:03.056898841+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.056898841+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8" 2025-12-12T16:17:03.058637374+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream mysql already deleted so no worries on clearing tags" 2025-12-12T16:17:03.058637374+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream mysql" 2025-12-12T16:17:03.060904139+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream nginx already deleted so no worries on clearing tags" 2025-12-12T16:17:03.060904139+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream nginx" 2025-12-12T16:17:03.063267757+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.063267757+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8" 2025-12-12T16:17:03.065874880+00:00 stderr F time="2025-12-12T16:17:03Z" level=warning msg="Image import for imagestream jenkins-agent-base tag scheduled-upgrade generation 3 failed with detailed message Internal error occurred: registry.redhat.io/ocp-tools-4/jenkins-agent-base-rhel8:v4.13.0: Get \"https://registry.redhat.io/v2/ocp-tools-4/jenkins-agent-base-rhel8/manifests/v4.13.0\": unauthorized: Please login to the Red Hat Registry using your Customer Portal credentials. Further instructions can be found here: https://access.redhat.com/RegistryAuthentication" 2025-12-12T16:17:03.627050241+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="initiated an imagestreamimport retry for imagestream/tag jenkins-agent-base/scheduled-upgrade" 2025-12-12T16:17:03.631658294+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream openjdk-11-rhel7 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.631658294+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream openjdk-11-rhel7" 2025-12-12T16:17:03.636631875+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream perl already deleted so no worries on clearing tags" 2025-12-12T16:17:03.636670506+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream perl" 2025-12-12T16:17:03.639982307+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream postgresql already deleted so no worries on clearing tags" 2025-12-12T16:17:03.639982307+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql" 2025-12-12T16:17:03.642117179+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream postgresql13-for-sso75-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.642117179+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso75-openshift-rhel8" 2025-12-12T16:17:03.644870736+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream nodejs already deleted so no worries on clearing tags" 2025-12-12T16:17:03.644870736+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream nodejs" 2025-12-12T16:17:03.646868785+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream php already deleted so no worries on clearing tags" 2025-12-12T16:17:03.646868785+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream php" 2025-12-12T16:17:03.648691730+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream python already deleted so no worries on clearing tags" 2025-12-12T16:17:03.648691730+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream python" 2025-12-12T16:17:03.650539045+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream postgresql13-for-sso76-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.650539045+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso76-openshift-rhel8" 2025-12-12T16:17:03.652941583+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream redhat-openjdk18-openshift already deleted so no worries on clearing tags" 2025-12-12T16:17:03.652941583+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream redhat-openjdk18-openshift" 2025-12-12T16:17:03.654581703+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream redis already deleted so no worries on clearing tags" 2025-12-12T16:17:03.654581703+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream redis" 2025-12-12T16:17:03.656215583+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ruby already deleted so no worries on clearing tags" 2025-12-12T16:17:03.656215583+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ruby" 2025-12-12T16:17:03.658020057+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.658020057+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17" 2025-12-12T16:17:03.659779910+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11-runtime already deleted so no worries on clearing tags" 2025-12-12T16:17:03.659779910+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11-runtime" 2025-12-12T16:17:03.661386550+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-21 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.661386550+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21" 2025-12-12T16:17:03.663239095+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream sso76-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.663239095+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream sso76-openshift-rhel8" 2025-12-12T16:17:03.664635699+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17-runtime already deleted so no worries on clearing tags" 2025-12-12T16:17:03.664635699+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17-runtime" 2025-12-12T16:17:03.665975222+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream sso75-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.665975222+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream sso75-openshift-rhel8" 2025-12-12T16:17:03.667572110+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.667572110+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11" 2025-12-12T16:17:03.669132658+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearing error messages from configmap for stream jenkins-agent-base and tag scheduled-upgrade" 2025-12-12T16:17:03.672884309+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins-agent-base" 2025-12-12T16:17:03.677370869+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-21-runtime already deleted so no worries on clearing tags" 2025-12-12T16:17:03.677370869+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21-runtime" 2025-12-12T16:17:03.683020857+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8-runtime already deleted so no worries on clearing tags" 2025-12-12T16:17:03.683020857+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8-runtime" 2025-12-12T16:17:03.685254641+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8 already deleted so no worries on clearing tags" 2025-12-12T16:17:03.685254641+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8" 2025-12-12T16:17:03.688281185+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="clearing error messages from configmap for stream jenkins and tag scheduled-upgrade-redeploy" 2025-12-12T16:17:03.692219931+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins" 2025-12-12T16:17:03.696763642+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="CRDUPDATE importerrors false update" 2025-12-12T16:17:06.724314498+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="no global imagestream configuration will block imagestream creation using " 2025-12-12T16:17:06.724314498+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:17:06.776793710+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="no global imagestream configuration will block imagestream creation using " 2025-12-12T16:17:06.776793710+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:20:11.312837719+00:00 stderr F time="2025-12-12T16:20:11Z" level=info msg="no global imagestream configuration will block imagestream creation using " 2025-12-12T16:20:11.312837719+00:00 stderr F time="2025-12-12T16:20:11Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:20:25.299487975+00:00 stderr F time="2025-12-12T16:20:25Z" level=info msg="no global imagestream configuration will block imagestream creation using " 2025-12-12T16:20:25.299487975+00:00 stderr F time="2025-12-12T16:20:25Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:25:11.716426190+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="no global imagestream configuration will block imagestream creation using registry.redhat.io" 2025-12-12T16:25:11.716426190+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="SamplesRegistry changed from to registry.redhat.io" 2025-12-12T16:25:11.716496832+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="ENTERING UPSERT / STEADY STATE PATH ExistTrue true ImageInProgressFalse true VersionOK true ConfigChanged true ManagementStateChanged true" 2025-12-12T16:25:11.936431707+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="updated imagestream httpd" 2025-12-12T16:25:11.951141854+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="updated imagestream java" 2025-12-12T16:25:11.968054968+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="updated imagestream ubi8-openjdk-11" 2025-12-12T16:25:11.983702889+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="updated imagestream ubi8-openjdk-17" 2025-12-12T16:25:11.996224118+00:00 stderr F time="2025-12-12T16:25:11Z" level=info msg="updated imagestream jenkins" 2025-12-12T16:25:12.018524663+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream mysql" 2025-12-12T16:25:12.058962145+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream jboss-datagrid73-openshift" 2025-12-12T16:25:12.096171262+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream java-runtime" 2025-12-12T16:25:12.138941075+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream ubi8-openjdk-11-runtime" 2025-12-12T16:25:12.177714833+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream mariadb" 2025-12-12T16:25:12.222328785+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream postgresql" 2025-12-12T16:25:12.256115982+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream redis" 2025-12-12T16:25:12.297077438+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream postgresql13-for-sso76-openshift-rhel8" 2025-12-12T16:25:12.337492999+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream dotnet-runtime" 2025-12-12T16:25:12.378817044+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream fuse7-eap-openshift-java11" 2025-12-12T16:25:12.420537600+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream fuse7-karaf-openshift" 2025-12-12T16:25:12.460209142+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream php" 2025-12-12T16:25:12.500957232+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream python" 2025-12-12T16:25:12.536790953+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream sso76-openshift-rhel8" 2025-12-12T16:25:12.578953540+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream fuse7-eap-openshift" 2025-12-12T16:25:12.615301204+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream fuse7-karaf-openshift-jdk11" 2025-12-12T16:25:12.659748262+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream openjdk-11-rhel7" 2025-12-12T16:25:12.702754741+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream redhat-openjdk18-openshift" 2025-12-12T16:25:12.738208702+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream ubi8-openjdk-8-runtime" 2025-12-12T16:25:12.779143107+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream perl" 2025-12-12T16:25:12.817871304+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream jboss-eap-xp4-openjdk11-openshift" 2025-12-12T16:25:12.856742375+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream jboss-eap-xp4-openjdk11-runtime-openshift" 2025-12-12T16:25:12.896855628+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream fuse7-java11-openshift" 2025-12-12T16:25:12.936595102+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream ubi8-openjdk-21" 2025-12-12T16:25:12.981745757+00:00 stderr F time="2025-12-12T16:25:12Z" level=info msg="updated imagestream ubi8-openjdk-8" 2025-12-12T16:25:13.015632297+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream postgresql13-for-sso75-openshift-rhel8" 2025-12-12T16:25:13.061717217+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream dotnet" 2025-12-12T16:25:13.097276761+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-eap74-openjdk11-runtime-openshift" 2025-12-12T16:25:13.138565706+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream golang" 2025-12-12T16:25:13.176632915+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream ubi8-openjdk-21-runtime" 2025-12-12T16:25:13.218121975+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jenkins-agent-base" 2025-12-12T16:25:13.262998173+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream nodejs" 2025-12-12T16:25:13.300291263+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-eap74-openjdk11-openshift" 2025-12-12T16:25:13.338078955+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-eap74-openjdk8-runtime-openshift" 2025-12-12T16:25:13.378885206+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream nginx" 2025-12-12T16:25:13.417208553+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream ruby" 2025-12-12T16:25:13.458057236+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8" 2025-12-12T16:25:13.500367997+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-eap-xp3-openjdk11-runtime-openshift" 2025-12-12T16:25:13.540302795+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream fuse7-java-openshift" 2025-12-12T16:25:13.577837031+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream ubi8-openjdk-17-runtime" 2025-12-12T16:25:13.616453405+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream sso75-openshift-rhel8" 2025-12-12T16:25:13.656396224+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8" 2025-12-12T16:25:13.697145924+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-eap-xp3-openjdk11-openshift" 2025-12-12T16:25:13.736738354+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="updated imagestream jboss-eap74-openjdk8-openshift" 2025-12-12T16:25:13.736738354+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="CRDUPDATE samples upserted; set clusteroperator ready, steady state" 2025-12-12T16:25:13.736894778+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream httpd" 2025-12-12T16:25:13.751438480+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream java" 2025-12-12T16:25:13.774223128+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins" 2025-12-12T16:25:13.793104854+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream java-runtime" 2025-12-12T16:25:13.812577345+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11" 2025-12-12T16:25:13.831848861+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream mariadb" 2025-12-12T16:25:13.852244257+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11-runtime" 2025-12-12T16:25:13.873309250+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17" 2025-12-12T16:25:13.892324630+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream mysql" 2025-12-12T16:25:13.912525650+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso76-openshift-rhel8" 2025-12-12T16:25:13.932371331+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet-runtime" 2025-12-12T16:25:13.953017693+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift" 2025-12-12T16:25:13.971425307+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-datagrid73-openshift" 2025-12-12T16:25:13.991486323+00:00 stderr F time="2025-12-12T16:25:13Z" level=info msg="There are no more errors or image imports in flight for imagestream redis" 2025-12-12T16:25:14.012613628+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream sso76-openshift-rhel8" 2025-12-12T16:25:14.030960099+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql" 2025-12-12T16:25:14.051140449+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift-java11" 2025-12-12T16:25:14.072120250+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift-jdk11" 2025-12-12T16:25:14.092431153+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream php" 2025-12-12T16:25:14.112298685+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream python" 2025-12-12T16:25:14.133065171+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream perl" 2025-12-12T16:25:14.151868674+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift" 2025-12-12T16:25:14.172162957+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-openshift" 2025-12-12T16:25:14.193169859+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java11-openshift" 2025-12-12T16:25:14.212475976+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream redhat-openjdk18-openshift" 2025-12-12T16:25:14.232274286+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream openjdk-11-rhel7" 2025-12-12T16:25:14.252802715+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8-runtime" 2025-12-12T16:25:14.272354698+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso75-openshift-rhel8" 2025-12-12T16:25:14.292212150+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-runtime-openshift" 2025-12-12T16:25:14.311676271+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-runtime-openshift" 2025-12-12T16:25:14.333146165+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21" 2025-12-12T16:25:14.354381703+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins-agent-base" 2025-12-12T16:25:14.372037856+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21-runtime" 2025-12-12T16:25:14.392948195+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8" 2025-12-12T16:25:14.410570608+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream golang" 2025-12-12T16:25:14.430712537+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream nginx" 2025-12-12T16:25:14.450383464+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet" 2025-12-12T16:25:14.470293037+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-openshift" 2025-12-12T16:25:14.491139374+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream nodejs" 2025-12-12T16:25:14.511397366+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-runtime-openshift" 2025-12-12T16:25:14.531167115+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream ruby" 2025-12-12T16:25:14.550774560+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8" 2025-12-12T16:25:14.571714460+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-runtime-openshift" 2025-12-12T16:25:14.591008236+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-openshift" 2025-12-12T16:25:14.612694336+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17-runtime" 2025-12-12T16:25:14.632135736+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java-openshift" 2025-12-12T16:25:14.650353665+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream sso75-openshift-rhel8" 2025-12-12T16:25:14.671404637+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-openshift" 2025-12-12T16:25:14.692064830+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="clearImageStreamTagError: stream dotnet-runtime already deleted so no worries on clearing tags" 2025-12-12T16:25:14.692064830+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet-runtime" 2025-12-12T16:25:14.709757595+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8" 2025-12-12T16:25:14.731741932+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="clearImageStreamTagError: stream dotnet already deleted so no worries on clearing tags" 2025-12-12T16:25:14.731741932+00:00 stderr F time="2025-12-12T16:25:14Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet" 2025-12-12T16:25:15.160411339+00:00 stderr F time="2025-12-12T16:25:15Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17 already deleted so no worries on clearing tags" 2025-12-12T16:25:15.160411339+00:00 stderr F time="2025-12-12T16:25:15Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17" 2025-12-12T16:25:15.960362495+00:00 stderr F time="2025-12-12T16:25:15Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11 already deleted so no worries on clearing tags" 2025-12-12T16:25:15.960362495+00:00 stderr F time="2025-12-12T16:25:15Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11" 2025-12-12T16:25:16.218303019+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="clearImageStreamTagError: stream openjdk-11-rhel7 already deleted so no worries on clearing tags" 2025-12-12T16:25:16.218303019+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="There are no more errors or image imports in flight for imagestream openjdk-11-rhel7" 2025-12-12T16:25:16.390666235+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11-runtime already deleted so no worries on clearing tags" 2025-12-12T16:25:16.390666235+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11-runtime" 2025-12-12T16:25:16.629836016+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8-runtime already deleted so no worries on clearing tags" 2025-12-12T16:25:16.629836016+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8-runtime" 2025-12-12T16:25:16.757037906+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="no global imagestream configuration will block imagestream creation using registry.redhat.io" 2025-12-12T16:25:16.757037906+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:25:16.918208808+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17-runtime already deleted so no worries on clearing tags" 2025-12-12T16:25:16.918208808+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17-runtime" 2025-12-12T16:25:16.938944653+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8 already deleted so no worries on clearing tags" 2025-12-12T16:25:16.938944653+00:00 stderr F time="2025-12-12T16:25:16Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8" 2025-12-12T16:25:18.415473557+00:00 stderr F time="2025-12-12T16:25:18Z" level=info msg="clearImageStreamTagError: stream redhat-openjdk18-openshift already deleted so no worries on clearing tags" 2025-12-12T16:25:18.415473557+00:00 stderr F time="2025-12-12T16:25:18Z" level=info msg="There are no more errors or image imports in flight for imagestream redhat-openjdk18-openshift" 2025-12-12T16:26:55.010565303+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream fuse7-java-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.010877731+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java-openshift" 2025-12-12T16:26:55.043900456+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jenkins-agent-base already deleted so no worries on clearing tags" 2025-12-12T16:26:55.044001408+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins-agent-base" 2025-12-12T16:26:55.068821825+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream postgresql13-for-sso75-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.068904157+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso75-openshift-rhel8" 2025-12-12T16:26:55.082929152+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-21 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.083034974+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21" 2025-12-12T16:26:55.105074781+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream redis already deleted so no worries on clearing tags" 2025-12-12T16:26:55.105192714+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream redis" 2025-12-12T16:26:55.115269339+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp3-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.115269339+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-runtime-openshift" 2025-12-12T16:26:55.135437028+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream dotnet already deleted so no worries on clearing tags" 2025-12-12T16:26:55.135437028+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet" 2025-12-12T16:26:55.161328362+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17-runtime already deleted so no worries on clearing tags" 2025-12-12T16:26:55.161328362+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17-runtime" 2025-12-12T16:26:55.190485329+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream fuse7-eap-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.190485329+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift" 2025-12-12T16:26:55.190485329+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream perl already deleted so no worries on clearing tags" 2025-12-12T16:26:55.190485329+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream perl" 2025-12-12T16:26:55.206031592+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream openjdk-11-rhel7 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.206031592+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream openjdk-11-rhel7" 2025-12-12T16:26:55.216486376+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream fuse7-java11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.216563168+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java11-openshift" 2025-12-12T16:26:55.236321237+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk8-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.236321237+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-openshift" 2025-12-12T16:26:55.247230542+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jenkins already deleted so no worries on clearing tags" 2025-12-12T16:26:55.247230542+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins" 2025-12-12T16:26:55.253101211+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream nginx already deleted so no worries on clearing tags" 2025-12-12T16:26:55.253101211+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream nginx" 2025-12-12T16:26:55.259816700+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk8-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.259816700+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-runtime-openshift" 2025-12-12T16:26:55.263207736+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream php already deleted so no worries on clearing tags" 2025-12-12T16:26:55.263207736+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream php" 2025-12-12T16:26:55.263207736+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.263207736+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11" 2025-12-12T16:26:55.266495309+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.266495309+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17" 2025-12-12T16:26:55.268077659+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream mariadb already deleted so no worries on clearing tags" 2025-12-12T16:26:55.268286134+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream mariadb" 2025-12-12T16:26:55.284277138+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp4-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.284277138+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-runtime-openshift" 2025-12-12T16:26:55.296246731+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream mysql already deleted so no worries on clearing tags" 2025-12-12T16:26:55.296246731+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream mysql" 2025-12-12T16:26:55.300291243+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream python already deleted so no worries on clearing tags" 2025-12-12T16:26:55.300291243+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream python" 2025-12-12T16:26:55.310646195+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream redhat-openjdk18-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.310646195+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream redhat-openjdk18-openshift" 2025-12-12T16:26:55.310646195+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ruby already deleted so no worries on clearing tags" 2025-12-12T16:26:55.310646195+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ruby" 2025-12-12T16:26:55.315213010+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp3-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.315213010+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-openshift" 2025-12-12T16:26:55.318204525+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream postgresql already deleted so no worries on clearing tags" 2025-12-12T16:26:55.318204525+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql" 2025-12-12T16:26:55.327245884+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream java-runtime already deleted so no worries on clearing tags" 2025-12-12T16:26:55.327245884+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream java-runtime" 2025-12-12T16:26:55.334221720+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-datagrid73-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.334221720+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-datagrid73-openshift" 2025-12-12T16:26:55.344339516+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream fuse7-karaf-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.344339516+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift" 2025-12-12T16:26:55.347796993+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.347796993+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-runtime-openshift" 2025-12-12T16:26:55.353220740+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream nodejs already deleted so no worries on clearing tags" 2025-12-12T16:26:55.353220740+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream nodejs" 2025-12-12T16:26:55.408227580+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream fuse7-eap-openshift-java11 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.408227580+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift-java11" 2025-12-12T16:26:55.420899820+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream dotnet-runtime already deleted so no worries on clearing tags" 2025-12-12T16:26:55.420899820+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet-runtime" 2025-12-12T16:26:55.431454577+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream httpd already deleted so no worries on clearing tags" 2025-12-12T16:26:55.431454577+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream httpd" 2025-12-12T16:26:55.468409140+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream golang already deleted so no worries on clearing tags" 2025-12-12T16:26:55.468409140+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream golang" 2025-12-12T16:26:55.482401864+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.482401864+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8" 2025-12-12T16:26:55.498324706+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11-runtime already deleted so no worries on clearing tags" 2025-12-12T16:26:55.498324706+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11-runtime" 2025-12-12T16:26:55.513230813+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8-runtime already deleted so no worries on clearing tags" 2025-12-12T16:26:55.513230813+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8-runtime" 2025-12-12T16:26:55.530234592+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.530234592+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8" 2025-12-12T16:26:55.538246435+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream sso75-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.538246435+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream sso75-openshift-rhel8" 2025-12-12T16:26:55.550253698+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream java already deleted so no worries on clearing tags" 2025-12-12T16:26:55.550253698+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream java" 2025-12-12T16:26:55.561230695+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.561230695+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8" 2025-12-12T16:26:55.563758719+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream sso76-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.563758719+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream sso76-openshift-rhel8" 2025-12-12T16:26:55.572209903+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream postgresql13-for-sso76-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.572209903+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso76-openshift-rhel8" 2025-12-12T16:26:55.576202454+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-21-runtime already deleted so no worries on clearing tags" 2025-12-12T16:26:55.576202454+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21-runtime" 2025-12-12T16:26:55.584654707+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream fuse7-karaf-openshift-jdk11 already deleted so no worries on clearing tags" 2025-12-12T16:26:55.584654707+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift-jdk11" 2025-12-12T16:26:55.601980165+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp4-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.601980165+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-openshift" 2025-12-12T16:26:55.605034712+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:26:55.605034712+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-openshift" 2025-12-12T16:30:11.321447381+00:00 stderr F time="2025-12-12T16:30:11Z" level=info msg="no global imagestream configuration will block imagestream creation using registry.redhat.io" 2025-12-12T16:30:11.321447381+00:00 stderr F time="2025-12-12T16:30:11Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:30:25.320277624+00:00 stderr F time="2025-12-12T16:30:25Z" level=info msg="no global imagestream configuration will block imagestream creation using registry.redhat.io" 2025-12-12T16:30:25.320277624+00:00 stderr F time="2025-12-12T16:30:25Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:36:55.011548673+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream java already deleted so no worries on clearing tags" 2025-12-12T16:36:55.011548673+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream java" 2025-12-12T16:36:55.016657441+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream fuse7-karaf-openshift-jdk11 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.016657441+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift-jdk11" 2025-12-12T16:36:55.020413935+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream fuse7-java-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.020413935+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java-openshift" 2025-12-12T16:36:55.023479062+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.023479062+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-openshift" 2025-12-12T16:36:55.025663387+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-21-runtime already deleted so no worries on clearing tags" 2025-12-12T16:36:55.025663387+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21-runtime" 2025-12-12T16:36:55.028209291+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp4-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.028248692+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-openshift" 2025-12-12T16:36:55.031958285+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jenkins-agent-base already deleted so no worries on clearing tags" 2025-12-12T16:36:55.031958285+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins-agent-base" 2025-12-12T16:36:55.034923700+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream redis already deleted so no worries on clearing tags" 2025-12-12T16:36:55.034923700+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream redis" 2025-12-12T16:36:55.038197392+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream postgresql13-for-sso76-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.038197392+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso76-openshift-rhel8" 2025-12-12T16:36:55.041099615+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream dotnet already deleted so no worries on clearing tags" 2025-12-12T16:36:55.041099615+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet" 2025-12-12T16:36:55.044203603+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp3-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.044203603+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-runtime-openshift" 2025-12-12T16:36:55.046651795+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk8-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.046651795+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-openshift" 2025-12-12T16:36:55.048959143+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-21 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.048959143+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-21" 2025-12-12T16:36:55.051112407+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jenkins already deleted so no worries on clearing tags" 2025-12-12T16:36:55.051112407+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins" 2025-12-12T16:36:55.053495657+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream postgresql13-for-sso75-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.053495657+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql13-for-sso75-openshift-rhel8" 2025-12-12T16:36:55.055587469+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream nginx already deleted so no worries on clearing tags" 2025-12-12T16:36:55.055587469+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream nginx" 2025-12-12T16:36:55.057725323+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17-runtime already deleted so no worries on clearing tags" 2025-12-12T16:36:55.057725323+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17-runtime" 2025-12-12T16:36:55.059820035+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream fuse7-java11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.059820035+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-java11-openshift" 2025-12-12T16:36:55.061988890+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream perl already deleted so no worries on clearing tags" 2025-12-12T16:36:55.061988890+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream perl" 2025-12-12T16:36:55.064859772+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk8-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.064859772+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk8-runtime-openshift" 2025-12-12T16:36:55.067094188+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream fuse7-eap-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.067094188+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift" 2025-12-12T16:36:55.069332414+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream openjdk-11-rhel7 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.069332414+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream openjdk-11-rhel7" 2025-12-12T16:36:55.071479908+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.071479908+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11" 2025-12-12T16:36:55.073499769+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream mariadb already deleted so no worries on clearing tags" 2025-12-12T16:36:55.073499769+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream mariadb" 2025-12-12T16:36:55.075924490+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream mysql already deleted so no worries on clearing tags" 2025-12-12T16:36:55.075924490+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream mysql" 2025-12-12T16:36:55.078736381+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream php already deleted so no worries on clearing tags" 2025-12-12T16:36:55.078736381+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream php" 2025-12-12T16:36:55.081411398+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp4-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.081411398+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp4-openjdk11-runtime-openshift" 2025-12-12T16:36:55.083363387+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap-xp3-openjdk11-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.083363387+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap-xp3-openjdk11-openshift" 2025-12-12T16:36:55.085142132+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream python already deleted so no worries on clearing tags" 2025-12-12T16:36:55.085142132+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream python" 2025-12-12T16:36:55.087007539+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-17 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.087007539+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-17" 2025-12-12T16:36:55.089381098+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream redhat-openjdk18-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.089381098+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream redhat-openjdk18-openshift" 2025-12-12T16:36:55.092008264+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream fuse7-eap-openshift-java11 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.092008264+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-eap-openshift-java11" 2025-12-12T16:36:55.094516827+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ruby already deleted so no worries on clearing tags" 2025-12-12T16:36:55.094516827+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ruby" 2025-12-12T16:36:55.096762214+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream fuse7-karaf-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.096762214+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream fuse7-karaf-openshift" 2025-12-12T16:36:55.098853046+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream dotnet-runtime already deleted so no worries on clearing tags" 2025-12-12T16:36:55.098853046+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream dotnet-runtime" 2025-12-12T16:36:55.101097613+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream java-runtime already deleted so no worries on clearing tags" 2025-12-12T16:36:55.101097613+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream java-runtime" 2025-12-12T16:36:55.103278837+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream postgresql already deleted so no worries on clearing tags" 2025-12-12T16:36:55.103278837+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream postgresql" 2025-12-12T16:36:55.105298438+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream golang already deleted so no worries on clearing tags" 2025-12-12T16:36:55.105298438+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream golang" 2025-12-12T16:36:55.108160230+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-eap74-openjdk11-runtime-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.108160230+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-eap74-openjdk11-runtime-openshift" 2025-12-12T16:36:55.110398236+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-datagrid73-openshift already deleted so no worries on clearing tags" 2025-12-12T16:36:55.110398236+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-datagrid73-openshift" 2025-12-12T16:36:55.112563441+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.112563441+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk11-tomcat9-openshift-ubi8" 2025-12-12T16:36:55.114691584+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-11-runtime already deleted so no worries on clearing tags" 2025-12-12T16:36:55.114691584+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-11-runtime" 2025-12-12T16:36:55.116529160+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream sso75-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.116529160+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream sso75-openshift-rhel8" 2025-12-12T16:36:55.119014323+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream httpd already deleted so no worries on clearing tags" 2025-12-12T16:36:55.119014323+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream httpd" 2025-12-12T16:36:55.121958147+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream nodejs already deleted so no worries on clearing tags" 2025-12-12T16:36:55.121958147+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream nodejs" 2025-12-12T16:36:55.125472475+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.125472475+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream jboss-webserver57-openjdk8-tomcat9-openshift-ubi8" 2025-12-12T16:36:55.127401164+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.127401164+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8" 2025-12-12T16:36:55.129811034+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream ubi8-openjdk-8-runtime already deleted so no worries on clearing tags" 2025-12-12T16:36:55.129811034+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream ubi8-openjdk-8-runtime" 2025-12-12T16:36:55.132239685+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="clearImageStreamTagError: stream sso76-openshift-rhel8 already deleted so no worries on clearing tags" 2025-12-12T16:36:55.132239685+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="There are no more errors or image imports in flight for imagestream sso76-openshift-rhel8" 2025-12-12T16:38:37.975064987+00:00 stderr F time="2025-12-12T16:38:37Z" level=info msg="clearImageStreamTagError: stream jenkins-agent-base already deleted so no worries on clearing tags" 2025-12-12T16:38:37.975064987+00:00 stderr F time="2025-12-12T16:38:37Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins-agent-base" 2025-12-12T16:40:11.321142083+00:00 stderr F time="2025-12-12T16:40:11Z" level=info msg="no global imagestream configuration will block imagestream creation using registry.redhat.io" 2025-12-12T16:40:11.321142083+00:00 stderr F time="2025-12-12T16:40:11Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:40:25.342011635+00:00 stderr F time="2025-12-12T16:40:25Z" level=info msg="no global imagestream configuration will block imagestream creation using registry.redhat.io" 2025-12-12T16:40:25.342011635+00:00 stderr F time="2025-12-12T16:40:25Z" level=info msg="At steady state: config the same and exists is true, in progress false, and version correct" 2025-12-12T16:42:23.004591572+00:00 stderr F time="2025-12-12T16:42:23Z" level=info msg="clearImageStreamTagError: stream jenkins already deleted so no worries on clearing tags" 2025-12-12T16:42:23.004687054+00:00 stderr F time="2025-12-12T16:42:23Z" level=info msg="There are no more errors or image imports in flight for imagestream jenkins" ././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043043032775 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000030500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000000254015117043043033000 0ustar zuulzuul2025-12-12T16:15:02.841277853+00:00 stderr F W1212 16:15:02.841106 1 deprecated.go:66] 2025-12-12T16:15:02.841277853+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:15:02.841277853+00:00 stderr F 2025-12-12T16:15:02.841277853+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:15:02.841277853+00:00 stderr F 2025-12-12T16:15:02.841277853+00:00 stderr F =============================================== 2025-12-12T16:15:02.841277853+00:00 stderr F 2025-12-12T16:15:02.841277853+00:00 stderr F I1212 16:15:02.841251 1 kube-rbac-proxy.go:532] Reading config file: /etc/kubernetes/crio-metrics-proxy.cfg 2025-12-12T16:15:02.900724406+00:00 stderr F I1212 16:15:02.900218 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:15:02.901217346+00:00 stderr F I1212 16:15:02.901136 1 dynamic_cafile_content.go:161] "Starting controller" name="client-ca::/etc/kubernetes/kubelet-ca.crt" 2025-12-12T16:15:02.902871926+00:00 stderr F I1212 16:15:02.902841 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:15:02.903324184+00:00 stderr F I1212 16:15:02.903298 1 kube-rbac-proxy.go:397] Starting TCP socket on :9637 2025-12-12T16:15:02.904068287+00:00 stderr F I1212 16:15:02.904027 1 kube-rbac-proxy.go:404] Listening securely on :9637 ././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000000014515117043043032777 0ustar zuulzuul2025-12-12T16:15:01.386735903+00:00 stdout P Waiting for kubelet key and certificate to be available ././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043121033072 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server/0.log.gzhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000025256315117043043033114 0ustar zuulzuul‹#Fââ‹ýŸ—WÇýõ««Ëãåáîí?ÿ¯ßÞ¼}üøžþýüR?Ëãñòá駸¿¸ûöl¼îû»›ÛãùÙåõõÃþññâüpwuyxw÷x|mdtþ‘—â»æÂ’™ú©/Å?öRöߟ^ÉÃþíxg~8þûvv÷p¼P"m/øºw÷÷ûë§Wû„‡ûg§7óæ°?»¯ã÷¼—ÚvÍBzº·O|MÚþšYŸ^Óáp~öúòÃñÝÝÃÍñ‡‹ó¯y OïËŸÎÿÝWÇîÇ7øúòþþp3ÞÀ›»Û/Þ>Ü_Gœþµ»º{w;xq²Ôþáǯ¾ß§¾¾øêÝþê»_åÍÓ³}¸½|øáÇ/?ìÿëÃþñ¸»Þ_^nn?ú£ðú^OX¸Ú_<ýÇ»ýåáøn|vÿüô»Ÿrúßü­7åéa§¼yÿxA;b;»ßïv?›£±ïÆ×wíµö9?™íxwuwxú¾gÆ«xuùöôŸ}úÊ›_É›ñ°oöOOÿêíÝmg}Çú«ÖÔîä9ÖüöæöæñÝg7çõþâßþåÿ‡m›h½mUسw%|R?ó'Õÿ¦1ã…Œ¢Œê‹~P=«mË»ñ´M™Û'Û–ßuåý‡ÛñûWw÷û‡ËãÝÃãnüîöñÝÍ·ÇWï/¾Ûï—WûÝã÷W¿Óü¯.¯®ö÷ÇWûÛƒ^œÞ…ÿ¾¹ÿ¤ú¿Ž8êË«ö¿EÅ3"­ýåûOD‡~—÷7»ÿø)dûPðÇØû7ž™vÜN¸èþZùïó÷ßÁøz†¨ÊóSAÐçüÿŸJÛ€Äð©;u©EŽP;:=e†yêx³&ÂAÿH(ÐãåQЉû)ìËSÃ0Á;É^€ 6e5÷ŘáŸöÁóÉ—âáÛ{Fl?•í›Áö«Úžk8ßšK_;E˜ ª5HˆFÙÀsy€Vb{o̰ýŠÞßÅDaûÉl/5¶· íðþ3{ÿ0`]Ás±@‡íá&·=<À #ÁŠ<€…ˆ 3ÇY‚„”΄zà’þàù¶ ¼,ZQG,0# ÙÞHÑšÍö5}€L‰DGhf$xIFàĽ'ì9õæÛ¯èœÄŸi{x€ÎÈjà#Ûh@ÂÌóQ‚„ÖÓ¹ÃLe{JØó“Ûà…c^„V°À’Ù 0Ð0!0™íkLÏê ÓÏeúî5¶WgÇHÀÔÃ!5éGªa5pÅf l?¥ík<@W2Á8ÈÌHˆ$ O¥àšr`7ñç À|þr`MCHÆ7b´…—Œ¥QÇHÀš- qA`êåÛÀŸßûÃôèÀô«IƒÁôèû¾¬í’¶_ÑöÃô)Ø ŸÚô ,°Y0¿`ûwÁaûuó=§Xþ^ÓôÑL±ý…¸ï3 $°Õ¸ÏÓÞ5›}¡ÒJÐ+ªÃö s~$™C z2ίðÏÖ,ÑåY1áK'Á†L\Ê\¿¦Ö›‘ $°dÜÔ”uþ¹TTjlß3`ûE?÷&ÒÞùJŸDÿÀ ®õDëްηäåØ~áÏ=³¦¡Ò¿b‡/8ÕÕÞ—¸`z|ìaûmß[s$ûËŸzžÒ %¿%ƒ~ض‡ía{xÿùJý%ŠŽ!&Ðñ›:Üà¶š ÂöËnõ†¸P •¤†MOHú~$ÀlÕÀö3Ú>jloÂô]óso‰MÏ5ç¾LÝ䜧Žü¬ ©Í¡ð¸â®  @ChÉÝØ~Õ¥ß8iû ¼æÇ>¸cå %¿„\u\Ò„xkèþ-izÕ†æßÜü_3þ1Ø¿k 3_y/в[¦¾ò^ã²ÚAK¶Ò¬s ֚̋YñÌH… èd,P2–ÔE;l¿b;è$þKh Oíj4’\öÄ—ìdïwb `ÅqÀll”†p6íÓokÆ-[4tW¬%÷ákКYºf=8ÙÇÛiÈ׃“cX¶_q-${c¬-9’]- l…œ ž‚ݰ%[@B}øØ~Åâ¯ôÖ- \I1׹؉‘н&!v\†[Q62•büX`âr`¯i *‡X`ŲÀȱ)¶h/ÐNÍ@ôƒ¦Þ ¨IL• =5Y ¼'z“‹ªùÜ;këh LíjR×hØ ˜m" ÆöAãGaxj¨™ ‰Nݱ°d{(¢QChÉRp¶ áK~îSªaS·…kâÀT B-xÉÝ€ 7 ZÔû§x‡íüÜw"nèárô j*P ›:ä$4"Q ˆ/ ÛKsG`Áí°a{¿`ñëÁج9JÁK&°ýºàù¶‡xa$¸— ¡“³£!4ÙPXM!@(à“‰CÃöˆü&·=¼ÿKk¼¿03$‚fcšÜ_Lg"Ü줢ãÉ`úõ¦À`û•?µì¸ €Îï@BaþM°&Ý! 43¢¦ `&# ¬8ÿáDò¼M°ÀKï– ¡iïa˜ ^P#¼· P'°À¼HH©‚¥ä‚e/?Ü^ö¸{øêÝåííþ°°\ho®Ì*Á¤SV‚h”8¶ Hp‰˜x.¸$9”¸MRê(O „¾#•$„óŠÅ¢%Cv*… »kÊá LŠ"à @0)¢½VÉ‘Ìcû6lŸ5¶w探i‘ ;Ë’þ‘ÉøFPÙ„ch] @â ª©!šj3"Œ“Œ5®Â:q0xa° w@ÑÃÏp ¤SËÓX ŒD0 ºâ€ª…1)+§E‚í¼È$» 6UW\V±ŒpÆáªi‘»ž%Í'J…`å’ZNÙH±²6-|Ç%¡€·®jHV-tVéh*Ì‹„ܵÑBgÈ­¨_â]‡é‘LÌ^³­äÂ5Y@Äk 3³@RFj˜Aœl¡&Ïd ‹™3‚¢ÜÐ(ã¢å‚b–nfÚ¡f<1 ˆÔtœ3ú„K&nÏí^–ØkB Cj8µÈiÄ3‘®(læÙÜ!o8uTX³žæ©é‰ÔpÁÔ0¨9:Æ“€Öœ» ÒN8wµdjÍRÙ;Æ5šÁ#ÀôØ’Ú¦ÑÕ\,0­rÉ Q…t‡àùšÁ€‘Að|fªÉ ”D°Z4wl 5H@è˜$\±PFÙ°Z4µCà"$ˆº#;\±PìTÑ-š™4¼ # ´‹–Lƒ™£„3Ó€÷"(8§4ÐÀ‚Á@ |,01 ¤[ Ü:ÖÌp0½'±Q"9\1,LŠ“X`Z‡Ðv-k Ð8› ‹<µ4iÖ ÁY¡F3whPà ÜÍkg+v²¹"Wœ74à]+r]Œ±w6wh 5HHRÇÞÙ‚µÃUf¤Š3§ŠN^…ÈÀÞÙša¡êÉþ ‰ÃBéV…„ƒìpI0•î˜0ž™Ôjr? ›cÄxÅKFé–=0b<3 x‚’±ºä“Œ01ôç¥a=­)%sÃê’gL2Ã=±:3 t®Ð$¢ÎÞ0d¼`m`Ø>;a´tꤠA¡õfŒüpêÎq ˜„°†éZ¡06]Þ;|ùáöú°ÿÃÝÃWï.oo÷‡ !!;׊F’4b‰@?yÁ+ØÒº3aaêœQ@á‰,·‚8„m¶Áó#¡¤š<€ÖQáz³ÆÒÄÄ j%ó…²­a%AOinNðR8 w´”&k)yí“#±Œ8³C¨¹t ÍzkÏë.‚6ÚYnæÞ0n:5 ô¢¸Ð)äy㦀 C¡qMpà#IT '+¨iF3!¬#ÎKºëQ“†&Å‚‡ð60JP%P!-=ÆŸ`ºd`-i&2õ&ÚÁ sòBMÕ€¹®¸¶ p‰s`Vi=á¶à¨â`âð 9–§n/•xÕfÎ𳆠%ÃGê$Öb ¢${ÐЖ" …IÇ‘J*ÌšbiZØ$•äF’½¡Ú8kBY‚Öœ©¶€ˆ¨¡VF¸0k¸`5 Pð ‘%ûïÖMdÅe¤¿²¦"^ØDY‰èœhNV^y4ë=ñÂ& Q³ìbvšg$6á*zI«Òœc*hUNܪŒ"$Xo8Ð8÷øs¯ABŠaü—ÙÄ¢·žX™Ÿ VãÂTq´uEÝ K6ÁôûÄÛ0¹S¯)8çð†«­)6«9±›¸¯”5ùc$prëHwêOZú„F˜%uÇ`ŠuK‘ÀöúÒ=i'Oœc]Ü xŽwUåIA %[kê0¡´ô´z¤j MÜFßÙJ ‘dÜÓ)›¨/ÖL4‚D IïWÚ Ôk !ƒ‰-—VÒ­Nî 9í¹µ¢ LšÖ›È3jªÑÙU\ —1ký¡dÈ9¥›2‹ù!Ñv¾H€¶>è˜ÆJ¸¿±‘"T$N*ü€>¬¬­Ã 1¶i7‰¬Q\Ë#ÃDÃÚyDölÏ[–D©é…Où¦Ö !)1Ö0+ôŠÚÒxß9™>Ì Þ‘{ $Â:&]6 ŽV‰&ÂÏ[ˆ$ªGɱO%æÎ‰âãÊ¢JÝØλ )ñ+H¤«" ÜBô5$!f’h_/¼L¥¤§V%È-°B´šÊƒž$_1а’"H˜ ¾² °’³ A±Õž¨„&':k×’4 !äüVl5¼é)X®BYúgH4Â9¡G[†íS,0Ú2óhK³hl½áÈø‚¥ÆÖÌqgzfð÷’^TãÎÃK–‘‡JBÃó°tDØ… n5ÓM¨?SÖáÁ‹6 O2ÏEPH”•×.+7fFº0sº0¢ø(xðŠ7cæNJ&Ü›ueSÐÀÄ4`\ãÌ„WÌ禭±½[*šÌ›J+Êœ{BÒujÏà­ êÖža=ýwmј@3Ó€Öä ÙÛ3WœP7zÁH±†NÂkŒ³³‚ $Udù‚àFÜŠ©"7²ô7°û;ª7V(ûo¾K)„v 4gUÙ*FâÞFDQ„m,*d $º% ê ײä ЀB´8 ‹˜+‹(™9`él ‰ÇÑT¥ –â(&¬8­ÎãWcL¥Î ÄŽ½&0T7Ql°NF5.Àبaƒufè5e£&zŒ³ÖŠ"BËÞ@pXyð„ƒ;K— ¢ks‚@j¦ Â3’ë³2A‰² §Ø=æMÈ#ÖÜðé$F¸¿ Hx "büƒÄaéì±3µÞ°Ë´‰]¦š-×ÎÒÜ‘F,Aön#b€Àö6Â…’js—ḛְ̀t¡±«y'gAMæ0|wÔ˜Öf‹PŠŸ5éãppŒgVÔ†)8ð¹ÍçB#J$¶>RME!‰)p1~=©šJcöþLùm@¢ÊqDÉŒ»yO$6‰¨DdkÈ1–>ø)Í;rŒMi «—`‚³iGD¹Lä.jÆœ¤[ºb?r2QÒ–acˆ°N}Ú/knúˆeÃ~䊣+ò”Db?rêã=5í)1mYY;ut é()͉A -jˆ!ˆ›`‚å°²‚„3wlIm‚8j‚Ç‘5´ç‰k€61¤e&6£f•c-iNÛ &ðóW—wš%¼d„trÖɬAHª“‚ Ä9+E&œÃ†Ùj2‡èžb˜PØD6ÙkÎB~LÀW¼ "SI/2„[:²ÉIA`5Qã Àòô´%…’‚sŒ Ñ!¶² H(A¢'N;l$‘¨ñÖÈКœÖSôÈHQ°\¿82ÍÑ‹\;fté ËõY›­‰<ÆwÂrý&4‹ ­I Y½ –¨©=L¸c;n ªDRtˆ÷-=×”MD=©Y'KRËäÎb™• ¢.Ć%ɹ4ûJBìM(±'?™íkÈ_Æ'_± ;3¤df--Ú3ØÀ•îMÖ†ÖMÔ¤&5L U¤†‡…UtŒ¨“8&R6QJ.¹)jÔ”;¤þ§­Ôð€eC¬¸Þ­£Þ’¬ÃöKÚ>ÔSaûõ=™Œ¤€aûõêFΞ"°ý\9`‰éƒŒeÁÉ6ÛkLoÕјw®%Y£ÞSàüçbÊÛ{’ á[1ðkO3bp3Ç%é÷Ó5Ä Áö —}>ƒíá^Xž 3$\ô¶ k%P¥„C˜+,)°jShÛML}—i€ ðt—µÆ!Åÿ°w.9r »ÝQ€Á?Úƒ¢™÷¼HÙ5lªŠf¼¸h@ƒjdœâŸ—*е:Ö¢kw 0æ@±h²9èñ Á–‰Ýâß/Ï }˜nBBz2‡N–É|"„Vª Ä ¿¶.ŽìAÁU wz\Cöô-X(xðÂÚy{à úì´q÷`‰)³q {ÞÞ?'Xa&7˜ZªG¤ŒZòÈÄÐO2 =8m‡(ÙÔbKq “]'s[͵¤zƒ4% Ôç×Ïsíž@á$ V¸Ž2õújO PIìÃpAìhÔâ+Ѝ6¡ 47l°µ•{P0/T®°¢- Ç41´ ^L(k›ˆ<ÃT”–EËxR tì¦ê[ÞìÁ¿ƒÔɳÖú0[‡œô<=GžþÅÉ“7JÌ>ùk_êб~Rؤ”ÅÆ/Ž—ÅF øÉtoÿ¬Y/|£÷óâ™Ûóöæ¸]ðb˜§÷éC Ò•OŠ— îO{û·‡ÉÿѧÏHE-ØÓ;ÞS==œo£¡óèÛ'›¢£óḋ·V@¨’LÑÍ{³›—.Y8Põb®÷õ§Çìæ‹OWÏ8G'NÔNs=Và³îËèï<ÙÖ­’ úýtÙ¯BGªßÌú_}ûKJ¾û{A´‹#‘ V 3Éœ2±Î7_Ö²Þ}PHuA7p¨kð×°%lC j®æ/v PƒúËGèj¹fzÐÈ€u˜l´Ç8ð Rq[é=¹KšAŠþ÷ÃíåÇá"~ÚED enè3=¸L$nž+0Ö ÈÊ–¡“ƒBg *˜iš|+:wÔ\zŠ Zyi01ž ÎÅÚ“BÚNv˜æ¶£xEõÄ fiõGÏÛW•›XÚ$¸ø•A—“’zZ,Û/^Vnê)6ÇÞa8y9y¡e÷Ì­„º¡Ê4¶ÊÔSuNލ‚‡¸¡¢ ÖSyLw È[<Ù‹®m©¨&M>`¦Þ3Ü\ªçà íJ·@°É¸ä¤µT÷¦zRñ3kC`Q˜V¹¡;Kz¶¡6ŸŸ¶¡Þ6 /j#^5¶0ð9vçØt¸ ¨tž>­‡‰R2…]xXD1¶1‹ j¼ á–ÄvÒ ˆ©?¨¬sÞ^1Î:zåÁ©Ç3x•F‡¤ô¸€Ošf`ìèb.-ïAAw”–ŸŒrŸ/Ü`ÆFº8(…Q0¾„8„ïw=%ôòĨòä¸Ð›: Eiq„ѱAdÒ>6a+äWÇ¢À´tÇ%( §t}”FCÐâ˜,«7LØ:¯ÞÄg1˜¸bt¹çŠoV…HÂèí–ÐëÀ$¤8ëôûeÍÅó1FîpóÃYÞrà;X„ˆpégrý9o!½‡ï÷ =ḱ£Ê8y4­¬§¶ bûkµ˜;Y¾áéa~¸™ÑãÔXQMšÝ¤–=W>‘3ªHƒÝƒ÷¨g}  .WesRÌêd‡ Mm«dÁâ‹ãìç˜ÁfÀšâB¤0n¶sÈg\íÄ¡DâоyYû†S÷Æ•ß;ˆè9ËÃ¥T¥@â†A´ì +U1}4ÖS´ØÙÛÃ3>-‰õ À9t9‡–,BXèü". ‚³‡ˆ,0Ä‘^Qá½ m¦¹õe_ѳä&"†¤an¼Ø2‡$*” Õ V|‘zf$ Ã0Ó0Pw°­‰jÂã%%cIœs¼¤ûÐB„o¶À·—{’âe²‘CÎÍ!÷Ê&sð'@ùv¨‘Œ™Å 8†A¥§Î'y0q…™ˆ_‘B ÑÍKQ=ÕÇLaF~yŧìI.ŠO@I@b~AZWV˃•ož×úPuøé¤À¼:¿"M®àÄ‹…ù÷¡”, B£IÐÝCB˜ªÂ ¼h¶J„ãíßk<âéNÿ¶EâíŸRÕó‘žÐP}¿±„dŽ‚xP o?r¤zÞþs¹“ðö/æ{¢½ì7çÏU¬X73!è þ•… ÿë=_zä}?};§'PñÄ¥½Ñ$DOóßÈâÈo†ƒxû‰oïx{ô~~öí™ÌÐûy²ôg_TAGä÷Ã:v"—+ðý}¿&ïŸY+0š„p‚@&´_\4ÇÛìô´¼Š-àE­ñöx{´€_.„'6þž4xû‰oßô{Ÿ{n\ ,µ´Gµ\“N@€I€¹âƒ¼lú½7‰ÄÛ?¨A`çÇè7–¶zAtøEJc-C»y¶È,Âpñrè<²6™ƒ *4q‰Àd‡&H€c°TY`=ýÅåDÓó…gÁ <¿ž. A/!‰Âí L¦»DÁãhÖ×§Ý«ì)[):G —ãéG>½áíq¸@µâ‡ß^5 ×ê!]iiŽ’Ñ‚œJR¼,\ðù cL½xÂÒ<%Øa†fž‡@zšC^ú$µ‹Ïƒ“ õ<[ÆÛÃ4‡Dú‰`f€ž·Ï8ÑÖˆŸl àí'¾}àí!Xû£o_tü=šÁ/^/·«¯Ý±ÇÛ_z»ÖJS^|ûKš~-?NFKÿ_Öx‚ÙÓÛ{}qaàö-Q@0;è`‹{Ayz¬OàóÞ=Ú߀ìÁN‚ô¤ÛÓ¢¿_Öu&—M¸fuÒs¡ÂY½lÃ.¼mÌd£a4,[ìy{¡MØ ,;O+$zPЭX!}ØPzô$ ¥_[ ?ŠB®ÝÓYtÝTMâ¹(ìE@(|b…ÒêAÁDÙQJ¸á0¶7ØVAÏa´§Ø=Ëg@áóp˜èQ(ñ?šhA -5R\ C—aèÑ/sSÅáËÉçp)(À:üí0¬g™É]?'Ñ5 ²3†]¸‰ÝÔ¡6ÂÁÌ ÈÅ=‡߀\źŠ&„3ѹ›Mˆ¯jÚ†‰Ø‚u¸±³m`Q=݈¤½ ¼Cæf1¬ÅMõ=î ÃØYã H¸ $ ¿8WO­¹”<М¼Á:( X‡ÿü¥rEO9¡âø  ƒ+KÚÓ|bCŸz¾£à\Þ<€‰›F³z˜È t¤ž®@° ]¾B¬Ç.lbM0qÅöÌî¹ã[6m7xް$\T HŒGB>ò®=iorÂI·aÒ¾MoÏŸ;Ϩ@M^Å××ÀîêØµÛµòåÜ„BäìÚ^%ì¹Ï)þZ¯…§ÛwmC7… "}E;»ç8DhÙÉ)6L ŒVö\ S­BqኺtÓÆÄ…/ „ CÆžæ„—ÇÆÜ HDS¿*6YBºgê ‰žt"­*1Ì0u•ª‚¤¤{òÓÞÞ.y{$?<¬P=CMÏšK|A Qøç’DO1ÕBLÌgbçª_‘lB© ¿B¼K[ˆpVI /L^¹– ‰ƒB¦Õ¿¾A–œ¬Q¨,½,Ò“\ª‘æ¸ ›ˆ%-cL)$»e†ß—S˜3 ‹1WíAâ3ÙÆ¨DON ¨Eþ à±¡Ê1…­k· `Ç*(ÎXNèyzÍØŒ@qh ØRh'ÓP@0Tm£z Pßdp£Þ>³çí³6$~Çz‚½#ìO×’%\êkÛÑ€à#žÊ@žt£(“ãñžÄ US 1á,Ííyû²Rœ¼Á1¤ô QÂ*sð`vP¥ÛP"š¶@ ç§e†Àp²B+P 3TŒÆÚƒ–iÝBLк`À¤–¦÷0a^&.Øcéê1+3I“Á“&¹Ê³w(«MÞ„¶e»Ç*IêKw8йœÃÄÉ+û,/«o’#!µ8×Gвžª£'aì@ap¸p¹NXbÈ+£àkGOÙÉò³û &£P=(—áØIDSg"*$Ф3ô‡Oƒ=ªÇ{ÖõæF?âQ=¥Ç3D@áeµö4ÙµÁP²7ÛhB\áÎcµ0±9B1êø¶a`Ù²q1t²d÷ù-íA!ÓÄ‹-•þ,J‰†èÑæ5 BWb²Ç îAA…·1ö¤fÕ{žþ„ [`ƒ= Tñ‹Yäì]É–žƒ; Þþɲ²éFûy6 ÖcÂJ ÁhzòÞ }Õï~¦ep &Ô®8âÔÓ_*怊Â`…XÚ!T(ÖÆ’ ¼’ZB'Êtñ™ï&ØÖîÙ›w’Oïƒ/Ÿxs*K\rzû’“o=Ÿ„cS'Ù[F’\6Ÿp!Âx$$W÷0á±Ää]ùÚ-g×MéS»@Œmw-?¹–Zƒ µ¨zòÓ“ObÝåí<"8á U&_V=qC•1&ÖnˆN,Ù3¼&nˆê7Ü~2véfä–ó‘°½¬g-î„’*E`â†Á–è1"Y„cÒC}…¶B…ò­—ø & Cñ0‘‹µ¥›&ž87ü¶ÔwX±GÁ} ò&`.ߢ†]¸È.HÏ4,˜˜¯¯±SPø À=ëÖ@ážØK@bÒffá¯"äq=çÀÄ5ƒ.±ª ¾âYe„ްû]œ(H#³ü.D  þ;J pÅêÊ‚£ÿÔë t(ÀÄ?ì„7Ý)æ¬4F16Ѝ¥-uéb¡ò€*ü°y†èy{cU,Ù]àö¢êq º÷þš$úT?g\Z 0ÎØØ¬ºã¡ö æfŒöÔ Hk Ôpblò ö^œy °?¸*á°þª(ÈÚ`>â—êâH >VYPø¥¾T@HøÜ—ÐÝ‚Bø¶$È?NÍ!ò`~¸¦`- @a< Òs„ $Œ'ÁW’ ðgn©eÅñ"zP€`8HÁÀ`À"8ü%¶X(…Ïy€Ý¢à.8@§p@á³;M+ +ýó6@ÂQÀ:ü9Z›-Â+H'‡÷¢Q[ÿ ÿÑÁÿù?eBõ?ötÌdþ××A1hæYÉPøÅ±"ùMüàå1=@Õá¯\sUWÕHLß…È‚u ÿ…‚0P $›F[<—îGŸ’)ëA!È+(Lv@(üR[A=ͪ$ÑtT†žŠ°K €=øé(¡Gh#2Œ=a†ÎB÷„ŠE'?Óª‰ž8¡N)y»ÁJd´ ‘¤¬$p¯NÙ ´´’9]¾á‚pa÷”’”È(W˜´ZºQ)j‰ÈqðÞŒ,•–ºsÚÞš8 1òé±=®ÁŠÔ-\ ô¼…‚ 0ñ·™ðžéçtAyÃÊZe=®#(ÂLÌŸ€=Oz ˆ)o¿’a7t'®ˆ)s79 §ÂFþ%ΚœE,“ѓψ÷Ô¢RhNÖÞд°¦JuJ£™9µ™Ùâ"ŠˆUQw¸#lÐ&&4Ë1wA-êÄ=«·Eé¨;Lu=xIn‡a¸áj©tÈ¿)}fè1Aÿô½ÒùOÞaf>yJó "ØQ‚¼ nÌ7Z ›w¤œ¦ö«¢­¯p=D°‰F#ÞÃþ@YŠtò‚Úãg®Ç0¤˜c|þ 3!Òc&RƒÚâS}E u¾÷^åÔ4¢z ˆäR@ðìö·0€ø ‰«$Œ”ªF\1ÂÞÅD ‰;˜Hmab‹džofÇ kBA9NWÈ;¹÷ QñÅ' Ñ64_-•'cJ-hyŒV–÷èA·¢.}‡yp–&’<±^5×<ÄÊL >VÁª'·àR# »Ü[pôT£$œÓ.OÁ™dV‚©mìž8Aù˜Ô¥o@bSO‘AUåkr° ·6˜ka‹êåѳ„þÔ=Ëê™{3s‚ÜÛÓ?Ÿi€};Bø:ðÿf™ƒ»ž¿7X‰žSˆœDDÜB²m0ÖÔG#vkO ‘âÂ(5=|¾ì[ €èêOöÈ÷XLökã°° ·k8Y™1aöŠÍkï©>~T[…§·ç* ª^“ó^š-(8I¦ï[Z¾…Î;€ˆù§XžÜÄD1ƒ‰+˜HߥÛ‘äË-LgUc 9=|»î[ @:ñÃ[ø&»…ð½QvjDz HSOט\"MoHxmkb¢¶0ŠMW0AÚSZPó`ìÖ^ $/KRÀ˜ø«Qa+ L€‰\»Mé)>©Gă_Þ°s3.Ãöõ|"jYõ˜s!EcbªYèÀkS¡õòꌇJfÝÞnN}NÙò†+ê ѳ?åQ‰º+o{n^{©¦ ¸¡¼à=Šò߀‡ìe÷„eu’ @ðp)H]*Nø]ç¥z˜È¬Ö]/h“™Ø¾ùk«4p·» ¦PGµa>¼B´‰¢M‰žšFô@ l6å á‚/‘áð°’ À~ZÌ>!`—@ïÐãlÙîñ±'°/‰zzTbÛ¡õ7ÕWH®¢Ø¸ž5¶,ÍDÉÞ‚Eª ÊLÇ9hK™DµÓðO½æÉ%c,—df—0Ãpû:~nÙRˆ¾`õú<¤÷ ¡ZÙž±²=Îk÷ É'“zaÂéŠ##ÖÓžJÞ®‚ §+ »§Q‘l[7nN %{ C”H@ v>¶¢Gœ#Óé‹Í*Ø…ÛG²6oCgb&M«•u¾x1DH7­êÙ£«1$æâÇv ° MvÁzNÏÔ6·BÃrê8KO¼ÀZÓ•ƒOQy[œÀe†üám¹žräWèppOœ Ê†a·šSÒµk]j$ &æ3áKzÊ¥E¡·;‚ ï &Ì6)V¯§¶%zÂgR,RÝ1ÛÂ=ª*öù—0ïtA525¥¡žÌó† 5eOHfwšZ¡æR¬ "þôãJ¢ \3¼Á9œßÖ$˜S1Öð¸]vèý]1 +»§èTT»p\à†Ö¿Ø;—Í®¥ Ï(ïÇh0t…h!ZÌ_âüåK•óBÃ(ÿ vÔ^%˲•<ŸÖŽXñ’žh~bbª‡ ±a*»šg˜sݰÂnÈ$„åÍ«}ž€²ÞŠþ žaB™…Rî¸W"@ħLI2¬½œQç^Q瞨hr(=ßñä¡ñ¤ÙVYØp‘$ª– ]øÆ²öÌãÒŒÃÇBP€Ì@Ð\ =P C”f‚›7ˆs2‘5LƒS§F” •» £ø§†¼8GCÎQY ñç-òä5ƒDj fdN Gj•ÄÏ‚.,èyÕ‘Îx®2~þ $¨„ !ñºOæx*®v››Ìµ01³bAðȘ5?² ä¨G_B¶‡™á¸ñ¡ƒR1a9 q¤ãqXa9eÉ®b„yˆýkY3H¹¡kåæÅáB!ù`]Ø0?9â/&VÑ72‰±É¨™d²åQ LF4ÎèB«9ÎÓ;7/t¸(<„«kFÌNnž…2ªvEáéêÝl¯Æ/ÌB­(<ô0ûËZ‚,Ül-¾6ô)cÔåê·ÁI3¦ÁÍçãÄå  •¦«wm¹¤{¢=iE”8söÁ¥RH¬(2ôL¼`ä¸L~ûSñu  cãO9ƒD›adþX]©;¹‡R¢yÇøÓˆÉäaɧñîxáë@Æ #…IO ˜Ï§ö©Ìø *3Ô ŽÕ?_ræŒ79‰S†‘¨1º0lhxÏ™éi ±G%ŒGÊ–ÁnͰ!ÏGB¦¨àx’M ± ¦ó!^›<W·9…d'®LÞ½Š+Ì%ëÞw´3Œ¸ ñjqIÄ ¨ç󽞮¸0xwÉ*Â[±ý}GÉjF¢)ó”§æ3²Ö.¥Û`L×ÌU¹¡IzC)9³Š%ÊœÇëOín‚ IÐâr,# tf7Ìç«/‰%Åó÷h‹ÞP”Êö%H@¶'“ù:'‡ MÇB0Ã@185@a€ˆa*­p"f˜J8§¡jcs@pªƒ`C´$–¿¯¨DÎñ‘AÏÊ Ôg4âuH¬HœAê‡ëŒL¨W†+W¼:2o›O0#zE¢Y3 :ÒŸøË¡w<ä3¦¤'WbiËÕ°¼sÇršÙþ˜Qm‰íWo‹Î4êDb±"ŠtšÉ,ªBÛÁĆ9ª®è&MŒ[® †£‹ÉFåSj ã&„+º#k$õ,‘'Æ„O¹A%\r È<¿±On¤kºT0j·Â«Vš Ô(°`üîNúz-”.´Ä¬H5k¦y²œ¢£w§±fÂFwRÁ—ÂÐ3û*8Íqíò汫JãrxÓ tÁž€a&“¨'—@]sö¡>C¶4‡eMžYëÒÏgF©{G¯}Æ úèšhÏgÂ?|¦U®©L ¾äÕQQ-ôÐn¨mG„”-Þîè€YRFN”/”4SÇo~+”¬RM/+òŠaÂÃÙWl°¤jè­H!ŒàœZ½˜y+J-¾ä‚·">jdˬʫ“ÚD* ®#8“9:$wœ8Ñ…tñ4,‘;T’G ˆ",Z¢ 9‘[š2¥+Ú N¨¨„ž/ì˜Ç Â)H-‘FÜ, ÑnðvÈ„ïè­¸fuªï˜#dd3æ¬v\ŰÂ%-¿¶?º°|%X¸V'¡Ïi….ÄŒ.<Ÿ¡ã—‡êÂÈráð0Eÿ Û1b‰4 A9âÔ2åõ¼ jÄ Y¾ŒVRf„ §¦C´>? \ì1E9)ú wä’CHtKc–îæ_Ñî¡XÙ±Cx‰P \BÜ„M8NIiU‚ñˆSOæÆÅî‰Ûº0r!5YBõk›|  ßh;Žèk¶&Ê+t!Gâ5 E¼pìè}@à$ɨGì¨\ûAÂ鈛§)8›‚¯žµ Hsô°,I&GòM2eLMíhƒ‰üµ‘£à/ܼÂ+½)£°xG‰j†ˆìç¹€,* ­ð*„b:b…,ÐDGKv‹¨Ãvt)¶¾.i_˜¼oUR±©«}Çw@]ø­ŽMA×…˜s<x>!Ñ $€Äg$O¶51tº0ï;¶©›+ŽÖ^Üî°ü-àm˜ò˜Šø¼¬‰ñTÀqJètá3eJèÂÙºY€,|–…$B“'îï®R¾èÂXôH¸P–n‰!ë›g¨ÞtaìÆýHfù0AtW/eyÐ…ß©{áùO\b鈋ÝÞta ‰ èta‚¶bƒ¿°£r½„ÈÂúp²°h‡N ÁÙâiÐ…›Ã…7@]K#  ÐQètáóuO !ªT‰›2Wç’î°W”)'*×oAºðºPÐèÂ糄#©¥t‘öƒ_í/(¹2£­ 7eÞ‰ta÷€í Ú–´;ÁZæ¸$qêN–‰Ö6fU7‚í¸c'Ë„íÈBlÌh_8µa#(u#\À‰û?!‘ZØávs™’¥EãkA#ta*^ð 'ú,„Ã^¸[ÞtalYÓ„ãÄÝD†¥,Çú :AS v¾îh_˜²v!&V´;^ÝÖôüe ¶5íð&ê"Ž<âfßñe;rVp\¼‚ã€íùY(Jíèa©$$\ þÂŽxa¢N)F°á¦ÌÍ7k_T&V|ÛÛ64Z$)fçv<6/Xz2¡N¹cƒ„qp)nÖž¿N}L Ñ‚iÊ›·²¼èÂX4dF´@ Ÿˆ™—¸[á:ž AŒ@àL*H#v,ƒžˆŒÒ4}ñ7w/•´9§…ë$ú‡é]¸øÖ”ñkê¶#táOHXº ÏÕ…\taJÜ  Ї.@>ïõ›y*<­P8VF íh.lÐ…‘¹ûw ]X¾”º°Ê_pètñtáüÈd!! …Oí 4ÑÖd!Ì(SžÚÖÔ#˜“4lÇÓ”#™e‹ ¦)¯nkŠ rBûÂÛqä©h‘2-èÂÅSS0–GLø N®‰³„§ê –Æ‚dòTrwíBиãÖ ‘â…ÃÅW›ÏNEÕAÐô¶ýB"«Ë7 ‘611ãÂZâhƒ¾ùÌ‹¤UÁ_ØñTŒ¤–ZÍ‚zÄÍK_Ýž¿%Çã²#/…qR&Ê7Ÿ póôÆö…HØH»c<Ïs#85\è„ º°Ã‰ž¨\‡0U¶5/ô mØîˆt¿x&lT®ÕLM°}mM¿ðˆjo衺°èÂT¼P#!dPalêærD3“¡{aÇiÊABŒÕp~ìÔpa„n…»°‚ˆœ‘-sB•òæ›2®¤„å +²A"I }ñWï|} (Sƒ»€n¿è~diÄÕñB5'ª;ì…$žÌÒMЧºŽ#¥ê|‚…LtA¯¨RŽ<%Æ]8TlbYS)Ñëæ 8ôq%àq˜zt &ÎF=bî#H¨v –5Ýì;>´Qá©ØQ§Œ$\ú‹s÷Ð…åӔɅzöƒÿ ‰ÊB¼pó-ë‚¶ Ô#05õ‰²(*ô/œj9èB¹ˆâ…Ëšf#a›º¹¡*•Ýà/@~!ÑbÚ˜¦¼Ú_¨ŽŠÂØÔßq‚ˆ¦0G9ânYh¶'Ÿ„½°‰‘÷ÕJbX¾p¹.xšÚ6 ¡d#HäóÇa/œª # ÔýŽ,_ø;M¤epŽ• M W†v¸Ž<‚„¸ãÕ@©5º°¢«i¢ñ¼­¢°OÕ…ÀšC`;îØÊ2BZtRb·ãÅöB[fãˆñ{a‰’ìv<êÒ¸‘FìH#FlGkáJ4A_m;Z·„¢ zGSÓH¼ð:UšXÊruáÌœ‚¯;Òˆ#Úãu—)¯®Fxdv¡©iG¸0±Ž£[ëU»†.Ü{Q¦Ÿ˜1 §#v#&V5½Ä ššŽ rc}~õÐT#~"á¥íH#.^Éò@–…ÕŽKf¦j cI ¬p»¸©éA¥0š wÄ #O…kvšš.^ùú@`ÍNšÚ/Œúä–.±~AÐæ…æ…áˆ.¼N îÛß.¼è˜.tMÐ9g+ºš  ÿ“$· LyqWÓ[ €.ŒÙ 9‚„ºâ…«Ë”L’¸4µd'ËL¼ÐÄýµuÐ…õñBgUtaE¼ HHSZ£q³ïø@ðP€vÇ%ñÂÈS¡éŽ,W_”y (qS,eAò'DßñîxáÉ$ždõˆ HØD¸ JÊ–¦â|$tè\i4[…#¸xYS´ª‹áÔÔŽrÄDf™NUH#.–…tyTÑÕ´BF¢…z^ SC´p±,¼¢Å”¯MÎA†’ñ¡Û4CÖ‡ÊBØ ® ñ;–/Ð@1"‰T™ºpj¸K €.Œ¥5„ZG¡ÙñØú”Ž@fÞpw¸Ž#H¼®•jÁ^¸¸ ú@,JÐÔ´cYÓ@j™ÖÆÍ‰jÄÍñ‚³T£yá|$äCüÛ‘°n¬úë©¥ýºðOÏ—ùƒ‚ÿüŸÿÿÿ_žo÷oû¿}û߆¬ûû!ð¦ÿ?”ªíwÔ…HhçÄzÇú t¡2üóßþõߟ_ÖáÒðý>Ó‹cOùë©Äo+ ˜çK(C›åmÊðއàËñâ·èÁ÷/ëúãÛ§eÉÕzp8 l Ñ#78J¾¿YåoïɃE u„~~¦Tà¬PÀg¾½ksCÎ%Ar$!0ýÑè8JxF´Š¾B¾™„èÌ»¾fAÞ¯3q u§Tàd˜‰¼ˆ æàa*0óéÛêk¾0Dà÷0ˆR½ Áa"PK¾=Tà»C˜!¡4%‚³¾=Í$ƒÅZ‰ÁѱÀLTø„„Q( }ûò,ØG‹f2‚Ö¢l¨À*ÐÑ!ðN&AG¢B'‰ \¨NiM ¸>p–è„;xc¡ÐÙ]íhsîî¯5‘B–:Ã.jÁ 8¸up&(TvØWv ¼áÓC¾Û˜y ÔK-CWŠ@‹0C˜“™Bn´­È-CG«À ®Ih:Mflaj´ ­#»ž¤í˜$º²@ðººIhªOê4Øg}ûž‰Òµ‹ñíoŒ‹ŒMBg}{™i+vFƒØ‘_9[£Aìèצ T‰±;_ÿ&MCƒ*BÞö:d¸p«T>? CƒÇ}ûœùö%Vh :–„çûÍìÎ0ú¢~ /$`ËàßIÈ'5l䆇E¶äÛC¾72èV^$Ž÷`ÕaËÅÛ¦²º*¯Ÿ'[€”ÏÄ ÍÖp®”ƒz —¦JNŽ%–XZI„ œ¯žPä3A«g(r„ã‘©Uåo@ÑÁû£ƒ™ÈP$[Ñ]rX!ãÛ_Ûa>â •Òub¾ìèÅôµ„¨ÀÒJ2T`A?‰Î`^ t‡Å3q ‰£IÈ\BT`é¬AY…´@Nöj P­~ »fNï|¾þíñ|w8S$ŒŒ(l»q é¾=Tà·8YZI¢p†ït†ñí¯Ý@ô†oà»ã@ áG,0£ ÅDp†oLæ $üØ?4Rü/ö®eÕ²ìFþÑAïÇ '6xì_0mƒ¡“2üÿ½vš†¦£›W¬mER5É‚[p;VH IÍÏv’]Í oA”ÁK'JÁ`ÿBB$)n“,L ›»I ¯&‘D±%˜ 㣋-i¡8VhU"ÆÉÒ»‹ÏľíHAÄ~ã `G *ìb(¶ ?@­HϬ›ëÐó$ #ØhFì¯$Æwx{ìñú·iT á Ae¦QÞØ5 #ùƒ„ÒrŒ¯TYÜ„êàJ«È×c໫ƒ@M,øv$0{š9*D uை=X໳A\£à€Ý ‡A¨ öPˆ= áõG,€Õr`]uè@ô{Ä/ÀÖµ‚è ÿÌ&fYÚ9ÓCWW†ã%H€2x©2 ¼@8X`+ ¨"öè ~gìÕÄgÑ#øH ¼´SˆØïíáxA% € á§h H˜ëC@"öë“#ö{‡Fðø¿@2H…aÄ±Ç °´$ c÷€„7Pªè!öð ÎÅþÿŠ/Àÿ÷ Hq’ÿe“z-nRf4äÿýkƒr¸©œ¤/A^·–ûwàÀ 0wÀ,°¹£ŒØ¯ÝB…à9À‹#q¶vc €Øï-"ö7ž¥z åãíÿ—ØA $\Ž„úˆÂó6$4!̈ý¾Ø^¼ÿ&Ÿn `À,°Z2¡>´–¼ûµ/ÀÈÚÁ4ÍÒÄ °~å/@Â;YàwÿøíÏ?þò‡¿ýý÷ýÓo¿ýåÇ ðà)øö§àt` H@ü\Crþi H„õ:‘¸{ô  –V‹ ,/1X¹X`{.xP<þŸ1À à…^¨,Î6+„2¶úÚÕk$‹o½KW’Å{gÎTFÆMÚL‚OÁ¥ ˆFúÈÄÔl>kÑÙnÒH_‰ßa·¶°‚.5ŸN¡““º9@p)r&æ\š4ÁÎk…–Â+ Q< %ÊðÂ^煮¤K ‚p«Jœ°Ÿ‰>ÍEä‹·‚`d©±q¥Âõ8‘×D¤¨t®¤… ¶ƒ€Û¨D`x  §Þ†!D¸£ûüŠî3©…@â p„§ZP« n.¡ãò¾6Ã/=|Ëß öÏÈÕd áEïBÏt‰·@¢>¬3,án¥¸°yõj”~  ß¹ I<:Z ,pï´[ÛK€xçm%ñæøRîEB˜ H~ž^u Hø9üú–×…·ç A'G4ø—oí.1@€µ8—Çzà_B ·7ó“3¯AYå(-\”(!‡o Õ{´“ïÍ ú#:ØK6{U-XÏîU„25çÚÍ ªàë± |3 ÅÌC ¶’›I¡fJžåŒTáþTANôgÈ!«ÎSr¸öàOe½ ‹oÍ‹ 4p³\”‘³|’Ï–DÃØâFø± |3 èÌ;h`3 sˆVG7 b¿0öMv>{ƒ¼#)`³UÀ ü HðÈý¢‰ÎÂÅÛ»¥aT Z´a ¸ütÅŒX'–†X¸#£V †X¸\2>˜0WByÙÊ‘š‰}&cÈøâ6r}LgòH#aÁ•ÃÍWÚÒúà úÀrF3ºªãºÍmìg/µF%éf}>S?ˆe47N¢v†*ޝ?ì¬L,Ú¸ƒL»Îoœ`/]ùÝw—&¦KKŒH¥ Zoáȹ‡¶ mœ3>±o)ƒÖÛ7NòÿKRo%å‹q;b¿ò³÷Ç?ŠÐo }¹66IÜ ŸI÷¤­1F¸óíWIr,]ØÖ9±>{Ä~g샭aÿ±›ü›”Qãߨþµ¿fç€Óo#^ðƒ}*À ƒ ? Aì—Æ¾Ëû…&ûÎTÌ~­ÿN…دlôûsm&…3Ÿ'öNñ5“2¿©…1C/A(öHýXxeÃç¿3öÁ ÐÎØ s"ö+Ó€pƸÿd€PÌ@"å‹®0ÐÁ;WŠŸØ—#ö+cŸLBðm´¤ cÉÇÅëb?ªC$àð-}B±ßÙΖÌ{ß´Øé#Cœ_G÷ áõ¿—Tr Ê\0­4ƒ”‰b¿r¤òYø‚ØoTýÕQ #Ðe±Ÿéþôyñ ±_™ñõ³Ðn•ÿ6VAì7–úû™õ€Ùc¥ÌïÌ Ä~ágÏDðx­|íùd’BØðuYìs&öÚçÉGìVvø9Ž¥^o‚ŸØ§F!öe>Sqb¿ÑÏÏLÍXæ·Sç3'ÃÀ»Sç³Þ‡wo'ç»z!ö»xÌlðm®Ìï…"±¶ë¶…í39Þs‰5û•アí·²®'‘‚•m·}÷5û485—j=%©DìWæxGêU#ö§rXŸþ=œš-[jåÐï¤ü<¯=ìz;c_¸Åº³…kž°ë­”z&^°jîLïÍU°sqç{ïd¸Á¹ó»wjiX7.»Â1ôÝkµ"ö;9ߢ ¶œŸØo\¸Â^b°í¬,çUí·ÓµB ×ÎÎ’n˜F#ö+¥^øùôû•åüÈ„kgéwŸÏºX7V¾÷)plíäütò‚ucczŸ©ÎèÞßzzîËàܸÊ)¶r87VJ½9Yb¿q÷'åõ³ó»?"¿û2¿Ê†­Û [3;7ª¥±_YÑmìWÛIù­¿ÖJ¥×A¡ˆý]±Ÿ±èvqê·ÓžÝÍæˆýFÊNa«Á³³ñ»>¬ÿµ+I¸„þHødÕ <ôk==@á[¡Ÿò‘Ü_„Hmž«^Vž‰ýcä,ÐÀÅ4 9# ¥Ê¾Vú¾ Z3PPÖ/–……o†Ëäq} 7³‚Å ’ùkž€Â7Cº† ÐJ€ÂÕ¬0s©CžÛÛ$€Â­Pà8²q†̤¿¶ÄPøfVpb^ @áb(˜AáH…2@áfV˜)4;±*ÚOwW˜frI—r®&é(øã@n– 2à AÄ•€ÂÅPH›Q¡Ú…ÆäÍP˜y"È“ ÷HR”¡ùt-ôÙA1ódH3ÊÌ÷¾ýá¡Ü±¢¿¸} Pøî„s YJ(#Ü0Œ•ò)4 7šgÊÊ.‚–ôÕP8øļZáj³õ ‚Ðr¸\6ލFU&GÇáî÷aƧ¢ê†ŽÃÝó/#ñêÄYð´^Ý’„Ÿ>÷™Rã/€šO¿¾ù ü³=TPrVìËÙyÛN=˜Ðx¼›²f FµðäÕïþñÛŸüåûûïÿú§ß~ûËK׿ Dk„&v(½þ9_ë$²"±/}å:=-9(ÃÖ”{åA}dfAË;h`% 4 ŠwÓÀPͰù< X°ºp•¾¶K¦Þof•A‡ã<öF1`DÑ ÓÙÕ4CP“àbòÆ›JFid°™ÝL&3P`Š4¬Ø¾Œr&ö“ìWÓÀÌ‚¼g×:ã¨îN5 ÊŘO»™\c ‘p.¥¥ð†èfˆ™j±©šJ„/±™&« Dx5 xÏ@¡«¿ÐÀKg žà7J„WÓ@ÊBÃ4°1)8¿pÅòÝ434à–Ì(.tYPy¢Bx3 ”ÏTÃ…ÂËŒfÄ@d6¶—]M=3Q`©æ‰ á]4@C±?‘‡‰ðn¨I‘hÂD¸Ò=T*( ܼ÷œB^ÐÀ[Mç €‰ðêó4ÉrTWÚ:*Û®¦‘ ¡s&*„—Us&öN‘¨ÞÌâ2…:y`%Ù¥|0Ò8tÏ ¬$»ñé!y Å)H.£ƒ‘Ыr„{fÊ'cF(jøÊdïP35÷#CH¡®‡ÄÏŽâŒbôÎøÎo~#DFfÒÏÒ•«)ÞñFØ DºUá¸ϹÄÙdF ^X­ó©4xá~íØŸÊ™¤2³p g§ Å‹- §›;O­34Pê5×+M©^ÑŠ=×wÓ@Û šeãöŠ—¤ #ègJ '±îG„>%ÁDž<ë,6®³ˆó·‚™•‹» ôá™R0ukƒžÁ*ÁM¬›i@t¤Éx~N˜À¦zk–À3 ¨b3¤ oHt¤ž*l'Û_ #^åç¦vÌ¯à›¹›.ç½€sõÇæ6Cx‡«€Þ@ Þ3Š!T«±ë2>˜É"#E+±n.(ÄL{!¢»8\«f@VJ˜{}…>Hšy#žÝ©¸´¹S”4£pµ>HŸyÊ4 Ó ?VóAs1ôÁôAéLª]1½°qz!‰B¶å›õÁÌ!î<¿{I¸–W·’Ù¨Q>x…<è™öB²u¡|°qŠ!¹õ‹—× ¾•øC3³)?7®Clî.¤f² Ó¼q‚%MçÙï~ †¬ëùlQÄñ¥›TÓ*Ü,p1 pΰ€S¬Ws‚ÏH—VE…`c…ÀÏ¿¹æ{߃úPÍ$‡žEQ¨ÜI:£#DÛï‡Dxf–-“"a/X9ǘé®XŽxõRuÖ™$!Ë òÞ°7W>Y3ôPnBY¹µ©Ô# x ©™ †HFr%v§Ãª~-/Œ†â~¸¼ð^ð™–w߸A»qþ”ÓpéÚ:ã h5%Âj·z™IŠÍoPY#Éd9%ö#½5ã\®£)OÅjÑžÅlz/tÌÔ²¿:ÑHL©‡œQ•I õð‚ñGùDϼÍ ù°ºÝâLb¸'ü.3˜ðpULD®Î+úÁ^A =3%ÛIRŽM*«kÓ]ÁpÃ_ ™yÚŠ%†W@¢'J N|ÒÉÂaØþhø&”Ž^@&q«`˜t(üñ¯ †ˆH8Gìñ·:f‡ŸãsЯà…6›ÁDE*nÇ^J 4“ITýãæ™èD½R3¼ðœ 4¢.åŸá…êÔ‰À7@Âgh¡« †¶Ý‰ƒ©äÂ+ ‘#m(á¦Â†Õ£1¿à…)HŒœu‘’"/\Ê #ñp'<› ð.Ï’ÜjT™y¢R}§[™`&}L%Ø›ß ž¡…rcG'r·J ¥ÚÈá_{ǘä"Z"1=}íÛ0ƒ in}|ÝGµ ˜Ø¥áJ|þ X=­¤}4@°Ù„ díoÒ+ªH3ÉcG&æ£_‰É‹‰q9ê˜1µF¶zBBn¾4éQ­¥XÖù‚õÞŸ™ŸOâÂ8ܵ¼`#©ežGˆ ÆWè…™ætІÍw¸Ýgž é6Yxâú÷ÿ8¿«ÍÅÇT÷C ƒWÔf^#9è`aC*Í:aXyÃv©1¨tÎöÜD:³I#½ý‹óˆýK½‹Vì‚Øßûž‰}“ã²ðµ»´fà$DXŽq«Cm¦>Ž1†[·g ¥e¤„¬ð~HØGm&3¨ªô$^P(Ðc¶Vì]ûRÌŒ7e—±Â˼zª¡Hª±áZŒ`@ŒØ0çx'"G:‰åÄ®¨ ÜZFÒ•ãÏnIàl(#­Þ”TA®Žã «GžÛ~-Á­ ˜†!Šì`u†!Œ-(Ë1Ð.oÙeyáDè[½¤¡VïÇkMOd·ÎЈ» C²ð\e.ŸÙ ÝŒ¦ñ­¯Ï€ SqUc9žˆp\ ‚)àÉØcr?$ü£3¥ÂönnŒ"ÜÚC©u¨pR„»vØÌÔâ9éîˆý­¨‚é“Ý㓚f‚öõ‡º¢‚¡_8yØU…‚ÀjËX·9F nÍþfªBmNŠM+Å_«¢ü³»,Ü-‚•ÀQÿ©(ܻǺG@À,ŸÐêñ@°yÑA¨&6ßÎŒC8aŠÑÕLÀlÏ–xE®‡D}ØG Qòô °®vá!ƒ(=?HP$\X$<±W:ÿz`³(,Ì@‘p¡Oèľìü|Á‚Ò9jeìɰŒ.´ŒžØ›&Á2ºp¤øÄ>Íý¢[\Ž´ êüæ‹`ÝØ&*—ÄNÝû4“Ð&ºCå`'ô ¯™) yâúÅ»Fâ£4S&ÉNL“\F3ª0ô¹g†Øoì œRÑ^8;rbŸÙŒ)²ÕÎÑJjWô‡—ƒ@È ؼxü€ Dp‘f¹S$[#Ñ^éi)',›½j•œùÌwß¡Þ(^‹9¤!HsÆÉæíóÑÄ&Eàƒkù †ö(ˆª ÷Bá“SH8òÃÕ G¢U5Î[—NÌ0…PaùýàÓD™NM HÜ?g’– WIòù9Æéxm})>™3HH7C}éf$Åô€Áp×êÖ½¥<‚¬JȨ…útΨ“†‚|ARáïÝ`©íÝp­nðOÏ$˜l•ÍÝp©Ga†\9¿¶ò|ð­ÍˆqÓ(dÅ×vžB.Œ`Bå“2óFi+ N×Òƒõ‡½f ÀJ‚>ÕÅ£mG,ÀÆòŽ7¢ÈF0‘trJÆqñA3OÄÉ)H¸ ±™‡"DP~A=ú°ƒÏ$˜ÙÔ¸¯µú¼zr%E:ˆá Ä :ÓÁn7ûOöÎ-E$Ç¢;rLoi³ÿÕ´yýtVÑó1Ä„ZÂ/õ‘ Q~éy¯¢ó´`Påõ4¦ùÐ ,¸l¨4ï§Oîaˆ—“—ž4šPˆ`Ñ ž[Éý€-ý'&²ÂK±á¥0«&ŒˆáaìK·ô_J((ŒmJ¾ß»'i`¹iyf6 ªE™ãBà‚ ÈÉOƒô4¤™‹µàß1µ!Ý“È- ÍÇÍÇzôô$ ª*…5— çØì=iƒ±T”;æVVML8ÅÕŽEÏÔ­.®j¦Ê¹ô·"Ea1µ°èaàu‡ÃeÕäË*Š–eH"Æ~Ëhš†ÐdÆ=Õd8{Þ‡ˆ†‚ÏèKüžeyÎû¯Oh7-h7e“€üe‚S±ë¶€ »¾Gþ3\›N+zÞ´›i·ÐÞŽŒžÔ²¸ÔphõiE@9bqa~`ˆ'¼‡ "£Dûa´ÚS€°¸}ZýMÈ Gv[ŠLo)(„5UQdnÖôVÜr vÆ_v° QfSÔ;„z¬ÄŽýÐáL´éD†ö0‘æØÞѬl\ÜÔUÉ 7¶ï@þ°ôä’nLŒ\rjß¡'opWU@0µ èiFߟû0‚¡‘ )3¨ ƨr*-»NÇaTµ‰ž§!Ì2õÂ`u/ÎìAÁßË÷œjŠ ™¸¨}<Ñã]'yò‡ÇuH±€ìé4§Ü¿@<š*ˆ”² ă¡ñ ‡8$¸²¬©µ˜%IØs‹‚?&-·RÙÎ…É(T lž@a2 ]K®eÄð±‚÷ àâè*Í~ zÐÝØ:=³£B R¡( F“Ð<(ÌF¡G“Å­ø‡çrè2.ï2Þ¡Æc ˆàó˜W~ì`½}òQî=(”Ÿã±É(dSTºÿþ@atc¡'o ‘XÐ =f¥JJ@a6 =•DÜ\KγQà–£HOrb˜†Lm,ôă4!öÞ_ò¦pàr+I0–íRXñ,>†‘äÜóîÚdó:¡;ÎCÝ«{ d¼T4 ͳ‚[7柆 Î90Y1ˆ¤'{JÈ8ʃ%LôWǹ¯E d‡Iqqu›’"ø²êNˆ…Äßw<$`Lüí±ÐžœR*ï—óÄ>'z˜0óãȦŒõ¼÷'ýè©´Œ*o(A°Cº‘{êŠ$M4¡V !M9d•KÁ;fE”8GrˆäÏlB=ü^öвŸœuqa¾ÿàCÔJ‹Êž_®*Rˆå ç´¡ç…÷0ñ®BÂ.dh`ОÀ ü6ÁLZÄy.)žh=ÃMòï+P뉙©˜<}*¸‚rtǞе<zÌá-¹e-Á{(R ~Ú™þ½­.9‰¸°aƒ{âßÇqáËk *çÖ“¾ÜnVñ$C»yêâb *‚nóÔ]”Ì^QgÁM} ZJs=b9ÌG‚–,Ñ’8n c%éõ–lBÁ¤ ½3ZÒWµ …2‡ðÿ\,ž¢žÄ±"C ( Ž Ô“>ú.Æ*ó§gÑN~–‚D—艞k'0±À ¢gKáþžPêÊÔ†sõ@`‡LÝKÐní=}Y÷ýBPš>=„vNÅìij$°¬¸0…þô´K©ãæi‡<¯kOó@…Äa7·y@OpOoQï+!Xe^Ð[mt”â·\€þÓÇ#A½Õ jfÑ3uP¡H@ðíHà&Çá{÷i[›×U ÛK †Nò܇»…‰8i„báÓFéb,XaÛ±Öh=dÄ›7‚‰ LhÏZã{)S]¶ÉgtÇ{ò†¼Á~x;ƒGO‘jçgÓÈ%×7ž_;ŠŸYà‚_„ 'Q¨c‘ †ºOôD‚ñÂYõä½·–¹rÊd챎FA´)2 0´%K(1ã ì0NŽ ![P@Âø‹Uƒô@œ…!õÔªz ¨ã fB-î•%ÎT„=æ©‘ z âÀºÊ$<{2O7öÊÔm•\ÊÆ_î ÄéÉBD°¤2»¯¨=ÙB¨ÅÁnÊÜÝ”û©'KˆH3ì!|yÙ½$Ï!ØÖì‘S ïFN“QÐÓÓ]Ìp),¦ }ZTµJêèI,¦LŽÆ=d±‰c=:Kˆ¦¨ .p±žšÆOeIf£ÐnåóÙ(DOíP•œ@at®P:k¥‡ £éÙ(q æ!IOÝ\Œâf ˜CÎN[¶”X7ò³Q8Þƒ‚FP£…y{×”ùB‡«è/‹ð•ÊaÁÜaƒfF='{*IQw¹ÃÔdO ©BTÐ_›Ñ#M}õ£ýµLÜO/=Y¤qp˜Ø@äéaâþ¢`l4M]vµŒ‹a 6µ¼ì)%<‹Dñ:ŒGBÏCÞ“Eûý`b~y?¼õ «Âé˜ØðtHI7}À©ý‚Â"ŸêYwÒ$vBSj>våž:##K1Ü}I•=YD¹Fa¢ùéÁ…‘i1ò†ùH0?®-o„±ÑIìÂ}#´ò–ã ²8bø²À… Þmy¼óßyN‹O}™æ;³óû ôdOm &æ 8E´™ðQÈ¿~;opQ&,@ÍG¢î9¨0ÏûTÌ}"nÙ3ض`DûñÓ–ǰö4U&¾'YÌÆ:Ã$zÖ¬XU1–X° w¿dô´*«åÄ‚–Óyª‡ ?R‚[ýLD‹Uée"£p´¿ƒ‰Ìž8A‡Ôã‡&ʼnì¹â‹A¼)Çü9èQýâºeÏc‘¦Ê¸é AOW*]ܰ}?´5Ùs¥çtІÚ"<Ñã–yÐP˜¡M}–0€#½_Ö;’@(üe€E,ü‘,øÍÙv^¢!R4W¨QsS\°wEHlè5ôÈ=¹…b=rêSÑSY2§Nó6 ÑUV°ÑSݯz0•ƒ­ééBST(agô÷²Ç-ä— {Äo'Ž"É&ˆ“mKzÌÏB©. Hæ÷¡ã±&ÀÄ?n'Zêä{e$6œh‡ ñG6aõ\b‚‰MÙD· ˜X'¼GLà:L,~;Ô'ÀÄßËÕXÂ:ØËwêmÏ:Ê ñ§ñÐé ¦çü ¼»u.JâÛ ƒ‡3ÔvŒ$¬…ˆ¸¥e ¶\BÆc75—‰d¦BmùÝÃýË@³!gütã‘n¾ ŠtaC-énh >Z-§ Q"µ‡ˆpc¬3LÍZRFVI¬3l0¸Ñž½h÷<ØfØqQÓóTè±Yá©X^X^ÔÄÄ>,ÎðBAÄ€àÓ9£1“@¡cêEDO$0‹B¯yl$èÉ ­NšJ D¼ü9ÜijóA[éÛ¿¥‚‚©¯CËŠŹÞâ†)Ôýè=HT±@­g>ù°ô ‘yT±È´ ¥–Ckª … X€„xO»¡ü§b=H(·W|Î}”ÐnXñTTô Q–†c=KÐüö%ƆwC² Ž"ÆÀêÓwt,ÎU·HÔs¬'yÐ8åÇ H´à`R§9¬Ør8-# v#t¢V<Ù¢ÓÁÁ wä©ÉdO\ˆ23¸Ñ,X|¹ß½eŽÅ©Îdr…¦S‹¡áe¢(EàPôií9ùß"0,x+¬%…f®„ÔÛ|"”ž0éa¢TW— Üh꩞k+‚ê׆”Ҟ̖žµè{žŽÔ­ù¬ž0a¯’&[v ÈZFâx9d˜öxÏ,ã¾e†Ûí©ë–ë-²¡DfÆ '£/7{„Èø’°pÃw ¢T޽ørYa— L±¿,öEBzÁ§› "Å0+úvÎ('шޠÀñ”z yä‡ ËMq/邃Úq!{âB¹›anýåÛÒSÌpšX€„=¢-D†`ñqGh1²#åsSìSlëé5(»9ºŽc‘{ P»µ%#.,HÜ[ѯt×Ï&–ßÚ2¥ømªÁ/>úü–•€`(-Å$³Ø †½È 7n!”+ |Y›‰ÙUK1X±ÝbŠÎ¯Õ L)?„§ ]Ø€„Hlœ?»@\øÅ¸ÐRFˆhfb±C¼¥ŽU“€nÓ·ócû¡Æ+âBÛ-eKñîÁBÛñËÛŽ,éÆ:b…ƒmËb¥š–P&¯}˜ô¢Å°Òc JDäPb=H ¾0%8åPQÙª4<^8êtÊl[UG¤˜ÃRvÄÙ’<ÔZ°:òÊ[]Ž |!­âTá È ¾_¸/;f¤²®{ÂXÓ¨mJJ!•ÅpÍzŽë”)¡¢VsGçúÒë#H_H˾_pø|á~Ü1à ð @‚@p@p¸ ‚|”ø¤‘QrrY¿M˜tµî“¾5¿P2ꎮ¡Ú ¾0ª/LÂØÂ©¶¸Á?Y |¾pߎø|@̶!6 6ܰ€ Ä'ÇF¤¸z¤Š³$ÒRàò$p$Hî'P”¸ëHåTM”ªbxa‚w $å2¥/Ѩ`·ã•w;zs)oÜÀ_H«8I%Ü v¸ ë )'Ë^´â‘Q7p¤p *•ÀEû)bƒå䌵mȯ|Ñ>J_œ$¸péy!A e£]cývC rÔd›„ð…´›” ¾_ ø|!ÿ]Jø;Âæ:Z|¾_€/ü ÊÀà ð…¹|ù|¡ î_ød% ê ð”a hGÀ.À¶) _@ºðçÕ˜þúÇ}áÙ?ƹÙgS¶p‘ž.’XžD.r­"å›I€ì"Í ¾_€/À>vÈ D j›—£.ïIH_H{õZà —÷…€/À>Ùìðœ#$ $ 2„û¤1@ P"ý50mJ$È á 3—SÞŸ/ ÞŽ°ß+'gdªÕ±öoŠØ“33Q`•ø݈¢“P¡bî+Vàæææš`!P”¸£DCº€PÁÈ@R‡û&eÀà _€/ܯñ#ÀˆôEµPÑV)%g”jfƺŽA@e 8¤¥ ¾€c„Âà ¿Q"j8|µgä ð…;JXC¾_€/À>}½ ÄKTF¨ÀÑ¡¾ð;JЉÂà \dŽà€àðûàà)mÊàbE ¶0fl›„°…Jð[€ Äï#E©¡âò$1H€±&ÄĆ»nDCºJÜSÂrBV£"R\wt¥%+¡¬é™"ƒt™„ð…¹_š‚/̵Ñ-à ðpàê"ÄĆûµ°„ƒ-ÀîîFl¶Pa °…»Õ † |!åŠ5|a"J„§…± I9>%äÁ¦¡BÅä·ká sƒ/Ì5æFðøÂ/¨Âp´t$‡»‰×”ÕŽ ÄT®S\"¸¨ Þ*ÔªNBøBÖJ®ð…Ëû‚2H€â3#8 8Ü?d (q·Ž£e”¢™˜=OÊŒº¦ÇAä 6 ÒšÕ_¸¼/¸È*H€ÎdC†€ á~lÅA Pâ¾Y (qßP”¸eGK$”Š’|áŽ5R(R«5…/ ZЦIH_È:h‚/  )ðøÂ½/¤ ¢i)Òà ƒæ X~H%lä˜bÕcÍ8FH±0%lêµsÝRH‹1æ(;æøB¥fŒcĨcÐ9¾ÐØ#p=bÔ³dÆÝ9¡²' /ÖN±õ•SA"-©a[R9$à(\¶0EÎè (q)JNò µqCÕñÊÛš„Œ´¦æØÊ’Ñ¥·æµ9|aвcJ éÎÒå…QIPSHe CÈF}yÐrH *\;Du§¨(pNpXGŸÉ9µÏq‰¦‚âó¨ùBŽ/´"^0ß8‡/$Q¤VÿN1Û–R{ŽÂT KÜæ¸rB‰J¤•Qvu %¥ØP…«9Á¦H2Ú*Á®˜h¹ôD‹J²œ.á SÔŸ2V´¨.ßÂ;CWÎT…• nMͱͫ¥PB×¶%Ãkʸd­µŠ—Šñ…Q“Æ” ¡¶R\‚à Á!¥­ëÄk5 3\:ilE¸2„+?k¿@[Ʀ-åäТÙr „/ zrÐÔX2Œ/L±Î‘r|¡UeÇarÔÃd†/,Ÿ;qcTǧ„=XÊ&¿„*Ò0=E )œB  %„Š ?BgË7«0RÈ^4/-'{Pã7^¢%’(¡”1½ND{¼mwBÅäÕÓ²üN¨6L²~!R(ÁL*Øú:ª/¤¤*Í-°–eŽ%ñ)GKõbÚà W^øi,a¸1ÇtJ¨0.­¶AZrJ &æ¦èN`úWJ8­óxTæÒSNG¾å D)”hÅUPwµ¾0 ` §*/À° ººj¨1®YÏ1Ñ’B ¯\qŒ¸ò÷ š”'¹I—B‰(Ë¿ŽuMW¾f½®†nÒp‰jŽñ…_ˆ²$ ¸Du屦#H_È¢DM9G´j¢ã ~ãþÀÒÊŽ]Ê %…TÃÝÊQË íˆà¢êŒÇ)çxÞÜS(±nchð…+_¤ “p±!kâU+(Jܧ )”p—nÄ•‡ |!mzÁà èF|¾p_v ø|¾_¸÷N©6ÔV TœF½dR{nEŒñn¦ÿI‰ê… UL5Z_ÐIH_H{Æ8c|¡6U'ƒ/Œš/0H€à0‹ 8œê±)Pbª'f¡¡¡¾ðI¨È˜_hÄ…Þ¸¿ ¾ÐÏLsH`j„­,sÔ2êŽM¤Zà‘ûaóš„ð…¬Xƒ/À8à ð…ûÑøŒ’Óú¢qm Ë /í ¡E©`[Ó爔’S ¦‚ú•û”-™(›š"_ȸfÝ Xümï Á:æ 5…«1Ô§È„R(ÑZ[N—ð…ën}m…Ø*cÞq’~gPb]î ÷¦FíS¦ø‚zmó sœ#ZJ ©·¢¶Ä_øõˆ¶œ%# a/Ëçˆ_¨ì´ü€/ ZwŒx©†fõ°$ IH€àv˜#Àˆß3B3®Y7"r¯¸1ì1"#g\(Ъbëë°µg„YÁS‚C+·ç¬A‰á)QØÆš-5㎣†ŠšB‚Û»2hGLq²ôJXX5GÅiÔ6¥LBøB–/X†/kUyÛ;Æð…©Y8àe%laŠ­,ÇY²G'A—rÔ)hN!«£¼0Ç[Ö)”¨m9]ÚW®DKcv J¬¿äu¾ðÝË7¿þñý‡W¢½ á_¾~üêÛ·¶ü¯ð§ ÝþãëÇwß}øúáôð·¯^Dï—å3AÖGàºÅï´k¼ŒÝ—ÿ&_.?í¿ÿ%Í-¿νòœ["ÍÇÖ”…dùÔ¡Ôƒ•ê›JmeX4¡Ô•ª”Œ-­.ìJMíĖΣT: MzmLU6Í3(µ/¶$šíò[qƒRVªméÜ ÌuWP@¨}…š-Óú(k±Ø+T>Pù04™v»ýM Ò ͦªñ4ϠԾؒz>¶¦ÄŠ3”z°RÕ6ÑäaÑ„R_Têç- ÎØÊê²ñÞb¡œG©ršB»f?~C³ÒaÑ<ƒRûbKìùØÚú&[«PêÁJ5ÞB“yX4¡Ô•úyû´3¶º¸p¬+(#öb«çQª†¦Ò®ëš¿UꢨaÑ<ƒRûbKâùØšÖf* ¥®TÞBÓK4[áP-PjW¥OÆÖÖ“Õ¨d{O6v¥ÚahÚ“Ùïf†Ô¸š^¨T· y¥öÅ–£æckfÑj­¥¬Tߪ(QÑNhÖÎÒ Ô®J-5[¾ І‹-Ó¾˜Êç&äÃÎøéaÂú|?UZpÍù•Ú[ò?[S^>q‹¥¬TM4µšTbÑ*”Ú[fKÆö6(ZU¤îÜÞÂç&äÃ&Îø™±ßî¶{˜gj_h7 J½°5£ÖtïÝ õBÝVª‹&”ú¢RY’±½ ŠŠÒ¶÷`ÃçQ*†&¿2¤6.eX0Ï Ô¾ÐÒÖ%©^Øš.?˜wÖ !ÔWÕ·Ð$í„ærDmT Ô®JIÆö6'Z¥°íÄVΣT9 Myý‡Z‡Eó Jí‹íæ„R/lnáJ=X©R7—p/4›-PjW¥R2´ë˜¨/x)²Z=Põ00Ÿ%”甤’‹æ„ÚÛ­Êo7lÍ$8¤5(õ`¥n\N]Ð M(õE¥R$c»Ž‰Z«ÜêîF¹G©všöÊcªnã¢y¥öÅ–˜ó±5Ui$APêÁJõñü%?­Ã¢ ¥¾¨Ôá³ØÊmL´2—°iä<£„rظ™<3J¸1ž/EÇEs~¥öÆ–6º»akF,LV Ôƒ•j›J=¼êpšPê‹JÝºÎØ[ºU ©¶¨¾[:Ré04Ÿž%ܨýz‰2,šgPj_lÉ,[ÓðZÄ”z°R­n¢éÐ s)¥öU*'cË·japÕ;±åó(•CóÕÄE­‹æ”Ú[¢–­)U/mçCƒPê~¥m¡é㢠¥¾¨Ô­±ß.ØÞEm}rHöæKr¥Êah>=öû|%ß–‡Eó Jí‹- åckæÁÆ% Ô£•ê[höñÝCЄR_T*ec{›e‹Rwb«çQª†¦¾òvjhõaÁ<ƒPûBK[ú½°5õ[)¡-TÞB³ñ°hB©/*uë˜ÚÛÛvX–Ýf;±µó(ÕCÓ^û&M9~ÇúqhžA©}±%·vq`j¼© Ôƒ…ê›hZ/4×ij…P» Õ%[½Í‰zY2¦¶ó–”žg–P›7Ó§g 7nÒˆ¸ê°hίÔÞØÒÖÑ^ØšQiî;WýB©¯PjÝBóðÞøqhB©/*Õ²±½Í‰šk«b;±¥ó(•CóÉY­Çå‰t\4Ï Ô¾ØR>²¶.< á¬ÓSꂦõBSŠ,ÿ@§]užŒíÇ)QY6¾[>Rù04Ÿœ$|L/ÜtX0Ï Ô¾ÐÒÆµãnØ®ë—C5Q¨BeX0!Ô…ªšŒímF´© ï|hSÏ3G¨‡ÍšéÓs„nhFóaÑ<ƒRûbKÓÜݰ5•µ·³û¥îWêÆr/µï‚&”ú6¥öÀVom2…ì®êy”ª‡¡©¯¼ï¶e¢šRš[¼Í3(µ/¶$-[ÓìV J=X©›õ¤:.˜ê‹BÝL—z`»ÎˆÆ:ø$±7¤Úy„j‡¡i¯¼î¦ÒZÍ3(µ/¶[·hºakFZƒ” Ôƒ•º±Kµ”qÑ„R_Tª{*¶öqHT©í¼!eç$´Ã†Íì™õ¡c®‡gHÇ¡9¿R{cK\ó±]WÃ.¿õÎdž ÔýJm4µšm÷Z(õbË›+wz`{%+Ì\wbKçQ*†&½2¦z!-ây¥öÅ–$Z )a¡-ÔØDS‡EB}Q¨›£„=°]ÇDk[o½îN~ù,šgPj_lÉ[>¶¦UÔØJ=X©›Éo#.ŸòÎÜPêÄ–«&cûq3láÒê^–ó(UCS^ûÆ[ÓÒ/h6£ÊoAó Jí‹í’ºäckk£¶µp(õ`¥Šo¡i㢠¥¾ˆm‹dlo«aK°ÓÎ{ÇvžYB;làÌžYº1øàÑ¡9~šgPj_l‰"[3ް&JMU*‹&”ú¢R™“±½­†më^~ß[ƒ°ó(ÕCÓ^»;¿¹ æ„ÚZbÊÇÖÔ ‰›B¨ us¿z M(õJ=[ÿeNÔ«î;Øøyf ý°y3fê÷ù “MJ/0]¹} ˜ó µ7´$¶‹ W‘`…PjÛB“|X4¡Ô•jÉÐÞ¦D¹ZÞQ騮S¢V¥°Û‰­œG©ršòÊoRuX4Ï Ô¾ØRõ|lM]]DJ=X©«ëV4}X4¡Ô•ºõÖPloS¢¢Qeç;›~žIB?lÚÌŸ™$|>¦.ÉV†Eó Jí‹-mlza+ÎÜ(ˆ:)uåû¿ÿ°|ýÅ?=þüîÃ?¿X¾úáý×ßüï‡/¾÷ó·~úîÝWïÿñÕ+áÿâÝW_=þôá‹Ç|óÃßÖOáÿ¾ùi—Öÿó›÷þíï?üÏwï?gÅÇŸÿåû??¾û~';âw˜þÊŽw?}óð_[þ¨_IözÁþ”îZ{\y±ü¢%Ùå—S-àüûㇿ¾ûêÛå³z›3ôÂþó>ß Øo™ûŸ þÞð08(\à¢.ðÄSpKºkJ<±ðÝíc¸@N.ðyƒrPìáãÁçƒ]˜JÂp±\€gÁ.ÐÙ2xàÁjEàCy€Ø$ØÃ:{@Q¸Àe3‡ Àž^Ó AN„óÀ`.Ð&Á.€ Àà]™P3NÁµ• .0– ÔI°‡ tfBdTÃÄÍwïÂ’ú„6 öpÞ3C’Â-Q ÕÁÁz)yàâÜØá#×fa\àøÃà·º@JuÐMÛB¸ÀX.}+^Ñ(z<£L\©-n£ÊŒ'Á.ÐÙ$Å8x]˜+ˆìUMÐ(™ -£YT}9)\àz-‚ûcƒ Œœ d ‘ÖæT[ ,0X‹€&Á.ÐûA† ´BÔ¢!lT¤¥`ÏAµâD0òr”kÅDÈÃàc"{¢ŽÑÁ¡] £.ÐXªáD0˜ ¤älR(PºS˜Ñ-jÒª‡ v"˜z˜@ïùaxVŒÀ.ž dÁšîv¨9Ø+;îm Ñ@ y±ÒP¼ÞE¢C°‡ œ`í(\àºËÆàèÆväp× ‘ \·,@p¸Àz"ȸM¢……™ F®x"8{¸@ï›D)Y!«T)X0rIк^*„ Œ\à &„À…×»U|öpÞ=‚”³aZ©!¬Sè“`8Án¥"­…aÏÐ{T”‹NCŸ2âÁú&Eîè\ 9nÝ)Ì8ˆ[cÜ"¸`]€DyqôF¾Uì)¹€E±Tsš{¸@ïAJ] L¨ .pI8{¸@oH9Vk"ÛÆsKÁÞ¹’`vðê•*5g–RàCaO)uôÿì]Ë®]×ü#añMzØ™dÐÿ`8iânâÎ÷÷ÚW‘m]Gg³yÎ*2¤ éÀºÅ]›,IÏ…í£Y cšˆ‰wZ×Љ¹À#° Ü]t¼XtÿÀ‰Â–_ØAíê˜)d÷`TÃ*{ìÁ7G‚Xû‡oÅ^Õ¬  Œž,î˜&/'vÜ(VthB‹<°vpö]¢ŽÚP’¼–†Mõ`Ïf×Ðñ7 U—“`ñ®¡G`¸[haR"*èÃréÁ^VrãÄÊZŒŠ`X {#vÁLáñÇv(pÃ5t _`c¿ù¿0Sxü=5Íí£³XÀ¸{[™¡`Á,à-‘àö¶~,0«"¨'Á,p·:Øá ÖPãHä'²@h°0*‚ɹ@GT­%„›$î!öà€»' ;2]’.t¦±? ö`›3–¹b“ýI`#/›8ë3°ÀéÓ¢ýB@ŸpX‡ÀŸ{°ÀÝþá]ÀBlC6QhO‚=Xànè„äJ¬=páäþYY`4 h‡yÌC™YÁÆIãI° Ü|Œ Å<æé×éjˆƒÃÄAêÁÞ2âàñC^FÌãà°F¡< ö`»ƒÂX–+*‚aâ ·`ï¬(Þ(ì¨ ƒÊµG \4ôìÁwW-,Àâ¶ÿ 8oÑP°¡G€ƒÅ;J gË%ÚØ›)£G0Zh© %ÉÔ±tðÀ1‚½ª+ÔÁÑΑ–÷2í` œX(ç"…øð±âðªp œÈÀð F"hÙ  c'$p7 hK$ì*‡eèÄTà؃^áBá„¥AH†™z gSÅ8áè.aK*P´ÿ]X6vb—ð؃^¢KX¢k%´Á»„%ißx ,pw.Ð ¡µ]—mì¯a"ÌŽvwd…W³xÿVè“`¸;°–H°ˆZh cgÁ,p·o°C'NY"¶Ð"8°Q¸±7•€]àøI¢”k¸ O¬"èad!ä0 ¥²'9ra,O‚=Xà5X É»ŽÔ4ƒ#1Kt¼w°–”DÁ5tà¶±ZÊRNáä©bîÈ K#Â~]CÀ,p3 ´Ü"(³ZûXà¼9‚2g+_`tEС×Mr°À¬C¥-Ð ¹VŽ.:¢ËI^î0 [RˆtMŒŽ>QØYR¢ ˜uÄ{°ÏØ!H`2 4¨¶ˆ×Îà:Oxö`»§¨#®LÀ0KtÞ¢![º–*Ã88z¬¸!)4¢u˜Γ=HàæzÀ $¸ú¼®¢I8™$:"ÁÄWbÍÐ4ÈìuQ€f§ c5Jˆç­|ö`»¥AÀ±ý €ºÎ‘€޽Ix ëpµDB{Îs >{°ÀÝ]Bk‰„ô Âz‘óVmì‹(«ÆF³@‡k„5r©`ŒhX‡Àž{°ÀÝ}–÷–˜âJ鉲€–š3¼Ã§ï6¶ÊtÁ¦±'Ø—¬€opr$¨µ°€WæˆÎÛ=|a¯Ê‚\àxó{:9.“¸uôj:…³+‚Ž@¥rC‡à¼¥£öNc$³w µt*l¿°chX*`=ØÇÂx†L–™†±@ö`ï`Ù,Ð1D ”™êp œÈT˱uôôƒå¶?e¿naºõ`¯’qp².°¼%‚Ãp˜ìĽÉ. À§ÊN }ÿ œgáUjÈFÛZ"AÕ¿m 5Xà9­ƒÀ,pw.À½rHàþ@HߟûðÆAyóŠ6Þ5Ö¢™z ú„ÓX@Ÿ{°ÀK¸\(-°‚üÄK,Qœ+ t¼vAPj¨Nt <{°ÀÝn•XËËm,ðãOû‹Ÿ~úûÏäßGîOŸ>þõoßÚþ«ð¿‚öö‹OøñçOþAþüö³[Ôùb„/è6xßIÔú:vßþ?ù~ÿ¶¿|ŽÐ4ï¹þ8ßûT>¶¿r8ôI°Åûúñïëgai¼¯o~_·4tlU° j÷·ƒnì+#P»®Ý[XàZ[ î¾g2mrѹ@ÇŠHS/.Ìþž¸Ä4B(°äøy³¬¨†åÒƒýþ(tóf÷ô;ºy–+gÅϬ€=Xà6€f:{,¸"A, Î=c»f>'…§±€= ö`»7tè¾r‚cÐyÄ=HàîÙߎ׋,“…TàÀáß=G:RÉ–L@­\naµ`´`ïY±°d4 t¨^á ·ƒ†¥Ï‚=XàæH¨ŽHØAà.ŽÁ›ac©óZðOΤ£W´Tʱp˜8Z°'Ê€³@‡Y ®]Ñ™lïdÙ²@ ¸Dž×&ÜÐï’ ±ðø`ÁeÄmðÀ½€!kyaQøl}¨£aF•³À‰¾ÁG`¸Û7˜`Ü ÎÈÀ`\lˆ¯]z$tÅÁ@¸[dÀ©$` \}Bk±«´?{°ÀÝöá–÷AíÚ°ë‡<´±Wæ€,0š!×þœ4hƒÃH zÏý ®:‖W0ò€ CI´´«‡G+ƒ-‰€ I“Ø$|ö`W° €Î]: x†‰² ÛH°Xàí AG ˜„:vŽY˜ÔrÅþñÑsDÑ ^ «À[§Ó÷WÃqp´aˆ["Á5‚Ð%<ð.ÙÆ>Ä`˜mhÉR‰Ü,pÞòG`x…¥EUµû‡é-ØóºöMbËÐ芀Z"ÁU8Ð"8Ð<¼±/qc°À`h锬 *¬=o¦¸„v2`˜#]´¼D“c†‘@´`¿¿á„c$³ýÃ=©@Šˆ¢ 81HÂ-’Ù$Ða!-%ótŒHJ•ËÐ&mª–HðE†‘âÍ{5JHƒ£U–H°å™P¦µ «ûT‡6ûðµfFœ¡ ˤû],ä£s–HpQu‚qðÄ\ÀÅ—)N;†Ê*Hƒ`<’‚\ØÁÇŸ'¬\¤‹0V|ÞÂÁ·þ€¦ŠG-©@¦ŠF‚fÉ܃}d,°Àñ+«ÄvAqðÀyÂ*%®€88y†€[rJ ÃI¢#íÃUk¦Š?Fâk‰øÎÀÇÝ+ö`›sŽÉÒË7¨Qá`ã*‚ û¬ >XàðÁŽ„Jö…ÁyI‚=Xànë`KE œ’Ž1‚+ &LŽÞAÛò>ÐU0hzö`»s–ÚPI÷°À‰º€²2)z§¯ñëR%“a¤ð¼Áâ‡`x×/WgÅ-‚#u`x‰Š,02P°X kõ(X`( X,ÐÈEåX5t ƒøÂþú(¬:}ᘯ\ʬÈŽ,ÞÐËÒ ˜†Bt 9±êþ$4 ‡YEèI° Üáè\ c’(<*ØÑ'ÖêÁ¾LÇÉF¯^n]^~äž%ÚØ»~ãyJ°ÀÍHHºV+œƒN‘% ñrä“Y ã,Qì(pŠD`˜sÐ[°-Î L>K”‘@KbX`ØTqöQŠ•ƒ³{-ïÒ’o<\ xJïàÆÞØöK,pøDa03Ñb¬>oÛØ…}j,䣻E-‘ ÄΊNá0]ÀZ°¿T¡¿ÀèAKVè&‹¹À°\@Ÿ{°À xƒcY°!– P ö»ÂvÓ· „¬•Ȧ±€ö`oÅj`ɧzX .#)zÃæz°OUG.0š:ÄAáë*…c¤ð¼;¥{årŒÎ.:dWÆ’‘#…ûX‹ Ž[’Bõ¨* ·^`cdnhŽ^?Ùa^É´>{°À \+Ž+)4‚.pàš© ûýA¸K4{˜¨cŒ@Âs-Å0ѰÁ“@¸{÷hKA*·À‰·6öF"‚‚`ò-‚Ž…¡´tvŒØ"ØØ{Y¢ 8^´•T%ðÛ4-Ø×bì›\´L˜ìHXYà@ÿð#° ¼‚Ø™M;F¦åÕ‚½ÈÎ8ឬ DÇûÀE’ êà´öP ôjjp Œ&ŽÄáº_K$pž8èª 'I†GB¶¼ìÚ>‰#…)ÜØ'[9rÑî±–Hpɵ…NúNˆ …£' [!sí¢$0‹Zta¯åÊ(<Þ-àiñ­î1°ÀãY ¥C¾*[ÈG-‘P¦µÓ°À,è>Y0I4[l „Í™'v•¨ž{ÀÝ$ÐÑ* Ò"6,9°Md$æhžž S•Fì³”lƒ£÷‹´¼DYá6BÐò3a¨“#AzÞfåh¸k,ì ´ GQ® ¡d°À¬±õ`_"˜D0T<û(QK.àê]`˜]À{°ÏÒ°Àä¶D‚û hÊ ½8tÙW‰Z*‚ ÷"LchÁžMΔÎÖ‰[ÞY¶óB hÜØWº@<Ý:Xk¥S¢E0lã µ`O&²`š, XK „Ä® @³ø¿ûŠÀÁdȖוYXà¼ËdÅk1/ˆ“I %Üb16( lìsÅ‚ÑÛEZX@¤ˆMÂÇ KTDMÂÑ…aK$(íl€±_d˜m°{f  Œ¶ 6d…o÷ª÷gfÇ[°÷啨GB‡S ÅŽÈNl쓹WF'lIØÃ…0G4¬?@-Øí, ‚É,°Z"AB—*ra,ÐòÐ]`÷ðì‚À:ÁÄ–àZù4ð'Á$ðô^\¾Vío>8`hö¾Š0PþðãÏŸ>üƒ>üùígïÀÏ~µÐû=¾ Ûà}ç+ãëØ}ÿùÿäûýÛþò™¶Þƒ&çÒwï±?úˆœï}@ŸÛ_Ù}?¶¶¿ã¤òî`<©ï}R¿LÃ~‰fúX4ñ¤~ [úraÓ­ØÆwk³pH’¨¾¯sý‘yR÷_åAhþó›òE Å~M1á5ÍçRïÆ–˜û±5»Vp3ãI}ô“š¿ÿ¤êX4ñ¤~õIlÆ–6 ™é2~'¶ô:O*= MúƒïTSÑ‹æ+<©÷bK¿SÙ܆­¹‘—¬À“úè'õ÷À4 &Ô¯>¨$ÍØò%’ÖryoºÄ¯ó òÃÐä_}PûIÕëìX4_áI½[2îÇÖT——¿sÔOêûŸÔ/§s~‰¦ðX4ñ¤~õIýrôönlíƒxrR¾s ãú#¯á´ytW]øó·æßïªÇ/>à]k;%¾3çoì©¿WEe’~KÀhÑ›>°ÕýAKwLÈ;77½3üÓz5:‚À9xÂñÌð!Á´¢!&èê/Ö;X¯C ²dÞF"O‚=̹7GÂo/ø ¼: X,ð•%Ï`—g €:Y <Þ9¨x-ÐýåÊ ÌÒx’H <œ¼ {O~çP`ß…}X ö;¤¥xÌæB$ ®\ ²#Lk%¡"Ö#haËHQ fO `Çë`°ÀçÚPÁ§²X,ðÆ-=c+Ö` Ìʬ{[Y˜Ì-Ý"_Vë8S1Xà3ö^R˜Ü1h „ýBxï*1@›8è-س’0…ÊÁìÁ>W,³°·–òÏEB€ýR°+³,$ýðïyßkX:’þÉ‘=‘`dĘõöïÞØß‘/ÿ]ñ±£à†½ö`o•˜ 9²óëîÅÎïèIñžžOW1"a²îß#ü‡±B:³ûež ìgå=5@riAØór|8°Ÿ…}OïH$ ù þ'×+©2h~'î‡ò Ûù°?PïµJØËó{ WŽ„Ô¢Ô«XÆ;NLóƒøÚìgùy­{7KHºG>÷¼Ìë^F·øzX€UÔ!ðYðqXÞ#ßB¢÷Äb_LWAß=ò±×eêÐøNèUvøyÏLõ4%°ÉéLìd9D¾½ÜaJÅØß3zˆïÿØ»š]9¯Üø.y€Æá?¹Ð&²ö+ÈY ïŸÓòb|ý³˜H—æ7,K+/ZR³n‘,’uz€vKœzÂ*•û• Ÿ?ß·À‹ðOñØC¸žïôr>û¦»‰¼Â¡û¯â¸)tÿº¨ ¼vêaRŽØ¯l"ÓñÒïÊ{ŽHác˜ù¬äüt2¼äµ4ö\ðp­û÷ˆ¿uÂ0ý›–ý{BÏ' ¡_Yô—ÞnƒßÑjoî_ÆOøÀÉý"¡ŽæÀ+wªÒb¿ñÂ;»aò;;TÞW_˜ü.œþåI3ÁàwXè[†þIb¡‰0 ÔóWRqÀãi h‹˜,é„Yà° `=±›0šœ´'È xúOF5!áý¸  66ƒRš5pXè[”àTÖ›žÓbßós¯–ßéø„äÿÙ¾o=É_+äûä@ ᳑ГLoJÀiÀJa؉ƒ! ÍŠýéù¹÷4ln< É÷«nŠÖãi@†_”¡÷ûžM (*Ä~gì“) [`ð|Ï÷a`` lãpfèý£û…9gÑ-ú¡ô,<Ìb/ˆ|+ý³Lì ?¬ÿW‘â™ÇuĨ`²qÊóv€ ƒÒ?,ù·~EG*ûa±ïá|2ƒÚ»4ö|T[~Ã^öí ½²–ü&oø7ýç’ÿʱ_køñb ð¹H`BÀ¸±Ç‚'2ÀÒ™$|óÿpä<ûØÝôø ØJ$ü‚h…Ð {¨ˆ=¬`ý·)D€p‚aÑg¦ƒýߟÿóëßþý¿ÿçßþë?~þùo_ ý!~ú'ÂÀŸý»ëùï1àó/ÿà7†¼1h° AÚˆ‡ºÉ ö‹W û½Å­±H¯ÀȱÇqZ¿Okýž‘ õƈˆ=RÁ'ÅÞü°Bœ¬öœ›”k¤¢#Ü;=âsŽJÁu`á¹Ù=± b¿°¼±×´‚åÄd$Dô ¡Â‘˜>€Hzr‚ÓÉ @âÐÓ"’‰jÁªx£pübŽñ3‘@/ny”,°ö}’ú¼_¸¡O\|LÂäòÞ%B&ؾLr?ðH& Ÿ(½(7ü…Øï»*¸•vP¬A^ Ì ¾nÖ)]ƒ"á Q=Ea†±âìèè‘ n»˜…Ó“a®¥=¡'¯Â½Áæ{*Çè`.è+² ®ö}ï] ŸK nMy¡ÔÃQÌ ´ ù¼_»vtOX6j±·å÷;Ȫ˜) .Ò© F\@Âè#5íABÜÆÓź2“pî“&¿ŽÎOAXà™Ûf÷¿÷¥"B¿pݘ¹(F »bvèÅ£½éañ¢À¾ñêÖ“÷L¥{iö$%KÁÐ`£c «Úqè‚£7JZÆGr<­ÁÊ«yk˜ÍE‚¿¤ç Mø’€%0wbXÖâc'¢rô ,Ê•@0ô±«&x·‰P §e–ÉÑý "ÆóØO ‘$ ½WÌ¢ á&>?ªè©CÂp‰¼³GÈ(!ŽÎܤU0ÄéÝbKÇàGn6@y8z׬e–àD¢‰¢`ØnAöÄ^©¾/öÐ š Q=>†Îšv°n2¸N̦AŽû¹ñê©‚ëÑÂsG€]^ðH¨‰áÝ£•£VZT“]Ž[œ ÝÌËa_±q%ý–…éW˯]xœ< »Á†Ös®ì¡ï™úÃÍ7ëž©t ýáxHø«z^Á‰ãE¬¨V†A—ÕÁä-ƒx_(¼LA{ÖMÞ»‘x y£Çië\8 =î†7˜( sóŸ&N(É‚x¸ñy¼Pá<¸Q) õÔ€{ÉÔsõž2ÀøýèÊ€¹e@ôl—…Å‘€{ÅTゞzÀɇsé _ê=‰Áï ‹Õ>&qSBm|é!å½8Ò/é›Ĉ$q€`(zZ„Œ£Å`µßi”¾_ʃd8«hi òÜšƒ`kõ: ž!OÞÏÂDyòyª·hGIDb¸]„ç]ÏÒYR9OÈ=é‚Íé …˜ÚBô€@ß'K(#Ÿ Ö–ëÕ´0KdŠGdŠžƒæ¼¦%$ǯ›´Óí{o—À MPk™Ge’Ç SmZ\ÑêH˜ƒÁ ÙÒU”xa¨yjô€ Ä }ÄìѨzxAÅ{îSyÁzxÁÞ¿á²z±œÊÞ(Уe›¥ÂI Ë«=u«øä÷=á ^è*¥CdeEñy„¶@Bå6±è#6?ï,t›د?ctÝà oÏE,DÏ¢i‰}¨—1¶_§"è¥ÕS1FÑ ¬+|]| !T™Æ˜MOÕš[˜€_6ÀÕôfnÑÁ+Ž;c_§Žz ÝÄÎýÌ„¡ÖÜŽàFœ© ž( †ÀˆŽR #ÌÊM±>ð[Ÿ¦¶„žU9°’úu±ÇòAƒÖXM¨§a*ÁÜðÎ)Ú Ë ìLÝ'èá=u¿{€`&NO… 7=`ÿp°P/ÊjÂýúÏ´M¶ÓnBÝO‚I 6Íþ ×`¦Ù¶{K-¼×†Ä`^qvsi'¼ï×X€„ÁÙ!›póƒâ‰÷•R²¸B=˜«Ô‹›„¤ï‡hà™Ny7öž’PŽF/žEÏÆIE&h`ßó,7öZÇ 40˜¼z Ã*?xšc´PpZ–Òß0p<ÌôŠ{byÎê£BUK]èt¾÷G@á³mR áﳞҀ”L o¼Su&Ëåxnià¯Ë á{&Õƒ„·a6FôÞV‘#²0BÝ*R‹ŒìrÜ1BXy°è¢Å‚Â`$˜÷Èji#›Y,à=ÍáýÂoðÁS‘p{CéBBq “ÏÓººC³L¨…;ÕB')ƒF4úJU{hÀÍã@(˜,WÓ$Ñ£Òàl¶ÐÝʽ$ÑFS;6eÄ~gìã°`¯tòÁѱ(ä1-@a2¨i‘(C’±g:z… E(¾@/œ,XÏ]ú€Êć–‰ Ü¥S‚ÖŠD  |ë{üŠnƒàBXhO|&ߥK‹Rvª°[:­)hÙ+7W˜ަë¹F/9‰¦`áä8ÂÈŽ‚+„=ë#%· €ËùÞűrÈ["à­Äi/¡´,F‘óI$†ÁåaJOf(?x@}X—Øú¼7Æ[£NÒ{ àé‰ ÄaÎu-Å@‹ºÄÉ“£ò` ÇÁÉäcT©ìAÂ…œax0¬,lQòmiŒÅó¹Hàó¢ìhôdñ "£DìñfòS@€\ðɹàx ˜!}ô·Fy€Ñâc^xÒl¹¼€¶t°»b”_KÐÀ̩Ԕ†äÈÈï\`'ÀkKB  CüÐ!ºd†_Ε ]ÂÞ%dÐÀ·‘dd@á—g´õ/CÂOcŸý­KDùw'vð¾úL¤üñwΔ>ü1P@ùL· ÐÀh Z,…éP¸AjÚ\DBØ»’˜/+(êµ4P |?'ƒð¢"h`û ª½³F¨D _hÀ4€e4ÐÀr(ÈD€Â/‡¬H0ÉnýO „?ýÊ-T~÷ÓöÃ6¬©—üÃ8cEùݱi|öâhNy å‹G‚jû‡ í4` ØJŠØïM…€ðN¨… zAÀi;h`9 (;h`maè ÐÀ›\4€tÐÀrˆ  )hý?¾lô§Ÿ!ê¿EHãI#»òdž¤±ÁQÿûcÚá³i@4 ì¦è1¶د}TE¾0¿Ž (|ƒB )ÀSK-³%ºÿ73ðÄÊxD\^ o›‰X¯CQ "|Ø41`)âÛ!=€ tÎ1Q+L®Ð: VøX+t-Ïk 4¥itÐzcá#Šâ`¬¦¤¯#ñ(€j´x ”ƒ@ Ý4ð$@*xºT:xŒT /v1¬-|€òàMʨ‘~s CÈ x)t°;34@¡ÓŽ 4ÐÁ“41¬¬éERà”_n”Šó/ƒÂO= üé_×EéwÈE5ÑZMüa PML¬&Òó!˜@5>@5ñI4P Ô >@}ð¡Í8& †­õøõÁ›XZöšóÜïŸ 40«,ȇÄ4ðÉ4 ¢ µ4@ ÐÀ›œQ @-`”P >:ë_Ãl‘A †Ä J 4àƒÝλž €ŒØMÐP €VÓ@t8A-˜-#‚ üêÈ¥^uÐ%¬-þ::øi&üé?!Y؋ޭµÄEFpþð[G-1‚<¨ça§=Äp^Æ— vHX(QLì;pú±‡ÖðÉõA•Ök -[ìàƒ'Õnb1ˆÄð²e—˜xM0£Ÿ$>JN –$>@ÂZŽäPQk#U€~]QÒÅD¶JÑ (Ñ—ê²”h”èPü z£¥€ ñA‚M¿^Ž¥W‹û¿q9'zˆ…Sª{4Ÿ‰9MF\ ½t ЀZà·Ãàt ˆ!ZŒ6@ xïEâÐ^Q‘~•èEÀ‡/Â/$T=JÊq‘ Èó3¿ªê!˜1<|7¼ð^¸alñn/Ìæ-ˆá1ˆ&0–òË- E tDíˆñ©)Ä€QˆÄð‘Ôİv‡ÍÁèÞ-d—æ¬~XõÁP>xPôð‚¾¨eECŒUQlìò‡£:[èK3ÐÀCiÀNˆ4 “iÀ,@áM aAÚŧ_6€Ô/Æaˆ¡E50²¤Äs‚ †‡^7fr;|Ýì™à&Åz #ì{xþÆ>#!ö³bo=±¯ûYò_|ÐâΑÄÐŽ³AJ4Í¡"NF‚õÔƒž»g;ÅÃûòÀfÁÆ‘¢Ç‰CŠzp¨7NOSJ\XI¦ìˆý°ØWOìS»D+C·ÞÏãØÏÒzš½¤TFìWr~šðÁ›kà Kzý §À£kû¼Žœpwf€RÒÀèåø·‚Š`ò诅âUe°ÀÂÞ?y8b¿p÷#î·­…9ïF3²8Éz0çÝ(ù1}çbÿãÇ»Ñ{g:óÌŠ½·H~ÁV‰“ŸŒç+Ý{ P,Ío£½\ˆß4€“•±×sÒ' óO@_¦=¡JPbïnUà¯ì9 õ,‚sÄØ!nø{2†QA,X9¸Ÿ$«Á±HÐó"é¡W-ƒv0·J¨Wô¸Nþ( #<ô<0<Ž‚Fg瞥¯0…„¸ñP,ÂNÒ L. {¶‡/j¥‰ úC‹ ýaoìUùûì_ý?{ã“{”ØýÅöq(c*°ðõ"aƒ&„| |NäBMèám­Á› VJµ€@ŽÉ!¼ðHD-ˆ‰)ƒ6K"%ާˆ'—Š–Õ‚•bSÐÁj:ÐL ¨+cËtxÈŽE‚Ýp÷¨ï7D›$ /ˆ\œS—Ã.ˆz2€§P`›|´nÜ„0ãbˆEè È{ ‘ç½f€¼°î‰™zer,`©@*­ ãÄ¡ÑiéTÜ3å±BaËÉ™ª‹bÍ|šµ`KèM…Ð ~tÚ_Á=,`Qh VfßœaÉp2 h¶´z§a^°±%ÔÔÈÂráÐn€["-a#¬’­Þ0Õ*)W ž04ˆH¸¼gHŒ‡¿ì<HO¿HP4"9<©^P$ð¡h/ü òJi1µòû›¬‰ñP}YOõ–š„T±8UA xá ¼L`.ùm.ij€ ðEòÅ-b ð˜LA¯¨$‰_¾Þð‹Èߊ‡–5w@áAÃå)E4‡‡Ÿ¿€žÃ 7üÜSJ–ª1 %†ìA'†>ÆÙÂrØã`‚=u,Ñ‚"?QÀÀP ô¹Ão÷ `‘Hè­‹-èB{0aA"Xc˜Ú9xR‹‚Ý ¸¤…#;<¡`¨–ýI¢`4“3w_ïk êèQì¾Î‡D¾ø´ô–zNǹõT¥‘{@Àä­è&é¾e6©ÄŽ9Ôê9”²˜1<¡‘ˆž‚A\œ 0|]ü*ìÿ±w.9z\ÉÞQ"ÞA¯Àk ?à†O¼À™,‰4Ú(Vô å! $¡@ýÿÇs#N¼ÒŒMÊ€!cFìégÂΞLôÈ]È4¿ŒÈeªðpæ*&0qþ åÅ¥K@L¹=¦„.¬™¡²Ë  Ðètá;]¸¿F@âûÒ6ßgšv…àåÀö`òÅ:$¦í8G{, ÷7®1ÒåRÁo¼>ŠŽ—ï (‹jtÂí800„D¶Rã‰8vÃW_j#«{¼:0p·¢íeäRe†”#—89— +zä¡%½_  p0 =rdàFAËiåÑ(ÌLd‡6µà”ñÑ(ŒxMaÑF¸ƒ½¦gUðHÿ|”T³âší ûY¢ÍÌqÔúdfº`£C+$ì,¹Î<ÝÏ‘k p0 4sQ"Iµ ~óÉ%‚‡P0T£6ô3ÅU2b8§…˜8[fzÛ~ èXøÂÇÈÁ#5³”(œï4W H ßT¡ PøÖìL3®i­.hP8…è‘-^é¤â@ád’gT!YMQ:¹Ì(…Gj¦¯5³\±áïh̆ˆ®ø\m6ãv­Ì< -\õޝ>8•ÝîŠÓÕ'‡ŠÔ#B=JÓçok»Ó‡®&ô~%Ðà|²[fZKY1@}4 3{áKÝ0@}xËÌÞç2’JøG«ÂL/S›$¼¦£»dHœ’€ÂÉ(ÄÌ^…²¸ÿ9lÇ“QŠ­•$œK‚^:³ã¹œ¸]mç¢`—û æžØ¸q²*¸+P *ÐÌ mSªQ'«ÂPKhd¢u²(È×Aw6 NfÊ’QšŠkƒ§®Ù‚ SÐÿ~,3bjT 8˜„±Bï޼ɹî¨@DÁ›ÏPWQu‚w?¥lÛ‘§B0Ó’ÔÜ΄€fúZ½,Á›—%T›)‚wÇ‚µ)§B0Ã@=Óm`àÐ`dصŸU‹Œ5í¯~ ÁÙ )Ý? ¼:9hÊvF;Á«KMíJh'xw`Èd\€àÍb³z ɯ®!Tn’ˆPH~¹‚—‡¥w*Š:ò¡ @DÁˆ] €ž1 €g  €! Às €å#@°d7€ $0 ÁÌ@2 @3?fà·U üìÿL5Cÿ0_‰lòÓÈüøƒ…l¼¨ xªÎ€Ï @> B@0²Ú .hDðŠb@° Ï Às¦ÚÁo€à+ øéWþ‡Ý³û‹?üý™C"È% Àsˆ@ @ ð`É `(  P( Ø€B €`lF`• @åà‡üÝ]H?ý¡UÖ ÈüI~»Ðw…‘Ÿ†àßÿö_ûïÿ8‚ý·¿üó?ý}Hüö×ùÏû“;ë+ÜPø‹ÑUCË–Âá(ðE©@a …1ƒÞ_}C 7 }åTP8=hB*¾¡ U ßb…ª @á1Š †#\ç€  p@¼ÃsþNây{t~ö»7fcú=ˆ ¾™èÐCöèÈP œqš.Ê„. ž€`ÈhP@€ôž+à9x õ"( 8e@%@v ˜jH°ÁÙfÑ?ŒÜuú0ðÓV¥ü¸þÄÿπ̻¢H@€· ÀbÈ$ÁÙ3€$ @Xˆ¸ .p Áý 0Œ˜ Ýög€ 7 € mÊûh@ùSBðSÜZù÷:ð óÃÿȼK7> ¢Œ !— lÇ/an2ž#È  ÔÇD‡‚‰ LðЪ/60±‰b"Ô­Ll`"rƪŠ4£+t¢g2ÐdM-0± ªpf?BkêÕÅ­ÏCa˜†ê) 0q>qEϼU&@bƒLTÍtM·F <ªLÈE4ãe·•9<ªáÄŒa$Íj‡GÅCLtUãZñv°ÌcW RÑ%¾¥ŽÄ˜¦¬slWͬr1£Šp0q>r¥ÏÄ)¸i¿ÄŸè‘YN LôU-Љ¼¼b†‰¼éƒ¹‚‰ˆ™3»áY­`"gJ`–åÚˆ1wø#DT†£«jƒJèECÎv«“!ëXÁ„ÖŒNt5'\ÌL°0á¤Í‚Lt6’ˆºh2£(º À¼eb&é‹d",g˜èôFãþ ˜™v}Ü d+ü*“™Ó„3ä;êä=ór˜R3XñrÍdÖLŽvš2áWˈ‰éwÚa&Vè„VîÏ­E0±!Ä,i§ñàT¬šØ¡!=ÃDf¢(ºä푼#ET>§X+ðukh¤§ªØ;1B¾Aüþ&G’ÏÒfS;ˆ™cœåð²Ïg¢.žñ¨*¼ÌàG,È=ûšé©ªÔVA7î ;¢u"õl±öTl/{ñ‘®âôæJÃŽ±¿ˆ&ÂÊiƂǂ/±H.‰Âµ…ƒ¯-ÜAÞ v“ NEÁsf ÑB¹¦cxÇš‰š¸Ñ!³‰¸Òd†‰Twd;Æü&º+‹‹Œ]Qå<Õ~y,ŠåYd-u÷ ݵ…"†¾|èuPòRD‘; =óXÜŒã¤ß&|"³xf¶„oÇŽ)ð-H ¦ÜÝ> ]؃„Ñåœ`L|/¬€Ä÷2‘Wº0¡MmhXfÔ%R`L|ÇÄe“{¿ ¤_? åšž|øÐ… SZ$3i†W ÖÆlˆ)ùªœ‰)Ãã“W˜ðX|'5qbç† ­+ñ7‘q©è]îņ3…AF¢H¦Á² ƒŽtÙ KÚ'. _( 1Æý@ ½Ü€„Ì69Œ\°0jÅJ¦™ø!£˜DžÚJ=b1pW‹£D±c3­OLíÊT„;Z¡ÞÜ^/¢ÏÈ.l§¶“ØHb!&fš†C³ËapeVd— „A®Ê™ˆÁ[½Âph*1â1HÞŸ¼Ã^! 3²MÚˆmvê›Iš,p¬y‡,ˆç yçhvZQºLÉ+”*ãsá‹/Œ!G„AÅBgWW<:3m¥jÝhv:Vf °2!$—+„rÄpPïFzùæ]`¢y?;zGz©#-óZÄ÷ƒ0œ) Á3´âFÁa°‘îhcÒÀÊÀHøýMæÈ[ñdXF¾ÂŠ´Ç9aB£Ì‘X?èHbaF-‰QšWûÑf)Ñ …`¤2e.w€~èWûLæÍmˆ—ÜÂA"<5‰-ÛÆgR‡h…ë&¾câŽ! kw0‘=c;e2˜XÁÄ̛̉‰ç´6l§SóŒa¨[`;½{*ß*\=ó ŠO·ÓŒ ÕD\p¡ÞíBµºcgË»-­ó³V$ øÂ1ü‘ö&'‰ÄÁì%FÓLC´Sj é7H†'œÙšaHïЉ™‚¶³W4ÌÇ:Á4b>º4nš-aBF¼H×p ô<ìˆ't&Æ4ä;≡·Ã‚0+ì‰Ò$œWkv a6M¸e'Nžê]ÏèB©(¶ü¼»ÇÁ«Ù1š»$×ìOªµ ‡–0¡CLTz¡Þ1¥ˆÀ˜\p,ùf"GÆ2ƒïü’`L.èÓ«b䱈gÝ42‹c‡ög cæFÄp(5£lŒNÙWonMFˆp>z]3›„#”¸Q§Z±ë)g<†Èˆ4\Æ]‘^ªŽŒ_E©—aëÏ©ÄHƒ}4g8n”, â♘ÑaY*W0A6’h$QK)‹7)ò†-æã»!à[%ì%l¾tf•prª0Z-°â²˜Ñ ñ¨†?}êc1€š*ÖÎïx+l¦-25ª°×áݺ`BY(QìÐ…šÑsUqè›uÁ© íÑ;tÁuÆyt ì =T¨f (#ƒÁ°Cj¤L•!Šúõ«¯Ödx:º 퇞y¢³M ;^‡™s™ê&†M =³é)³+ [}V0á=c9”»l§W/ÏaÜ$9ÖcI%ŠÌ¨0‚½"Š ™Aâþ«‘X,òÒ™5€ÅFŠu¡dB.I6¿]ºÃª§L8]î3á„tf¡–¹¢'Rg¶w”ZUc sÁf_á>ÃD{3–y¼úÎQYÙY‚C!)cU¸%®ªï0 †_*½4Ñø²€ »‰^ꉄŒÁÏŠ@< ‰ºÊGd"ÄY-’+ŒHY/«q§—hxXP°ˆ«s {ÖP#šX€ÄýÕÛL8a­Ê¨u¯`bdY½Æ`#ÂÜD ‘!ì(h-H:úò™ø²C´ +ˆ‘mZÄÏ>b0q<A—4Yk±J£´±#š‰/ŸÂ7)‚‰DDͼÁsÁËÁ—,¶¿™ˆÎÀßà«fªâ•-nˆ0WdR3:ÑJŽEB;ÞŽŽ&Ú膷C/â‘·£½©1é¹c‘P,Ó.nÆ”L¸Nè„)+Æ70!¡p¬ñôWÅÖì8½xl¶@àf­˜êÛ±<„g˜ U„” ˜ÈK¹f˜hªD± €¸¿xÊ&Òn™@7î щ,¶À0ð©ÛCF„I¢ ÄŠÇ¢T–0aøBa0@¯^‡¯ƒáq€.Œ\ÈR¥1ô½`@_•3¯Ãó‹°åþÝž[¨cÕà›5 ¹[vK#²¦U‰6†S_Y„aª¿©F–‰Mô>c4©R1¡ÝiÅDžLîÚU<é˜ØÐ-}'`L|Ç„Ýñ„ 0ñ‰9´÷+˜@òù…É'@ñr‚R6¯0pr‰¢f húäé]@ð…ÌX¡¦Ÿ»µ ¾‚˜ ‚…©WW¬5åΚ̸ÅÞ…Kh/‡@Œ+JÔ‘3ÉBu'îÙ옠õ™ Zm7',y[ÀÄý5δ7Ý9KÀTxõÜœ‘KâªÑ†ÓëviÇ åÆègÚñXèˆùhl]‚ÍÁ+údf}“Y37tbÁ†q½’Fú™žh‚Olx;L€‰&•Î`b“qY¾„ Û{Þ ›„afŸt‹Z"ÑØqŽBe$€h#²ÄþŽC ›aÀú“ë;  CDðHôÐΚÖËkZí‘\YØ€DŒ”/:¤5Ð1ùæIšŽön¬ˆ]ADͼéž‚ÚWËB‰6Ú%w‘1ƒ„—8îœ* ºpîi¸•½ µëŽíG˜PNj¬ ]Ñ-é2Ä„gÀmذ\œ/IaâþA czG”õÚ¥ØèsjP9QÉv6Õ†0l*ýÙ é÷;!eˆ)W¼¡<ÃDu7š Ä”rÇ”#L¨4 ¡|±B'Üz„ ³º˜XÁ„è •„˜SóŒ‘4CƒÃÞõ O*%g˜Q\ÈÝñVôHµ›$I"÷\ÁD¬ýL ~øÂÆ(ƒ0@>4cTBÎnb„áƒ00C0‰]€.üN¼ FJ¦¤†ö¸%íq#în&² ÓxǶBÍX Õð£7´BÕ%:# VâŒy¼7×.­qw,ûñ«eä­p«þäŒtaùÕ+÷Èg‚Âp~ÀЗõˆí䯡ŠWU=Sx†Â#so&:…°ðÕ#üÁ‘÷CaX! 3£¹Ïdn¡(ñæ•?*-ŠñªÖãý‡u†‰ÌR´Ì/h…õKdF'L“%Šë^¨FÚ££Â*Q¢xó‘ ¾…X¬XÂDP’‚‰+ˆýÒ‘7Ñ&…–‡S7æ CH(M©Ñ)æÐ…C¯Ÿñ驘å^„ŒT2Ó¬» ºpê‘£^tají‡ÏèÂS¥bT©Þ\¥ú@þTw2ÓR›°˜üÕ…Ë_taLFxÖÆa}à¡èÈ㬑ŠQ«yDÛÏ1<ÆIÄSG0g HMÂê%VôD¼P¦žÈ#NÕ]ta ‰nèü.@>ú ]€¿àÐèÂÇxÁ  ˆ  ÐäÐäÐ…ân® ^Ÿª [ €.Ì N]4Òï]8¼!Ðè‡þ…™ÔÒ½#ý o^!ý+ €.Lé‚Ll­t·è©yD.º0¦ È‚@0ò6‰ecvnÇZ·$Ì H¬°GÖºµÈý\0Ö/œº®‰—@]˜J#"  !º]ø  P,qƒ(@>îvœ‘ ÁÎøSk”3ºàQ¦è]@ïÂÿ"¡Rf…è7ÏLý   Sº0ÑýÚI÷ß ‡ÎÓÎ@ÐìXÕ´"貉BB§#N5ž  %K Àã0„„ÍÈBK‹ãÔÔŠpa"±ü%Hà¥Ø]¢„.ìš°B"UEÊS]Ç‘òØ;—%¹nä ¿Š_@È;RKÏÂ^x1áPÐt{¤Ð5DÎ,üô>§(ŽE±ˆ:¥²S!)(ŠÝëËÿÇíG¶)$×|>äŸ ØŠÈÐ=‡&BYYá7)·’PG+ÙyáoËlE`Ås±FÎ-f¨0ç|9â¡;¸mEP·ïdyej _*PÕÉri9kÚ1di‰ÆT´¤/,‘v ) 3f¬yr=ëKÖ¸H¤/„½díé ™h‘ô…ô…OoM…ì/TÛ&˜·#f]GÀ"E¾5_(!ç¾Uxú¬×#l‘"H_* 9º†R\Éóò[=Š }!¬¥ ¦/ä|ÓÒâߦ’âÆÀyñÈçX€0º×Ø_™B"(âé ¼ŽÈ"˜»›.R98„V{úB>8é é ŸN%}!7™,}!}áÓ¡"§Y¯ð–åVH™wœµËOÈyâ_ÈGh–XZjÌÒJE•ô…‡žB¾¼Ò§ Ùr"÷ê–×öúœr3Öš çW¸£7f¤Vc¦Œ/ÌzmŠ)‚´…°æâé é !cÓVò ñ'ÇJƒž5ÑrríU%ïÎ=r¬I ¨z>€¾ÄZ2f{A‹YAÏcÊJ"b Pc(Û?s ˜tÓ±†"`ºÂ%ÁSµ’eÎmZ_ˆ/h-nhy™r‰Ý…KÖê¦Ù”eÚ…%-Ré Qá…h+€9›¸ÍÚÄ­.Ré Q¾`1¾àb9_˜u¾ 1EÀþÂ"H_ˆòØw4¤ô…GÞw4TÎKSy9â÷%A\$SM³^ލ‹AúBØþBHIì¡xÂô…G¾Li\yÿì³8ÚÖ£rpˆ:”2 ) wbÈuÄÍBŽ®M„@s¨xà‡°M´¨QæÖèëSÛì%›²Ìº¿ 1Eàèykj‘Ç#"š/˜‰pÞššu¾Rй²\ "ø‰c–™sºðÈ÷è¶"p1εä¬cCÈS%´ ™m[ãL*¨$j)ùÖÔ%ä B”'T³n/xLpÅ’Ù¶GnðiU¡æÓgRäYYŸn;ê"%‘CÅêqÇj…œri¹„/pDÈ©‚9æ:bÖ-'_¤ÒNu=¢‚©mYyi¹ñ¯,š×)×ð]¤$Òú¦/¤/Ä¿`[±²•|Sæ‘cMø…ï¦-„¥ C–T‹±g¢eV[И"Øc¯_X#¾â \Ä‹×ô…GÞ^`pÍnÐKøBHc¿Ê*Õ(»;>ò2‚·Ñ¨¦-,Ñ :dº \2ÿж R¨Ô<¤\c²°evÊUÄC¯"Ä [>d•þIXÍñ}®0*ä!å=âC)MX-à ½Œ0åZ-ƒm|Ñ~+e¡ìã·F>$mV«çµûE²Ž13Hßæ%ù¶ù´çBÖ’¾7kÊ©iß ÙcòJœ 89’ Ù^ð„Å2¼0IàSQ ) !Ó’§”³>!E û=û*Ö5E,-`[EpÞxäÞ ˆb%)׸QäæL¹ýÈw#zAÚBØ)¥¥/ät!}!}áW¬#v¢Ý\r{á¡_šr#/Z2Õ´ÆNtHEpµ A?ò÷nböÂÝ…,‚Z¤rlˆ fΨ†.™jZ¢$4f邦9TÌh 9’ªRkn/¬PÃ÷›ô©ÄXŸQjºÂ°Ðk±x )RU€—AºBPI ×’0¬‚¾0§/@ÌààÕ-}a_pˆ( ÙFŠÂ’¾0§/`ˆ/H-äÇWYÁƒGÊ6o<~K*‡W, È’È’øô 㥥ºV?Þð5‡ŠàùBHX©²YPú ëˆ2{¨l.¦é “î;†Ìª‚¡•ô…%æ QîÛâòø›é ÁKK(ئûCSé K,-#¦Êª”ó…‡ö¨…kÎÖðØŠß¾Pî/Lê !E@f~ü™€ô…×Ü_ ) aÄšó…I}¡rH(:Z“SH8Pn>¯PD1¾`RIJ$–˜/hDÞ±TWϼã¬ç”1E@¢3︄/XHIP!Aϼ㬾à‹AúB”/„DZ±cî/Lê 28°0Rú KKØwÜV+ÛÂ2ïMM›kŠ8”B+›+P®#æ/ xŠYFTw”\FÌj *EàÄL…Ó( Ù‰¦²ý,*¦/L:]"ؾ‘KNV( Iº‘Q°¼a;ë|!dpr®%}a…’Ðë”dÛHáœ×#ùÚãö;yú Ç!7ï¹:R¶q›õ˜2bh•š±¦5Ž#"æ Œ²?(’ Ø‹Ÿ*Ó"E¾R´ÑŠXZ2»ò’¾0i|)‚ô…¨uDHû®(‚Fé |!ˆ kZÃê"%‘¾0p¾Qû÷…2¾0Eð–ˆ ›nsÃ<Ž˜ÖV)‚ô…¨mLjU„í¼,›;>ôîB"H[8Õ)¥ ¦’)èY}ÁbŠ ÌTÓ"ý8B|ÁÅÙs¾0­/xHì/Žy¦šÖh¾±²¬$…j>6õоУÒ¡‹¨q+jzäË”=Š m!j97/…‘-CM|HéEŒ±æaÄ­ lÈÌ€!§ Ü"Ø«¬¹ŠX % >Áðž¯O`[AÔ£¡&‚ûŒá‡Ÿ·ŸüöçwïïÄÝfø—oŸß~ÿ"jûÿHíò/ß>¿ùáý·Oÿ€§¿üè¾Ë‡ò™¢?;_Ä]y‚¯¹‚Üf÷͇ÿ“o¶_ö_ÏÿG³¥ðà<ªÏØy<[Q(쨩ÔÞJý¼Mâïi*Ž¢éÛ÷æTêP¥~~š;˜-ï.Œ•ß)áó(•»ÑäkJÕ/©¤ <-Í3(u,[ø|4ž­0¡˜HM¥vVª4•ŠóÒL¥ÞT*Y0[Ù\XÙ…äð|IΣTéFS®®SõË3$׊ch èŽô%4Ï Ô±l âÑ +Ø| …Ú[¨Ô¢éuZš)Ô[l ”mýºÀÓþd°É6e:Ävÿ’“(uû£t¢ùÛ‡òG¥ZkHuÃii®¯ÔÑlÁ$ž­ìm²S©•Ú„YuZ˜)Ô›BmÑ a ûºÆÐÀù¨ Ãy„ Ýh½“_+V§¥y¥Že €ñleߨÚ€R©•*ТYyZš©Ô›Jm©CØâæÂUˆôèË û—œF©Ø&Þ©T-Ž8-Í3(u,[l¬l†±•ý!Ð ‡çK©ÔÃJÅMÒii¦Ro±®ÁléÃij5%?ȖΣTêFóÞ,¡1OKó JË{ÃØŠvGÕTjg¥r ¦Ì 3…zS¨ÅƒÙþåý?dËç*w£y5Jh­ß* ÓÒ<ƒRDzmÇó±©&FœJí­TnÒDD³·TêP¥¶²„CØî1Q†}_ù [9R¥Í«YBiÌHt^šgPêX¶À¯ÀV¶¥9sM¥öVj ¦é´0S¨7…*ÊÖ÷ôÕRŽmAøy¢„Þ-oæ×£„Ô¸En¤uZšë+u4[(ÏV˜U ¢§R;+•[ %ê~-ªÍTêM¥"³Ýs¢¦öd çQ*t£y5K¨ž\µLKó JËHâÙÊfÂ"˜Jí¬ÔƦÃFÓxZš©ÔÛJå`¶{NT :”ƒlñoCs[$U¬žJÉ–]”±õ'G«„åàíÔýKîSêåÇ¿ÿ´ýø«ŸyþõÍûŸ}÷´ýè§wß~÷?ï¿úñͯß?¿ÿå‡7oŸŸÞýãíø¿zóöíó/ï¿zþi«ƒï~úÛþ)üïw¿Òú|÷îý¿þý§ÿþáùÝçUñá×óîý¯Ïo~|4ŸTÇ›_¾{úÏç¿m¿ÕÇ"k…_<~&ø½ãÀ^d_‹"Þ.‹vÐöý*:“i=XTîø«VÁQ‹X£$¶9ÓhDM8¢©ÈKjbQcøë›·ßoÖ„FÉÀKY@Pþ~í ëaá0¬œÜ9J†Ø¯],"L„Ń‘@ÕÓ;¢d\Ø9‰îAQ¶t\«^™Ô¥ת±yVî§Ÿ„kÕÃX šb_RX8º±#àV‚fØ7:ÚjBB–V ë{CœÌ ‹gWzƒ®N8Y?›z^Œ=2ï„ÃÖLõH8•Œ‚• œàÕ°ï}KèS2Ž­lˤ ØÃbßÓ R¤°ÿVƒ•º°£à›ìÿõ!åP€vè9WúÁñÛ¸âJþ+1áøMî³êA‚ß?›¤6Ê@%:':~ú×ÏûòÓ_þñÏ?ÿý¯?ÿüÓ—ÅöŸ+‘ g‚ž·¥8Ið€Wf¡ Ÿ^i#ö›¿÷qX póZ8UŽ€ðHôTêïA1ðÂf“ÐHì¥ád“°¥=È’=Ñ#²²OÀ2T ="KCËcÄ~á<ˆ¥ÃXymÌÞ#a-ü¨‰’ÜÐ#²ñà°U#ö+…Ÿµt…lÜ á'ß]¡ˆýÂÛrNäì˜Ö¤=±gtF‚÷drMLƒ®| v6:«Xh±ú]Î ¿ãÕwué·0~o쩬ðâ?Ùøõ@LOÏH Ù£”Nnôƒ¢z " -:à…ñà¶Ta¦Šë³'ÈZ€à71ŽKL®#²§ŽpS¬•ŸFÜ£‚ˆQCNF‚¸ô ÁŒ– нkˆr/4mœð<æ7#¡¼G¦Z:ÚÈÎxÖåt“Œ ^´Ì{‰C7ÉJ»¸ŠŽÁ‹zÕi)ã"çƒãÙƒ5\œ¬ Y†Ñò•7ˆƒôj̘V‡½ !™° 7Μ¿I.ÁäÇ"né"‰÷› 8Iï›Ä¿hVó@Ï=šPד˜« Ý{`'L0†2lþ´'Xyáy2 Xõ°€WEm¸±$ÜÉù€æ²@ô\ª‹8¸L¾ò2y„+ ¦K&³@5!!œ /È£ëƒ $—ãyö¬I%d©á†ùNaP~n ˜ü`ÐRäy‹3H~à×é1ó„éÁÛ+ ó²M`ôd2 ˆô°™c5ÉpË ¥Ñ,Yã FW -îQþ:‘e°ðI1%ÄÏÉÃ2@ÐRT++ ó¬®Ü¼Ø:­îß$ €±÷òp,%[ÿvœaÁ„d³BZFŒ2™, »‹Çk}qÏaû,fQ„cÉA_Gz¬r.”cK„Ž:‘™I2â ¢¥¹ä[@‚ñ™v1è`¾:0 À; ì&–—ã½,€tðÄÁ %ÂÞf:€:¸,j``Ý,$ —,ùžX`u>È„gÏtÏè·¼P^@3rìœJ_ý!ÜØ~×Q½PKØët9Lø‘CUÎY…—O’Ç3×^æ§Ê @B‚V³k‚`0`0ý†D¼°VèêàÍÀmU°ÀnP ìÕ`°À›Œ ,¿,°›ºö€²Àw ý o„¯~îÅòû+U§ ç¦'†?ü÷ƒ2¾/eÔCbÎø\áà]í`µÂ$0œºŠpÀÞ!Àt( à9,°šR,W,°› ®Ú Á»Y ,€Š±ßû@@øå¬š @Â[ $ò–ôÇþãF_ûÛ„Âåw·Áß 7þ¢&ñÇ‘gl¨>{(‡Ofg°*IįIˆý–Ø+b¡4(¿Ýî!©‚Pÿ!öëV”$b¯Ù÷Û¡µÔþÉvŠðb´‘¾AìÁŸºÓþ0@áׄ @ðK[Cl•'Á`·4È àe4°šøu¤@[i@4xÓ?Æ3 |{@Qø…„4€eT Ý4  €v³€ ÄÀ^ø~5ÁÃ÷ÚíG>vò?aþßFÞs;÷+Ÿ&Hãû¾.ÖCbíð©4`7JÀë"h`w ‘Ê 8  Ý4P 5°–¼ÇLñ£ ¹4p£G=H¨ÃQ`b@­„,0˜Äz‚V„3h`# ¸F^GÓ@ÓhâÇ¡x* űǞ‚É4 Õc „» Š‚aÇ¡‡åaÐÀ`ð–ÆãoÐÀ3÷•€@¡X»¶,x³@ƒÖÖ ¼i ¼@k·Ø‚À?¾NÁÀ5ÐÀr HО A»i@ÖÀ^ƒ0@ 7 ˜ƒP€¶ÓÀw, ~|4 |í' áSòµ%Øgòÿ!å??Æ ÒðÔ‡@Úᙫ @ H ÀP ì^Tth`o¯X,ð Hp,4°›ˆ 4°öu5hà×å¥ê 8„ å4€CØMZP{iàû„?‚F á«13;©ÿù}ý_Òð&Üü῜±¢Õèñg|²tˆ.k40p‚™@ 7 $·dM-NL+ £~HìAŸJþJ ÐÀZHÐhà‡wÄ{z@{{@ói€ EÁgÒÀŸþõóß¾üô—üóÏÿëÏ?ÿô² ø²y Iá9‚N$~ eAª@bM¸ý–¯=œ"P>àr§ÈzúÐ…ù¤$  öýüGõ%õ+Qý]³ý.‰®râ?_λ g€`òÓS=°˜{²C¾*Äb€DÉÉáßÁB Ì26›Ñui @ˆ ¬]š›ÈÈo$8 Ží À[–8 (¼íc#Ô¨ à-® ¾_]ˆuyDÂ×>ì¢8ò»/Ú7ÎM"âdŽ!®’8ê<<öÈŸ\EDh-Š>@¿C‚šÈÒ*°À¾AöûJI< ï® …NP! NÜ¥ûùÛÛ….ìhh‚•éÀ%]BØ4}ï‹#û•ß{µp¬Æö½'ÄmAÒ‚LöžÎ¸< œ6/«ßì]¢W|áJ’{WOGì7¾XØÊØ?„ü{T Uy@°: 8ô‡íŒýÕ~Š® …Â7ö÷—%‰²{€Þz÷x& b?¬Äë‰}‘K ¯oØ}êÎ/6rÄ~£Ì/;éhé\éì” b?,Ý·Äþ~Þ…&¾aå½ôĞرßXÞÛQwÆj¯Ñk<š`öÁvN°À·ßåS=±ÏbBïÞÆÞ=c #˜ûû·û½/b?1öFˆýZƒ·{ôì"öëbÎÇ£B¿îkßú$!´ð ûÖ·Li˜ØÇÎ8ÃÛÿìã==, œ†%m+çòMÜ ];c¯DRØÏ5t?×é)úµÒq²å —öÌï^-ÀŠÕý;s‚_ÑIŒª`*¸^Ê=¾Ð{}/a“ÿPy =ò NÉAm8ìœgOè¯4<ŽT08¸ö¨‚ˆw+0X`ã«pž}y‹„w”ª§ vOêÁ„Ë÷æ€_EnM>1;´H„:f ?i÷µ"£û aþÞ}zU´ÈÆâ÷²elRz&ä%Õƒ 9©jHC/27ÀŒ û—‡½>FOìß j˜aÙx•·¤<±ßØ¢\ú¾Ê˜ˆý¬Ø÷8ªG ±ßØmTE…¥Ú Ïq— ÃDÒÆ3+eâyptqXì¹'ö®ªˆýJ™oEéŒØ/4)?Nد´õÜŽ%Öä?áîö±&:ˆC?ñà fMƒÈå%§p/a¥ÿœ¢h þ²Z„…|w€çÁÒÂNPAì‡Å>{bÏBŽG¿a-á=¥_z&ãðÅ$ hO ˜åª™:*Òó*Xì)x¦´'ö¦šˆýÊð‡ñ2´ð{?n:X=¸p>ø~Ü|•b¿°þ¿·”Ä~'ç‡+b¿®é÷~Ú)¢ýÂg>;ÄV¸Y7xo˜¾œ{²?Y)ãÅwXpêgw3Œü¬T~œr3? _ûo±Ç'±_èðÞØ‹•ÁÝ–ï[B¯Gað®ù¹±¸Ê±ß(õŒô8ºúŽwÛ Çx÷N™ňýÂ+ô7öYœhäXÉùÉĸ!¹±qóÆÞÂ:áÖÆû¬èü•¶^q7—ÆÞIÃ{½º¤Ï‚i©Ó:- *gœˆ|"¶åÇEÂ{+Ÿ…‡}nìC“,0 ÖtçËèþ9Çàÿ “Ñ{'ìr›Œ„)ÀQÄ0ƒ6>ü‘¼ƒH`.š€ðÖœXïò¯€Ü{ !| xÄÆŸ¦záÖŽÌhߨÍ”6îzyÈÒו`yKF8Gsýc{Yô<'¹+1샱P`~‰  (ü þ:Þó´‡âc^j†Lä‹{fÉ($V^ˆpì©"T=‚¡/NM ¨’J€`së1¥[PA,Ž‹Ñ´RþB!¹‰Cù zŠÇÒ8_qwRø8>ÙA¨¦J¡üðÆkÃ(ÿ÷wCI‚¡›Æ½õÞH/ñ ^bXdò<ŠvµÉÅ#i‹­Ì,y÷ÈpÜ^ÚrÌX$2PK• ÜC ªâ¥ †ùÄ /É–Y~9¤A7 Ö QÚ…LÍ=<¡ªè™„c7QâœLÏЄ¦“hÇ©EeOr(ÏÀ—G@‚³%UØMIù1é^xúqz££8Lþˆš’^”Öƒ ?™è|œZH´`€¯ZP4>>Ãkê¹5‰h|ܽ1úþÇÄØÊðÁ`Ò“,ü½Ú ÃTÁÐ2:eñ^Îqºùx(ìim²¼Ù¯S£_§,Z*­ˆ˜ð:5u³| ü0Ŋ؈G¾º¡åeÂéþE‚3•««Jg'S4¾NAOvdEãë²Cu]¥&¤¤gq«¥즩ø-×n’QhhyD²à’Ld°àÎõê34î~á†- †¦«Gü,4ç:Ï}|jAØÀ1 z^·ÌëBêÁ ŽÕmž.îhl{„<0í1‹ì¶²< uáÅÜ¢ãäQE1ú*ï1 N“>MV‡z `¦Š ‰ÕÏ•!GC 1R©=)â}b&Ñ˲¹À É2Aô#ѳ™%Ô(ÝM«{_C£ê«'ã½ÝëÀŸ‰êá¿’ñc=ñà…§¯á7R´´mA¥) …g$‡HÄûÞV.L­z\¥°Áòò1R«ÐÛ:U!´LÃDÒQÆÕ¹G@› ì‰F…ô®œ—sO®(ŠrìZØm5V0‹ ç}áø¼»’Â#’‚ö@â–lè^;Á/ëyŽN‡Ÿ°º'áØ;w$Kš9ï(-ðî áþeæ¹ÊÜ8h=í Xz -”pʬÎgx8€Ö7Uæ ;ŠÍ± êÂö&T›ÿ2Bðåܱ;¼¸Ð÷Ú[制S£ÃvôýHØû56†‰ì_œ@&60f&Újš:±aT2?”œW(’Éå‚!{{Ä„ä+H¬Iï'¬!LØ`*g¥0á$œ2§ÑéÖæ±è(ÎÉ,9c·BÏXߊoÅã˜4C£…iÆÝç…ü@¢ÍS¶-¾|E¤´¥FY‰Z5$Ô9ͨaÅýˆ‚ Õ•½rÔF{ô¥Â9;V&~t8vÿáUMeê}ŒFÈÿõa¯|ÙÏð4C.…` xNq¡çµé#F Br‚ç¾¼Õõ…`t†ÖÇ/û_˲[‚=È[!€TõÄ .[X±ß9AHt6oËí¨#¦˜ØaÉÌáÚc´F4‡‰Ã·ëHãÖ‡å//èz!˜JeöøéÄÁ6^ùôs০„½¥O?nRögg™/¢òEÌTµÛ!#„[óEÌãàgThTÜ€„…cÏbËáV]ÀÄ áñ‡^ê*^h yΞ # $ôT¤u ýË·öÁûPSËÝiL+ÒËßObâÛ%§·Ãú·!è°?ÜðÌÇ4‚RËžN®OXÄ8&„÷ ÁqGV1˜Âô›TXrø—W€W­¡.lèN¤bè÷?:oíN` ÐÇØµ\°Kã)ÈÉWÄ=Ùµ¼õnLc 5ãý­÷¤0¸ˆ²)u­éA†‰—BÍJñƉR\ô¾£Ym$úœdSêÓ;½#Ý-ؔڀD¦¢PælJí˜ÀQ“ܰ°Ä ™˜‰Þ‚ØD ‰é#Ì3omG@6µÅ¤gí?½Á7æ÷¡7âm±êvåQ˜‹ÂDˆÂ¢†õ ƒ á©Nu¸Xª‚Bz&‡m/FÁLT…(¨+Q ÿB!„(…¡1¶… (¤¢ð/ „Bup7ËÕ(˜‚2ˆ9ÂSÔw£˜ºBýË Í¾Ô‡Ín#m¦jÔƒ›õ „(° ñL„?ª=¿q£³ q·<Ì@P˜SRA®EÁž•^]åáˆ/_=­ÁLòê§!2!(üÎ qg׎È1!K8F}ª)WXä”ÈhÌéC{Û{[>v0ÑCIŒ&åábypŒûUËÌydæbôI “¡M.N)æ$Q ¿÷AƒB¿1Éav¹ |ŒÇ“Rôf—W¿­~Da …y)FªSˆÂµ(ÄÓ2<…»L©iÊß# 7£à˜Xa:#ˆÂÍÄQÈaG»év»œR½uS,ðèPBpktˆaà· Cªw2   !4ŒSªßö¡G ‡T¿Î@OÇ?Í@ŠWO¨zº´s:ñÒã “a¦þáªcúÏ!Häc†)eû ·_{D©ä8;Ë×N!`P›&ߎ+¤‹å[}F˜Úa•v‚OÄðúq@wÁ­!ÆbÒRC®ULÑ ýD°§|+˜r§é°©ümÛi·;!¸6;ÀÔŠzÒ¾h/¹×dk$ŽÍ94|º™<£ögûŽé8þËÛ¬Ò!‰A¼ Hˆ³›¸ÁvXÿqˆ¶+œÝDô3Äž²n2q?ò/ Ñæ‡Ll`Â0æ¤Ð.auáÛ‡•Âô„ …á~aÈGB1L”œ?»³Ä”󯦜þf]nØ”Q®—Ü!F…pu?¬B,x1ô©0 imô¹¯(C¤`Ò 12qqaÏ)H#BÍ„9nîaæ|gDý3Ã3_ Pôá!ZÐñx#ú8f ¤Mv,V¤ ÍJ‘9rرX ú~ñ˜bu½±ƒ ãÈkãÈ@pŠÒôfßbÁ“1Ï&ͬ¦ÑDáfkæ2A¶µ+ýÑ7«‚`îegÍÐ{5 z jÞp(ÜŒ¨W=âbìUߌÂ&«/mÞüµü ‚ óÐ!õrmHÀXG&‚ˆOI¯ãqœc7Çob‡A¡,4¨—êb xãƒÃåMWç ˜ º cCòî2³aTAüTs?Ï¥O&R”Œ16¥734Ì`m©WØP.AG4› WGŒ˜uŸeòÂÀq¨OœËõ^¸]Ä4&}ð7yPÚ7 !Žy*ÂüÍ)ùT\šQbBÇ|³畺û‘G0M«üXp˜zŪ9‰aâwËÒÈý<Ö&ZƒëýV, ß1o\É4cÅN7¥žýÛ!Ï 4+®¨L9"#dbAÚO`î×Õtˆ4ëwÖ#$ SzÁ§p}Lþp¤’loj÷‰|GBpi'¤-&„àÖížäü8`²°  a‘ÅAÊOÛ\ZR[Á­a"•táå­JÐìT‚K!Ç@=ÎÞÓ ‹[‡–:œ§½5}„0`âmIîd 0 ¸(­®­Î$[K‡`æ‚Càê%ì/Þ Ab ˆßùzBpi\ˆi2{U±¿xm óÄ©(ÎLß:‡i2‡…ò„äŠA¸À‘bÙ§'`žpà ÑïcÁó·¾RJšÍÆo—Ò³“ÍÆ,ž &úüá …á/ „:aE>}á¡ËêÛß‚ 6š.es1´kD›|‚¦ ÷_÷'@5„n?Ê‘¦û™¨g0Çç{¤ú‰2ñ[‰iJg7›R ˜°ÇÄDñ|àÕËÞýIÌñ‡~Œ7`¡QÈRb2±j¦â 2ñϼÃSŸˆiI2±â”ZÂ%ú¦¢Å‹×:«—0@]À ð?ÙƒAb^*øT,(GÈÓƒa"\œC8·ÚfóVÄ”‡pVäighž¶C$V”¢1Hx4+×;6I¦9†‰ß/;á+^IÃ0ÑãÊY½.L•RKgxusúˆ@ª”¦£=t̬pG4dvLìaâÕ O.1–,Q]:Ô ©GüÖŒÛ ðÓͰñ?»‚DYX/ 9rã…yÅ#˜+Wv´¾­ UžÆöÅ‚Zõ/^Àäý¦–Á<âVa€4,~kÃﳯNCÜ¿Uþ´”6!¸L;©£Õh:ú´#5äå€)â·ŸƒíæTÓ$ÊéBV(s…O×Cõ·›•Bðé‹¡U̾­vz8¡ðq% ,ÅtÒŽþéÆrX¦÷0/¸BOb—3CKê§ÛÌáïg&!ø´/9<%}É·FJµÈÀ·‹!!ܬ¹áêß㘅y•%ÜŽ¶‰LAáÍ$åϾ ã˜è!µÎ! L F%*ŽþÙ!"R‰ÆX—:=ƒ£TB S„ïºÙV„—˜å»ùŒ%\†“G1YhÊQI®ÚÛpu\ÒÑL}¿v.®]¦a Ì•冀Ò1Ç…S'3YÂÞQ†¤é5Åàaàyªˆ‘øw•À<á%Âjå 0+3Zch™\QšÂ,jËŒ¡IæÓ~ú,ÕsX²Þtv) ïl"±"z(LiªM½y8l9 ‘ݤs…J`z¿Z%SŒDÆ‚]§²yÈ~‰¹bˆ(;nÆwcoeY«5ßùF$BN³*±£ZA¢ò$GwV ážF¥©)«+ª˜SÖoº1Æ+Æ;À¬ªšßN"q?òH¥ªÚm„NËû‘Ðóœãd‚LüóåÀ 1’Áåä;hL­jÞÓ˜…®ÈB ÑãPÑN=4[®@¢ ‚„éæ4×—¤¿xfhÂÞ0¨¡Ï4æ­˜ó¦3Œ(wø¨0Dh 7’-!‘vêLfsœkI@ Q ±“Œ%v !$ò0ÙÖXá£ò€ ¡óRÁèrÅÞÂIBÕêtrýÔ îÁÄ¡ÂIà*qº1H”i3–Ø‘„‰ÔSÆvø $ “¯@œå¾€K»uù„àò¥Šà7±ÃDsŠyÊûóÌ Dd½v¿1äð\ï¥/EB óf–+ðÔ%HPþbéÔêÂ?t¡œº@]0êuáH`ÌÔÔVœ¨ ›ê Ò…ù]ó¥.|øÆ·NYVRV Q± êÂ_ÔÊe²@YøO²ÔêÂ? -A¢êð ʵº€ˆL´Jè‰ÞÄíøßuayyº°ÊÑ2Ôê‚R¨ øúuº@]X…D0^ .°¾@]øÏy„P¨ Ôꊌº@] „ÀЇLYCfdˆÄ&t°îȧB¨ Ô…ÿ¾¦ÿ‰ÿùÿ€Äÿís-ÊçÿðKQEþ&2ƒÌýÕRE®¾Œ%HPþâ%¡.Pþ‘ˆ*HÊâPnÍ:0ª˜L*Yâ‚Ê%HPÖW#¨ ‹Ü˜Ô2å¨R>íŽ|:ãÔ…Ý‹Ä ™"bM]¸ÔòâÆÆ¹Û·5P6õ#у›ýoö®mW®ã¸þJ¾à ë^åWqòà?0C€;6`ùþô&-‰#ÒáÏìJ÷ìE@EsijV­®{Íະh‘›ºÐ•_hÙîd4IAïK9ANÜ N›ºÐ6`YÀͺ€,@nË”-/E0yÒ «nµMH]hK;¶„)¦ì(S®:ÑäÌ^âdÝO”˜#QÊ… wA?ƒÐ…¶„dÍŽàmH¼ xn³ŽJ€·Ù<x*Z†¦  X)]À…[ÈÂÆÙ¸ ¸ Ð…Ï"K.`·£C  {fœ  ¨RB  Ð…KT¨~ÿ]øg_GˆÂô×dƒŠœ—ž$å—…Š\è¾H€ÝO!\̮ʶ&tp]€.|J ±]˜œ ¬ ¾òÝËgºÐ6ËÐ…k낼•tºð)%ÄðT€ŸPB߆⩀ €Œ§OÃmI* (IH$<·›¢ ”%n)ž „ÈÂíE")! $Ðè»À¨O¿jqjÉ8¥Y”cŸüªdKd™1ÄÄîq~¦CÂ|H0Î^¯GX jLûä·èÕÚ„Ð…·‡K ¤*º°%¬åÎDKè¢qDmBèBW]€.€ AO0 ¬=9à5px‰ðo¼ÄTP”¸I4:T”¸ÝÉ’ï¹gÄ’ '<ŸR"ƒ  —×E~$(ÁåI@ðàüBBm”øµF¨üÿE¿ÞCñϾ¬§|v½Ïʵ‹/cY1+)`q» ”6¡ëº]ø”•$§²Yø43Áð A‰[JdK²*5Ô1e·j§ƒƒØâ³‹àqh¢„C Ì Þm!Á FH°èíÒç(ø0bFú”®¦"Ÿ°…Ë (ñ %òCA Pâv‡mB ø“']€.ÜÖ­º]È«v‡ïP¥”wAÂMŒØ¡óÑZ²Q‘£¸ /Å¢Ë\ZJ–Qž©HI®O ~ã–;eå®R•ŠU‡³[HÃ)¶Fï‘qê(cWš ü…+G–•®Ø&¿É¡²h¡D„•b…êT-þBÎ`2 §É®ÜÿZeP‚UIÀ ÂÇØ„pÛíº€ÇA¡ Ð…].@ …›}o†0²°MBºÐ50ÓP˜ð¨"ŒXo±LžÁ0â¦z(JÜl”®](wrïìH°x¾É[”@†UFâqØâÜ€€ Ä͸Œæ&”ÀSqââ&‚.@n.ÝjAÐìY€,Ü6·´¸ %\‚W¾"ñ@ºî ©Cvètá†%Ðè‚B  ·g$ Èdá6½Ð1uo¤–lÈ:.;S'›ºÐ•^ .À_èt¡ýtiGèâˆÏ),B©Ð…5u!l@ººš:æ¯'%Ô3†mWõZt¡bajj“üB%X4T[ýV=Z™›ºÐÖÖ$Ðè‚B  7ºÐ±"º°zAÐèB=‚M& „“2«ö/Ø&$€.tù œÐø ]€.ÜÔ)[xA÷ZœFwÓÁˆ#Ðïø%„D³¢  Ž#žAèÂKítƒ. ßº°×˜µ·PBµr qm]x?   m¥ëJX±+NY_¹N)ä,¡èwÜ£NÙC‰H¶Bôuo‘Md°C¶(G´¸ <ÕqÊK—)…EyƦ¶Ð…žL4çñëX×´èì\OzËt(ÒŽ¸]ú3%„™ºpé0BdT ¶/ì‘]èƒX ]M«v/d ŠÝà.lF„¶PB”†¡qå­¯3„8²Lè‚ÞB:KU/u,kZö]‹Ï¨6EÊ-dÁz(‘¥!hv\Õ]èèlS›á —@¶¨F´D–Æ!Œ&è‹ë‚š'CpkêJ”¦#ޏr´šåÈ@±GWµP"T”0±ª.p j°¢ÛeÊ_(Q‰rÄ•»ÔGQ0ª”[¸ ÙR ržØÉríôÂH]x©s"ê&Ì™©K» á‚(ÛÁ¦DŽ IT#Ví^hÑ…¤¢ùYÐ…-V5u¸ –Åü΄táÄ&èŽÇÁjŒ8A·‡.T #X†¢©iUYj!Œ#½YØAZ¡š™(E\¹Dieš¨EìDôèBŒ Ü™ºôþ¶I‚´é3B¶hulI.Tñ LX_<Ѝ2'„’«–:Hàóûžƒ‡-2LÜB "…bÄ÷d¤G%æC!$x*§µ¸š ’L«Vª[8œ9Ð×¶Å^G‰J”»Ò {,diQ Ê‘ó£ðR,úRH Š$±¨i“Í Œ°acz‘…E«¹ m{[Ü#Õé1@.\pcá$¢·ÐöJ¨P`ÀúÒ YÜÌÊûá÷èjja„û¨@SÓ¢²`-YG‹!¹ö.pw5/EüÛ7”%nm¼›¥ ¯H%Þ¼ ·î‚Cð8tºp»Û3@ PâSJ˜õxi,˜¥\¶JÙÂOG¯ãÛá[ºâ£˜ßYŸ‚,ìÞÔ”#5°¿m‘©–©û ·Q…ÕªUÊ„:ö·m¡ #Z(QYX½ª¿Ðá4Ò€ôÂÍŽ-ŒàÊù²pÝíðÁVCÕ  ;x Õ±î5„‡ é¸jÒ±Åe‘F ŠØ£ºG$T±ðõÊ»8bŠB‘b‰ßªz84Ó1{,ßðJ¸øp¯üìðè¡*ÐM7pþ¯¯c÷‡ÿ'˜¿í¿¾ÿ·>4ïÙ÷;à|Ô:7À–$ú±5-IªLXê³-5ï 9ý¦eÑ„¥~ÕR?ÏœŒ­NÎ2“ùM[}KÕ§¡©_²T¹ó¦êtˆNBÓùÃKhôU-õ\l©Ù©Án¬#a§O¶S»‹¦ä²hÂN¿j§œÍØÚÔ`ÿÐnobk¯c©ö44í[}_ò“ÐÔÓû}š¯`©çbKŸOIž­“WÃRŸm©wržc,‹&,õ«–*ÕŠ­ÿfLc~°äø#/b©ó¯ò$4ÿñMùjŸèÏ`]\¼,˜ûêÙÐ~¡ƒã|lÍ()GÂPŸm¨zMÍeÑ„¥~ÕRÉ›±¥£üVÁ%bK¯c©ô44é[‹©3˜ÔeÑ|K=[bíÇv @“¹K}¶¥Ê=4i]4a©_µÔ;Þï9ØòÙè(5Ä–_ÇRùihò7¦~Í•yY4_ÁRÏÅ–Tû±5²SXê³-Uî¢éË¢ Kýª¥R4c{´‰“³û£*,¯c©ò44¿ÜJx'‘ã„üàG4e¾«¢ïAó,õ\lI²ÛãôUK}²¥Þk%Œ¡¾,š°Ô¯ZêÐfl6Q‹šÿø£y}}KÕ§¡ùå¦ß;Ò|÷ÖEó,õdl«ú±55‰Xê“Ѽ§RX.‹&,õkØÞSOÁöC£¨°×0~[{Kµ§¡ùÅfÂ;±ŒqÙ8 M!f–÷ ù –z.¶t§«å4lM%çN…¥>ÙRït>L4]—E–úUK½Óùp¶ñ±StL¶[ºãuº ãigñånÂ{Õqµ.šû[êÙØòÈ~l-äXç¡K}¶¥Þ©Ò=}Üâ'4]Ž‹õ°Ô3±¥¤flNQ?:¨<Š-½Ž¥ÒÓÐüÖ¾_夓ФùÃ\ž¯ÚMx6¶|G…OÃÖ³FÁRŸo©÷º¸‹ô4c(1,õTK%nÆöèõRK[~Kå§¡Éß:Hžõô?¡9_Õâñ4_ÁRÏÅ–­ú±µPžN°Ô'[jÝÉýz<½7ôg4#‚–zª¥ŽjÆöèuå9µTyK•§¡ùÅnB»‹fêIh*U±¾ÍW°Ôs±½§ž†­«]ŰÔ'[ªß›¥‰â“д²!K=ÕRK›±Õ=J–’F6ú:–ªOCS¿q–F…3OBóX ýpîW_ÕRÏÅ–)û±5³T&5Xê³-Õï¡Y¾,š°Ô¯a{¯ï÷l?tŠzÇ-Ua{Kµ§¡ùÍKD…OÈ~@3æ7¹Ê߃æ+Xê¹Øò½ÅÜgak&>µ,õÙ–z¯G‰•OB3ÒÓ–z&¶ÜŠm~è_5"Nýû_ÿöÛ¾ûË_¾ÿó¢n@ j”adàŠm‚GìWªà%C?câ÷a7àd&Dö0!¦CH`ÂÊÃ#=¾À±¡T0BvI_À˃P¼b—ˆ…%Ê‚+¿£G’òûF »ª@šj&T`删'6Ìô¡ˆ W~z†ˆ­(S‘!ºä{PžÈ ,ÌéyªJDX¸WL{ƈœuP!<¸b¿s‘ œÂe™ÀoÚ³cÈEGº&ÔìÃG kp± ¥kЕ-å¡kbo^‚ñÁ•™ ÙÄô4 SoÁÞ„F¢P¸tïpO 0=Ag´^11ì>DY Kff ‚iÑkb†Ä•È/Í„žŠP؈B.øŠ£c>A]xå6!îaBrR"3|IÈdNÌ“¯Ì„êaB±2aÍø%sB5cC\ž»äré˜_Çò‹a_=ØÇŒ‘Zº"Ôò„”‰\q­l”Î0I +¶DåpF-9hrI +n˜Ø§°¿â Cmà²ÄÚÆš˜`&¬¬ &=L(·B“ðb-=*@|¬ö+aÏœ=ØOÓ$®8"”LÆŽ2ÀËÉ!ÃѼö=šÏ•<°#èŠeÿV3dþ/i÷rŒbÀ·Ç§nJ¯v@¨ úiùHõ_±×7•½çÄ—^ÛCÑ‘Èô/éïÉúhá”àÛ¾ÒH³° dá½ =i° àsɤ¿e GÁçŠû Ò¹Jú¿²ØÄŠ­0×LÇ`vìAxÆñ tÉB`xnD¬´&Ô±)*pEHrÁÙKnÈŒ À”žÀ¨À®9¡ù#Ø_2\C‡àXúJPÏPÉq$*pÁ  OT¾çÜ  öBføòÝbE68Ñ-|E§°X¢Ð¶Z§xö"Ânà[ÅK‚y8ÿ•ƒAéaB'æÂ/ùú«Z4]ûã>æÂþz OCÀ·Ö-˜žÑàòù/Œ†­Ì„lé ­!†ù tTTB€K®ž¸ë;«ÀþùØ÷ø52yàX¹ÔÄUVÌ,öúw¤‚}Œ!©Ø²òzX¡&PŽ@øýÀ‰}Ð0t†^p6Àé1# ì¯WžØG‰û ö‚ú`A è‚%  }Z ¿¤Ù‹Râä×LìM¶B_Òͧò´Àˆ•™P£ÎD*‚ÒïÊMÜÃ.‚¸Xʯ%ôçcìc «]äì5=öYÌî›° F»Ïb3_Ò‚½³J ã1Ío>F½3í§ÿd§_{¿œq&êü«ûuœu×2'8ý‹uzµ$ý¸ðë2AÞ¢§üW3Epé­àÜÄäÄ{°Ú{Ð –ÈÐ$Ðb‰€Žh0Æ%$¯Ø2} bÁò—µ ÁÕÃUr¤‚/XšØiûëÐW¡ °Øãï-Øå¿B÷çÀÄ#6Œz®{Øß4z¨ >0ï·˜ç×óhQ â¿ue@Þ¼eü/†©&‚€kz‚Ç4# X,ý›=Ø'ó@3кOÀx3î‘PIt]2™BXº² ¸X Rû_.ZÌ)ŒZðJ†ÿV£'TGW z×e½Ígºƒ 4T,0 º˜#Øòü“z C<¸²'Å›P2°i2°¾7PƒAPá(…^d¡×-APáCÉÈãÿØ»–ÕʲåøG›•ïÌAMlð¸~Áà m0øÿ½·ê•ê6%¥rÝ]ô¤i¤ÒÉPdäcE"!œš2@ /u9+X§£ÀG³€5Õ†¥´`(|ž¡T±0ÃOx²MhE¥8ÏE6È-T±WRÐ&± |ðpšÝ@püO€ €àXAÏãÈ( Çðvm ‰áØ 1<îzå ÀyñôŒB€:á‡c` Ho Ñt Øv¹í„Ï {„úíO¤å‘?¢ùœæ7•¿ü˜AŸKµIìуühÐ œJ ¼xuš€°®  ”€æÒÛU$‰7öœŠÖ2E/8+xa Hh‹zHâUì^8x‹ ˜ ‚àM@€ÖÒÇ"AZ\›„mÈéZޱ’Ê‘Œ™Ã¬#²‰Øã2ÐðØC|´C³& (<…ƒPVèBx2"öç¶©àe/Q|pn.  eøöÑÃKb°@b@4p8 x‹©êÅá»&H (~xÕˆNòÁ"±>-ö_§Çþw?Œ°”Ë@þgGJÓÒ¯?\°Ä§Æ>e“Ø£”üàS€MP-ÈùòyÊÒa¼—É伩á<‡î÷ˆ=RÃGŸnh9ã˜YG:h4ð\v´–ËŽ ™V±?6ö„Ø?_h@h@hÀ³ß±/' HxqÛG:8V"ô§†^ÑÿwÍ€„ö*C°Àá,$ /Hø¼¥‘¯ÿ4ùà·?µ“ØO»Ö»f&¤üò3gLª&h$@Cn«!û­DF@øÂwÄKAǦ €`ð åí;æL¨ÐÃM|q×2TÚE ‘Hu3h=ÐÀÙj ,D°ÀÉ,—•Ž]H Ðhà Ë%•Ÿ…¯€ÂçAá·ÊÅ?ÛþL!]é㗤ϭ%u“؃3>8}è*ЊIÐÀÉ4P×ý? €ÂM EŠ„€Xàl]˜À^"hàp]hZ€ ð…î ùàXOlô @/²0,€›k=t $‚ë);¼U°‹­£\,2âµÄ0ô9{nC—_¦9xáØ%5 løÂzÅ‚<ÀÐÀé4 8¶‰ ÐÀCeÀeTh¼&»¨åMôÁL}€æ!ôÁ HËîYišš'h`Öp±#ô¶Hæ±Ò°¥>0ó0Hà !-wÊ—Z˜6™/·è—bK/l ]=¼ aªà…oe+÷Ì¥è'·lšxd¥;@0tå¤éµÙ`èÊYO2( ‹‚HÜÖ3zö*ã"ÐÂÁ7œ+ˆT óÆ-šJ{²È‰3s†Ysn‰}¬J[ˆýqžØÓýkØŸ÷xñ‰½»6F‚Tõ !4N¬ f³@¶Ä>E8ð„y¶‘n ÐÁ®‚0ïL Q0ù"{"ñ€|ÞAö;öElˆýqއOìÙ-ûa±ïáü’ru$ÿÉÉ¿G–±ƒN{wð„þ.ÿFA'&^«DûÇ÷%1<5;±àãuÿ+ˆý‰¢ŸW,]…ØÏŠ}4Å>—!ö'®ý0- ÇêÇ‘ùžˆk?gÖx·Ê²Ç0;쉽;!öº !ö'/y1óJÃVÇ`$4%¾?},Á„ïFBIö{Ž,ûsÛ>²°ßsâdŸåþÇúao¾{¦<U~“…_OïWŠ=°ãqä¼O™laΤèW¹E?b?+öÙSìkPæüGÎ|4Ê ±6óéá|c!X»œÙè1‘¥˜÷M:B~©õTûæJ‰Ø¹ÇovâÝÌù?6~ ÎGîtÞ_ˆU`í:ñ`ØåÑÃdwÞˆ§§Éã®L¸ýðíèª/–»cÆ;Šü­©ÛäT†ïÜTÚ3ç ##ØwY„ÛrÄþį¨[ÿcµãȶ_.7\p84öº 'a¯üÒ)Nœóo‚Ïq',{ [öèɵ„N4u¼I6~NôîçReœq9sò_¦7ýƒünýUÆRf‚`µózŽy&aèD K];@C ÀšÀà { CA =(M5``(Z:ÂBd«iÍ@ë‰= b?,ö=€|Y`äÌØgV@ÎÌ÷,¬‡‡Y±÷žÊŸ5Lû#µ—ÈÂÌïLÎGìOíñ§ü™Áž{|$䊞ç"ìü{õÙûX°{–¼'öEéŒ 0 uùê)”* Cÿ#Ë?Õ”χ:‰æò¬èœ7z@`+f}GöŒÃ£¾a¡ïÉû¦Qˆý™c^ V äý¡Ð3ó³#ÌúO|ï#NªŽyßå¹åØxÍAÜïØãÿ‘;<µb?+öÕò®_ÂÃñ´s¬àïéöD¸0æ}cé@õª¯WIWrŸ{ëÅ/&(˜Kbþw¢ÝÔsï–Ïs3‚]ÜÔ®û8 0XDõ4 ÊÙ “¡#7BªXåøCï”7´<ÑäÖ8Bjø`(”[R½`:´Ä-{ãJâa„1t­êá…ûK¦Ã9lj¹å-‘Jò*hÇùðËVt´¥©Ð SõB¬H 6()/ëI7 ’0žê6ÝÒpVE® … hÁ{êÊxŽÃutªXÐÜ™EÄ´ÑÓ\Hæܧ;ú8…fªüY% ^hâ…lêCš ßN¾Y¡åÆÄà… x¡§®¬Ð 4¦²BK‡ÉîJÒ–€v`éA„ÔrXÝ|;ùÒ¹‘¨›ƒÆCB›pŒ2iÁ ãlµÀ+Ð[ب§¬|H€>ZÏ&¥è-lÁ ^=ËBwál½ 7 « [ðBöè…g%Ð^8[/8‰Z‚6àé[›× Gi¹ÃN¼RË>´“0Îí ;·Ó{7X0ÍuYXW®ìBE.<™;{ •"Žw“cA@= H®‚ Óܤ7ÑAùýùCÎ’†=åR] ®Ñ,Ð4d(S#p =¡/»K°À`GFki&{yD¢U0:TlT‰¨|>xá@Pø;,ÌßN˜=¼ÐY €Â+Pþ®ˆ†W˜à+™ °ÃsþCÐw9¼µýG+ø…Æ‹œT@PøÎ MK­€Âp(Ø]QchyüÐrø|зÚúK$|ý"á7_;ÖZž?mÑ£âhºÑ"-~dPÉi±.„—rƒm(`QúýE€@/ µ…gË!ܯTB^+ Јj›RÄè¢A›Ô‚«™ S¡Àt…õԴ༰˱¹j‚„&)Nlð ‚.ÉžŒA–¬¸I¹EQa=k’Á· \8Jy´sSȃ»ç&݆¦d!LjLìpz,š qg ܨ\qòEÕ#4ˆqã~¬lhÁ€i£œØã¬LO{Ú*ÙPMm¸g>=DÏtóë-*J)ïÁÄsÓç¦vÐ *-ç¦rѺqT1T/p<ƒÚ ƒ7 ¹§é˜D¡B€Â`(x5A!¸Ðl ‹žÁ«daém,ª«¿”l"…b‡"¼‰²"puhnž¨Kz)8=3øQõØI§8gú 3û MÍ%%'NÐÁà ¢¬ Vµ |pòÑâ4vw4—F7—¬ YVè(ŒN =“ê|VVP˜»Ô(×­ç[ àò\/EwiÍà=ºÑ£t¡¹4ùŒe“x ¥Ä۹Ѯޅp24—†“Ü“R«¤Àƒ3õ<¡ÍôE…ý¥Ñ©AzRC– 0 wÔ{ŽÍdq,¬²NM¦]7) 0 Ü…`l5Žf…ž]¶Z·j„ÅçìDÿw­4Ãkú£_ÓתZö V[õÒêId^†¦óäÎB.î‚;UàDĬ^s °®¤BjŸìR•H“ Ñ8ùÕCFKϹäñ„GwiòÕé1è+á*4G³‚q^¼!'„îÒd(hÏZã;@Ý¥| ÕÃ*T ×Bæò_ä=*A•î’åäxv ¾ru¬³Ñ’µ˜˜ß}–ËZ<žßÐ 8• ˆáÍ!v&€‰74Ñâí HìCëòè‡ãõCËMsà Qà¬0€_vUTŠ?øñ H¼vú-‚X@ñÀè@ä€äðƒå#;€…ˆáµ…Ÿ_¡-Ä@©•—”'{€º´ó€fóÂêIUÂ8°$¸ZŠK&/7ìÆì¼@7%¸Ãya¹Ð³µÀbK¯SåBËÌšUÂÏî·€Döð‚Öý¼0”²v)\&ÞdFÕÒŠf¿ã€öÂÙeDhTha ZèÉ‘%1åÑÓ.ŽZ <·¦;ð-ã¹Å‚aB5wÚü²à(¤ÙÂÑò±P Çã³EA ™UÀîu.+Ø%-ο$÷÷!Øõ f½¼…l‰™@*L&…è!@a¾j\-Ì… L_½§–æ³BÒ. ýÆì7B0¢ßø¶ßh=öŽÀÄôrÒ¯–cÅH0ƒlô¤¶®s¦cya´9èêà‹é¥( ¡ $ÔÁ[uPÞ¤ƒ8Ä0´lhÁ‡„xaþÊuQ¯§‰°(ƒN~WojFØf܃Ôz:Îê) Ápôñ³$ ¸>O>:W=kÎ÷’\(Œ…Âêòh3÷Ì…:béCSᕞ¸\=·}G{öÙ"’—«'/<3÷ˆ†\+ ¯ &ëÇlJ©A…åh(ô˜ðX¦‰Â¯mýx‡>{f–µâHöÑë+MÛluW —©Çz’BÝIáÏúK €w}Ïr‚/–$ôGkÈ(äór4pàQ'YR Û8¢‹{Î=8•«£q4¹ì=ËíÎÅ 0Zô¼‚tVwl·-{Šv“‚ßÚdg%éyíâ²\~k“_E«dîÜ`èë÷ "¶`»8»‘Ø„"BùÈ7 ˆýÈ’ %ôJý´ =¡g]¶úY¿õÒû\¼0*:’ñ³ÌpÆíìF ±î²Lf–&$øJd‚YÍmŠ}Q öÃ2@OÓ׉øÏŽl öïûž_{—*ع/~lì¡þ_ ”‹p" öcOˆ=|Žûãj½Á¦¤ˆý‰CBôñ.Eìgq~"öÇ{êÜ« €#)0€lœÛDèñûÛ$¬~$D/3ôÏÔ|y Gˆ]¤˜É~· êñ†4œ½Ö‚Usœ4ÛMŒ‰ùw)({æ‰&ìǹPˆ‹›êFÓð€ùøÔ>’ö€ ô ÁÌÒs½*\µx ƒUBõØIE,JøQÏf…e=PP Ü´ÛB30õ°CÁ‡v4;4ÉÇb[¨!N6 ÌÅáŽ@‚.s넊'j‰ÉµÄê1*Ìuÿ)ÜLß`LÉ{ËöR‘(C6 • =ÄÀKi)ˆaHdlà§²D9qôjk>–æ LMÙ‚ÌPF[@øº²çåc*×R´v`±&H(/$v€IOR+©PRN•Ž-‹Ži¶8±ÿ|ö8‰–az ½Ðs0%5v@b–ˆ&–Ȱp¤Š“ŸÒg°”xaþäúYƒíKD’Zó1aר[0‘F¢˜ao!){®ïd±*3 1­A/jª<Ëì& ¸1œçÌ•U'dÂcº€ îÞK1´ëË£—GxÙBr‹*B²7 MÈ Q—J$BS‰ù`¿<;vx¹—01*WË%ö»~ ÂX{jqá= ðJE…yò‚ ;¯å '¯¹ïêžµØh‚±&ô"kÁD®"-,2=ÃN¹•aHuòË[Nå¬ ˆÇ±«òr™Z‚ÌàÜsvR0²(ƒÃAà%6–ö>_Ëéô¿ìK’GŽEwæp|hÜB[{Øû7ëxUåK*{PL‚î§TÒ@V–T1Ž.¾~aÓñeyòCÊ[˰~ô[Fç¶ÐÃëÄ5çL¤Ÿæ¸s‚G祕‹ôó¿JZZG5î\D('–ÿk9ž¾¢CWtðr˜€‰7wѳ•Ç0±.ëyúÅŽ¨„ ˜øäÌDŽ ïLTÏ‹©ŠUÁÞ¶/¦z:Yjªó H4¹óTů^(D~ã*K‡.èCT˜[=y‚­/O𑬷î Aµ@ KLÙlÜ5X Ó´œ…¦'Û¸êÐ Y4xIÅÊ$îÈ` —¶TbŽ&$òNh+íš/x ~‡ŠÂ¥í$¤)TDÉÀ¥mÛ²GÖôaìÄ?ø™œŽ\Ë&&m'a-#e5=¦Gǹã]¹u„.Xõ aŽ9ÛíË$rÖ`aÊòo$DWDaÈ´­›§]á-k¹¤k ^[°á©Cуmõ /)ëA¡bc6M³¥°4¾ 6-%×êÀRh1=ú½ý —°ËöðôP*CqÞxð9µ©nÁ>㣗Tlš³ÔºéáéË«'/œKKXKÙU z Ð;'ÀªqWT€àñDô@à6xã°ñq¿¿RËXÑ\l–ÂÆ(Øš  /Ç-=ÂW*G¶­zô`½f œÛV|^Ùó<Þrh'^]¶”¹ £Œg÷òNž=aN÷é«>EU.>‚™¸mâmà=䬩xl»yÒ²“kMvÐ6…`Ô!ZP»ª§ˆŒ;4`‡öìýƒ°RMCö?ýûz¿Ð+ÜŽ„GW“·.$æÊ$ u'‘-›b©èàÄÛÁâÒìA"=DÙh?áêŸ{K¿a;ƒ˜DŽhB¢tTÄþÉDÞŸ¿%Á\sÄý³¨2žÜ–\¯KI[òa¸¤ˆ*TáѼ+2MÉ#T!ÕA]øºÐÒlX+}&7OÐ…™= dJÝE6­"Z¼W N{á]ÐlBÂEŒµè'ï6äÐÉBì ²Ðtç)GÔ/ÿCNo:¾ìøD]8@¼ç2lJTp]þ»=ÙƒŽº‹KBŃÚRã.%OíOD¶E »ÿ6f¾Ï–׻Рûµ¬§zxí¼&gãžÜT¨‘bÊ˺-éB‰»+ONjš•ñÀöt¡z˜ÈiN yqõL«ë®-‹¦ã HÜŸÞZ^ØÞa#Äé>îÛˆ¶.7Ÿ²03PØ¿ÌzŠ «‹:sÿHr¥÷džÆ…ꇿ®¬ˆA¥yv•ö¤ kŒûGÄ UEõì4ÔÒ;ÀôÑ/n+óåè…0l„_2Zf˜USraz@¬ÐË´'¥¬¸ãï'Ž`buÈ„½>UÆ!Vcj 2ÓI)Ý~¸!¨qÿT„á„¡¦GO°˜3¬¸€ýä Ø6T*„Â⌾dO ÷wà¤Ý¶û²=ÁÁ$«à‡³• C§g—Vk0¦:‰°$\-‹†#Ò„D¬Y=ypi#†Î_ô®Á!z P›•‡^æKO1v—“tŸ]GÄòxt<»£°d•Ón> v¬$"—³ÆrÀB¯¦:b­’Ám#RHëI!ÓÃVô®ÙCOi™w9¸S·ëd²'…¬›ášÀ-§žàP‘÷Ÿ qB¾0[TBÆÐrO@ÂZüAo$B=yVu@¡yÉa=LT¨ÒÚŸ‰i—ôd"S—rÝrÓ”Òf1¢OŸmÍåý*ïI%§¼Ìc‰ûç ~ùꑇ© ›ËCËE{Ý;Õa °- ë’êÉoÒxjûìÌQuÍE3ò€åh»îoÕÃD¤Ð >ÁòþüÕ“7ؘš,K?øþÀÍ€ÿêÝ2t¡«´´¦vƒÅ’dOv×%‡ž$Òj9s‰üZæ%MIä „ãx¬J¯=:áKÜðêØ5‰¬2ÍðêxôÛ[‰ÊJÞäüšÕ# ¯F$> 0!ÚeÔ w]á¬OŸ ë’ÕÄD¼Ü¤I öL fO°ÈlL0·ªkXO/2ÝÍèOïÏD]K›¨1$e^‘=‘ãu •CG 1¬å¥îcèäÿ‡(²¬‡ 3y¿k¯Ú{ X¯Ü@ðäÉ9j}É3È–m¸ù²›OÚ’GŒ/¤)a˜Q!”šG0aÙƒD‰HQWhõ0¡ê+ÙÙµ®è‰¦KÅå£ýá¦åä)æ!¦?Ù²4;Ýî †ºâˆqŨ$*ŒA÷Hä]iö4¤ÂÆ­d[† •žÔ²U‘±ìÐ…DêFV»êB´ä :üNx¹ëvdËړʘ‹Çwg Q«‰°û/è“ûŽ:kÞ%ºp‚õ¸öä ²¯)vÕ…žà`³Ê¸‚yÆŠKO¨p×ä »êBKGA#¬œÕ/jz¦–ËEAâ€WÚ÷çŸ-;-ºT†‘C>ÙP×]Z.æ”'„ŠÌžP‘#E˜S>ú†¶ÖТ}„.TSºPšsQZ>\Üb°¿p„g¨ÏžžSUï°ÏˆÙ+ì–#Ÿì´<:VØX¾œgØO¶ õ¹âuþ¬ßBO÷y]Òbç0!×&`âmÑÉ$@âmp)Ã`‚k¨…\If ÿF¡Œänò²µ@þ »< äï×/é<Ð’üo- è»ïߘa`Va@L-à˜Þn÷@ð?þc¾úU2ܾ® 2Þ ùêc¢{@  (AíF€¢Ø! 0vèy?§—¶Ä†˜i¼«Ý7I €°S Aèz“-Çd`â(ÿ_'V𶺀€Ê(ŒáÍ•É*22 h9At8§¥×H ˜x?5fÄ *‰@Ð…·V£  è‚Ðf‚ùç ‰¿‚¯þ4Mò'GYùO6à[ˆùçF:±c:Ñã¾']pƒ ˜xóÎ%‡0AÆùûÒ‡5„áƒKϼ¢&Â@?‚,axC¢å<Dœ*²iš稄! ñ.6LÀÄ[—2h\ƒÄG™ÐK”š&>mY/š4#Ô€€å ¢Ã§-ëøsÂðã{™øê—6Z?áAßU†ô¤ÿü‰,Ozá Ø d]l%6  Ç蜾d XÍÁÞ=(R  0¢ô èü‰·ü qÊZ‹]D "E“C²pŽ,Üÿ#˜€‰·‰ƒL‰+.ã*ÿs¡ãÇžù×ÿÔ¦ýt“뉦´=›rÿüûMÊù ËòKW‰KŽhNH€ÄÛ››* ˜x{®)Y‡0Aþpx‰0œ# qÉ„áñÂ=YdŽ0ãÂöþHÔå-1!53 ]xò…m Ø÷C 8t µJA$>~Y‹I±bóX@ D¢Ã§„Á@$>$ ã2ub†t© M„aak´˜·ÀÄ9L¬K4a‚`q¶©ºpR¬˜èºÐ4¦B]a@„á}väB†@†7aОÂ@‹Akx-æTGè‚/?„ tá÷邵ì4Øœî!@ð`s¸ï€€èЄDÓÖ#º°÷›ÁàðV:TO›É,<}! › ì›:”`i¹Ž…×®þ~¤]‹µLÀÄG™Ð–„$þÄÿúïÿ¹ç6$aÍ+QÔáÍ=x^â‹T’¢R„áo$t]:È`â#re$Á‚¹5€0|»dLÀćÊbÝÕ&ýIÿû×W”“70¯ F#Þó†ûK! Ã'aX†0 úÇ øê²á?Ãîo—ùÓ¾2ò§e¤%çüâëZöc¢.Í„ ˜ød7~ŠNnœžn %ÑòaØü  °"å@@†ddo'Ó]Ðtò( Ÿ‚­Fø„„$¡‚P0 ÄbÃÛSí…,°%ûçJÉß Á—¿ôýߟ-«AfóýÉ/¾‘dË*ã"ˆ$¿Ñ$L‘dá +@âœÝ† ñáån\+‘ ˜xF—¦ïϨ†.  oéCyÀL¼¶Ñ V°BM‰.¼›DÍA‰0Lú û0h9ÂÀd»„áo$ìþ’‘Œ‰¿K¾ü‡¸kéYŸÁDF6}¶÷_‹X²9€XÒ5Áh:—çÈÄýõA$Þfßk’>У*  ‚Áám²i” ñî0º¢`&>”švyQWÀÄ»ë¬Ó—„‰&æ;èUSgt™Ä ‡Ãý¹L„¡':ˆèP„စˆW¢§²7Éûw ô²¦»7êáœT;£ÚTïYµ¶i¾0;@'òuü¬…‰XnSaâ˜Ù³y½$×4îvïz·;æÕ4ÇXKÄu8BªçÁwjŠaR¹i¢ç\Þý/â‚0ìÿ`ëN%{^ýûˆJ*yFÉY-s —±ê×RI‚ÅoìY·ô'}÷Ißသµ^K¥‡ Óû§ÂÄþÁ".™=:¡+îz&Ž`"{˜bÇ=ëuåêI*×ô郤òÉ7ZÖ¹˜Jœ! k61²’½ÈÖŸ®Õc(¹DF…ÅþHø5š˜i±xØ„JXËÔjéTYì¹ìz3­¥¹L–&å®õÞóÒfÙ á¥ÍåÚ+<ÓnŸ!c´´ªïœ2²Èm[ÿ  ]N¼§Ô¼£…)³íG϶WúÊ ‡<ãòQO¾PQ9É!hAõ¬AåH‹¡æ®CÍ$$Šg¹'ÈBõT–9-œäá û1ëa¢|*ïví6´”–©1JH!Ï8r3{rH­”Â0ðÑí†4OI6fðñ&$JFQ[>º¶ôœ‰èDD‹3\;c`:±«,(PLz éÁ–ô£ëÕ‚ÄróEá$šT¢¼Æ$T<¸ŒH¯·—,IAD¬$Bqžß·ëØÜ_¹‚xÞÑuÌû¿ÿ@ü¨*µ,’1åh²craêL#žüXâ; @zöš.ñ–Òr¹çXl/ìª -uÄË"T]8"_hyƒ¯GØ£&ºð`»¯\%kþÚ}d øôd5«ÛÛû”ijoS­Fs@†åFU€ÓÔ‘l‰h2Û[HÔåø¡{ë|I÷1U»Ñ|8I¨Ì6ˆ¦µ& c×$áh¶ä4Ÿ­J¸2¥©½M­ÒDóL= ¦©CMIVà-hèVš<½}ÉžžßJ–ÿû¡¼ úç'ãäƒh:ky†æå=Ζ•ç³U3+ž¦ö6U°F³÷îàÍ£¤«hš:’-±Of{¦DÏS,ÖÈ÷1»Ñü¸}èçûI‚ Ãh:R”ghî`êX¶T9LÆVÍÃ!ÚZ˜Z»…`¾,Í4õ[DŸÌöL‰ºt£­=ÀíK¶1•ºÑ¤/¼±)Œ¢Y¼PÛÎïg4w0u,[ œÏV‹p ¥©½M­´:èàîH3M½Ç–ˆ'³½ÅD9”Y[«0ïc*w£Éæl MA<ˆf0!yz:’-kLf{K‰ˆ"µ®RuSµM}ì ØrèsQÇ¢E¦ùlUÔ™5Eí,j-Ä">ˆfP÷4u¨©>—-Þb¢VŽ" å¸O”»ÅÍð“(áç4ÕiYš×7u4ÛêÕÔQlÏ–;ÇßÝÖ+M}ÀT¶ÍÞ-&ßhšjOS‡šZiJ8†-¾6P¢Æ@7î%Änq3|¼}(£úšvü¡žR7˜ô`8ŸYŒ-KsQDzEüØkšbÀ‘¦v7•k4 -K3M½k*éd¶gHÔQ ¨‘-ïc*w£ùq°r4~ÞDó|6ªè34w0u,ÛÚ}·Qhµrp[OÂõQ+½`Oš²,Íõ®¨µRCØÞ2¢ì‚¬ÖÈVö1UºÑ|´Ï¯" ¢fÔÖaò3š;˜:–-UŽß†±U)(КæNS0•«4}Yšiê=¶ˆ6™í-&ª†æÍ[º©Úæ‡QB«<ËäÃhÃæ£qÝÕÔ±lQi>[=ß/ácm“¦v6µv3•dYšiê]Sy.[z‰ Y´ÅhŸ(!u‹›ÑÇQB­D EeMFåÐgh^ßÔÑl)Ê|¶Ç_@ ž¦ö¥ÉQ5µûáøMgoì\—¦þA¶b2™í­7¬±R+[ÜÇTìF[=ŸNhž/¥©­¦¬ÑtDÓ acOî4õ²%°ÉlÏ h!ò¢Þj*íc*u£I^¤*AËÒÜÁÔ±lk±ßalU9ÒÔÞ¦Öaʲ0SÔ»¢ÖÚØ a{k kÄî¥uC‰÷•»Ñä›çk”âËÒÜÁÔ±l d>[-V¼pšÚÙÔª¨cÊn˜)ê=´Xb2Û3'ªPܤ‘­ì#ªt£ùa–Pkûøa¶,ÍLË9æ³U¹õQ3OS;›*µ³ñpX–fšz×TœÍö5'ê®&­»úº©Úæ‡ïÊk¥us¡eiî`êX¶\iÍ2Œ­Šxõ?ÒÔ¾4kwÞšîËÒLSï±E©lùל(8Cc‹Þ'KÈÝòfüI«ßÏo[¨ز4¯oêh¶µ;oÃØªž§©ÝM­ÁìÞ ¶Ìõ®¨•þ¡cØÞb¢ AÔÈ÷»ÑÄG[³„¢YÎ?·ghî`êX¶è8ŸíQ…ÅŽ…¦©M­”´ûýÅ~0SÔ»¢ªŒeû_ٜҊ‚B././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043063033077 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000000015117043043033065 0ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043063033077 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000000015117043043033065 0ustar zuulzuul././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000000015117043043033065 0ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000000015117043043033065 0ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043121033072 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server/0.log.gzhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000023751415117043043033113 0ustar zuulzuul‹#Fþ„‡??ò³øæ‘:™?õgñ}¬‡ÛoŽ?ÄíáÍþãÜ~ûâñoöäæúöþTÇPzÂ÷}}ssxýðnŒpóõÉ—‡“×ûûøY¸ˆÍsdÊù©ï)~ ¢ïéòòÅÉ«³¯ïß^ß^Ü{úâ}ŸË_|——÷ßÞìßàg77—ûxq}õśۛóýÇ?¶óëw7×WûO–:ܾôÝaé×§¿y{8ÿó‡|ùðj__Ý~ûþáÛÃ}}¸»ß^Î^_^\}ôGÉß÷½°p~8}øŸ·‡³Ëû·;z·yøÛwO9þˆ_þ­åáiÇ'|ùîîtlƒòäæp¸Ý~0MßöÇ7z¥)/Žf»¿>¿¾|ø¾'_ïïâåÙ›ãOüâÇG¾|ÿN¾ÜŸö§ÃÃË¿|sýmÆÛüãZó«‹«‹»·ŸÝœ¯§ÿþ¯¿ÛÒÐjÛæ¦œÉƒY?Õ¶ù«9©:>›5ó#'UøoX3‡µµæ¯á¤>­méozá§°­ÒB6EF~¢m•~ÞI½=¼~{vÿòúæp{v}{·í»º{{ñÕýËwg·>Üß\ž¶»oΦí_žŸnî_®vìÁÅñ#øï‹›O:èÿ¶GQÿ´‡‡—‡»Câýó¿Üã¬ÃÙ»O„†¾ÿ\þ4În.¶ÿø.`ûDüõ>@Ĥÿ‹ˆ±M:‚‚ý•šùw1ñw07ј$Ÿééü™§û—ƒÀ§:‡ç#Û‚­é6ƒˆçè~·'…û'Õó#!{lÿÜÍ|®íž hó‘€pbvž¶’OøçÃ÷x\äødž@`ø% OE'>‘ÀºW@s` xÄžàècøùLìŽãÿ¤Çßqü× ÿ çç_¨‚t{(„óßçüÿ¸ïáøk ?T8þ®éÿôgœÿÏœ÷ÍÃÛnx‡áWã}E§Ç0|£_RéA2aø>†·šo͸õÚVz´.)•ÞåB=‰ý5êu2|ßO%gp;¸ »ë`ñdÜ÷-Q@UÔŽÒàµÓêø—z*cÚã:þŸ¡áß·øÿöúö7oÏ®®—= Eà9]pô•óJ긪‘j(çw…Õp¼ºÇ€Ä ú:¥ú51ŸÛƒá—+çkð$ÅßéÄ—4nÛ¤”„áû>KæºwÛî_0üjÉ]ìÇ=&Nür£š¡ûïhÙë ƒ¹QP BB 0èšãkIÄæÁŸ®ìƒk  â¯9ÿ–3#qþÛŽìÕÀÀ=Fª<;yKȾ´”O×íûu:‚ç‡,)þ¤«úDŸW§ß G”ƒ8Qð_¶Åós WAIr˜ €Þ/8‚%“CÙTç 8ÿ‹ö~G ÎÿšÓÝó¿úù÷-Ýpþ×<ÿ8þ8þ2þCÎçÍóoÛddÿ8þ‹Qœh»áü¯þû³©£à©_Îün%G𓌎ˆõÌõ¾=¦ÕŽ?úÙ lø½?Ã#lx¢¡Ì¢óŸ:ÓÐDÖ=ˆÐ,q s¸Šc)ÐrMd“Ô„ò‡E·ÂÅœSö_8ù‹Íî† šÂ€7ç’ö‘ɬ9À²}d“#ˆ ¿\ a¿ø•Ù‘vwD5°rD-ÝBl)Rƒ6¨tºjBA’W•™ØÀ>ÇáxœP´æPÏÄ6™Ï¿É‚¶@ÿŒ ¦»è3àŽà3;‚Ã;‰c¡ ï1M8™}A=Q[Q1ȓǀ¬Üz¥€˜™ ~–••ãÜ_Jqô;u€•ø|™.6¡1ßõæ÷’MÂ!ìIŠÐ忇DÔFp~½ÝŠ5™à~ðz a½G`fû/M%Å?qÌØ+ÜtBŒ¶Q£+-1|ºAÖ½”g²aB ë…P3*ªâc$º„Ûçj%ùj„:zĺ OeÖÀÀv ä º^R#X˜A¨e”(%ãc6Ä&°ìN2KiއÝThðab‚ñ¡åzHœLÙ15°Ü¹Ï‘ ‘ σ† TØÉð%œ Ë°©¨5­ñæRÒ;æ:Ôæ€X­[4EG b=ž¿™çW«Á%èÿ¶8Ð-¤„ûÛ?sÆ„p£ÑðšÐ/uà8€¦õ?µ¢óŸi˜më ,j¼14$;åQc÷`H6 ­J6"}„2æ‡×L2ˆ®« ’ž„9Âg V2Pœ±[! xhï¢&T l` û†ŠV(ä8.@ª¸š¸h¦îTQ[ 5}‚™!éØ8³ìꩃÄÔ€€U“|ñjMâ9dŽ4ÇÑA@ÀÂÎ_§²&ž‰€o³d÷pà a{¿À%%¤a†"bËIb~&€G(ñ@ÐðfMÜHƒ•'´F–m2Ib ƒVñ3ðQQ÷°ˆ %™#©Ëê›Ë[kÔrÎÛÎK :IÓ–¤äÄ#1†Ú¶—€´&p%rHÏtr5¹` ΄6uÛÀ¬& s¤ë¥Ç†â@7a[® ’E­dË*ÒäñQ N°ù~Ú¢D•p„z:DÉ–«Ì£%X|À`Ç‹ HN«¨Æð9f€h›˜Õ\ûL Íœk‘á}¦h°Õ¤<\&Æ‹9ª!X;^ð¤€×D‚ìVò޹€–§¸ÇÀpIwZpn¬%-#SÉBÁ t*Ôƒª¬@Û¤ ¬ÈÄÌ á¡FIAM—è´)1Á ´u5uÓ™„ueËõ MKžÓàz:€ÜfÖàÀ)m9ñ™Þ &ôsã‘ØWØ–Ìšqé‘âØMÝ©5¨†æ´ÐÓÌMSjp`FX_Ø›[Mq 2%Á7 5†O#6pÁm×—ªÖP‚™æŽí…]ï%Œ &Ÿ`—kd¢éèê›dF „„! Ò‰¬ñüsƒè[¬iæ)4ƒpþ Iá“U°¬ï¬ •ÃÌs¡)h¹žfS0}@ME€÷`¶‘µ”«€I†cP ù o^D êþšmµ¤&TŸ„½D}÷9ׄF2 ÜPWwà5Ãl¢®à†:¥†%Mìc»E…g ø±MpÏ’r ™¾]BY£"Á±úŒöNíâ5@ÈHÅÀXßÀk¨ ˆTÐr+ª9§&¨ ¾@‹"ÁTMT‡–Ý4%c²MèHu/ù&Q›ŽÙ¡V1Aáû‡ ´`O̱iÖÀ` °‚Ë­—r2+Øöü©ËË$5…¸t§T Èð:x`h¬­Œ”¨ÁË#Å¥ƒ'ænHxŒIÐoˆ€¤HL †ÚSCQ³xL8È cRÚˆ@(ubš°í0ÑÈ ²g†à†—«‹„¸á¾ÜPzÉE ¼ã@¡0Ü©e´$P±ãÚi¾‘DÉ•¯»Õ)úµ%³fjtû”„$–« ¨GP¢*Ðw»L ¸ùÓHK²f¿Üg0<ÀS:€Œ’‹`™Plo•ûÍÃËLƒ”Tß“RBš¹:#X®3ÐÜ8ìo[á©Q“µ=düp§ ÆîéD ú GÍš{ Ei`\x¹ò¯É¡P këbáÀ‰âíDÍKŸêºÜ¨Èó¯ B$r@]¸»GÐͳÎNˆ—‹Cg£C¨“á' cOgøL†fÐN†/‘Œ¤©'~½h†_S%,REÑüµ`p·‡w#Ÿë|昔ŒQ¿å&}r9ô¿Ú~·•è}챡»c=W/b>1Ú³Ül ¿f?ì¾èÞ~Ñâ ¿èª/¾ß‰'ŒmgãW÷)[3Õj`àsh¼õr»˜œÒÆñJц¢ë_§r‚Ì]îø«¦0–7,·½)5Ò&šñÖ«Þù4Ü/hx®hÆ[̵È1 ¿µãcL‡á×cñ]¦º0×»ã=£ sÁ3É“õËYe¸X(°nœŸÌa*¿çÒÛfûg?h`ggð,ÁrBˆ»-8²êéš;½ã„96\ˆ)NËl=“d¡¦ƒ,´œÐ×°f.'Æð„&Y #@Mi‹nj8~ì6ç`¤Íå8ÇÓkqlF¡å·T €e˜;–7ôÝâg50ˆdHø7…o%÷«t’¬6°Û}L6­×x×D¦{’ªxÕ¾’>†PÅ-3‚H<²­xÆEG+aiK{<Õ‡A“(€‡îx`<Ëã*IÀCÉ}‘5ÜQ„cὤӌ“B”*.ºñãˆ&BÏiWm𣱋<óÔ O°ls‰ÛK­ý“Æ*<Èž7‚InI#•Ü ²#-D⪻bŽ`Ñ4Ü íï„âHȦ 4¡¯Û€*”S<€€E%Kt¨OÏ ,Œ€˜>¬a˃’ÜPÓg¸".l>†°yrps˜»O¬W`aœÐ~´pŽàéÅâåÃc¸Àð} /®5†7‘‰’Õ¦Nb¸ÁîË‚¦º`̬áKÖŽí†gZ0ºïÝ*(5†·Ü‡á×RªÞM¾väXCÒ©Èk ¯2 6ÔSs®æ²—œŒ(oA"ÇHL±\¶Óe_QÚ%%›<Þ7 ïyâ¥Äðs 3öŠ®&Ã/:è³^vO&§S×Ö¬1¼˜œÕÒ¹Ýði4aøõîxž#±PvÁ;žeÏè`øF†‡cX@Ï¡oí¦2ȰW¸•㯉ñe&Oè·mÓÉ"hæ z ·iE0öÇ©AOæŽ_‰(0h 2­ÒPó€,ðwx°ýñ@! á§Öd‡&ä0|§ìÐjº<Ìö/¬mÙÒYsô}Îd4õ¶dˆjŠ.© †¸©â¯íÆ®¹ <Çh=7†í8Èš¬0¦Ê„îoóû7Ë¿GùE=ýå&ZÓBûc–ÝCfÊØÒß#ÔT Ísá†èÎÌÍŠˆÅP‰&µ–mŠ%–:9¿<ãÍ;’X°}zYø¤!EƦEF-R2™ÃˆwgG°¨#:ƒ VF@Éì:ÐÙä3AX–P€ >XaP2©0ÝœÝAµl6©A€O‰@»Ñ²{I§ÇQÛaâʰ˜,ËzõîQ<^Vü|¦f¦"\÷ø @ñðé(ÙJÖ_ÀÀ€€¥‹‡ž l$ð<|¯p´BWÄâI ——ð}âÈ€KXÜ%\\Â.!€ >Ø­…4xøÁAØžW‚g >È+3€ÀÛ–  —/B€_ÀÕðƒKˆ-ì—Äïž?õ>LyäDþŸÿ—®â(ÚÝóSfƒ?é·ÉÓŸ aàžª›½z\ 4€–„…”;p%t¿bÔ\ Á“Åá:‰QƒL `Õ½;’f®„æCЛeM˜äŒá]5þsªñ Éà 8è[môš”!ÕMpÐÓjR…tšôÅÁ(ò©!`×mMÌ1Œ„à Úz‚¬KÎ ômG³˜Ä@[bSèž(p \ö¤8èJX qpÜKŽmáËÎ7ëÌ51Á“Å;’{“ÈùL`GðTG ¬€,B@xZà*h›Ö )%“±£žØ7=ôšÐ…,úMš«mT"È)¤‘ ‘ÛÞZÓ‡öp€Hñ‰"Å(Bcjµg¶XCŠ‘*J}Iµ”PløkŠƒ¹yÑ #2ƒ¶9#K XR0è ƒY…#6((· ¬ˆ2ÐJÚýA Œb(ŠÌmÝÏFÙxPmq\Óm`š¬¨4öÍjPcõùÕgÁ)΢(ÁÉUPol/y"V“Dº¦$TÑÚë×pŒúH­Tà¡ä¾HªÁCª£ Ù³ Y“OO‚vnÏï5cˆ¸-G˜j¢Äðaõäu×ïdM `å8 iÌ1Æ–q@Í-»ý]‘vçŽ,júS‰°–±§öQ [”ž{tD@?$cŒ=9@µ¹g•‘e ˰‹£ÚÜ%k–v¨1drà g b°mq Tt-äp‚Lnßð@jüÍÀAWè6¬&] Mƒ\rÇ÷"øî ÐPв”TLúöÎ,G®#É¢;z°yØEï5íQP’º ý‘Lk{í7%ñƒBãàº×Ê/wÈ}3N¹‡e€ÉNEˆÎ¹AÖ+Ïd ÒE†Fãö˜‘—™²¢ÚùÆ•OÄL`\i¸Ë¸Ý'Ožî™<Ò’ü{¸ ?5Ž45ºøù£‡$ìæ!‡N±¬²°º´}<ÍO"ˆTQhZ„óÃS@¨ˆ¡Î°>A¤Ï„b™ö{ƒÈ*OIøc¼ ¯Œ Z-JEî¯=&ÔÛù|(fÜ7z ø ¡lÏœí’ óç1bTž¶ñ±`)=u†„£Ò°ˆx*F* ]¢Áè`o/=ÑS#÷|z ±=Ï´¡åiëvJFC{;úPÑ•†JÔþÂCç„ÝÆùÌ-e¼¡4É#@0õí î½KTŸŠ¤q`'rÀ]¸¥œ/{¤UáÿZ¤I‚ZËÁŒ?§¥&…å‘cŸ"Äõ 8ƒpð8Ÿ¾¥ppÞ3pž÷N`°~ ˜pû©)hp¹¯â)@ ékwN¦šâ«RüÞˆËÀ@ ¿Þ~Œ’p·$$¸ÿþ‚€l—€b$¤¸bc= ¼ {_(ž†_M {T @à¬ÔCC…H< {ýá\>Ã4¸«Ýn¹²ú-ДÚ-‰õõƒz”Þž†7_t ……çÎ>õ ~~…ƒío‚¯<€‡/ú« 2¥‘>î«%>l#%$OJ¡ 3yBr¶Â€o»ßZ‚8@XÁ÷³I¿€H¥„#ã~›o™~ö2r‚íû~Š!ËÉ¿wp1äÅ5Silª \ÜC¦ÏÄí$ŠnÔöáñžI2»Î+‘àaùay¸FbÈó1f(E¿áDÖ Z(C­)cf蹕ÛR¾ w©‘@ü®:ÐŒW€x‹ÿû{€@ê͆ŽP„w 7ô£¢P(Šð"¸M¤•A®!ŽÂôöB$?5âÛ!¨LoÏ">׃ê%@ fø©˜!FÐÔLÌÄ®$€AÀÕ0¸€˜!ÀX»1½¿C<š¨3AnõÚ881¸t¡úÖ*`£§ÂT6ƒ€q¡Ìh€«˜cûe{\˜ðP6/­ŸKzZ{„‡RUèÃz≙SS'òÆ!4:,\,$ &‚„$Ò,s< »y¨'FhªÀ‹°RFF•“©8J ËÇçáM1§¸ßb¥GÎA% ¥ª /0a*™BXvÎ+‘DœZiHØþFøÓu¥T¡¶€…ÂöFD=5âËv€.ƒ…Â~ zdbé¡E ^p¦|èÅH9ŸÖG62Ëzˆ(…³ëv ø©‘Õ—D¶¬<7f™=C€i²`e~eAf¨dE¥iåÁé™8Ñ=Û±»rÌy¦”àUß¼~ê˜I "$‘+îkuëNrÀàá,#µÅOµ¹ êíÎJçÁ±ÚÊ>ic6æW.G¾…HÂȃ$lÎ#À3`ea‚Î"*¸ýMhpyªˆ¨àö¨å¢Û_E\ˆ¸ð+.”§P?¿Ðgf<¼£Å$çk, þ¨$ŒøqV8·Žw¬?3ÎÙ#Ò I¸Xì¤ I€$ü.&F¡’p{-I@ößð(àQømO ¾æÔú±Æq{uŠEøRy˜ŠpùœZdŽ?O@KØùÁ£°~!Ã^$á§ÂÄ€$@¾$!™¸¬B®Ž¥èý ‡œ°jÿ+@àx±Õ €€«‹¬]F…£^·š±G¦a‘íÖ㞇évÅÙ¦íÓ‰ùpÆŸƒÏ äì:ÏD§j£œ¸] êñ‘ÅWNêð€@l¯/Ç#Ñ@ü„ó‰)mæjÁ¤Òv üÉšQî.±~ Ni¤î"C¥ý@Ï(DÒI:@,Ï:í©éQ—¥%£.µ£‘´³Îß"È2ö“n¢«3½‹•K“#¥ÉîÌb ÛóL=AÃBd‘‰‘†•ç>l„ÖVF”°ÿ† y!l'tÛB5fxÈöDåiÿ^=ÇŒ@¨{†#fظ0å#XP1Æ 7sÍh/­¬$T<©]PIÚÞqö'ræQHŽë—_챑áUƮà ²é™ !ƒº0Çz­1Ó‡€ x¶®m—Çg潊,±Sÿ“…™©5ï tÁ±q×aÂÀSÎ÷Ï‚…õac=j=Ÿ,Â’pm­)T¢ Û.Ëy¨§#fx0«ÀXëþ'ÂF.† ò3è –£í1y1ºÊœÐŸZ9Ã21Ť”EÑIX?éÎh€…c"e”è2ƒ3‹#F@ÖðÅC˜΄­|!b„€NÃ¢Ó †Ú':®NÕ˜GØÙž! Šƒl¬-äÚĨ-¯çÁGNÀœQ³ YÃþntÔlaÄx!ö #žZ~`D‰󄑉5wOÖ‚çvßÅGfºMEß|ÀÃQ¬‘fSœß=ÿ%,¿®\mÒ*41„´1Qi(…¨EÞ„õ“êöÏ ùÌ Q© ¶ó`Œ8æ„‘bþà{ 38äÇ4õC¸Ÿlvü×ÚñG”©8zMƒÄ! ¨ìd,²,ç!Ó‘:stœ0oµuŤÏYGL¤­w_±Ì:8|¼—Ñy\y¢g¤Ó”L,‰‰´kM²ç§P8ZσÙáÒ°ÍÛ¨#ëî‘ÒܘP|ÃË[x€"¼;FP*ɞ݄‘ÊQÚI »K÷VެLiÂþ @g î^¢àa½GÖÌ{žÏ¡@³ñâ 1ØÍÆxdåÈR¦izM/h5Œ”J;¤ ËyàÇgRÊ2. #Ü[T¨Ï¦›À{}ý*¼Î´žªŒ ×Zö¿=âÄMÌЇõ;.1c•ÑÜŠFÔRû¤EP¢*´!Vî5Íh@Pw FØ?®Ô#}ˆÃѼ ך*&1›2AvóàOŒ+¥„}î~B*‚3¸¼²Ø3¤g#J\_Y ‰”º&›÷Úî¦Êç¦æ×Öo>vö nU0HY©C$&×w›ô©¡ A•Å`º¹ß=edäýð`ÞŽ¥ý<èŒ>9ÚKÛ6¼iR-(5ï÷S’™Ró_àŠðS»ò E€"ünGWA.o=Šð‹‡xMŒfÖ / ÏP(Âèõ7(ÂæºÒL©B$P„ý1㈕B‘¿±¹Xaä@lQx2NyÜ»)]T®„õ‚`=QT(æk„Œ+CÆ! :ª1¬²ßm¤5]\¬ÁˆVy†€naìF¯lFD‰"NžØrØH@ŒD¢æZ8ç²²¹à3´¦!SÜ_JôáÁ"¼'ìÏFîÆ—Ô°û—Þ}M"ÄŒ×îÊÚ›PK\IÀÌ›¦Ø„Ü¿èT3<´´ÁSkgwa¤’¤'[UA-qc\¨6B€P»á èJ_½‘Ž’Vd9SÖóÐ33F”ªX‡ß%ÎTMIPiÞß®á!LÅð^ì¿Þ03‘bÍ,û_/8#>’S–1i Î¸Phdöp² …çÞÆœ²fðÐh\‰]YUé6UHš¢‚ýS«3QAŠœ_ÁÃúë 3ŒM¡†]·Ýè‘ÎC‹œº_a0Ñ>84²†÷\ÞBaƘy¢ÒܦôéNC6f‘õ ÿ²H(ÂfE`(á7 E€"@ P(—"@ ‚AÐj€"@FÇS ·/CCÞ£œó @~ BC P(ÂpÒ`,çÝ‹“×0Â|Ò6aG²†ÌRÇ¡ð.¬#…Å@sâMXéÄ<Ò}ÎVmÁ¶ë½‰aÛõb¥C€6–n.ÌPÁaØw¾væ¤:´ཫ ŸÕ¥ó Øh¦:¢-úñÒDyhýúZŒ4Z‰öÊ÷Ú+œ©p„c¿" ½z2…„ Ê~d¤|ÜŸ²Ã~}eñ0gèèDÖp«QZÒIýP€7a¿ ŽððË(mDLYR‘C,çÁž ˆ~ªª#D9¸·ÒüWÀ›ðó<ÈÃÅà<üQgàáO#Å¡÷¢Kч؟S6×ÙŸÛŸàa7ñPÌd¥*ô­Ëއ€íÆØ[—Û’EU˜#¬Ï)mâìç_áŠðSI"@¾ÎúÉã—&Ê´a;ñÔPÌÐE†µ—¦•FV62ÁÛÊ ¶ PMv‚MÎöGÁÏ×86ª¹<®uä?¸É7Çš! #‰CöŒ$x9Á$cguÑGˆÖN…$,—ž¹öy€H§*„ë¢d&(uÆ=èQãHÃA[MQãþ'BG¦TØH©v›ËèG'l¶L‚jó½3 &’ß,-Afj#ƒkˆÔf*ì‚fÞ2f±=f !œ; gîç¡«Gxp5mL¬îç¡gÞ‹°6Å0ÂʺRÍÐÖØjÙÏCôHÄ UíßsË#wcmbXIìH„”à…Ø¸÷X/!Š03®6£Mí˽k¯~ý  3^þE¸\&úRâ'‡$Ç|ÊÆæ™ÎX¬òÌ,?árE! ¼aÖÿ†ÊAÌð Éß[}k 3ÍÇŒò(x#Ü[H*2.øÒ~Aè‘Òr©f IÁgÁ]4¬|P"aì8\[K·oºÂNJ n­%«j ïò+ƒ‘î’å'*LËy¨GFöZÒØ…^™(Žàç·qzÿ˜òÌ"ìßà‚ðSÍ¥™´1¼«Ž/P„ßåÃCòùT(ÂÆa$i0r7×öOÖšŽ´q¿}NƒððÇ™WðþÌ!j$‡0%±Â‘Ï×9fBF ÷vŸÍˆ8Ñ{ ¶;¢pÏðaç啊0C€“D`Ly}Ú¨#‹ v¢”À^Óþ´‘<€‡_ú0³ ÿ7x@ÄðC®ib àò[ Œ7oÂooÂåJÒ àbü!cpùf2ED¿Á%‚‡?÷Zr&JjÜ÷Z¯ÓÆd¥0ȹ×A Ü>Âü7À‹0pñ‘·€$Ümµ<2 Ix$èÈ…cV£Æž•u¥‘LA45 W»–óOŒØ#˜:»3xXÏCÆHÈ !IŠåøõ<4D ç¹ „û'Öz¤7mBm;õ<4Íð`–”ŒbãÕÏœZU¦•Œ3dX ¦°Ò>GF²TÄûçcæIˆPQìÆ_ëÊ|bBilFC¾xˆhAUi¿y ÏdU冤áÞ­§Ï|ãì aæ…pŠ8©$acÌ8Ò™vQ–†wÆz|&‡pufÁ ñ‚Ë.>ÃCŒûWÆŒ3Š`AÝØÚ>Îøt0$a½$؈1o0‰`„íbÆ`vwÂMÈBé*¡lß¹Ú" -Ç Ú)P=Úß`ðáÁÉK‘*nŒ iˆ€ìPùZY,˜É SªTð&lCñ] )ª8dìHfxÐóƒZâzyˆ‘¹ö¶2F)qeg¨Ï¿€"lW„Ù} åóƒ;Á;¡fðÇþëJF’H•dOl³¬œ]i0ªf¢‚ÿfïÜv㺎4üF«ÎU·3@®r¡7ŒŒ“x€óþ³[’MŽEJº»X[ë·!_زHv}u>¬é<ðÁÙ¤ºö_ç¿ \œáâY­†E€Ex]—‹`J†ÊânsGÉ+ ÛÃyˆÃZ.±ž<$3£ª0rû±¥×PK4¯yÌ¿™Ò@WÂAŒwnzà .]v®$_ŽfôÈéµê!@È ûÏÛÎ/Ʊ…¨`þ8·,¿Æ2 %´"·Gˆu‹ çÙÇ[í±²Â0á~wÄ;pÈd)¼yùÅžò< `x-'0€j¹ª—p ò¨–aFðp•g)5XX„¯ã¬ÙrIarßA@ÀæÀ%À%¼¸5ð^xð¦¤Á]çö¶NÉ•Z‰³»3 €[ì— QAË$h _i°cÃ?l~7Ãa`~Ë{¶ä“–h(–ZFFŒ-1‚ž_§ã«ã‡¸§Š %–‹`&Z„Ÿà·ñe\ʸ@ÖP-U%Í2Ä#³†l!@YXÐ|žXGÒ–º¢ÛR[è.M´d-D±úÍãy°ê‰k… _GÞKêñ u2à¨%Î_tl cÕ"’.2zKÀ¬„¤adÈ( `sz|‚,] AâHZ‚Ä$ Õ¼BÚ(=Ö:ÿ$J”ç7¢¢'DˆåŠ$r¾ypï(*E±(†‘¨ŽVd›'Ã"ÌÈ¥åLû#x€ExÖ&¬Ã"À"¼zʧ£Ð\~;—@°#c„–¬ÁËÄÍèùA[é‹-à º´Â S¶OÑå¶X%ŽŒ;ef8£6ÿ‘?íxôñäs£Œ0ž‡Ž$RÍKä%ÐyæJ¼·U¸§ ŸÿªÜ† QlÂ(|%àîøà)–àùäÁ{‘ç¾–`8 g´>cPbëûâ¡ÿÏ×i¼ÞÎ!@ÿ§êÿjq§  º/ „þ?8ðÁÇ¢pèÿÜ0°%þ§\žßß\†þ7è¿ô>œîKü ÿOÕºÐÿ V•ÊΠ ÿS1Ð& <Åô7ýçóãVeèÿÞþÿ@ÿ¬ÿ-r'¶T…úuÿy  þüªÁ3ßb?èÿØ.]èÿcÏ=†Ÿ«d¡ý?ï …VXAÿ…ÿM‚ÏòǃQþ{ÐÿëËÜkFÚÿsõŸ.‚ôÿ’úoË‹Qÿ›«ÿÒƒAù2äÿûÅÿ÷ úÿTý |¥¬Ýƾ'|á9@öÛ{G¨í×ú»_ð8в Ò”Dˆ*‚cÍBÚE0€?¸dE‚ß4„á> ÒFq0˜›´PÎŒ†À†î‚ßÔýß/xØý§Ú}À`IÏ\ø­((¸°Ý>ä¹Ãìo;R&ؼ$K\*áv›€à÷tÿ;ìþSÝ¿`°D{0pvx ½ä¾eí_Vè2Û=í#RÖ ö®ý?¸+öþ…Ê+pn»¸rß4îãÓÒ¿ÝÙ7áT#¬}oþêÃ0€þ_ñÕ~W/ÆJhðo~ïë@ÿøµzSôŠç>~r  ÿW¬ô@ý±ÜÃÀ(ý¾½w<Å„(±é¿_Ÿç~ÁCý€i?;Aÿ÷›òv)Z¨þMzíƒ x¤}ƒå‡ÿ´û€Áðù>À`5Q`™†X`¿°{¿_¼O}n¿Üw?PÿKª¿P≗Q#=Y_è†Ýßü°Ÿ„ÑZÀ`®ûï©ý'i&öKûî<ôÿ©n §÷Ÿìw>øý¿äÊW©c¹÷¡û1€þ_2 „à75üµpÕù?0@óÿ3y&œ8ù°]ùÿ~¹Cý€Û^Pÿ-Õ_ÏOû 4 ÷Ýʾü¦ OD˜öÙ>ÜPÿKª?³ä¾]µOYÏÿ€ÕþÍó¼`ý¿b›‚Ÿ·ÛÛ$xgbÔ÷çnuIIJhóNÒÿ¹Gh¦ã˜Çvÿ»ù[^PÿMoù@ð›6v`÷ñb;ôÛ?ä‡Á¼ÁáÃá/ è?®6Cÿ·=âXÐü; =°û[Å}ÐØ}¨ÿ¦êO=/¶BýQöyü'þ;ÿÞ'šúíãÇßþ¡à&6‹ïÇnâÁÿý‡þS(ôa"ô/ûBÿ1Áo xx|ô…àñ÷ÝôNè?<>ÁÃðï5ÆÀt̓áw` €A×ó Bð;¾%÷'½û­¸½ (Q,|S6özýLÏa €Ái ì" f¼ä*1ôÑôúýß{Yúý‡þ è?ôúý‡þ£ýÇè0ôÿQúÿÿûËýõç?üÏßÿó/?ýòËÏE vÞ…+¸ô<|‚Eø7 x÷S.–´ßkÝ%íÇ›? ìÇ8ûÁqS^ò2‚‹¨1Kþþú¿­þôó Ø'@fðêFx¯x‡‡@ŠC€§ë¡þ˜BHà vO<€‡—f’|\¤ˆ©”*FxOôE•åߌÊüs^ÞþŸRþð“í÷ Ž£çÒ‰‚”º¦àÆ•úý_õÇqLh?.åCýáý!xt–aø1| ýGc ò‡Ø ŒHï¢(/R°é£X~~î§#g£‡€ZJØRœ¿¤ÆÀ8¼ÚYlÁby$p˜ƒÞò¦²/wB adÀh=¤‘#g™3ö LâXcŸ"DOP2R̦ŒªåÐÉÉ{:n`Î}n½¥°P)R‰@adi©¥–PE„±Õ‘6 £|keƯpÍZxK6¬2íÖeˆuÆ„wÞ6BLø¼óøg®ØòDÞÉ;æÖçï1Ôî-@¾cË#]COŒxFˆ¤XoÜöîÅI€«l·º€ç3cS îß OqG¨Ð‚dù[ -ã%HŸ?ÞS& ¦38„™ñ Œ‹nûzNÈù »¬ÃyàuTËØÙ C-'t“'š„ê‰5ˆ×1§›j:›d§3 4'eM‚wÞýaµÑYBZKÙˆ’NßÃ0s @ô€4'†åŸôRROZ˜®œ¸ˆ=Öò«öpPëvÊ`·V1/‰*¼˜3ô$¶Þ²ƒvr¾jƒ™Ž€iá€8— ,¸íã™Ájá\6Û.z„àážéì2}–Ø0{zÇ{z„‹ 7¹†L&LÌ7 ÕS7dK£‰ƒ‚Ä–:Xf)‚ıAbÄU8@Lp阖à1Ô3k¾§I˜ÝD†ÜñT\ÁöAa& ^¿ÛEÞxðí84Û9¢= ˜=@v¹#ô{&P¨?äŽÜ`K C€ÝQ~GððË›,rŒíö|Õ)øÛË$ý«ÿ=`ÚÂ+Es+AÒ„A–ƒ¹Ö §0TZ%h`·Ôü<Ágà])qctän@‹ëeZ‰3 ÜÆÀZJ‚A´T–`Û’@ÐÉ¡$05$ ž‹Á.,ŽXp·i°=í°’]­ì;’"‘†Ó’C³9¢¥”*ZäX¾&š_x†'y†ìˆÓ$… kB»• øMO >BðˆŸ:àÐÿ= Bÿ¡ÿK ú¿ùqiØØ% ;€¾ôWýoymú¿ç¶8Ôÿ*Ñ 3ê›Úà7Ÿ@)`SÕg§â@¢Èr²Ü?AîÿOîï}vª.ß>¼ÔLIOQèíÁ>¡á–Ìî ¿gd›MQ°ã“Áo(x:¨iú6¬Í_u0òÔ)¤{›¦{ Áo>Œ¼µ>€U Ë¿<ÁcÊ‚G[‚¿rm÷#3ûõy÷õˆøæÍ"dÍSào‹u€}†·$C¡û¼SPûæ`ñ ‚Ç‹P" MvD$8Øž?DPx†ƒxeR@ìn”c$“0 ¿m+ÐÁ‚ˆCe¨)`†žá×¢ìãÊŒ÷/²½÷ÕĨB~ È# HOÎù¶a@†Sîz 0üúàö„Àl?ÌR‡fÀàø ÀžE…8Èa0ÍK€¤à%&hêPÃ$l~ö&á:&Á€øˆ#ÃEìí" áeH!AãæAQI@%NNá«SÈCQ€xµGëãúÍŸ. Ä{?§Q„ÆïÁÿqƒÌ¦=Û·?V4­f˜‘<¼iÌæ šNp0{Še]džW¾åÐQ~¬¥KcóÇš¯0Ù̇¶ø„Z–Zd fnÓê±".Â\Õ«Òµ2bŠÓ]Ã:¢ê"@À$\y¾­ˆêÌ&a¸I ƒ¸Å$“G0L¶Q€Ih1 &-@ðòó æQ+³¥²ªà#v=Ÿ&` €€­ @@.ñ€ 0›¿ð ¹ØÚØœÔ7'@@Àî €ÎÀüÀ ÀXi?Ÿ~dÞû L9íU@ýè»>&XŒ}úH `.6¯ 2À< ØÚ¤Ž `ëó¹W!ûF ûFv(rCä† `ó¸66 5@`¸÷Kzp8@‚ð+rˆ€ððЇb{G!Dz+ø„ñcxOX·[„þÍ}Ò¾ãÙï}03.åÊM›,àà"ñ„Iˆíã ;Èó" ž¸ö€ ؽ ¦>ùžÞR]53<¨1· mõtK ÇZ’.BxNaþ]ܜȃ‚‡©#ØÁÏœ@\9ìù‹±7 T‹–,1Qâ ^•žëù³ÏâJBS€xÕ­Ô«‚äз®®µ4ÜÓ&a¸I8ee=&¡Ø8&a Iî À„“˜Ñ£˜:±ÐäLË”=ŠÙ®á&{n)AÚù×ù¥à¦!GõÔ­‚üû¯3"Zü°6U.ïµn¼Â$ ŒµÅ)ÄÊI˜„áUg?êù/ú|‚O£ “0Ñ$t@KòüC&a6tpKÛá<À"<É"X $¥„*%Ä!J0 ››B€ á71Âæa b„_k‹yä2âõI–„Ø©¤8©5² ­=Tð}™#x– HÏÀG˜GP-°Ä"œL¹ÖÔãDÙ¹ÀH`=Xq`m<§è;xPŽU„Ì`¢EÈ–ÜPE³Œ`fó GËkNlä¥÷Ú84<%ý™)qÃáÝùíõŒ$îb<Òã/B=Ì00²¹ÐC@¬ÔU[8¶Ü²}Pô/<æ…qÕÞéÄÛãá--†H[éŒqÕá÷—ëà–W;8*ä_¨6Â5tžÐi©.å’“ì4 z ó—!<I@G‚ D&Æ…°`øÉåÃZ,‚¦*xÜwIȉ"PDnÌN“=@pØBñÿØ;ÓI®ß(Á}¹…ï¿’Ý]C,Ì4ÅtÆ F?F@ P~òq ®¬"橸Ëp‡óðº$Û^›ôúðчžyTfºüš52†Ÿ*-Î䌕!ŒÉ”ýfâ3)d÷ëCà±q¿a¤²$¯y……A¡z†€M¸ìOgjÍÕ)ÁdŠ0™&ŽÔš›¤U…„õ÷=eˆ¶“‘bZm崚ť1ò~l6JÌ­î 1C@3¶‡¾C@1†Ø÷…º†b‚žŸ„“o;aˆ€Ö ¸°owaï“&μM=&:ÛšåbžyHZI&š7¾bb<É(œõF@–0—%Ìì:ÚKJ1´1O™>0#2~¸·Þâ«M~•æ AmLÛžæk>`ˆ{8‹;Žz…ñ|~LØWÚð÷ Zî„а›yÙ+޼MÒS1¯¸¿ÈgÞRIbO]„µƒ@Ðÿî˜EøÛ¡}†‡â )íŸ_µž îF0l__^ÐËG®B ªª ±\!ηò "¢»òûC†Ì<2‹²4!ëyHÒ ‚™Pˆ|ì°C@G ‘Û%á|{éM”U2#gØ?ÓV3@4ÅÉ#@¬fr„Œfõ†içþË`#Ë“ÖÖ]‚{ÒuÛqªâ$8+ì¯;IÛB !þ.葌ÁÙKÒN;b_'e‡8èVk9~s>Jv”¡·Ÿã‹3F€HKWŒ6ÜÀÁuä䢛ìí !—PQ¬”bç2h&•,¸³ìwg9Iÿ ᪂·ÅÖ¥KÍ¡HÑrrÃú\²†ŠÍM…BûSe›ÂK  z&WèÔCö°wîaÛЬ“ ½laÔ±—ƒÎÄìå@y¤ìhâ¥äè[¯uË‘éèßRÅŸÙºõ‘ªC@ÖÓ½¼nÀ_%#OÓ“4 ¶ò÷‡ˆ‘«R¿ƒDˆº,7rmôànÏß•N-}|áóÒ™iÓrNq r(F´¶`Ïz¥áãL%ÁDɰE¹]âê˜),˜µ+ï·;€^63¿ôroâ&Œ)¬Ÿ[¡˜iFX¿Œ¢!Û¢‡2'vûëN,3ã¤ç'\l³Öw¡ëÒš"ýäh9¬ì9ÍÔ’êüiXôoO"ãš9øçåÝ‹þå<¨\D3Šæ>¿ÄòqèC›0†™W®¼Í DŒŒaÿQŸ˜¹%ØxZÏCžo?bœç' £9¹2Dè ©öôNFÊ ¯Ÿ#ŽîäJxˆ€V (ì$`¢¸eE60@žwbÃty»)η™Yú@@îìºÐ_½Ã~†€¨Ê-°ä¶³ã¬ àákŽpzåàe0b‡Ò#Iâ&cHWp€>¼§ÒÔ€ø.%ÕEÙ3@¤î´ìÜ€" ô·@ÀÏLeÍÐ̘D[¯e| ?fj‹mäEX!l(Ghg1 ¡¬ì6ÌT;µ¬Œ 3¥Å.QÆÑ®å<ä•11‚DLÕ8ÚµÑ?O†‹ÂÑ®°Í  –v e†€8Y¶Öö›gùýŠ ¸Ð²ßÇGV˜’Û¬pÙq½Æùê<á„‘BaÚPˆå@¼Ü³FîçDF * ÛsˆºžR­˜RYDÈE2ÑÊ4{]ˆËC†^E¥§¢`nœnxî^ü! ¹ÏŠýIC¸vá‰ÐD%j9âWéŒBd³Öæ·' tåÈÎlQ#‹ÜÄ ¡#@t†2ÆÜ¶¹MÊ„Ê áb; vuËM€Àó§Æ<Ûeðø8‹Ä‰á¿jhc’ðôiHÂM$áÊ@’€;âóùƒÚòþ¡¶B*<0Ô¶Þ‰‹z†­hG{zÿL›µai¢Šœá©Ç]ª‚ÝíÇí'¾Nr7DHVªûŒx]úáb–óP×HŽð;p€ Üy̹ƒS\áñþØ;Á]]ü‹IBÂÈÃÑK€ø‚@ˆï1ÖK¨€x¯KJ%€_!ã(D2€_fŽ|¥ñ2ør¹KÈ@ááÎ&AÀæ“(=!(|§}$áá’ H›‡™%aq 1JÄÄ„÷ÂäMl…£;a !ÉDªÅ ÿÖ‚‚ðð¾:mÂ7!â§êP(ÂÛiibb‚°ÚgÇ@Œ"& &|Å„,( ‹P(¿›ûâ…%ðð§&+vÅDï9ÃꜯH‡»(ñáë_H"Äç¹Ñ*“å_ËPzžPñ„èß÷ÎüÇ_â¿þä$ÿOÔ€Ï@€‰‹yèUú§z²Ì.¼/*âëM"'ã©ZˆX¹#ãØ~¼–§Æ!Ä]€àL ÞÓ~Ðèø©F‡B _âÒ˜Q„2ÃÚÕÊÚ‘!j=¹©"&<¸ù­áŽ+TëïÒùæJŽ:Óö!¦ j¥l±qÓjF\¥vOõõ{PfÚ Ûy8j„‡<ŸŠðÐCt/¢PJÚïÏC!¢Ô,’°q³fÄÂG[Ï…§×vIàˇ$¡;+ ’°Pf Æ^ÈÖó 4ÂC©J£Ý°³á7!Š0p4D®Êâ«ÿd—6ñÝœi?!exöI€¹E„„»<#o“3B~d?ò$x5@¾§”üŠwhqhà>l =‘BˆoºÜQˆ@ˆ¢ÌðôB*Ï„ïÒ/„$@@À“—_FòD6U b°3¾ H æZéÊ`HÂçØ<<1,…¯³_ ÄWñ€/5CŒxx–  àá¾ ž'Ò¤ß<¡œˆ–Šð­%P„‡¯7$ŠðUKZwa e!Ø2?6GÔ(%kG2B€UbÂò—“&r‘F¦ –¯ÅóÕ3@„I FlM“™‘©°î_ï”Ñ#nk’õòÜðêÊJÂÆÝ8Þ°=k´KF,8%›¹)Äò©”<_}ˆ"Ò(Ô–6š§Œœ‰=h°$aùâë5$œ¸÷´>e¨óÕ@ˆ¯n”^i9DšrÈÜ·‘ºSi%®AmWˆºjdGVÊŒ%0À°}Cò ›ybXvzUÛ#† ]”r>NÅþb(ƒ8ùäùªP'œf^™aþ‹W…¡#<ôL„ˆýµFá§&Þf!%œpeze÷zæ™nÞÔs¯ B¬AÀƒ (Š&t'÷óP ÀÛŸÉ+´~­´ˆqïÊAŸ¿ÇÔó Z <€‡ù¶”Ü0ÿà2B··¡¹°’€‘õé>©Ht€€§Ú³’ØÑNxn!©©Ýí„•ŒÀâŨ%¯ gÐöÏ} ´:7æŒv&‚ÑšaÁ„zñ:jhãU3¹,QZj„€6MAq½"´Ä„;š€§áFEH!À=©1}¼ÿÄßHÁX«‹Ã°âzƒ©£‰ÊÁy2œß‰ÂÊœÑFÐtI¬´n÷Öó«)g€hjC!áÁ’`çåÀ4ºÁàÏðÐê¸ ¿ó¼ÛžiŒ!ƒ•Ý…™¤à¤‰Ö¨$­ÜGÐÂ<á…´ÿvÍ$/Wþ„õÍþiô™BR‰tâHÃB{UәѢC>ûyè™WcÒër RÆÇ·d&2”–×§ŒÒ2ƒp¹"GxìÙ–C@´,·×+‚æ šjŠÂâJEy5¤Ÿ‡£Œ°^b„†âP¬2=w›ñÐYGÙ¯=²ÚfE.Xlz²"s`du¿¥úÅT#<œI8ë¶>BÄ #…æ“.È!!bãtŠÍÐeŒ²ÒvI #f$¡©0®´óØgTúü×D–°]úª™¡ö—KŽ&†Ú÷'Ì4§»™¨1Ô¾òËÄL³«Šæ¯¨B v9é'g$x)î\‡ž¨5ºóùM 6…›€˜0üR« Ä% IÂöõx»ŒbcÆcï7\BÑÜ¿ 7Ò ö0Ñó/ÀÃöµ\îòHê_tbú—ŽlÆyfQáUù\{î ×³RÐ|Øï¨3bÕ n3ÃDwÑDD(Â+‚\Ô Ä¿p¹td{1âé&l„›ô"^ ÄǵD̯@ _¦|#Ó P„Í\¨4B>šQÞ„§;9#M|8 € ðô*¼ÄgºPiÓ€þœ‡üÿí?%ü2…üOô@Ïo§§¯˜ .þE‘onn¸ ˆ&#}«âëýa×PE<Ü$^´#c€é'Šð]±œ‰Æ)EP„ôÞ¤ EØŸ3êÐ#"=´pt?4bù!"Š‹¤ëË q‰N(DšuRášÈÊQ‡ÊÜa¼ó™#´s*ü¼Öóà51ý–ÎÔ©8'óX÷¦tñŽÆ³a=1²T—®¡WÃ~ºx„S‡/øÒœÑFpŽr8<®$`$Gå8y"xªËï YÁÈìÈj5áé×BÁƨ%>9&äy“bÂvJG^Ša¡¢°u]iÈä7!Š0â#•ƒ8ŸA<¬w‚'›ˆe–.öÛñDÝ„d wžk‡"Üå ­ áéo…"@¾|žõò`H’ðd¨¾ÓD (ÂÓc‚C ߊ@6ÑŒ¬`:@`Lq¥"Œ<ÃH ͧõÍ'¾²lˆ×ja·eåºãȳ!B­1µ¸^èªßΊ|e Ø€~î»!ª,`›rIÙoªdË ¤ûÓF¹S)ÑŠ…·gãfPVn<L€½Ö[°Ü°qÁi¤ÁÐÖ©"à¡í`ópjß„< &–†ÊÉàá&öœ‚€+ÒP(—"¸AÐj€"@¾=ÕŠ€G‚0:A@ŠE¸É*ôL§ÁŠÊ1òàNƒµs€’\#p7ü»¯3½&/3k𰟇™œ1Y,`²øàWCQ69a¿óD„`¦`†"Bž"‚ðæ!FRªb”•V*‚ß„(ÂÌ£a¢Ì(äæP„›ê EF¢ý¼3+ȲIQJÜi™Ä#´{r€§¾ ^ý„:©!ØX<œ¨S)5ªE½È!̦«]w0×õ\I>öj—˜HpBn0˜š7áŠðSGzŠExÛdmwáÁB!ðŠÈ@ÞÈ ¼²5ðÞ¯†‘å6ðpøbÆ+<¼õ!FÖŸÁÃ}ÌØñ¢À:<Šð6cwð¾yHA >>Žd\Uh€;ÀtS9ü1ö‡„®›ðE¸ó:<ŠðɃµaú~):ÂSwa je„àÝ(4¯|5ØM@L‰ åà<¼—âtˆ‡m¼"ûŠ<”T‚€Ç®Ê+iê7X”žÁÁÒe„ýò3)‚GYÂQkeïq†€p(Âþ½ù™„!…[ Šð\Eh=p¾eÿFÔÌ#Rˆú$ P„È!€™P\Šÿƒ‡b.J4{ôñwE˜¹?Áƒ55[á©ÍèßAáÿç0´kE°Á£weabÒí<(P[^t†€¬t ð”L Ý…õ<ÔȈš›Ic eeǹF8™¡â~Ëά`&/ «,\úÜï‡P=ÃCcák•#<Ô+EÀŒÚzFšMn]læã Gž.…ÍÇGÝj‚r½"ôD§!éd„•/ʺ „™K?EÀâ#Šð=Ÿ4!ÌBJ0ðØEØ´mÅ£áóIétí(ÅÄâÊÙ„¸ P„™ËOExø«‚Ax§#§3D¹‹ Ii7!Š0’"Œ 4g¼µ¸¡Ë#x´:R„ØrOàPâ²âÊÖã}þzû3Fò›ðEø©"‚B UÂÃÁ!„ ¸ E(¦:OH(ÂcÝ“Š94 eÅL'MDˆ’r%”v–&vÞJ©Î‚"ì·éçáÁ(Ë0‹°2G˜!ÀÓ²°ö¸KÞg"Dž¼ÑàËþÜWƒ‘7ã¼Û Z #>*•~Òx)=×—½2¸’‘%>öVÇ! L {nû+IZ#<œlTÿÉÞµähwÝÆ]ðýØCÞA`eÿ@Î'G²;€Œî¦yuJ‚f?ú‡šu‹S,¢kxÃIà™ Q’Öp`݈€™9B6³†ö…žÀCS{Vbް²k˜¨Û\U5ãr<èã5ÀJ§Uqeøô¯\sz @#O :‡mƒWʵÂA€‘‰C±ø?µ<„†áÖ¥†ƒ€ÔˆÄáS¥‰’‘-JIðò´òåi­j 5 Ôppª(†WÊʦ!FÀÞ¥ BØŽ·<()žW–39áÔÍ0KÙü‘&šH)1­#Üzúï[F˜±O0–À`„_Á&jFÑÂâãR[vy À3┚ÀƒFx3‹+»†‘œ å™‰Éâ~Fˆ F0×$eÈVNcÁ¦ »”ýK 6p¹ãÐA…~ñF8á§j„—„0£Xœ „ ±ˆP­O ß0Â#LŒ•Œ¹(30F¸Õ-å[F˜y|ô <œŠÑ3Q#¬# T‰&fêEØsZއxjâ¼›)Œxá°üˆðKF˜ÀƒWNà!µÓá±xížÓA@$1A ô‚Çlj.òãɤFØÈ( ‹@Ó€S ¿àÁ$ æ;‹2‚±8ÕO <k¶A±x­-÷A@¨Ë×raÆtu¢fŒ² Uè“VÊf)R0]ÝÏ?o²O–2Kü~cöø0¿°üÿó÷‘ÿËŸÿ÷¸ÿüóÿ~ÄWÿ’®!O×p~ñì¿ßd1ÿpŒðmƒ6BŒ  Œ=؈€™²°~Q!%¬O #líI`„K"2›¹)@Ûá mÀðð7"ã!A‘ª+Ľ«¤ÅªØ6Øú²Tá30hnì®úþGâ.BÕŽ‚`{:™u“’%¬›lòéK9űg°u¾ÿœAHH!¬orˆ+X#\ך !,Íì<ƒóõ#ìQ¢…ÊrllzHš)Üœ]ø=§™‰€§TÁ‡d' ò›{1ìh¶ÂÀ‘žAl§‘ö¤V(1;¿h4Ó†‘e€ø—æö™‡Âhrƒ馇¡™ï?5‰ ¿®ãÏê`x m]6î™eã*Ngx ÝGüåV¯¡­/@)3iàT~m˜ø/zê™Ù/®I‡×ÐÖÆoÁ†éÿÞé¿ÏLÿº²ÆC[{1k’ë×ÇV³Ä ”4!ßä/À3O1H|ë#pÌØL4•Yã-àºctÍ͸B°v$ÿÛœ…;˜¾xv á⢓ò þÙôÅûKŒÿ“ë~ŠïGÀáN…[S+k€Th <‚¯õ‚*îó3aú²ŠüG/§ßÿ¢+0ÜŸ&ÊA/™ÁCšâýgÕ.Ø °Z5„KŸ?!×´ˆCù³éòt^¼Œ`ºv'Ðrª‚Nà²3%ÆR‰5À½ù¨°t24›&3…Ÿ«:”ÿKaÐ õ^\!ÐZŒxÖ§dƒ'Ðm õYÿ\ ÜûýÏ­‘b`«œFLAJIÏPnôLà»JÏ‚»ŸOc†Rå4"·.–gQà:Ì} o+c< / ËÃ3µà7àðÆ ï¢ó/¿H4òBņK@›¾xGàÑö#ðù…ÀÃïGŽGà‘ã¿%ð^¬°vÛDõ<ø*)(û®à„r&w÷mö‡º˜ar·‰êgâ^Ipo¾0Å›01v¸–¾ÕùÓ3Zþø(6.Þ{q0cì.ÕŒe¾ëÌÝ¢<4@k €‚_‚Àåz €åÀÌÀ\xf»y`ßì7ñýC½û)à>-°–‚f›2£ MÀLÐ!!0vZJñøÈiŸJ"n‚Íû½›|IZ]ø”¾Nô’O¤ H!6w5³çñ 8@ xe øÅÞc€½Ð3]€Å©P Üzè÷;g§‘—A!`óDp=hÌàÁ) –Ÿ{çĦ3 ÁS4q òâ^1Xë:­ÀK0Ò$”±wÀþu“àù<€^½J Fx ZÀw>$$ˆàú6QŸžyQÞ2>"CB€É À#ðØ-FàqÿÃ>øKO@(º<< ?5s¸ùspÝ[ð7ðs`OâóG«‡ÀC Þ¿«ð›¹¸ô"ÔÇÎ0RÀkJ5¤0Á4\'À—2A=M3c€Œ<ŠÁëàë‡ØkÄVÚg®Gv±³62ÂÖÚ°fÜúsUÐá'³)#L †›ª,%áÚû6â/þ8|34Eá8H ¹¹ÿ‰‘&)¹)àʳBHÛå!¤#8àó÷˜á¬Ðuóf=  €­5Ò °°H9à²íßø>G°W j2‚5enÀ¢`&ð&M„‹‹Wk™¥PˆoZ‰»W“9¾ÿ•(~„f:€ ÂU¹M¦>øVÍ‚(p¹(ðÄjÄ%¤…JúkÀo”hx²5Þƒ66ƒ>ƒ€T‚ô¶kb'î‡õq¿m=¸5YÊñðwÛix© ~Qàiè‹=5>[НH ¼ðnúàGT~Útbèê6vu:ƒ)Åh÷:£çxÓ†ÀwçãÞHµg'ÉCÖq›ãó ¼Š ]gäI‚QÞ¦*/fŸ]„Ào üÌÏ’åÝ÷ųHÀÔuUqÇ3wBà/\Ù71uÅ÷:iމg7ÝÏ>ÏóŒéýÅå½R{`z›[ë ¼’/Ëöc3+9¦©,`_'ʲ0/ í}ñ–3W­ë†ö/¹Í0ôé[JOs” wz–.âÊ#=Òñ{·f3˜àÚ¥Œ suÁFîòÜÇg&@Ñu9×KadüŸšI —®¥µ==Ó%¤Õ) ñ¹íŽs—0%t¼«o3Wjèxï³æ.©8_=RþÖë š3̯ÜîÐzmùÄ݈˜ Mß>ðŒ'w“œŠŸv6}'D3¢Ï>¥2vâ µšA”·a*¼;=Ø3§;Z"L0\šâ™±ìk©¶ÄŽÿÊFa&1hq:¤#÷qi;‰@¥ÁþÎq%¢¬ÇCûÀ³¡Óç A±Dt­€Xpâ¿ðÓ÷!ò7íÆ¤h館ž©*°Ó£ƒ­—þ²zf$áž(÷ĩσƒóë—À†ÑÅ=ka@¸` „xÿ£M4›"ü=OLs.½ šü9q¿óƒÿ×µñBàOàÿß_i3ÿý©†Ÿª‡òÂ?üLþØ‹ãü½â¾ÍNq¿“èw wx<ãoàRÏ?üeLoçkï„mðJŒP¾‰˜álðu=I45„[kôŒÃô$ÄÅëÞóMM¾7¾H…øE&ðæF„&oSàgº<'M7þ:ª÷*r¸8n üLwbä°ù_¹‘3„$¸8.]ÌìGfvóí$~ ¸¸íÄò£Có¾lV‡‹ÛJ1ÇȤï4̸µ××7xÎÁ &Xèìj#:Nw%eÔKß~ä©™×÷hgÜ*ÿð`>"ˆ`gI@O÷ M=„ƒ&æt#ø*E 9\©;Ÿ@gD¢8\OCM£ÈW…ˆ( ~®4°§fäˆ-Æ'9+qPÆLzä€EÉNUꈹm+}ôG@ÀÆÝÔ™A5£õóCyLgjDµ¥!±\‰ÆÇ„…5%)€ØþÂÐ#+ %Å*l'yÔ'¼Ø(Íaoqï=6æèÄ“ÓZkü‘÷† QÀ`- ÒjæB†„pë¶3Û/‹ xzÞŠ— ©à'SAèL*ðHÂKÓVÈSf#8£vì9/Å?:3*øLëŒ+ ƒ‘é¡Uvà sç°h&tK+T×úá±s9T÷¶‡N_´½~J™øì0Ç ¢$áq²ÑÔbæ-)D+S@ û)ÁfðL¤`„ベ2Q‰(à’¼weˆ ôT ܽ㛩4„ûH+ÛÆƒp3Ç™­½GÎl´1áaañ¢òLFna<,,Å>Y3ãİ4œY[ü°C8(ÎRÌ–Ïì‘‘ vá¶Ïâ*Z†[=mD´ŒC¤µ^É3D æ‚ÙÁµÓd‘“ ¬¡O_;Eô¯d‘`¯lÔˆÛOtKÔ BU0SÜÉ *åL…JlÐì7L¶¡Î!S ÃVbøx_À $¨p™s;/ôSCxÈvƒ%Þ^Z°ÁA;7]w.ºŽL”Œ$ ™aùE…x˜j®’¸¹²ÿù)yíyÂÖRAâ‘‘£²PF ÛÅç¦÷L¢“)=å ÁC€¨"ÅúÛÊŽ3ÐVóÔzJ°Grd¾ Ÿ›žÄz@xŒ¼G¨7;ÎñÜk±(A­ŒÓ æ 5“#‚¹ òÖª¶H¸bj©¨­ã!"R,À,~”¤™ ›« YYÊõ¤ŽLœCTR`™°6SœžÁÐ3®,}ÕžðÜ\Ë=óÙbR;­•Ó£| 0=ytt·@D–¡YX{Ì5z{µ­ÒþäÌ0¹•2¢”õõ‚›ÎàÁ%Ýëñ Á3xfƒkÿµ®ýŸsÏê0iß©R›iìG%ä„õ9gjs•Àܵ7ÞR»¡[Ý®Iúœ}œ3:2~*kÊgŠÅ÷žâ"øµ‡€LÒ N[ÊùäP‰Jxz\Y$òÊ X9:š@²•¡KXo•¡=ˆÕ^ea”x­å àtU „å”h×U,˹Ð.l5b@ ¿‚é| P+ü”ˆ]€»0rIá5ÕbSį€xxD›¤â%ZxvÜ{ÆA&LyµL8“Æå¼`~ÂhñWÿ~©·0š‰Ÿj'”Jø  B¸]Ÿ Â_ý´rÄW ”°¹F@R¸}×Iþ¦J,àxø«ûn#E\¯a PÂõö05˜k°@ÀÕ@yˆòðoÚaàxø«7ûÈ+ƒ…RIaúÚ{Ñß0Â#LiA ›)!@  „_)¡²%€&`Ž›n×úæ8 ¥Œú÷'1€øíåñÄЀàáW¹š<<"W ^’1äD}â9Ê]²€X¿.ëY#€p7"8°mì*j†RÛ Óëñ 5R3xk¥Vª×FáüóÆÁ§(Fj„à¦2ÌžW2ÂÈ`!Œ…ç¢W.½Ä Šqàgÿ‘ØÍЇk¦à¼ËþABŒäÁö8¼zžTÔÚ ÝÚ½JÖb3ZTGÌàDáacÇr¦ôd^–0è¼À8¡†BB“òÌ Þˆs¶`—®Dsåi˜ž! XŽ+goŽ+-:u„€pÁ6ìÊW`&„»ÀºýÕTc¾ùÅVÝÁ©mè•õ"ž! ² £J/& ˆ„Q3^IÀL.PZŽšñú}Zgf“ª,nï}¤˜¨‘¾·H,%Þ„äðµ1 •B;P"|íúsêç0nû]`˜'#‡œR…ÉÕ‚‡qEµàµÂ0U‰)¾™‹ L½¶otheÜk»`›‘s&W4醵ö@øˆ‡Þ"âE¼ ߈‘ŽY®V†\ñµ*¡H-›^›)e¸bÂì½3†ÅÄšÈÖÏøí#@¨ðÉ6®ŒÄ^XZßž*Ú!#@ÔI"a¥H˜˜7+a#”_;‚ZbŸÛ+ÞK@Š¢\ôæbTfÁYýµwwJÉÆwÛMsùɧ}q‰Ó¾?d1D— R÷×dÄg½Tˆ]°º¶½¼\éÂÂ(/¿vBùÂŽ4b%#åD's̨¿öj_9¡œøâRÂùýÇàÉ‹«‰nG8•!`&¸9cMeayà©I½¨<V Až!Àš õ¡íÁŸ”¡Ðu˜@HØX Õ³F;ˆ²-0’¾ýMЇgFâ¨Ä€ÕÕ{¯&Uˆ2;ê…ûF ñ‘G!TÈP,xñ¶jXt^•‹I#1 Ù?W ൩Xü Œ”RT ¹À‹§GR쀡ãíåytÄô´Ò›p(ëÍ»iur¬¬o_L©'gdbº7ÊEï=ŸYmBæ8 º°`Ø>2_ÖA]Œ7a9çkœ¢‰%Z@¬ •£&Í¥„Ù£íâ|ë­—Ñð[ÕÅ! ÂIpoweâ0ÑeìL¢ÂÂòv¸Ï·>D"`”þÞ–SUÕÏ®­"$ü>ýDä%< "ÜX3¥„ý¦Ù™C@X”ˆõ‚¹fxè” ÃzFêJ%L8¾µßKx3ÇÄÊνg™!À¬Ì!f²ÈÏ᥄dÜÞŽ¤§)f€†f\ l¨È®hêGuˆ ˆÂòË‹Uc¤ibùee{z&¤¤ Ö·? òœ_Ôß"ˆDY @lb“'&Vd?@8³b„i¥J° >Žp\p¦GgB€¸£ÿ L”Ä=gzÎ ÄŸ'CŸš¨4ˆ[òL~2¡!Äw \g€Ð*Û°§&ökƒLŠ3o!5R‡0oÎÀÞÄvQù9÷á#@„u)"ÄþæEæHóÂ’ÓXÄú´“j&BdGDåO͈ÊCƒ8\|öGúèJ\ {«ô! Õ*aÒ²rj€’P Îoü|ë#Í+'iàôADf€È*Æ ôÆýk),8‹a½n¥H¨B9q%à‚GFòUú™0BÂÅæŽ*»‘9®¬Œt$]U”Ñ‘Ü߀òê 22PKØ®rªEíÆ‰»o=/ö! :fMûǘÚgd£ëA™äöe‰|”GÞˆ¢àdˆ†ýëT<Ó€èŒ,å¯W‘ý˜M¤Ì'§I±~)Ÿ'Žˆ[4D<•:D–3ÎŒl¢žª „"%aÛ±±g­2C@FF]÷‹†™uea6-ˆ†å@d>>âõÅ"Ö„ÂÃÝ i¾ˆ†_©==C!!Ôpxæ‚aè䩤†¾á浯 ÒÝhg^09ThPWŒFïëh?6qœìC@Ýñ•Ãñ3•£V+g¡gÐÈBµyß]ó‡uæ°Ð¬C¬ 3™µ¦€}¹ÀCCµ'¦„™Æ¾Ó5R@V¶lI^Ðs´ôK€@Hø%Y0â¸6ß)ðnF¼½! .‘ôd)BÂË×"‘) $|…º2 .ÂHßþF@6"$|“x#ÄñÐ5åE1²)]:òd´ji`éiZá~ P‘¿U~æBÓ°ö¶‚½gc[S‚Ä@ÈL„°®óUˆåKpüp €ßÓŠâÏêüÇè÷UüÖÊ‹‚€—ç• x¾6`艙%¨v¬Êïï`ÇÌèk{J3vå/pàr ¾žŒ|º}ˆò\\¹59a¸$lQа¾ai#Ûâ’7¢‡–'þx#n¾= Þ^mÄ£p‰J¨ÇcU÷JÔšöFëÈú̢⇙$€èPÚC%#@Xf3ŽHí¿Mk>Ri8/Fn­BS"ª ÇŠ/{‹É35ÝXQ^¹q9@+·+èúsþ"aÿ9•‘4Âè0Á –·#øq ¥Âç£ðF¼ÕºçÐl‚¥ëíyCœDr„ 3‡ùûkWl]IX²^Ù À;4 ÷ßΑ4²Â<Û1+ÂHq¹RÜýÈõÿ#ö[ÑlpÒ¸`ðÙgžŒÈÎRˆ†•²q&$´QnHn}ç'G.AX2‹2Ží#2GÞˆ<8„¡Ö´¿_=‚Cà ;%ƒ\BÂÀ áÏyÉpQ!!áA"AHxù&„"$ $ü)>çÓ6ÑpÉ**ø®¬Ü©!@E© ´ÛkrB‚ŽÑQŒCäë߈zdÄÀÓ-â£ÄòaÏw@x¥"Bì?t´þݪŠnÄþö”Öçc¸ 'åVk†%°‘€‘õ! Œ´û‡XŠxˆîNL5í/> ÏÁÔpx½ 3²Q˜(QiØ„rÎ!^ŽJÃþí‹™´B¢Y`ݵ²_5€ª‘a†a㒽΄5ËÄÝöbcœï~F5j*w"$¼uÉúP‡5Wª‚,D&Ï+ f^éÄ)ˆ•§ fʉ!?¶•1@gèø±­/}– FxH3-d+3ƒ!R³0™´-RrF$”†3šÐÛŸ~|fT-ÅÝ Mèíc*ćÒìfJ" ©‰ÐsÞþBÐÓj3@Dþ°å F€È™¹µ&m ¤•{Ð#;’Aÿ½<ŒAµí!Ažg°<Dˆ+KÏàåð¥~RG< ëÝ–4gBB* [Ý–@\9'h \Z&¦•ÃÏÏ] ï% ³8óêÛ[ÐõèÈY¸BÂ/…„G­È")Ì%½wRñÀÎ……}%㧆^2SX$í# ›Ñ…•%Ò`£ à*=AÀ‹e@k*†/˜=ó 3î¤ê ƒ(XY-êºÅkŒ+WVôð&Ì\òê‰&R2™P£‰ôZ3ÝC€·(®´¼¶‘|èlÂnûÆ‘cŸ!àü‘ÄZÊÞ˜ž3@ØÉq¸iûâ"=Ù=„—7FM^¬%©n¸Ìh†èhÂLúö™t,f4C¦ú ¸_œÏ Õ&¨-½ÕO;¹ÊÜV®Ç¡‡²ÊV"d•ë%ƒŽ½‘÷‹í9E>S¢Í ë ûÏCØ%}ò‰0ûo¿úÌ “‘trÌíuézÎ/î¢R±.G®º *‘ë%„==2fÔ0JXÙ¹š B& ëÝsHG€ðt˜nî¥ ÀÃßK°9b« ®9Ù… ¾ßqd×<#wbÁÃ5•ê9 .I:ã€ßâCÏ Ôˆ[ÎIÒ5Šuêß²ìÁð‡‡OŽiˆ/ßÌF£á[DPAD€á÷Õ!p÷]ù&̨‚ ÁÞåþ7!f"Â'$Fb/Ð#š1¨¸KwÛ Kñ´O–ŠªÈ + K!¡ØˆZÖó —ð€ˆð[ID˜>Nç”’VšsŒèBq6M¿®$ /!ºpÄösæ¨0€¸ædžˆ—ËÄ/' AÀË÷§!¡ ¾f—íqv„< Âð÷ Hcxy^pB&fª‰ÁAŽzò‹«‰qž„‚óÖþˆ0„CËoЇd¦–˜ZY8HþÚÙõC€1&—··›èIš¢Œ“a̷߫Ӈ4CskcPq{¥‘žê‘*CE3f—WІW€Ga‚‡1ô®”vÇáÉý<¼©’npüÏžâs˜’á½ë-ÌDhEl7_¬§y¤;™I’„âóÊ0sx1™Ëq´#gV²4\1Á¶r^%f°Ìï}šòüèÀk7²¥Ý0ĺß1fêíM ¿ÕýF91"Šˆ©QGÚ>›‚Yp]ø‚,RgDD’B@ì_Ì™Å8q>å ñx@Žù[æ›…ˆ€ˆðL <€‡¿ãl €Ã_Á@ÀÃiùò>å á“?+û”oB«¹UcbÿZœZÎQŒa¦ nþÔH•©¬«a¾³±‘KQÚ‘‰ÆÔö‰wzF§ÿˆ¿5Û”ˆˆD#?ê†ð€ˆð7kèV„w„B@@@ø;þì…€ðî€à‚ˆ€ˆ0|ªÃ•Íacïi†€ð ƒ%ãþBóÄÕÐ<Ô‰daá­{Ó‡w%øm]\Gx8OD§ "ll5Ô™‚™Å N=ä]‡ð­Œ#*ñ„jÇ€Ò…fáANè (½uáC€D¬5öóÐ>ƒž´Á¡VVg4‚™H#o\ù&ÌÄó@ÀJFj‰ê\^jÁ÷! ’™ÁJl„€Ã¡Ø ¦TgtáÉSÐ_Ú_M®‘î‚~Îyà(ä@(…""¼V#°ó§ 6úlÚ uâRVVfˆ)ä û+C<¤95<“^»Üv8‰b`"eå›0£ ë³ÞˆÕ…•µ‚™Ü°9$±ð¼žé/ÉÉHq³gí(fx`÷(h„Œ|ÿe‰ÊÑJ…#ÕÉð"ìï/^‚ÂouÔR¬°É´òlÛHéPÂÅpÆo;l·[«‡TF€°ìÀ½¿÷z%¨¸–FXW¶ªG8ŸŠÎäkÏ»©¤ TÁþÓN#§|TŠª YÃ~F¦ÝM+EâHDÐ#FS°å²’€ ìixnxfx÷$^{&^ÏŸÖB/ra䔇ª[)N7ìÔ2C@¶&&Wn9ÌÄ€hK ðVã]õ©Ÿ9b@ü>z¾Eð¾7›F²†n\½b jD3†¦q@1¬Ìx„€êìD÷q½bá!)©•Å••Å‘~ãçn8%*‹ë#‚ÓH÷1­ùdàa?#½§LWjTš_»)ýYƒ“Æ®üNfÞ„–pÅæÛzov™É"³ÃÍ1¡´òPøHD(Ré‚¿Ò~?¿--¶¦ÂÄÚ~~“v:Tà…ØØ›žˆ¦¤­ ǵýŠa¤7m*ªQè=­Ì"y„U ì5ì¼Þ3tc÷ñ ‡Œ•¨Vܬa»CÆC#½'SR8ð­çµ¢¸*pìç­·^ø1xfLöFÊJ~¾„*ëSˆêKxÀqóQXðöµ«",Co]kQ‰í.•¸zFtœo#‹#BgÌ JRâþˆÐ#íçNù8& ",ÔÆ3(nh6‡{„‡:4t‡õŠAf4c3;N„nç!†ŽŠü³øŰp¨}¤û|~îFZÈ!þÃÞÙ,YrÜVøUüìHü&ॽ°^8üŠ1Õ¤Hg¬…ŸÞY·©ì¹]ªŽ[ ¡òb¡Šâ45óáHÔIdúޤ‡Ä4îu†Hy­ &|ü§zÆô!ä! ÉÙêÍà§ÝË,Æ™¡î½å?4„<æ"›{•ë»CÎ+!Àˆµ='}>` ‰hðèÁ•®lO@«Ï0+Å F@ÔÍøç}¹áŒ¨Œ!Ö„ÊÏþ,te„Ë\jh•žýÔÀ•*#üÕ«áuj¨ŒP¡2Â_2BÌ)ÒÝ êf|ÊSCÈdQ¸cE@ʯÏ!] A}mªËð¿ÅòH µ§=ÿ·†½ýgÄCUˆ+oáåæu©á/‚bL< 8Õ¥†gݧ%F&&µ?)¿£=äá1íÛÅ·JO{ˆ´.½ž÷J:HÒpl]ëØ˜ÿž[ȳñ#\FY¨xÈcGðæÛË U!ž¶B8w«‡Òg„ÑÉÄö¥ßÍ¥2BÆŒì¬^-Bþ¹bh”kë3ßjP–® uÏéo;õxpÃjRf„&Qšv~lŒP0k´,!€MDj;F~?J‡˜xPS¬…j¸ M!ñ 4N‘õé)åRæSƒ¸6jU!ÒÇ…¼«Ö‰;Õ©áiŸràØ¼–l>ï$ÉŒ”©ìù'Iä!ñàª5GH™zL8–‹ý÷ZBâÁ›Q®Œ0#ôÙ²#r=Þr…§ bâÁ”°üéãA)&dd‡^7SöŒ!ßž¼cÓzÎé wã#2BG!©SdÊÕìA0Î \&æÚ–ñ[<ˆ£×sN9·#hHtäê.0WŠ˜*tVìäÕ#<íeèÎ&‚Rµ.àX‹¸×°­X3Òò#Ú¯E›ùo¶HL~èFÄ5YÌ?Gˆøøh(ⵚý‰×#ª¸` /0FÐx0r¨…)Ïû “¡·ÞZ­Pº@Fˆ0¬~QU¨† O;V2’NÀõ©¡ ·páÐêвE) ÚÄêžSþA"¦ŒÖ ÜOû.ôˆ€nfõ€OʯÏÔGT“xccLF`¤úÒÿв=ÅÌÛÖ&T…xVÇ¢mûsZíf¿@…àˆ9‚ƒ5£:5ä,JL˜ŽªPð¬{6ÜÙ½ºÄü”Büj£A°î\[7óûÕB6(ù  \;örV ‰•Ú±w…¹RÈ©A ¥sÝrɘ4äÔ ŠØ¹æù{ ©NFV›¹Ÿw§Ú8@k¯÷[.ð­! BXâöàCá•.üzLjÖfT÷Þ.ðÆDă iMšS:”Bj‚m]Be„ü”B2‚óöVx½ú˜r§ZHàBÄåPª Joñ‚±®>>íKá#xÝk¸Ànöˆx@$Äš#¤tµsHÉøa•ÒkÐŒ`¬£O¨{.ÏêX³Û oØ+#äŸ#´€{ Fìä¾ÞQáÊßH@€°<‹ù3‚¬P2ÆnÞ¥ö#¤üÖà!àе^ ¿ÀA#zvl\ßžv²È΂nåPÊÏ%â6´I÷Ž õ­!¥g1¢Ksj£.TFȃâü{.öÒGÓ¸½ûx8ìcáÇŸÇßüîçÏ_>H{á?÷úíA¿ÿÿÐnÿå»×O?~ùîåOðò¯·¿:BÏm¿Á ÝèýþQĨýMv¿{ûò»ñýçë?ÄÑÜS÷8Êólákïàl¶þ"Ä Íàð $_D©0~·§Ñô{¸ïÒ´´4¯¯ÔÙl%”íøýØ‹ª€Ê±s÷öKÖQêIy÷·?”¯:dzŸ&#䥹„R§²…¯gä³ÙnY˜ Ó |”í:5Úi4ïÕTÞƒIœæ B‹vt-Ála;ؘ9ÀÁu Û/YG¨§¥]øà1•»ö¼4—PêT¶€ÍvËÂà"@‡Ù®SRñ´¼ ÷JªâMoii® Ô¹l)˜-Ž,ìÍÝUø [\§¦âiyïÕTçšn˜–æJÊÙ£ÙÞN6꽩e»NM¥Óò.ú¡O®MÓ9-Í”:—-P4[Ú²0 ¸GÙÒ:5•NË»twôûþÐÁòÂ\B¨SÑîS'±Ý’ðh«;|‚ú·?‚5„ʧ¥]ºû5ÕviZZš+(u.[ØëOaË[&’øhæuJ*Ÿ–wùnIÝ(YSMKs ¥Ne v$aîlì¦GÑ®SRå´´Ëw?¦êÍ sü³h® Ô¹l£Ùn6Ñ ËÑ©¾¬SRå´´{ßôû~óK®®³hvÃæÐ\B©SÙG£Ý’0µæªG½gËx~ñ4_(Ü÷üî|pÛF>––æõ…:›-€†²ÅÍ%º ”Íñ [\Çó‹§ùBñ¾ç—÷ÜùꘖæJʾ¾ó:›íÈÂBب›e»NM=ËŠ÷=¿{W-zË s¡ÎE»WRç°½¹D]´™ÁA¶°NI…ÓÒî]ϯì¤]µI4ÙºóqUÏïl¶wÔÎf»}~î¢Ò޲]§¤âiy÷®çwÇÃ2hö––æ JË íÍ$ Öĉ¢ÅuJ*ž–vïZ~÷2Æ6 fß¶D?s NE‹ ¢Ùn£B'—£‰á:Ž_<ÍŠ÷¿¶«TáI4™ÇAF¡¹‚Rç²Å`²7‹(hw¤£gTZ§ ÒiY÷®ßwgÀvÇ{ÍÎã Gh.¡Ó©lÁ-šííÖEï­÷£“BZ§¢òiY—>x/uÐ$MKs¥Îe ;“Â9loQFdô£Ý¯SSù´¼{ßð»s/µ¡¶´4—PêT¶°síbÛ- cë¤~tœÄëÔT9-ïÞwüú.MNKs¥Îe»ëøÂö¶ÖÄѲ•ujªœ–wï;~߇I 1-Ì%„:-€E³Ý’°wjÐŽŽ –qüÒi®P¼ïøÕ=˜ÔfÁtéÈÀ¼¾Pg£…Ë–nQs ÓÁøhÃ/f ¥w ¿;3mœ—æJÊvoÕÃ$¶o{aýàf,ZÇðK§¹Bé£K~7˜šæ B‹vÚ¥9loQæÖå éÖ1üÒi¦PzÇ𻳷c½––æJÊvoÍÙ$¶[¶†]Ͳ]§¤âiy÷®áwïըƨii® Ô¹l÷–üÎa»™D­KGc<Èש©xZÞÅSEåtËy4—PêT¶»×Ýæ°½lHš÷Ãlש©tZÞ½¿ä—vijZš+(u.[ðÌöm/¬™ eKëÔT:-ïÒG/¦vfœDÓGÿ{p7­júÍÄ¢Ùnë€y{ñú(Ûuj*Ÿ–wïš~y—¦pZš+(u.[ f{³‰vî ð([^§¦òiy÷®éWaÇnƧoÃ:æJÊv¯¦Nb»]flŠztZÈëÔT9-ï~xÍocÄ´4WPê\¶€Ìö¶V€ d+ëÔT9-ïÊG-J¤MÓÒ\B©SÙÂÞ ç9l·“M#§£¥e\¿|š5”î»~öhN¢É¢Öùš×Wêl¶J–ßl¢ÒÇÏîÇr0¯cúåÓŒ¡üŽéw狜¿à<šKèt*[ Œf{[b×Ä”ü(Ûu*êYÎP~g˯ìÑ<ý²ÅŸi’´çóª®ßÙlw¿ÑLa öb (V*¬SSá´¼{×õ»³˜E'¬8æJÊ$íÍÒâ¼½\}í:%OK»ðAƒÒFÓÒ\A¨sÙy0ÛÛfXÄíáä£lq’Ч¥]üà“©Úzo“hŽSj;8äUM¿³ÙÂnžÂv2Cï‡Û%\§¦Òiy?8øÝhâ,šÐÚ#4WPê\¶ˆÑlé¶!€\œúA¶´NM¥Óòî]Ó/ïSO߯~Í%”:•-ì<š:‰ímÛº»Ž£ÍQ¶ëÔT>-ï~Ôô;hJKKs¥Îe»ßýÎ`û¶¶[9:Öçuj*Ÿ–wïš~w®‘ó(|š–æJÊvÞšÄöötµJw”£lש©rZÞ½kúÝ™šÊii® Ô¹lawçä ¶²}€Cu–ÆÙÊ:5UNË»wM¿ºó¹ò„³ÌMk½>§Êª5u.[pf{Û·Ž½ÓÁKR¼ŽéWN3†ò}ÓïÞ5reÁI4iSU¡y}¥Îf‹Ì¡låf­’ ã±×†dÛ¯œf •wl¿{>–æ<‰¦Zï Ð\B©SÙâÎ;“ØÞVÙpƒ~”í:5õ,k¨¼³ëwo’§_‹:æ JË(íÍ'JdÒ¾D.ë¸~å4g¨ÜwýòžáÌ:§¥¹„P§²…Ývi Û›QÔ˜ô(ÛuJ*ž–vác/Òl0uLnÛZnxæ B‹–ƒÙÞ|¢ÀÖ[;Ú-á:%OK»øÁË©ìd6‰¦xÓæÐ\B©SÙâÞ~sØnY€èè< ×)©tZÞÅ~MõQú&ÑWU„æ JËv0‚Ù¾-‡Ýn3òÑæ—Ö©©tZÞ½ïú}èÀª§ß¶8æJÊvÖcMb{›²¼{!ë¸~å4g¨¼ãúå]š––æ JËv´ Álù¶v§5"9š…yšÊ§åÝûÎPØYÜì8‹&2´Gh.¡Ô©l)žímZØÔN yš*§åÝû®_Ý£I8‰¦’ó#4WPê\¶°sóxÛÛrX‡6R1d+ëÔT9-ïÊÏ©äçoÚ9æJÊP¢ÙÞVÙ™hëG»ße\¿zš3Tî»~yŸ&¦¥y}¥Îf;H†²Õ·õ°Þ¬u8–…uׯžæ Õû®ß½©iZšK(u*Ûý;oSØÞî^ôq´9ø £®ãúÕÓœ¡zßõ»wÕØ˜ÓÂ\A¨sÑÂÎvØ9láíýjå~-¬SRá´´{×ïûý‘°á$˜¹„P§¢¥ç'±I;‹M°NEÅÓÒ.|ðkêF“ÓÒ\A©sÙF³½ùDuœ€Y–T\§¤âiy÷þ®ß½o4$8‰&wヮ_]Õõ;›íÞ¶³Ilß.ÒtU9¬Ôuj*–wïº~uŸ&§¥¹‚R粅݇æg°¥í:#7lvð–”®ãúÕÓœ¡zßõ+;Ò¨{––æJÊУÙn׉¼Óá­SSù´¼{ßõ»S9-Ì„:-4 fËðBbÖè([† õ××ß÷éË7?ÿòúë§/?ÿúùeüÕOŸ¿ûþ¿¾|óÇO¿þðúå—?}ûúòùOß~ý7Ÿ¾ýöõ—/ß¼þ4‚àûŸþ°ýüÏ÷¿Òù¿}ÿùË?ý÷O¿ÿñõó×!ñöÏÿîó—__?ýñ`hô·?—ÿŸ~ùþå?^ÿ0þUŽ°ÝˆøËø+±}]¹½ lA1~uoÛ ó·#@›ôƒ¯"/ÿòúåß?}ûÃøƒz,'Ìÿµux2yü^Zÿû‘?Z’‡|]í'„Aoâ ¥ÿDú·‹€/ýÏÔÿ×G·aÐ;´Ò"ý‹†€·Ž«þg äˆ0ðÕÿ'Ò¿Kx%èUÿ³†iDý÷QÄ´•þÕ/ˆÔ°ôŸ2 ! 8ÿo{œùàÒ³ÒˆþU.¾ô?óüo؉¡Qé?Ñà/¾ô?óüï!aÀ£Ü(õÒžúvð¥ÿ©c ýkC,ý'úþ_úŸªÿ2 ˆÕKÿ‰ô_úŸy $-ý?gÿ_ò/ù7 )ÿÝLHjü—éó^|éæø¯…èßG¸iÿSµÿš«pé?kûŸÿ7óêÁgDKÿëÔ€Íü#õù?­þ9BÿˆŽÛë[¥ÿDú÷‹€/ýOµÿ”üëëÉ¿nÿN ƒqìnZ·2•ÿ^›³Ôí¿¼å?À.Ð]xœ4JÿyôqísoDPúO« çÑ J¹ÿéz xíËý—÷óDlïÈ’qÝþËTÿC¸wÐrÿd¾BÜk÷ϳ©_¸™”õ?ñ·?‹wÃ*þO6üj©C+ýgÕDu7)çO¦_Þ\ª÷Ï»ø/¢ù§½šÿ'»øw øÒÿÔOÿ£n]ÇI°ÿeêþ9ü8øÕâϼaÐ=" ¶'¿ªþ?ÛéŸA¡Ôô/ï10âÓ“¨7-ýgþ_|éêâ2ÀÄ£ÿ/ýg¹ƒ—ó?íøÏBÚiÚjïO¦ö_cÀ÷®5þOÜþ‡Dvëù¿Kþ'å"àKþ—ÿ,* \Æßg{öo€ä{=û“·ýXÿ$ÒH°NÿOöìï/‚èµ÷+ïÅßý;uÄ^_ÿ3ÿ{x&!«úŸ×ýJ´m­úŸéƧ^|éªûw~h“¶½ý^{?2µÿ~ô}Z㿼k=$ Á­.ÿ<ÙÚOmÚMêê_Þã¿…„6êÿ©\ß¾£u®W?óŽÿŽÿ j`õù?“þ5<”û'kP„ýGqü]ª½ß™ú‹?®óÚþ?Âþ£(]¤ì?™.ÿ@x#¢ªÿÏýì—rsR­ËÏvëKZÓÖêöOÞÛ?Q`䣬Ë?‰Ê¿ix–¾¾þ§•ÄÚoÕmû`¹ÿžléÓ/èZ»?Ò†AÄã/Ú›vB«W?2Mÿà"àKþ3«€JD˜c³Zý‘©ï‹hûú8ô‰{½ù5 4ÂüÛ•ÄF¨îÿÉ6ÿ¾ô?õÝŒ½Úÿ'3ÿöÞš:A ÿó¶ÿe C#öz÷'ÕÇ „ÈUÿŸúÑïñS„Á¬Žÿ™¦ÿ~»ý‹Uÿóš{D€70*ó¦úqðs¯Íßyõ¯!a@LØ­.ÿeºüÏ!à¥uv-ý§}õ›BÂÀ êüŸIÿܹÑöæSÉÿ‰7wï#Ëté¥þDWCäß‘Çñ¿¦y¯þGLÿÝuðúúÿd«?x°zø'¯þ#n€[ƒÞ@þ3}ý—ðf­NÿiOÿ@ŒêÎ@éÿ¹ÞýàQ°6gÖHý×΀PúÏTÿ9¼±ƒVÿŸ5 ¬‡”ÿlÕÿ'Jü=DÿæâݪÿO{þ÷€¯ÿ äÐjþÿd«ÿmdG¯»ÿIÃ^8  ¸wõ†Zå?ÓÃ?1àͼ>ÿå5ÿO/ÿøÒvs?~ ÄÒÿ©àñ^ù÷ðæ½‰>¾ô?1 Æ™<" ¨S£ãíé¾þ%¼ƒ‘•þ“†HDt2kî%ÿ<ò羯³6>¾ù³ä.ÿ00g€ÒðóÝ¿oàYUªü§mÿš4¡Ç ônýGÏD5~¥ÿ™a`Q™©ÚÿDÜ•CÀ› –üÓNÿzDoÀ¥ôŸhúÁ[£5üO{„0­¿So%ÿDÝ¿†€—ÿ¬a Ñ²wqîÕþgjÿé"àKÿ3ë¿F ºpc–šþgÒ¿øÒÿLý…„Q'ªé&óO xhV_ÿÓŽÿ$ÂýÓ­#êc&ÒÿÉõ?¤ñsg€Vãÿœa/rþ7&s8¾ù·ô?¿ñ Á®Â¬UýÓVÿˆÐú¨5ªXâOTüCäß„Ëú%ÿ©Јo@æ,©ôŸHÿ!Öß~ü ¢ÒZë„þ}sÿ™ÕÇ¿Dú‡žT°>þåm#®þ8)÷þgþ{øÑö´ÒÖúï!eÀ FPÃÿLW=¼lÿ–þÓöÿw`U:TýÏTÿ{xî<¥ÿÿeïÚv%±Šëµê~ᤈ‡<ä,,YŠc¤ùþì¶ lœAsº¨ž½Æ~°ÏŒÎÌt­U÷Z{kû¿cü”€P(mŠÿüwÔÿkã¿ê ì|•Ç?›øŸ3†÷ÖFü_Ëÿ ¨§ò‡E€ÿw)ÿ= ßn‰åÿµù?Ä/Rÿl:þ­Ã7Cúk- º'`ÐnlàÿÃwÔÿÍ®j˜ÿ­]þŸ€zžG¡ÿ¿)ÿ—ëø¿¹ÿ7ÿ‚ªÁÿMý¿7±;èÿJúÓ ý£‰·›ÊÃÿ/UþD`ýÿ²õ¿cøìÆúßVèȈ-©Ï·ÿÝ}ÖŒá½Úka`3ù¿»¸(Öÿ7Åÿ˜1|ª5ø¿wýg$ÿ~aýgÿuÄðÊv ðíø$ $iu þ¿®ÿ—L"àÿâ·?&øobiù¯ÛtøùìŸr9ø¿týG&T Ø•šý¿ëúÿ®NŽúïÛW`OXS<ý·jñCF ï\8ÿY;ÿ“‘0}èXÿßÔø™üz’šÖ÷ÊŒÄÿìôS ‚ÿ—õÿ¼ˆ«õÿÞ00Áÿ Ñ0¼ý»Šÿú&†ÿ_YŽäÿÑ%®ø¿IþŸG ßçë„ø¿÷íωù_>›ÿ¥èÿÝ&ÿw ÏÝþïÿO €«:ô_Dÿ‘³ïC~9õ€þWÓ¿ˆ‰qýwÝö–©Ä?gÿCàù% þsßöÿGüåôdû¿ÈŒ’pýwÛö_‘›DáñϵÓÿ‰ð_– íßë´ᓱü·¹úŸÈëùö‡áíïû¿Q³"ü/Ö€ˆÿB%FÁþoâ^M˜Ñýß{ü3Âÿ.ªÆòÏ&ÃϘ½£ÿµÅ¿¬þŠŠju"û¿LùÿÞ¸;!ý»wø7PJdŠ¥Búk“æs޾(œ0ü[ÿ'¤•óû§1üÛÿ}ÆðÏ'ŸÐýÛÛýþ«P°)¤¿>&vUX*Zü_ ƒ$ÁSû'Ñý»ìô÷iøt1Ôÿ{/@'P J*¸ü[ew™1¼œÜíÿ½—ÿ#áßž¾c÷SûoÄîœÂþÜ»ú¯Ý?qéTtÿV…ÿÃ{d1v×vÿs¤ŒÐ4ÆÃ¿—½øz ŸÚAàÿ^áÏ‘ô?“:ñS÷/ßÄðàÿ»?ü¥R©.…Û¿MÊ?:cøçÓ_Øþ¹úá/•v ÿïâÿHÿ¯ƒ]Ò?Wßþéÿ@ûSþÿ.†ÿß} ¤lçŸÂôÓͧŽÞÉíÿ»•¿TÅ•R±ü·éô_f !8þÛ|¢ý¯Ú’ÚPþÝTþ¿‰ÝAÿW†ÿ‘èϰÀþÛ–4¬T§k‡ÿ1ý³¢˜Ñü»­ù¾OP†ÿk³É+»Ÿ›ø?’÷5y9dÿ÷†ÿ‰$ÐL¨ ¯~¬bÌÞXÅÿZúóÿÃ$Ú±úÙÅ·Å1¼6Ööþ&^~UKIƒîߦâßg ßÕïµÅ¿NL€íþaHÿ¯kþÃ[¢úßÜü›HÿÏgßfŒÍÿM›¿#vgÉPÐo(G`àÇ€þ‹è?ã÷ƒO€òoh¢ |Øo"8üYµù;døPGü_\þOtNà2‡ðÇ&Ùÿœ1¼µ`÷gñæÿ $óðÕÿ&úëˆá«œ0ýÛ;ü§‰ÇÂ’Dðêת¾Íþyú‡Ãß½º#ñߣ¬ û{]û/<•[ û½W÷cbqü ;ÿ7ñÄñÇÉúðìÇfþó Ú˜üßÄÿ!ûKâôw¯îïH(³Ro~üí_‘~ùÇï~ûí׿ÿ1~øõ_3~ý›ÿÄÿ-˜¨#-¡¬’`´„®K ³Ü0Þ{6±•Â%ØYÄîÛƒÿ{7Â^žÖ£(EÔ>=þ×?Æÿow¾øÍï~ÿ‡ÐÈ?o¹_}óõüççíüUúoöýÿ|óõo¾ýÃ7ÿáǯ¿ÿ¯O±^ýÔ\÷GA\ž¦;ÆûEP©ý¿¶ûê‡?ÉWç—ýû±æÏ‘ú3Ìù©¬|Ûò™ùjÛöÃMÚ“>]¾«¿¦:}˜5û§˜?gMÛkÍ/©¯µ-ÿXV÷Ŷõzîr8—ŧnÛx}9õG7V”ø\>'«òú‡ú­š¿ð’Ï̪ü‡½³ÊúEØì«?,òþ¤õ~|+ûg>«KÇ+øü!Öü"|õKmËÓ¶=q8´BùÓ}õ—’UåùÛ~˜5*«jûk*ÙZk¾?S_m[ÕµíóïslkÅDŸ8¼þÓG€áÕ|Ž%þÈ¿ÿœÕ‡¤Y‚D7„N>ï˜hÿ©m¹íoþºHòóÉgÞíÞþ Ü¨¼ì&ðASŒ—y{Ãc˜õBp$øþÃð÷^ŽŽ_ÈÁÿ+ùÏþƒÿÿ…D/: 8Ãðÿ/3|˜}¢¬%üþ?¡ï£õ&0ÿ?˜ÿþƒÿäàÿ¥í?†áïtüÇÇO ) îF6pgÿe øÿHýÑýo-ÊÁÿ;»@3á?HIü_Ëy€ÿoÿÁÿåÛ?#À41Šÿñ¿ßÄðàÿ+aþ#þƒÿØþý¯¢‚þ ÿÏq~á‹ßÄðpü/„LLÁÿK+@ðàÿÒVp€ÿ࿃ÿ½ÌðéIÒX]Ëÿè7øÿÁù¿€ÿàÿÈÃàÿFaèÿÁÿ©ø_©UUOƒT€õ…ÂFÀv<o=‚fwFÐ¦Ž€Í>(á›JÁ™ ÆPÙdx1¼²‘C|¯ÔŒãW ¦B °Ñè ¢ °)ç±»)1Aú2=ð§á«JEÿú3€šI\«Qû]èâXžq¾w¼ßàÿ{ò¿£-€­0Йm*{–h\{PÐØÙ ˜ÈLÍ­€€;B?è (…ÙíFá¬zùÃoá 4|ß/Œamt«‚¸µÍÀ ä” è-JyÆð}~ï‰.Uõ‡ÉL^Òê˜_ö’üÓðƸ_딇@FÖÄïsÅÞ§d÷NŒuÑŠ^á­Ò¥”ÐŒ¼ì‰1'"ì/îŒeŽÁGh;ÁÆæ± Àƒª‘^&‰q !\PK^޹}$ô4Ž"„€õàŒ'¨¢†XÚª0StŠ:XÜ ƒcº‘Ö’‚žšy‰pmV1%ÌvãáØÐ|ÂN íM­á™X çß6ä[s‚‘}'"+9Á¥z‰nbœ`9*e#Âó}’[¦û“Äì‘Öa¤º'¦G+cÄH)Êùä p Ë]‚=(FðÐM&¬ô<€än ôï ©eŒÖÑUˆÇî†ÎÑÒΑMuŽÒ•TPÜúúÚAÀ a¸4Ù_¤L“2K.a¡K A@y±°qÅ`d°˜]!™ ËÇ õpD‘WBÇj= òÁ3éeæ-¸J»·p(?9‚ZG·]!U¨D@­dr4 "‹ÐLÜzt’Äi*Ÿ ñ8†€cXÞJò‡åŒch­$´’î¯h>Pë@’¸¨ƒ4b÷ª¡v»4%è‡t’[°^°KÑrd¯¤MÜIÁÿ•0h~èÌiZ[ë12À{3Àðó;!XûëHk S¸Q ^ÆìòÔ1‚GXï’GF äî6î?DÑi#p=’€Xï!´'’H;‚½ÐG¸­z8†wiÂ:êÒÑB>‚G`-™‚ša#FêqH Öã¡Fd- úäÐ3Ydù¶WÀªŒœ`-z‘®H ®=V3‘8©¡!)XžÔc¤J=ß“¡^±Éî#ËèÇòѸZÚ‹ƒŒ˜ž::<Àg3À…IQìÕºœá¿—U+ø¿Çó«Î´ƒB27{qP3à)mJ˜.rý®#†oíRh—Ý×ê-´¯}Ó”Ê㺻³ÝÔŸ1<‹·ãº`¥påˆóWaæ*¤ýK¼‹iªRœHþÕ{m3!@Û=¡K´< èƒ{& ©3®‹Vn ޼uv PšîÈ v††~ÐÌʨº›k")Ø“dȈåØWk=€òDYàªeθ4]žÈ£Gf®átB’Ã….AFI\«è3‰à\ÂÓöY¢¥¶Á'¬LrjÊ !Ûí>¡Õ1ˆ"MbySñØ~ä}4· +Â|aQ+!rÆò•x,{)Žð·çËE«öâ@V#0ðhêÀ³X·)[»g°n ï3|y²`¡ü6é)÷¶èÆIÙÒÌO#O"{s^µÙÄÿ‘áa°F&D(·7âÁ:B‚Çå÷¥€wþ=Î |¤ìVMA÷gm.HæÀpðЙÚ8X=²Gô»ø¤„oøö1 ¿Íõó£Ûàú‘¹3L¥ÀÅÀmštž"l‚{Â-¨ž€q4©öçCÇ#©¦^áuÚÄÇð•Šwi¶&ƒúšÁ©Ž6‰ÑyÌÔV–Ið[=@ÊLNèbf¸&YêüQ3úCéçÇå×¾G±8óŠ­g¨¦ãɪëfGù”I…«ZÃ#sãl:u!rµµÌŒ;“m :²CX$M¡p[‚Í„bV2¤‹ÀÌýH‰©.K7r_’g Ð§à€øè<_b3€Ðôþ¼÷,ˆ‘6Ò2,çÄFÉÊVÑLàÄ„N7q?z¦2ð¬„õd#]ã m!ˆmr9S+F³5! بB4rz^éQŒÊ`{0°çó×#€(!ÌŽw5 fɪŠoœnM ü‘9ãš’qо79¬¡:±#ÕÉáʾñš9±X¸Ñ‘¡å” …§‹¶Î˜•f` ñ|¾Ža½ŽùÐ4¡­¬¯k$-®¥#ib[àì‹_µépq5 à^¤Ñí7ÉožY@ë"*x„s%™A@fc²x±NEw:+köÇ„™p~8ŒÞ#½å RÿÌ¢xh$h?|ÏdG°0a°ËgÖÀ«#8_¦€îñz<žÎà!• 5äµÇìA*Å@ÀBÄŒÐjf”ëñP# (AæÌ†2aeL˜A€S·â}”•uÂLVðœ7 ¦K+0Ó+ˆè.T÷ Ь ;'+P3¨¶F°Ö#8i ¶ŽÖãÁ¦ðªÈ wdL\A °‚0‡“#î×öïœÌV£LŒ˜¯Ý1ÀÞ #!¡G¦ lê&è$ÞÛKæóÉ“aÇ`åDqa@ÛfÖÎø9^ Œ® ÙÔý™ò@À«Ào‚Ä„ë”xpø B´°‹¼R Íl"/(z £²Ã'ì.0ð<üŸƒ¨GP½ #Þ¹p8ðö ¸„õ.AGÄÑž±=oXà66'N¥Ï—ø°¦¸SøÄgP&QÐÆ¼l­”=Øð®Ò&ÃÏ0^4´1R\‰€‘B@U¸¯“DÝMý™tOŸùxVsÓ[:Cy3ÂÐp%÷Gê䯏?¹Vóô ÀÛSÂaÀk¦è Fp]µEÁÿËÞ¹%·•äZtF'o`5ÿÑÜCw›dwÙ·B&„ìÜ¥úò‡—6vâ‰TïHZ Âr+<ú&yÿªžO¾”PùŠ@ùOÍ8n}íì_þèIùÔòdÔw&ÅûžÏÝݽ‘tL¥.Æoþiy¥ûSðÁ¶v.•+c!ÖOÊëyK´WurƒÖŸ§õÆ8y>ƒ–°T_u3¸ýSÜÞx"ö×Χ!&Ž>ÎQ===±?*ÌPÌ=®•GÓ2Žsû›tzúùôq®5½‘ÞÑ¢þ¶8ƒ/¿™. î§_õp™ã?h|W{&úbñ×ú91Ïýy T®h9G˜^eÄŽ0RînEKHr±"p0–ƒžéþ0%ô€œ\g§…í±Ãy`»²§øv",…‹ÅÓI¢—ö¼#T*Iy©®%—øXã_k†&4¼#ãbëñy¿#‡Êæ§š³%¯%ZŒDÓx¤g‘d®¥ëðWÅÜüÂ'¤å<ÑÍA¡ùp("MÇ®3‰EÍÀÁÌ‚”Û%=ˆ$w3lžžnÊK½G˜—ã át \¯ìY=˜llØBuðú”¥R fafqR¯píáÀLçϧ{oBIXÑÕ|î(KÚýEp‹ãy°&\5/L ¶ ëòž †ëAhøÅ×-«åà<¼¥¶1ŽH)|êÀaKªÈBYÆù!"{x`FÿûjéÙ\œ®b8|UvÔ'k‰h9Î#ެPwĈ"b x†ñ’P—¶T&‹ýv'Žyˆù¦!­Äñ ¯vièÂÏFx¾”w ðŽ;Ý€$lÒðUB\ÏV'L"ªÔq©àÌ5Íð€¸àÀßÿû9hHAàNvHBü²ž€SÀ5ÁÒÂO]hdã&µìÂø5mÁÁoÿEKÿ¶Ýã°µœÑôºøåÏÕæ ö²G?’ÌÍËÓP¨éô „°•B¸ôÄ„²ôÄ •ùŠ Ò’%”Š–´‘ã -U(¡e¦gØ@ZŽBÝ@øZ"„S­•³¸(:Ìo\_ΛIøÔ¢‚$@Þ$¡Ç%ˆ=&«3@ˆŸS°²‘‹„iø”iÀ;’ð4 z$álI‡$@^Çá2’€}JHÂO—ÀÈ5ˆ÷F6Ff<< Ëý®›ÐKÂð„a]¤xR@Þ€àõ}ŽáMÒüöWýí*%ðiº,FMý/¿ü”W¦<<¼t¬üTIJ€ÐðœÑMÈÁár àð^ù Heq„áÁ¯ž®·T,ÇSaj ª)ãFR‚#Ä39°ºJ›8§Â.Ÿ¹)éžEÿiÁ´ÀÁP=È˲ÇXÝ_(Ml07Ó3H•þ僒¢¥ýÍzú!kßB9…‰9‡P àlÔ@Àásˆ¸ÙA€’KáÖüÁNÐr±˜ZZšëiXÐxîçJæEHM¯0ߟbÃÛ0ïȱûG*€Ã ðx¹•&8t!ãM[ñ×4® eH¾ãŒðÍCh¹à¥8)n! É…pÛa<=!ˉ98÷À•U†ÌÁô•¬—µ$deŠâúÓÈdr´@VV™ lµ<„cE&x˜ hiAÄôaz/ê/<€‡ŸÓ WPöð`jŽ–¤s a†¢ÓyÈ+z^áî™Xß=RzHãDVáà4c”‘.Ä„á<ÄÚãªX Í §¶-纈Xp#3ÍÜC@–ÆÜÇÇ„l±ÉK}¡cqä œ^§w®€Ó“ Épt[ ˜K€õÀ–† îçj€?Nx'n0Mo4©KÍ7’ð™++ ŸjBL@LxÙñàá¹ü@#Àxxò`•0 xE‚tŸ"& &Ü<ÔUÛxF(ÂÖ[¡»ô® ÄëZkªððS âât ^‡úV‚ððÊ3-Ûâð”B ?áþ¬Œ¾ ˆ¿¾( ¿û»Rÿ7|Ò”·s1Í„€LïuÑìhuà%iÌ K1q*7!ŠÐòȃ" ˜ &À#& &šÒ„Ë xˆé]*~QKÚ¡ná™H=ŽÜÅÖC€+öøoДв¼µBEÙpëg¤"D FQ 1aä˜t8?ŒbÂtš!¬´p5|¤ h!F `d3{µ.H6o°Ó¿Ç"…†æïâÓž4BÝOwDˆSU./@ÀÎ{3*‰Ò ó,ç&’¬=*;…mqÉT¦˜m8öpüM€-](';Pâ„›ów崬ܬåXX¸8¿àì-™ÅÔuË2#Dx¹™ƒsÁUšêKçŽÌU2ó3ÜÃQ"spìV„›€`#lF™9èÉÅý“Çýž²ÉM©ÖÑ«"š¡‰^·™wåi =3Öo1’T,óT‰-ÌÀM$ e7‚ÒJeÌâ Ex» Ü2ÖE8½а‹"¤áðJ”C OE¨–s_ÁªŽJÔ±Ý àøZ$€|™¸‚ÏóÀ—-àáß<ÈÕÓ®dqó`¸ >2@ä&@:¢ ؘA€ üKD‚p¸ ŠðTÛátAPá)±Šp¸"Šð<ßâP„ÃóŠE€"<ÁòЇ+‚âÕEx)B2*Oàᥡ-­ Yfì†qì˜L@ÇêÀ´lÇ(ðÞ~Â" £œm 11ᵋ‰láhZþ4¹ÊdY]èRþ<ÄNN('Íw…-’ŠÌWn3Û”P=*L¸ö=¿šÏеÖJáå E8r·òƒZÆUÿï˜@ à3з¼¨¯r³‹¾‰'È<·U˜£"ã\R°»Ò@ ¾iº•¯øüè $áð¶T(ÂNŠ ¦›(€hñŒÎˆ‡w €€Ó3‹ “€˜ð2 ÔcÃYÎìJüA@¸Kaþ³Az€H—U(?žy¤ée$´ ã%Ášx¨P%(±G^qÿ/P„ñŠàÖRm`*erägQWIKAšíþžŠ~•é!­ÏÀ¾Ùx†‡Ý~‹Â‘iœ~éU¯ê)? ±ºâ¨Ûüräjé`ÖRìZ=tÖDI©çég?ébé‘}¬H@ÇÊtÏ —¤µaÁ†ÌãÈá-’`Äå¾0 5pJ} Ѐÿ5Ÿ¨ÎRÈ%[‘ÖHs $“†ó`v}þøƒ‡Òà*4²ŽTHá›Á/ŠI°µ¨…‘M€$|žº=‚‚„ˆ·¡¹$#¦E !bbˆhÙ›a™wätįjyFüS Pqú¶¦„–DÓŸà!âSÉæBˆ€"ü›‡x ¿$áð­ýI€$ü|Hêe-#²b“>¥¼T@à]ùá M€€iøPªÉ# OIÈË= KÂc(2~†&¼iB´¬c{<$¼.#B=@¼îõ$£myˆ<é‡`Ì„*$!fóà·,õ6ÓEæxEŒìai©FÄ+#(ÂtE¸µ»‡pñ0Á„äHEàM€"|žö˵ H$á§$dËx$a²$t¼³$) Œ$@6!A¡¡üTW6¡B,H/O”ë FlŠÉñBVÞ7þqðõTExíOÆ’H¿˜úÑ´¢°9{ÎsF¡W¹Vk‚†Hap\ý~Ò)Øm$^]= H'õ¡Ìp+GQ#VœÅ2'L÷CâyÑ= '6AXÒÎbs"@R¼A¿dIÐ) Ê)G#rùxà)i+šS)ñTÔúK›³F(¨ñ˜àeK%¥„I4"ણ¶(9GMX?4O ,HF«‡C™âw,*Û¢±Ä&rÉžSÀ@¤Äc’ëÏó¤ {HLiÛ«ª¥„aÆs¢{ÊCÖLR¦Äƒ‘"…",y)†§D@)Y‘CÖW„<#Êö-áªOEm@¨„ÑFXߪSv …±²P„×eN¨mÛ†ˆ€÷0%DÄ¢&¬?¥uÊ®¡l_HSìÖß5Ìp+! Ä5¹Ë®€¬$ÉQ!`$ßE.Û²1<ÌW®±E€šj(ÂúE›ÅHï¶ä­†)k„Lyo.‡"¬9§ÄƒìWÃÃ|Uóöõò¾R ë àš²k(¹PÎqúxÙÓG((N×W„7a1 it×<{’)P£Ä®aý>‚àœxؾÅt„ËÞ{C$DeŒq)ëß×kF4K–!JK®¦¬K*Û¿bZÆú7Êœx`J™¢³¸äüœ>vJf”1ü«ÝÜô3â!KÛ·P„‹NÔê¡Sú:£¯Äš ‰5Â’»†ü Š0e×0c ïœ(‡"¬ébž£²gÏâœ>ÎØ5pÉÉr(ÂUÏxÛ4d(±kX_xF<1B¡8}¼ìĔݎB±FX¿sa“‰]Úk„ûFáíƒ/qÓ鞌œ±‹T¦"Ûr$aÁU"L 1NÅõ‹:c‰`@¬ÀaPºìÛè'MaGEx‰Õ’Ðb6ûeL÷Gs\}\¿@3â¡°I¡ξæx„m„" rìÖ_#¤ ‡”¶x Iqø¸¤e$Bæ<ú8!D2Il–„)µ@ì`…0ãXÚ¾`J…â aÉ.‚M‰€Â9.>>€…yF<«iì®zÒÐ%B¦ìÊ„kN´7•2#†"\UzD@(ÂEÈ3âAyÛ3pœ4,yÒ0'$o¢ŠðW¡‡_·'㔬ˆ5÷ì6Eøñ§íßþôáã´ëÿüöùÛî‚¶ÿUÊ¿B;þçíó›?¾}ú<ýçñ_-ô¬¥)ˆ;¹ô´E ü>ºo^þ ßl?ížÿmÌZrßA³5;- Ìf[žÄ…Mš7wÅM¢BêF³œ%*ViÚ²4=dêX¶Êd¶`OÙŒ‰ µ²?%ºé.XÓ–ùš hiYš.2u([ ÍvSa¦ý’w+[?5»é.œÖT®Ñ,¸,M™:–-|iˆÌ·MDÉÍsŸÑOMÅnº‹§5õu˜‰—…é"Q‡¢žÎva¶\r¢Ö Ñ觤R7ÙÅÓ’J5š¶2½hzÈÔ±l!åÉliWaEJ©ùf*ù)©ÔMwÉnËTRʼ,M™:”-àl´Ç¾ÕÚg•’Ÿ’ÊÝd—JÓìéßдA4 ›_5#¯%u,[øîÏ–í)—$%cóË짤r7Ùe»±ŸTTtM•´ñ¼‡¦‹LÊv+n³Ùn*L¨°´ª0û©©ÒMw¹4½ ù9MÅeizÈÔ±l·Ïx2[ÙU˜ j¾ø&~jªtÓ]9«©UšÖ¥é"S‡²­ù±ÝU¸ä¬Ð<ÆD¼ÔTÛþ¶ÝhžÕÔÚÙx)hËÒ|üLζä©líð‰¦T$h;€3?¦_ëæ µsÓo-S÷s”eiºÈÔ±l¿|½t4Û—Þ¯¥Mˆ[Ùú©©½œ¡vîúýLµeizÈÔ±l!Íf » TÁF¶à§¦B7Ý=uýVNi4Ywgh?š.2u(ÛÚ)Í ¶ûÎF 'n|!Üü¸~­›3Ô^qýV3µð²4=dêX¶ ³ÙâÞ-DQ2nÝÙ ŸšŠÝt÷Ôõ˯wv.KÓE¦e ijÙî*¼ml˜›×K触R7Ý=µýVNi6šúƒ½hzÈÔ±l¡rùb ÛÝ( ¨Z[×K䧦R7Ý=µýJåÌͶïƒhf.Ùôš.2u([P˜ÍvWa–¢œ¤•­ŸšÊÝt—níý]–¦‡LËLf{8EÅÀH[Ù²ŸšÊÝt÷Ô÷[q ¥eiºÈÔ¡lk¥Aln¡¨šµö ØOM•nº{êû¥\¥ÉËÒô©cÙ–ÉlåØÙ$ ”S#[ñSS¥›îžú~µv:N|,ÿ¤iTA5¯¾ßÑl¡Lg{ 0†ÖYvæÇ÷[ºyCíÜZ›w–IÒ(š…sã öæÕ÷;šmmõ;‚m9œ¢Àj(;›âÇ÷[ºyC˹7ôõm*±1‚™Áõ˜.u(ZžÍö¸ÐˆûQ-µ²õSR{YCË+Ã~ë4yYš2u,ÛÚl–!háá¦ÉÑ‚ŸŠ Ýd÷Üõk•³q–uiºHÔ¡lkD±=Îß¡@ëÚü”Tì&»§®_©ùÍXuYš2u,[ ™Ìö›Y¤u¹„~j*vÓÝS×o匆˜»ûÍúÑt‘©CÙÈl¶{¯p+å’Sje맦R7Ý=ö[¥I¸,M™:–-T|¢cØÒþЉäœUK#[òSS©›îÒ­7i² ØË¼Ð4ÐtM™:”-"Íf{ܽØVL™[×K䧦r7Ý¥[ßyË[Ý[–¦‡LË+þü1lùxLX,·²e?5•»éîù´ßŠß¬däeiºÈÔ¡l«o§Ža»ÏÝÙD@)·v ÙOM•nºË7N|ØhrZ–¦‡L˶ÚûÂö˜KÅ05¾uQü¸~K7gh9wýJå=Ƨã½hºÈÔ¡lg£Ý76`ZH[ʽ˜~)u3†–sÓ/C¦¬Kóáu8[žÉvÿûì›B¨ÒÖ‚8~‰ŸLí#»Ÿ>”ÍÔœ«#±lYš.2u(ÛÚEšAlS m •­ŸšÚÉú›¸hø ©¢‰©=S½ÖÔ±lkư…£YHZ¬‘-ø©©ÐMwOm¿5‡’e\¦‹DжÖOÄöe’rnó‰~ú|$*v“ÝóY¿ÕL5[–¦‡L˶6ïa Û—é°©`nó´¿ÄO¦vÓÝS×oå9Ib]–¦‹LÊg³=&`ó„€OL¥nº{êúÍÕLÕ´,M™:–-(Of{øD³æýŽz#[òSS©›îÞêúÕ$ŠËÒt‘©CÙÖÞ#ÄvWá’qœ[Ùú©©ÜMwO]¿‚Ušº,M™:–mí¥‹1lé°9) ´¶õÙOMånº{«ë—ÉÑÄmùevM™:”-NG»7 ɷߨ­Ÿ’*Ýd—o|’f£Iº,M‰:–mÍ¡4†­X3din닟’*ÝdWnŸÏŒ) ¢)J…Ê=4]dêP¶P[.a{L²ã¤Íl½ÔTèå ýM\4>µÑÔei>~¦Žf •Qv#ØÂ?}¢,Äm—¤Ž_â'S;é.œ»~kWÞ„{?„û M!È÷Ðt‘©CÙVŸNÃö8€ËÌ¢ÐÊÖOMíå …s×o©\5I6ˆ¦dÂ{hzÈÔ±l‘i2[8Cͬ­5üÔT覻7ûÍ:`…Ô‹¦‹LʲÌf{Œ²+…’r+[?5»éî©í× JÓÆÐDH S¾‡¦‡LËSžÌv Ì™¬‘-ú©©ØMwOm¿3¡ô~·¤#M™:”-Èl´ûÔ’ŠBI­hý”Tê&»§®_©½œ ÂËÒô¨cÙVJCØ>QÞ>ó’ZE˜ü”Tê&»tkëtYš.2u([€2›í¾±a+9¡¶²õSS¹›îÒ¥¦¡) s¹‡¦‡LËe2ZÞ=- š[E˜ý”Tî&»|ãEfîîbùDÓ@’â=4]$êP¶”¦³=D˜4%ní곟’*Ýd÷ÔZ§ ¼,M™:˜m™Íöð‰ê>òPZ}⧦J7Ý•[/Ò( X!õ¢é"S‡²…Úzi ÛC…µ4«°×/vs†Â+³~«4‰Ñ$ mý{ÍÇÏÔÑl)•©lñÅ'ºím€¹­¡„~\¿ØÍН¸~+~³”»¯ëGÓE¦e[óçb»«0a*¥H+[?5µ—3Ï]¿¹Tiê(šÛê ôš2u,ÛÚ>u ÛÃ'ÊV€!5²?5ºéî©ë·r9­Œ‚Y²dÁ{`ºHÔ¡h¡òÚÐ ¶ûuÆ‚$šÙú)©ØMvoõËXºCûÑô©cÙòd¶»Mt[ùnŸyne‹~J*vÓ]¼u›Šý/0¾Ð„”¶Le¹‡¦‹LÊIf³Ý76©¤}§ÚÊÖOM¥nº‹7Î%”¤chB‚Ò^SÑkMËk ¥!lwŸhIš¶ 4²%?5•ºé.ÝZSK[–¦‹Lʶ6îlÛ£¦n«nIÍlýÔTç®ß\£©ëÒô©cÙÖìgcØò1v'CÎÚº^b?5•»éî©í—j†³ŒëÒt‘©CÙØl¶{·P·S1ne맦J7Ý=öË\£9ÀÆÒ‹¦‡LËjƒœ‡°•ã.3'i튟š*Ýt÷|ØoíEÆ„iYš.2u(ÛÊ6uÚ—Ö/RëÆÆë—º9Cñ•Y¿X¥©ËÒ|üDÍЦ²¥Ã'Z±qÖ/ùqýR7g(»~3Uê¿•éGÓE¦e Zf³ÝÇîâ>¢©•­ŸšÚËJ¯Ìú•MÂeizÈÔ±l%ëd¶»QÔ„²•Æ8òãú¥nÖP:wýZÅö«¬¶,M™:”-T2uÛ£Y˜9µV¶~j*vÓÝSÛ¯TifE³ìßï¡é!SDz­¶~‡°=l¿iê°‘-ú©©ØMwoµý2²à²4]dêP¶ÕãÔ1l_&>$@m]/¡ŸšJÝto=NÅMy—¥é!SDz­ÖvFb#hí’ŸšJÝt÷Ôö[»G^ÌpMÞ˪ÞCÓE¦e ¦³Ùî;.9“µv ÉOMånº{jûå*ÍbËÒô©cÙBíÒÛ¶|¼ FI3¶ÖTöSS¹›îòûÔ½‹—FÑdUÖ{hºÈÔ¡l!Ûl¶Ç ÖTCk·ýÔT馻§¶_Ã*MD“sR€{hzÈÔ±l1Ód¶rœÀ BáÖL?5Uºéî¹í÷õLå´.M™:”muÚï¶› +!Rëê×ï—»yCé•i¿Z£É¸,ÍÇÏÔÑl7zSÙò‹SÔ4íïÍ7±e?¾_îæ ås߯TÆ`ÿþà/43£Ò=4]dêP¶@:›íËà ¥ñ”†ýø~¹›7”Ï}¿Tµc%-KÓC¦Že “Ù‚=nŸxJˆlÁOM…nº{êû­ÜN¥þa?ÁJ|L‰:-Íöeä:ff¶~J*v“]¸ñíT¢¸,M™:–-$žÌj´}ìlÑOIÅnº‹7>žJÊfËÒt‘©CÙñl¶{³P“fm|œýØ~¹›5”o¶ý’JJËÒô©cÙÈd¶Ç|X±"¨­*L~j*uÓÝSÛ¯Ô̄ʺ,M™:”mÍ ?ˆíÑ,TN ­*L~j*wÓÝSÛo=QmL3#À{`zHÔ±h±:–{ÛÃ'J *¹µ«Ï~J*w“]¾ñ=ra\–¦‹LÊg³=|¢°_”Ê­lý”T馻§®_*M]–¦‡Lˈ&³=ÆÃdfjÝØˆŸš*Ýt÷Ôõ[»G®&¶,M™:”- ÌfûâiѹÕKèÆõ+Ýœ¡üŠë·š©—¥ùø™:šíFr*[9|¢f&€ø‰ׯts†Ê¹ë·æáfͶ,M™:”-$˜ÍvWa&CIÒÊÖOMíå •sׯT3µ»3´M™:–mmâöpìlJFjd ~j*tÓ]¸uŸj%ë²4]dêP¶€4›í®Â%3¦Æç6ÅíWºYCåÛ¯Öh/KÓC¦Že €“ÙîFQ#ÄD˜Ù¢ŸšŠÝt÷ÜöûºßLúŽ÷ƒé"Q‡¢­>5†í&ÂXò¶?¶Öåú)©ÔMvñÆ’ºÑTESD)W×ïh¶PWál ž "HÙt ‘-Ám™úþù»·o>~õÓÏÏïß|üéý‡§í¿Þ}xûýÿ~üêooÞÿðüñçß|ûüôáßÞÈþ«7ß~ûüóǯžßmAðý»¿îÁÿ}ÿsS¢ÿ×÷>þéïï¾ûñù×!ñòó¿ùðñý󛿵…§—Ïå7¡ñæçïŸþûù¯Ûoõ)ªñëø,Ûñ‹ÆÅîdÛƒ‚ò¿ïfü߉߉|Rã´}!k­Ã„7f÷­âðñ·ˆ )a,GÔ„¯ß|ûÃöI-x" ©ÌM™­ñ$·¢ðO_.ðûÇCN‰ä¾xx4IøçOp×Âq˜|yWwQð-‹‡`‰ü¿dþEþGþŸÌü¿FþsääâXÿ_4ÿ!ò?òÿäœÈÿk䤤ÿãt"ý;§?FþGþ'ŽôêÙÙÙÅ?ÒÿZé/2% 2Cäÿõz…“øuÀ>øþ‘¦”#ÿ¯)üSNû!%ÔL‘ÿ«†AÉs  ªDþ¯^ç€')`ù¿hàœÆO‡0ˆüï›ÿ¨‘ÿ‘ÿ4òÿšõ瀷’˜üBÂÿ(àCøG žrÛ3Gþ/”ÿs¸sJÊ‘þ‹F$šFY$Â`Ùöß5ÜŠ€FX¨ýosÀ“AÑÈÿUÃÀælÿE÷÷Y" V ƒ,s ˆHØVÚ L±ÿ€& ß×ZõNÆ«YN)„ÿÚ—¾:„AäçüŸÓþÛþð×ó}åãKøëe|&)þz­CŒ“ž ‚ÏPRôø×uzÌ~3.añ½ÞÝ.((9ÅáÎJàçìêKÞÑø«U|ÜÏõR´s/·«ß~TQK€¿ÚüLº­Xã/kãŸbçB@Ùvù‘ÿW[êío{1EWï‚àµpôñ/ 9‡{Ý®žÌ C²èê­4¸gÊVKäÿªaçtõ;„AäçüŸ¾Hÿåžé’„Û’?„ÙVÏçn‡0ˆüïÜã›rœ«€¹D«çz¯À÷‚á<- p[ðCÿJ+þ)?Ý–üwÞÕ ð8ªG‰Õ$nç®ôD“€U¡Äÿªa€js 縲u9 娸­TÿyxaÃØø]ü‡Ê¶˜Ø¸êþßæD&Óð}¬<§&(b\ê[wbÓœf ŠÑåNÿô÷wßýøü—ŸÞÿùí›wïž\´+8# •®­„ùAâ!6‰xõëšà£ÌéGü?{g–\×±cÑ@ßÌBóMåõ³-EIú¢â:·?‰&ÖA—ÀÆ^Ø@ô;aø?}—€Ì-€ë[à­áƒ¸…n| =€ö¸»E¨ àòçB¼ÝN:E·çðÈ¾Š€oÿ•ÚðwÿÓ*^ýÓÕˆ֙ί‘ð ü‡ƒä´´›1¸öTk²©‚€•äÈ&j[9i€€uÄ ‘RÌxcÚ΃ÈÈÓsw)kƒ‡õ•âH–ÈDªnvó CjÕ‡‡òrÄ‹ýùCöXiß;ªÔ3›´¡XH€Í ’Êh(.$ÀCF8¿y¬íu}Äcøê„žùR ê‰Ð â¥nŒqååñ€Iá![Hœ2½M橵,Pn•9Í¡xÐUê¨ ¯½bÎ'°LïÁ„G°ŽÄ ë3DѾca§ÛýõÈã’†©3!c¸¶•¤yEÍx[Íø šñSOã a`‰ïÿ¶ÍvÖbs‚öÑm§1`ø…³ç:cxKÃÛ3Bh5ã+NªÃoúâyÄðýš&ÆÄô±ÐHoäÉøþ/Ó,8†¯l´÷6Vy#°“5N¯•9gñ“øƒ€k]€ØqÜ¿_8²̦qý¾éæÙa™™0£.ÆIìëֆ̂ҡBsï Ù±ÿå {Ëc=“º´6ž~o;|t ŸUb( Öï‘Ê ÁíáëF,ÂT™ÀÖL Gî)Ùà`-5S Ø ¥áµÓà–ÕÕhîí YYKAsl½[°¡‚ñ5F@·°Õ-äLèå5P –bP3¤vÁ¢vÁ„Ýý%E*8’»79Ì 4É¡1±¿{l1ÃCê:”÷¾'9‡–Cwìºv‘Kx  ®[%t•“ \ú` S*䮞˜$ØŸ ôLm`Ôá(û…šáÀL¡CøKC­#«ŒH$Š·m º{RâAy붉çL<ð¶"l›lêÌ4ƒ% “+ïž |»ñê’ gŽSD6‹ \'4M‰©‘ÅÄÈðP²äIðý/úþsÆðI€áß±öOi±ÆÔèÒÌÏ™L5ažñuBiîTX+[ë|d<(²ke+ÇEfR@ÏjA ¸©ù72’QÊ—/½Å½©˜yÈzƒÒÄ}©`?ô«š3 %ücÉj€Ï|þé™ÛÄEê\)‡øÃ`dô«XDÏ?K€=>óþ[ä†ãä+‹Á‘.@Éë" †@7m 4K-I0º6TÎ8cõÆ0àBO3Àå¢Q¸?¿œ>)ÏðP„ë4+sƒ™² ½Ìð4´©/0“–ÊïP )üTíá™UѪS¦Ävr`OÌÜ(®–¢ÄØu7Š›ùõ2€ïëÑ̈P‹‡¤àû¿m= ¥õ¤0üe«Á|ò¾68þ«÷Â$4S {¡·©FJX¸a-dUç'G ﯩ08þÈ)ù]F8ïÀFðugç$*“Cñê³1雉ýU] <’ö'i«A5þÚqÀC€Ó)þ@ÀBhˆ€ªÂ4ÐnúéÒXÓó×NÉ_b! ®U‹“—h)*ƒk³= [¡/¼H$ÞíM ¾ðçõ…ãážqªR$àÖC£Ô…#a¤W¨dDj‡°±.¬¼² ÷„Öó`é#ÂA1„㮓:†o¢‚rü^0rLF…, ÇdÖ“©“Ø Mxø[*%~Ln3ŽáufÐðò·i¸o¤D‰bÌxnÝ ¯Ç£Þ„8€7¼6¯êeÎ8&´¶4Sp€@ð8ƒpOÌ´ŒÀÁr àG'Ÿ~›<Â;Îì.ø¡™;À{Ì)Zx=ú7'ˆGTá !——ð?—SÊp ‹³¸¸„]£3šàám²F4’nßGÁZÊü˜<®| ß¾ÞüîÇ ÑüÅ^$ÿ/‘°z~c$–«x88̬¹‚‡÷ðýôLš™bFX|_™^Ì´¢2º2@Àµ¶ZòÊ2AÀ½óíÍD¸Œ¶‰'zfŽ¥ƒŒpiÿ‘ƒœI»Ep*ïÞ.Ô©R÷³ï½ƒdÜY˜m¾Îðr>}ÁÖ릺Ðg ßÕ%¤=Ó(4#e* Ã_'eMÅëM†¯Ã÷©ñ°¸¶iu¤°7S*(^ß§xmfu>zþ¾/þ„ø6~Ñ?cx—ç!_·©ŽŸéÝFq6ÝVÎiä©åaøE1^`x´laøk ß WiŒ71<öÄ>óÉÆáøQÜÁð7žaø‹'²`}|ö0<äÄ@À…j¾ëeÇíÉjxÜ£á/Êþ †¿ó‹GÒ‡óô0“'º4›€ƒµõcÍôJùƒþÄwœ>‚á/=¦˜Õn‰ÎáN ˜ŸæÏßVÁ‚Îáþ™ƒ™NAðúëyÈÿPçßãÇÚ83g›Š²ÌÑIÞÉA?Ì# ƒbÎpŒ¤íßl¶‰úÑ_ŠQS¾VÿO€ÀðyŽÀ'IËGË.×»)p9ŒX€*áŸîA  4а¹‚3~óê0D8ÀµpµÚ»t >oøL>;ô!#ûO2ÈL'1½Ó‘áÖ©D—"iy0ÓQlÊ’„GØè&¤p\‰: ÕÂJr†Û¥Þ‚ë‘7eg d‰×®0:ÙpÜóbÄ܉×ÊèºCQÜzIÃUMœ \ûŒ ¦î˜-xƒƒ{5ŽTa£GÐL¥áÅÖп[û´Ì#ºg‡ƒÒT¬«­÷ §ˆá!ùµÁÖçŠ=S;dI‡åƒêù‡e<3îÌG2‹“;2¸·zü vøLmLÑÊo3„6óҀȰXêÂààþ­fTõÁûÜpçŽÂE ©ºT+ϳ¿*¢ vràGƒøƒÇgÖ™<Ú!‡²¾Žpqðþå¡ÀâÄ1·*ƒø…ï÷ÞMß%qÀu–?ü OOð]f{æ>xxBëð àj‘]p9„òàrº@¦Q 2ø_eO›ñˆrCŒ¸|nYÂíY‚€€Ë}À×u ¿}¿û1‹E~©/?Üç˜áå׆ÀÓã:Š|ã~9ÕTÀÿK/=s¹ <¼Gá˜q$QÈ(b(p^,™ð€Gñi —an¸„ë54†ŠIp°šžÒhFDX<­†ˆ€$ñß$Qvè}Õ#8~¼À<àÐ&à€ƒ§ $88þ ­¤ËßÐJú¡•¤®âúB=Ù_7Šòí 8øÝÏ—Âýó­RPó›Ärèeâ×V÷ØòFià臻ÁâÂ#àú1 psàà1B\ý¸ ^@ò‡„ÑQ8ˆ=D2ñ#9Ñj •TÛeœsäPX8·¶Þ²7.ÊÙÂ(&¶>>„× I!O°qªeˆ€6-´–z{¸fr‚“"Ží5ãôüA—Ëì¬Æ yÈTQã¦ôÒn³Ÿô@F8°8à–ðÞ° ®¿z8Ð!PÞÖ®ËÒÈmñpWW2p°7Oä,¸ìõ1ÃA Êk9˜zZ *cp°7.ÔŒ?hV äk9ðvpp=~Œ=óàÜÚ-އÆ#“§ã}i©#Ð'¢À …ÇgÊà`ûÿK\@bðY'ßÜM@brÃÇ’g8ksÃÐ!R²0„¶–á@*Ø{Kû÷f^—BÃ+@¬?ë3Â^ ,äܾ /hÏQ΄}¦•ŸfúI®RŠnÂʇ¦™~’§QáRôzlæ¥!•: ]…½£(Þà~áŸ]{¨àÄÕƒ¿ È?)wT—Ð àòB€€Ë'Rànï!%J”?<4Á#ÜMet•×ó Á3<„²‚‡ý<èLïȈ«b£’æLÎhÉ‚iµ7x_hÂÍ;¯;[I3dšBg}e7y¦jî&\¿W_û5ÅîŒc,û량 Ñm…ÎâzTFV2™’Á:ÍÿðÐÔ áíÓŠÞOά¶d9+ 2Ç…žÀfs’T4¡«t­tV’½ä¸‘$.硞‘&cÒ¡¡Û°ëãCŒä IenéÅk3Æd2=ÿÀ#ìæ!OÆØ#<½æYá6®Çxq>.}¤{‹ +IÄ„å<ôÃ#+)I^làa{ŽP#ßæšÐ¹vT%¥œÅÐVZ!Dd„‡&*(±îì#ŒÄ%nkL§¬÷Ú=ÃC¥;žž®]rIamœ]ï\†x(SBް2Gé+©‘‘"GXï¢x†‡æfä×*±¹«&WÆ„‰ÎA‰;gƒ€•>`€R7裬|pž! »IÑL޾ȢOÅ„Ò2Ç"ìv 襇9DdÎ:íÂÌF€hSÜuZ™5Ö æTè%í×ežé$hk“anu¥GÐ^:ýØe¸ö>tñ+0 &¬¿êS3<¤k0^ —ó ü$ V¯+O(î}n°. ÂjÃv—ðRH‰§ˆ (fl¿¬ ©”s²GáÔ×ÊS_öz„˜q bÆŒqµ{[Œn*LX{Û_>ð Í3opùgÜÃÉ…‰ÂΛ õû å®Ði~ƒ0Ñ3@d‘ z oÐv¬‘¶£×ëJ¤ˆýb&“ìPÅöüzŽÕibY¶ÎS·§’oQZtÎðÀZ‚E¹õ¢ŸÉ B,-Aì1rì¡NQ‘mx‡¢sÆC»Rˆýó°3þÁ¢I ây­ S…§6„×6<)3âuPÏWo1dŽŒÂEqacââ‘ÕTñï¬Èôz—ÐÀ¦õ,ÖÖŒC´½ºÑbý MŽ„ˆ¤SG¢ñøBGž&^bÇõeÄ£Î3HPN…"§fÅ“ÁN—0{÷džËàéIáAŽKt W:„!@YDQ',$ fìŸvJEØÿÒ«‡ÓVÇÊÁ­Ó¥/:8Qܪ³zpO"LŽÜºvt.c(˜,çë˜q„‡£ ˜€‡‘%¤¹g€x½)aà|å\ÉL’P.A(î}PòæPk°€ ÁŠ¢ “¨ -XžØÌ¥®¿+'îÏk¤›ÂÇG@Ñf;òðLk)ÔÍOÎÛŸœëù?öÎmG’ã8ïâ'hdœ#|ë ûÂ|BÐ &¡çwÖìÎôXÛ;nzºb³º~r Hµ\2¾úã©£¥²àšQxÓéÄ'·Ð Ì¥-ÏõÜ9>Ù ‹ÌtÑ&Rql¶¯ŸeZôd™2 AäúYErO*9l µ¾BTöd™„£‰ëÇY=AeÍ+'îWzBÇzÉÈ–~e 5)$–«×ë¢Ô’X‘Ëõ‚Czx°é&W®ÏõðÀä}¸%C׌J@ÀIOçLDà ͩÕ+Kr±Ñ3*Ͱc¯§=šê®¯=¯®—Ê–£ aCƒ‘D®?¾0zªL6Õ!‘D`œ¥å¤FxÑÀ«¯Ë·¢&ÞA†ÕüäŠdK™$މÇ#¸˃ðE8ð{n“¡¤B±þ®–ªcª bƬÑYK‘Æ…þ‹Ž/õàÃ/~ Ñ3«®lŒa¦õŸû î"ÙŒ0€cÎÞSeH‰ q€›]Me†š£ÀqçÚ¼e¾† Ãjíö¤Äzxˆ„S¿ç-XOÖ 3b4ÂËŠ"ø @\_¬g™N¨"˶ëá-ý'&> Éå=†·ðàcÛ¥B̰`ÌPÑC@)ökp¡å)QÂݧӖžy¤”šBVçA©ÇC;šQK.Ü7žŒ³~Kö£;–£˜FX(¢Ää-Š@“ÌҚŠ-W *Ië¯ÏVKÖ@9x@ÎúÀL†$s­ù@ ‘^¸ý|„w:‚Da«!†—¡V•fL0¥ÄÕ/õÑeµ‘ó7¼¸f”Ø‘7ŠHì·-I@KX ¢iŒ™ƒ È”L µ‚%«‡-¡±1×ï'˜÷ðà&˜9X>Qà‹·Œ²‹8“®1®?–Ö2¸@7Òšxðp€R³^fÞׄqd"†<íñöIÀÌ+÷uÎúÄÏÀ]q†sÍJsõR„=ÉãÂÁ-x1á-ØŒ±õ8…¬‚K¬K6ŸšUíÓ^çžøt? `Ém†žäp ‚€ÏÛbšUÝ@K 0〰7°¤ÄA@vز±\„“— À)€€3Ðòž‚Ãì­e€¯÷’tZ]€xÂ.à @ˆ«Ëà Ä;…(âM!ìÂ-×1ÄA‚.f¨Fˆ7 üB‰jôÙO¬€³Ï°wÔ"}(§á†Öò“KÛQµ:„®eTA ×]GÒâÕGHÓ­nqá©à<¼Väâ!±A€ ¼v§<¼;¯„<\SÌi÷– 8„ gŒ+Æ£ª§ðVfÒ‹q.Dpgwµ35)ñ2Ø…äÇ)ÄOÐE|÷çŽß^ô€CÙgNzx¹iSÈr9‡³ĘG~v’p˜fö (ÂɳN…"@®Y;€ïÓÎbøˆ“GNN@Â)À)\‚¨q½ÈÃà#NžI6•›S ’pÒq“ÃUrÌø’°bØx -’à•@¼áR…8¹ H$á* bI8y&I s* àÔ·Ûb2œ›€'É!’ÃwÇ$©$’ðV/ ‚$`: ’Ix“„Âàf‘~?- ßûõªåÈof§ ÿ›òzn[ú±€~8„x×WDÒÁxøÊCÍ€AÁxxM@ãÂìˆ 1ÃI€$|å¡”Áx¸v2#@ˆ70A ÞÅ}ïïð(€W‡‘IÈ1Ožc"§€"¼¹ˆKÇ`\0›‡9ô`Åg¦ŽBô åéb!(ÂÉç à||Âël›[•P(«"¤‚ððƃ2bƳ{ˆ'ÏQI‚Ox#”C0Ïðœ(||Â×-Iäàáð¸ÂN#þ¸,á‡^]øÞ/Tl¸}s þãöR­s =·ý8OÝñó ¢l‰(›ü ŠV½Oð ð ×S<à<\³†x̳Táà¾äÄà<\cBŒpr¡È ïfœ <¼»Î6Z"H¥Q:<¬ÎƒôlN?€D ‡î<@£„“™ ‚A¸z"f1È ŠpÖók `åá„8ð -AbxWXá!#@ ¯ŠÐ2ªÊ8Ú¼ä‘V>P„E(ÂÉ>Ï•ƒ€sÜ"Dˆ ®J 4\{MUà<\y@¸pòp! àe@À‰·ZêFÈ ¼öZÖÜÀÃaÖšúÞŸl6ð®þÂ~\ ñSwù½_\z¤Ë?ŠxófÂÑ3»rÛ$Ðõæ[ý < Ý+'À@6å (x{aA"xx?¬Ð¢ó»0Kv*ý @öç.VÀÃÕCŒà¡»Mˆáä½AËzLDˆ ÞxÈ–ƒk™#+U¡+ú„–4F€‡åKËÙá!jp© "†åy0"ð¾ò jiEVg9ÊŒK^_ó,Í/{,yއUE¹ä€R´c0XQZâÂÈ1OÄž÷–Â$À¨>W[{m7öÄY)¸§rÞcÅ€÷:Ö$ Å ä @.pÚKÌ“€MÐQZ¿£T=‚AxK!ªzK%að°üs¡-‹“ºR`<í"dU„„c7~É7!4 ¶ æ% Q½¾Oàá­¨`#îàá8þ¢ãQñ‡ð€ˆa/¼‡K,C/ï!Dš/~Â'À'\›MÀû¡)fÜÿCeñÄYC™2a¦ý´W¸©†7œÓZß'H¤!‰îãbÆ–Js±Œxïñ¼ÝÇÚÞ‡Øz:o%©¤Ò s–çÁzfÔJ=¤3.σSG¯…Fdbfñ´»ñ,JƒW¹—$ [°ô‘x½iE¤Ç $Qá2ÿšX¦bx ~ÅÌÀµ‡€RQÜCXŸ‡–3z¬cäÖ<@¦¨=<(©`aý³Š-<»¶_—ç!¼¥Ž "Lè>€ë‰25±ûxÞM'ÖÒdGï ó_x0ñ @7ú´÷x›Xd<y„û-uçí| tÎú",‡ aÜT;@ˆÞƒšN(wžÃ‡—¢ì¼~™±åñ.IOGkzÉ¡%F(£šÿaù¡å3W„²@N»úÈ•1£ G(,vÄ2†r9Z §]s‘AR6ÐjX_ZFšeX&b„Áz|ÂŒÝPGXÿðjvÄŒÂQ!“Z§}®A8ɈAÀ’ôh@ * œ·–8  ñÉD=ü5)B¹RÖ[FÔDfÒèx®á<ôø ñ­Û€ˆaÉ:B‹‡žP„(BuŒ°Š©T&”–\•ï •HÀŠIdK³ÉR¨ íçåyЖ›jâ:†06£—ç¡FKˆàîÁ ±dÒÐ’6z‘¤¡Ì¸þsË=Ù¶¡g& EXP´%F(å˜ñïÀväJnõÉ»ÌP„#ïB+Í,2p/åŠÐ’E*«Q0b„Ó()[$#F8¢SKŒ`1y(´Nû ¨Zq¹`dñ¼>Á‡š%*IXó(&yˆ°Öðƒ .›LÀCœu¤Ù†Î0ÑŠ€‘ÅTµSN¬꬚YDeñ [÷)WO;Äle¡ÌÈ"5tÌ#¸Žð¨,.9ÄÜ1Åì¦aÆ EX?kèàa†j‚E§5'”¸…€ÈŒƒ)˜YìÀ¡bˆ*’†%Ë­†õ@aq¦ ½£ŒeLCðDìioªå&z`Ñé»Ð 8”…x%ήžv:¥¬†Jbá†ÝCF¹ŒAj4îßK û ?d7þ…­û'Šðƒx`§B#éþy5(B§"T Îäð.á‡)ÂþOþååå ó–EÞËCþ1EøëßæŸüõo¿ýþ­ý± ÿå×_þü—OmûG©4ÚËùõ—?ýõ÷_/§Ë¿½ü§{¬—·^ìüf’€7Ó ý³¨Åÿm»Ÿ¿üJ~žÙüòO÷ZS…„ò3Öüèëþ„9ïý<`[ÕmÛº˜nÚ÷ç÷õ4_ªŽ‡Y³n|©ß>“õÞš™ËZó¾Ô}mK^ͶթÂiÉ>í;m«ÏãSõaº«·|ªÚ÷­9]^ÖšOñ¥îj[úv{oÛNÒzwMŸÇ§ÚÃtWoùT׬i¹¬5ŸáKÝ×¶TÙl[ËKãíü½ãö<>Õ¦»–wÝÛz³¦š ßÉš"awûT{VŸº¯m™«Û¶[fC3–qoEɞŧúü§}˜5oúTúÈš4v²¦gXÕg¬yü/uoÛÒ5ˆ=l;ÿy^2K#¿ïæÙöyž/õAºûõ_Ê7>õƒÚoŠç²Ö|Š/uWÛÒ·ímÛM…ÅhZ×ïµíóøT³æ-Ÿªú‘5cìeÍ qgôûk>׺¯mI¬Ù¶´©°[z ¹Ó¶ô<>•¦»t˧~`Ms©ÜÉšÂwÞnøŽ5ŸâKÝÕ¶ùÔl»©0k–Œ{}*=Oå‡é.Ýò©’XSÇXÖšÏð¥îk[j¶-oÕÂ"£ ºÓ¶ü<>•¦»|3Oý ;ƒtYk>Å—º«m?ì§îcÛ©ÂÎÓ_q·mŸÇ§ÊÃt—oùTûИ¹¬1ŸáCÝ×´ÄÞlÛ—AQdÉt§måy\ª¶ÝT8kFLwÞîŠç™ù‡Í…Æwf~ý#k¦.kÍgøR÷µ-±4Ûv›-#7“{U˜žÇ§ÒÃt÷æÌ¯}Êd‘ïdM)ÊŸ±æS|©»ÚV¸Ý¶› «—ø½¶}ŸÊÓÝ›3¿]fÉ¥îdM¶ þŒ5ŸáKÝ×¶ïÑìa[æ—²>§ßÙð3\3ÿ÷ÿüíëiÊß¾EâË_ÿóo¿ÿ÷/ú¯;/Sò—/ÿÿË”ñî'è»f>ÿ®r‘‘Ê%ro¥‚ŸïRé1xß°ó"*‡•|†ˆ#jÂOúó_æ¿©… ¸u¾xçÇs¿ °³Ÿð¹Eá@Ѕ€`–{ïh>‰&üë/¯|îpõ^J`0ü9 OÃÃð0< Ã?¹á™`x|ñ+þð¯Ô,30£nàœþ_aøsú?„ÿBÿOªûøúñõSô„}V*w®éâóïñþÙbxO£|ÿKb@ƒžè?çO4¬êF(cQ…XÈ 4¾Ò þÿte¡¬ÿÃÞ¹¬ÚvìføŠÒ]ê&öy…@¤qp ÷OÍÀÞkØn­5µ5\¿ n¸áËÔ7¤_*]÷]÷Þ'dµµ`øAþh1üIö„ RolÆßƒÁñüª †bÀá=ÔÞeÀ`lâ׃º†ÔÀ Ø’øÉk > ÿïKü,ÙiÃñOuü=ý>âšI(üÎM [¼¿ÊƒÂ@=ÄðøþßÚý“øþ¯üþU`ø+ /Ã#ñƒá/šâ‡Ýï ñMž¾Ä%þ¹•ž¿Ï®¾Ñé9ëürÜ¡à?Ið·Ø]…vì~[£—Z0£µs’á³'à;‡—Ãðƒ ß2Ó§±·0fúÆ*=ï©é‡e¢Ã÷º‘Þ#ïMX`÷!vç¶’~•&CéMªðµtoÚg‡Ò›4¶g=†?ÞQ̤í:Ì~»¤ ?·iï!àãÿf¯øüñù7½äàû¿ô¾ÿᣛ„Ïå=|ý—~ý=ƒ.|tªû“µ÷ø}%ÏFŽI~¿Gö+¾øëfô¿ÁðˆøoÜÑiÑ„AQ 7MõÚs…Ó‹ƒ »s7ö<¨¦ÿ•\»°©i’ݽÇð¨û\žþW©ö6 Å —YGs¿ìMôÅ-®ßÜë×dxp,ì[þ·ìÁ (6š¼'}ÿ=†'ÝŽiþ¡ÔÚ-iàÁ ¼Ðò}Û·lV;žŸÿH b5±(ßøú/+Éùµq k˜áã!†‡ß£ß÷–™K¿ÿùŸßþãÿ·ÿúïýÏÿí·¿ÿã⃭ `0=™Ÿ…';¸µLö 6)~µx¼©‡‡×5·Óy èy"rº|Bä<4¹‡ ŠÝvÛAv¤ëu+?Ÿ¡¥§\˜yÄ"޼Îíî‰E¤ãiáö–p‡[ZLΖÛB´_÷Ÿ! É‚€áQB|ˆoŒ•ÞƒjúHæ¬ &éùþÒ´`þ²c`BÇô›Ð@4Éð=vw¶DÛøL |EËi!Ý‘O…óŸ†zúÉHÍL0T„x„w½Ã# .°690J=9‚×ÉFð<<ö• §ePC·îL$:™\ ¹“ƒD×­ÑL+C©hä{AKbh;(Sä÷N‘ÛùÛA &Fë!@“OF÷^$0QCypè“Q ¦†¶‰ôt ïŸ}3]@ËȈU’áÍp:é-©¡o‰íÆóÐs³Âi{z Æó=iãÉë¤ ¨ª´<$ûùÁQ3y¿ª%]Lݩ̳y ÅÜ’:¤—“nð0ž‡žå#!QŒ¾‚‘BtUõø…¤Wg”ÂÄ¢RË‚â8ª÷.«-¥mŒCf8tØã<‹ o»N[Äa³ ’…ñÉ‚µ¬±×Mv’Gl¬Û‹Þò­›Õ“1’0m) Ê)0’0ÖT ¢¯¨0[#ðÚÜ”O)xkôÒ-†7Ë/ŠCþ[¿xnY`¬jY8j1ô !çïáÀCzp*öZ2ßÂA¸ €@0Hh‹áëuæå¡¡À×®Žž#µM…C×÷í2;†“ûa6iäxb‹´×JÛ@;ÑØ Þ¢_£êF˜H™ÿl=~áu÷’\W4ßIŽ`þ(RhV‰ÃØCÝ‚®¦D!8¢ðVp_†%®‰ ñÖEvjù*Áld%õx„ª¯^Aïç!µ\ESÏÜ'Y„@œYAŠÅ=qÂKK2à¦Ç‰--‰cìí*PŽ#ßZjÊAZjXe0¿¦Ür;Kƒ¥¬0°:²¸ÜC€ì#q=íÖe¨£Í‰ãqPêáA5BЀ2R$¶¼7†“Õ†(Y`–<·cùåø4Ö‚™ÈÁÃü´‘{x°€f|Õ£’Ø6ºÕ'U¬ÇðjJH.®(g¦'€‰+ï:ÈWýÐ ¾ÿ¶.¤$=ß~ Ë`êÌzõ¼ $‹1AüM:¢Õ2’²“¡$-u¡Ô]åŠ:ÀtÌ{´ Ú6Ƕۑ¢°¥2˜æA”ƒ*A=†÷#6!˜™ Ôê)–¼žÐbzí-{ÝND‹éø…çZ-åò½óß {V]Vÿ@(Ž ÚCA'žŽ¥À²¥‚TÉç'å‹=a Ž%‚á/k9?·¸Þ‹o›4:?wÅv,¬¹ÍÕÛ¦ (h½¡7öJëq*‡8S9ðhâ Œ;Îçúƒ–=†¶_ûL5 õ ·Ø=…‰Ð%0±¬-_>íÚ"èéZ4‘3©€€[;…Œ˜Ëï‚ó{ƒZxv'¨ÁI¡@z –Œ–¡‘mä=Ÿ¾’r`ŸñxN¼îá¡<÷°§¯¡Ö„){¡§p~ý {²Ç×~*ÅKÂÐ òñ MÒÑ•“QIYEè©$O àÖ$‡w¿¼»à<ðîñ™zþ m0U´ôQ…'ã}a¤2è€7IVŒÇA$zxÈ×Ý<8„k™¼¾x7¡ƒ‡¤ÔvŠ÷¦ŽìÛ-1|vÝ ‡&™ Gœ™#Ê¢ž‰Ù¯ vp×9Ù¡ÛЀzï4¢i#Ì\@ð²P ε§’‡b°›N¨›³•cÁ}JÀÕ^}0ü Ã{‹áÍ‚SéCs@_¡ÔÃArÀA å…Ø]‚ö†Ýç4ö4ˆxì aþ²»¶Çð¶%†ŸÓ&=v¯M¤°û OßòÂë™á†U³sïõ|ÿ¯•ä‰Ó‘R¿ÅÄVaAOðLG ´zž{ãàvþ€#˜˜´$ýñšÂÁ²ñÇ)jQËS .Ûù±{Û‚C$7ÔáÌ‹E=( ŽØß[ ÕRÑù3¤ÕÃy¸(<ÂÄšQ>„x„–Rrµ´ …oò¶ÊñW ›–¾–N9x¯¢'‡r!t“ ­0ûÒèñ 'P(&ŠF Æõµv28€w>1UÏLqd抆r`Ë{öÏDÙ!%„‰Á["BÒI´@ÀÄ“"e—¯xDÑ€²Ç#¨)ŠÓ÷š×bné>Hsª@¶8I´ØÝ2Ð84Ià¥Õ#B¸ a¼`¬žNõÌ]†x0¨¥' ¤Óvl!˜”"¶¼åkûÌÆÕ»I_|“á•6% ?ÈðÚcøÜ„œo”«oi*¬M„…×­®ý¿8 ›«¯];66Lrõ-c†õºiLØ/=t·X-÷@•JØ.:–ƒè©ë;98ËAµL•”H°¢Kx,é=þ@Š´Ð,:–ƒc 4) Âu×Ê·~ñp5Àûö¢&‚Í ¯“JCM†wICsðÅ5¢ÌrÇ2²{—‘ÕILp£úâ‚r*Å剈ïM¶Ññw]ã×1<:þÆ}VµÜ9†u{Âð“‚½µžŽ£Gñ(¾Œ{">)ÝWòÁXðg‚e£ös±êçã·]õ­Gÿmˆ¿Iâ¯ÇçÛ–#0àóo­õùö-bØ2~í–ñC€±#ê_7ëç;ΧÏ8<9鶈÷žD†¿ï‹OR&<ëÜÖÕq o^Hð.Ìì_3‚N¾ûT})™bž’¸£Ãgºaž’«oQõ´7qÁð“\}Ý•5Q¿¹NÔ¿V³mŒi2¼öþ5© ÃOÒv-võè\×—ì^!°ûuÝØÄ*_\ÐÃo7¶Sá_wz°õ¾$^ÎO(ÛM2|ϯ'—C2wÝþÕ×-.3ƒáï3¼±mGÆuo²'‡7Aß|}}½ ÿÍu»ž/Þ_–È㯛©#· C_Œ÷L þ>UªV…qú‘Ø*î)ç„©c˜úÞó*‡€×ñ<´cÝ×–“T»Ÿþµc6”ªZèÈdxíNŒZþ}ª?KOÂŸíØ¿fè±7ùâê>ì À€$ýs ²ï˾®‚ï­0 ÍÃcvÇ‘Ý;ÜP;L ÙÃȧ‡–ǧ¢­IX¡7´¤‹{ZÕŽ6(Å8ÃÌqp¹èƒG ˆ©òƒá½iܳ]÷;x€Gx“GhÙ”ðÒ²Lx„Ë5B“Gpµ@Êt ˨ˆWŠ3î´Í_˜cÁâ=Ü^HBH@HøðÙ2 2ûZ”@|hÆ$H†ËË .-¥E‹£G@ÀÀ(°{G))t£Í„ïñ*ièF»øyÁ˜5ë×ç/Pë‰ àá-뼬eW €xÌðcx<äSx€„|òH <ÂS6,n‚CÀ ^88„Ïщ€¶vxx„<¸<ÂÝU8„ŸeƽB!0OMÛ=Â<äE2ÀxøtÀ…PM¸½ké×iÅ¿}ú¯Í­ò»–YðòEуËÚ d\Û«x;6< ôÀk‡ø…©Iïj}³O¾88øS TÞUyxH„TÒ<€‡©d.KÔš x1C2‚ƒ#SS€d|—d„'€DøYUòÕÕÎ0öÞ®×\Ë‹ÀÁõìåôëžþöŒˆðgÿK–¤ú»Ë¸yEÙÃËÿ¸ðSZå  ³ø§>àáÓ…Éý j¾kîºàn— šKR¡áÓ»DÀ/€‡Ÿ<ì•Mï•â @ØêÙ åxûI)0ø™*®Ï´é¾$0]m¶i—w àöU 5€:Á§æ•ýë*Ë¿âÏ~Àc¨½·Îè×>YŸÿ°ÝÓ ûÇÿ÷Àg\¶ ľ¢<€‡ŸÅ? yW ðh]\~F† ·~V&diÓ%[ñC•ªPŽ—+G…G¸~àRV6u¶€ƒ‡tCwMbˆÙÛzºz\ÀÁCƒ47wVO8†±ŽA{úáÝ„l ÃxÇ`=+¡ÄpÇ`ôÇ€¢Â»ež¡áS2ÙÓÁ—0õéaQ\\ÂG“l’ˆëGéü¨E`¿ðÑÂB wgÞR`,±"U0q›'¸¼+!BTÁ_è¦<Â``ðð‹ €Ãr/SC„¸92@´Ôj³r¢mm$.o[cp¹ˆ‡YØ" KÀxø‘7ÒjY¯ù¿ìËŽ]×q†_%OpP÷Ë8“ õ†ÄÈóg™dw$Êi†ìRí^?©D$›ë;Ý«$J£Õ`!Nõ$ºÂ>ÂF›à>‚$‹y2lÂþ:#ŠP¬LêãöÎf{TñåžWñ:Š ë­ @ˆ tdmÂQÅçã 7‘„zX$%ip´Q˜q TL ÃOËyðë ­g€(ºËsM—BôŒB˜´£±õ£S<”Z ¯uÿ½!<É ÛsÑüpá¡Ù£±Ü}cL1¤í EØ~2X<2'MIؘf€Sˆ0yn¦±Õ’‘iÜIÀÄ^’HÃ^ýkØ,Fxh&.”#·{‰ùHæ Ü„Ñ÷¼ÒKÌÊ=’°þ*˜\“¾€è¦Äšý@ÈH¹Z“Ô {—¶ÑI6_<4q }a=3»|5Y­°†kçq°„›°ªsùè‘EKu}C^a{=Ú/E0 ^‚­ …°í8Û¸^ÑÀxøÒѦî7á>ä—tAnR›º\$œ~$@Àá{¹ §çÞ¡p á~û‘4c,BMKKBf¡â´’€º 0 #+œsäô§”³¢q;ò(¿ °ïe# ’Iø, ×»t.ˆÛ,è$ƒBˆW$¥NÃáehØH—24?h$Û n“iˆ.ØÜ(@À(À(×.ú!Õ„ƒ%A¬HÂIȇMíÿ‚$¬  ’Iøb"¤Šp¸"Šð’J@Ô€Ks—¨Aõ'ñâ3<ü¯–H•âuÇ‚ÓW FÃËB…èúìBüîS}å³ýÎø|õ÷>ëÆpІ:*Ä=€pó›,ÅÍ×”ƒ€Óo‚€­8Ý °ä œë&8›m¤‡úÄC?(apæœllÂ'òa€Ã'ôá„‚(áSM¹î\.ñ§ï–„ßûÝœ¸ÛäGÄ3&äë/ÙŃ]GððbP²&Ó‘bÁDb»@èHWt¤^ÁMà&ê~hž¤Ò„®ÌRÆžéQ„åi)~ÐÈÒ¦ÈKH`#nÐéÎ3 Qʆ4ÕúQ*y¸è uYŒ€Ó°1s=ã6vœ†õµ¬ÐE(!bG±R|†€L&¤÷‡1ÃVra¡"ÄHî¹\ûú?P„õWÊÒFx¥j„‘û·üeŽ$š*UÜ@¬?P#8\.ƒ8j•Û=ÈëÕgbÊnrÃÕ¥©²¥®°’¡ûcÊ‘–éhUãDH±¿}aæâP´±6òŽ;mDÍP.ZÔ¹µ¨ëóO 6.àÐúòH‘k>·úÔáJß×ç Þ‹! Í´l†²Ú… œº‰-º®X l”€‘ŠR?ÛÒ¤‘Ú?ÿ6RSJ"«0Ô”ö—y6Ê„“¸ÒEÐ.7Qà$»µ?/Ð2ªëy°‘ùÇ$%WBaeúxFÔ¸}hûUg\#m©}GýܤAØ5öîå¡àa=:ã2Fp BˆýCû)År{ÚùA7áä;y>R–„bˆ•ÌÄ­"ŒVõí³ ýp‚©TyÆs+Ì!ÝXšs‡å›6„ô3ÙIØx>x¤!­´< ë3K1’id÷ËK€‰XÏCŽÌÀ§<ÝFF&áØ)%ŒÆõ¹Å¹uñP‘¨Mîç¡k$†NÇüÒÉ·”L›7×ïU{ÔÌ̃R«a;÷¹'=RE=ÓÐÇžžNu½Œòë÷³ ñÐfŽÁÈõúÐ#‹ÔRÃY•‡SgåS“<“ç&µ•à"ÜÀE0àáEDx†‡Ðvð°>Ñ,#ç<Ҟ˸…‡ýú#¤±¸ã¼Ë~ÊGR ö<æÑ‚YïOúL´;)§ý<ÌÈC»c9ß¹Sõ–!JØÒ¼’€‘ƒ5ñw&aFæ¦c¤»Õ©’.ãzœfx(n†°³—m€¦+: Ü`וq}Oœ\é2öÙtÅŽP„õŠP5ÂCµk#ˆ<öÌGe—H¡µ_h¢N]EáìðV*ˆ—X,ÜmP„õX‚“ Qà vüÏ(Bš7¢v* Ï ÁPzÚÏÃHZ©š•q+òà4B“hJ ûÓ#íî— p6öîT„!@Ê“qåa¿"¤ð )èF8XLÑÒ~‡m­#i¥vVoø+a¤üÜù\á][û‡à|$ÐÍ E8¶ÔÐDÜ’H,îF,D‹öqž›Y|&EQà ”fAÅ÷„Ï->69¥%”öG #-‹Ma‚&æƒ}„çÜ#öqÞ¡a†‡"Ã>΃ó—U@õMÌÿà™R‘Y\I@ m ›°Ò&ÈÒá…\âÊñç‘H‘53 ·\Ž­87Ûå`GÎ*Î3Šðl]FæàõÆáÁEÚqtÿé™<{PaÔm¥Ç`3QC0·ã´Ë©·}.,¸°c%3F [X0ÿ¾ß)Y¼Û|yd"÷‘34·â²Ïzr&‚l65dög|&ãØ©\¨;;÷ØBŠâÜã Wô@Ò‚ŽÕõÇÿHgxm¬Ô;X”” ^"¦Z>óPN8z‡ó°#IIËRÔŽ­:HU:¶0ß`çj‘N×÷J¢ma¿Þÿi‰¤•ç¼|‚€`âRÔ!÷§j†ã$,ÇØ9Ô2C€kº£½^,gxˆn´¯îïL°Ÿ1ŠIbea$nŒô®í·!#"/Q„<Âʆö„\ ‰Åõ‚P9b R¿;„€ ¼WZiF<™q©a;ò!¡Ù]h^4Œ„EJ‚âã~Eˆ¡X‰.ÂJAqJ:8`V0á0Q¤1Nu,çA—ûÀà š7áâ½ÒÂ'ìÁfä¡P„OŠ0ä!°'9Î4¬Œ!k„tB¡a½ xÏ(‚i4\„Š0“Ep¥Bÿê~EH "=+ >ÂÊî$! ESqv9þžQ„¢àFóÁi„çAè‚°¦aFºR—œVBÀNر’—®¨‘ IÇYÛD°>kÿFîÉ$²•Ögík¦;‰CˆQãÊ‹3Š‘ˆopØk¤öÈmŒ敊0bD~™j1C€q1¶£¬}ÖñD™ÙQkZÎÃõì5RiwïFõùÜ)'¥¤räÖóà38°EãrËN—q†•gÁ‚°þ¸[¤Ô$k¸wVžFJOêlر 3Ô¢5-4»-‡µ’ÅÇÜî™)=iu j8õ`¼³Q„–îîO4÷HbÑX#à#<úz…¥ðö+‚Ïà Ö­pv,Ì.Âþ½ü=’F0׊ïÛŽ&x`éV²òÌïÓXˆû€½è&´«-mX‰Ÿ‹—)3ì|é^t#b ¾žkÜ™%lœ{ ¨p–áÝi6c¢/ÿÍIû§`eÆ@”nû-Ýž5C@“\ß Ûy°™:ƒ÷Bæ W^ˆ! ÈÓyÄsgZ‚™I°,çdÜ$Q[Ú¿ afˆ)v!{Ïï"àY^Dœ°_z¤ÚYŠÌâ âÆ,%AÃÊ)·‘ÔrxE0,Þ`éòH»âƒd`uÎ~‡!FxHUò@Hyƒ)غ ðn=E¸‹ÇPE@»‚ðy—Ö„ ˆe²Î+]„‰,£éõ‹bæíýj>ÂÃsÊ©ÐѾQF6®Š‰«¢¥ýÜÅbjI¨<Ý ±82å$fnäèVZ9ö¨#¸Hâ¶× jÑ:ã#„F âÆ•Š0ã#d¦šCößr’Úñ„yçTÃLæ «í7È$õDG³8G,Ä x)F‹+yaáæÎ¼ÒHÁM.Qù™E·ò0…MXß2²„Yü¹.#аñ˜Óyù£`£IIxŠRË7lÆEH nð°_z$­äåv„•öfh{F àЮe‰çŽUGâþ¨Ñ†xðdÅŽÅìNáX(=«+k#HE#­xá3‚ å•(=îoW«‘"~Yž±²]m$ÏáÁåZëy(žñòÙÊœÂʜŒP΄ r;òˆ©;$qpî\Ô?b’3Ä5ì?÷#"ÕŒpºc§0²!”а¸3nôêòG‘Y¼ÁRæ!¤”ЯvìáÉ$+CÒ G2‹uý°|„•Š02ØTÊ)†u)ëy虨¡4Ó‘G¸Ãð ÑNr°ÇÎ8t“^n#8´ƒU‰Í/ÀFr„11EÂþ1§˜Qµt…“xƒžöŒ/u@7 :{„ïÊDViåbö‘ü<EØÝ«Fx¨¸Þ듎íXTêj†"¬W )™.éqLƯìNñYÔ)5Üà¾ÛˆÏ襌ÑçF žÂVH4¯´ 3T…âÆý^bŽÄÞ‚å)û3‹#ýjìåÌâJÁG-t#¬\—¢#\*8áØžU Æ!èdfl‚GµÃ+X7Žt D\õI·ýqcÍÄ ENÜò ò2ÃC8+vhk!’TˆœzÀI“E„дºž‡œÉ%¦²)áäãþÉG±ÎƉsN-ÄLÔÑÏyG°‘€ Ȳ :­ß¢æ9ÂCQXà¾× ò>Ry()ö@Êú½«\âh…i#o°ë*ÍðÐÏ“^hb_YZ˜X‡àFbhYݯ­3vÅe"Špì–Å®òËE(Âú.æ"Èâ© ˜sZÙŽ #TZ2ŠÑ7„œàÁE£¥†•ÅhŸ! ¥ wXÍ>C…&úÕVæ™GLBeUb†Ì9 Ôã9«åEظ@ÉG¨è&,YÜ4HLðЩ–¼âÊå~ #•èh ©FÔ°2±È#T1a¿"Ô„…PÉËF8¦\Væb„€–Bóþ<ðÐ|çØ–qêÚÕ‹íÆjöD ÚyÅEP„ýç[&jO.êáÔ)—‹€ªhG­a¿"Ô„Q}êãÊæ ’/_$XÜ4Œ”ž*˜K‰Å•¥†!ày¸á"Ü@&\„Ve'ŒÆŸ«?‚(ÂŒ" £“ˆÒ¡+”z„eC©a¿"¼ñ±EôöÉÇú6Eøëß®üùoÿõ÷o|íþ„ÿúó_þã?¿ëÑž_JÿúÑ~ùŸÿòç¿þýçÇóãß~ù··¼^}-ðOžOG¾Þ.þÏ·ûÓ?þ$º~Ú¿ÿå_Þúš©œooI¯oütÇs¾õãyƒ·åß.¿|ï·í‡{Ë7¬µéóIUúa¯Ù_óÄ埽fÈÚ×üŸÔ÷}[~[­Gv'Å[},ý86U˜îj}Û'ÕTŒÞé5‹º-¿ç5?Ä'õ]ß–)¦ßöRa#±Ëÿ}k E?ŽMµ¦»Úoj­ýšk_ó#|Rß÷mù·w§ßùmínQóø½}›j?Lw­Þ”‡þòšªBµö5?Ä'õ]ß–=¦ßöRamwKy«÷kǦúÓ]ûšMýç¯É±ö5?Â'õ}ß–Ó‡ßÖŸ*,WlüÖŽÿ86Õ˜îz½icÝKÖ¡­ö¾æ‡ø¤¾ëÛ²ÙôÛ>³…šnoõ—ü£ØÔº¾Úöš_³©ÉÿÓÝÙåFrÃ@øFø+òþK«íM²ˆF«ÁB›~žÏU%©«©MÇÇÒüùJÍf ‹MÛëﱿ z¿ì_÷ž§ŽÔQjï~) ýï Úci–Pj*[ 9ÍvìlÛ.Û:™ -Œ¦oõ˜þK“ŸK³‚RsÙBÃÃlaìl®m6è›l¡N¦B˜ïÂtŸúZ©Ú˜ù±4K(5•íjŸšÄv¸0ëµ°Þ|÷ýÔP*†ù.Ì2ÕdESì±4+(5—-˜f‹Ã…Ù;ð&[¬“©æ»ÓÞ/½¦)Š¥YB©©l¡Ái¶£ÕBJMvÙÖÉT óÝY7”ØV4{{,Í JÍeË‹¥¶4Þ¾0·®m÷ ‚êd*…ùî´÷«¯•ÊŒ¤94½)6‡Oh–Pj*[ð~šíxÇê¶{®Ou2•Ã|—Þ|—梙pêE³‚RsÙÈa¶£)jHww6\'S9ÌwßíýŠóƒi–Pj*[ >Ív¸pwRØ^/qL•0ß÷~×JÕÇÒ¬ Ô\¶°ØÙä°ýjŠ¢Žëm7ÙJL•0ß÷~o½I#~,ÍJMe ÍN³§…Öº·}¶U2Õú¡6ïý®h²%Ñ$h ø ÍŸ¯Ôl¶®z”­¦èµ³qCØœ´çuz¿Ö õyï—ÝЮÞrh^¾kÐèš%”šÊûÔ$¶£Õì›s’½Nï×ú¡>ïý,irMlì²9kÇ«ö~³ÙrãÃlGSÔÐö• u2Â|wÚûÕʼn cÍë‡áš%”šÊVÝO³½›¢×ÚWd›mLÅ0ßö~•V4¥%Ñ$“Ý™^µ÷›ÍV ³½›¢×o&ö]¥bLÅ0ßÅ7g>èµD}.ÍJMe ¨§ÙŽÓB¸,Ø”wÙÖÉT óÝé¼ßÅdîASK³‚RsÙ‚Âa¶cB¬1ºï²¥:™Ja¾Kïv”´ƒeÑô.›Ïܼjï7›-àq¶÷ÛrÙ@ßf['S9Ìwç½__ÑÄçÒ¬ Ô\¶Ë‰g)lï ± *à»l¹N¦r˜ïN{¿ ÛLX EÁ,!ÔT´Ëg9lïMcÛ=‚à:‘*a¶;­ý.\W¨ñcaVj.Z<Ìöî‰önj›l¥N¢J˜íN[¿òúä—Ì K³„RSÙ‚g;\X]Úî=&^¦õË-¬êóÖ[ÍÞš]&ñ ͯÔt¶°jŸÅ³Ï}VØÉ.µî°½?RG©1¾ûëKùŸRnhø\š%”šÊ”O³.ì¢`vÙÖÉÔ fèoÿ{«ß‹fô{Q4+(5—-f{÷DHµé&[¨“©æ»ÓÖ/½î=r.ÍJMe ‹cý$¶Ã……º9Ñ.Û:™Ša¾ oÞJsÑ4{,Í JÍe z-ŽkL¼›à^AéþH¡†Ùî´ô+¯kÜZôCß4ñú’¯m*|B³„PSÙÒy¶cìNsW™ì²­©f»ÓÒïbñ;hÚciVPj.[XdjÛQõfl}ï¡û#u”æ»ôæãTnð`š%”šÊHO³½§wÛ»lþ×WPC©æ»ÓÒ¯.†(5lšEÓ™Œ?¡YA©¹lO³½{¢Ò”Ýv]˜ëd*‡ùî´ô+‹%F~.ÍJMe ¬§Ùn4ö]¶u2UÂ|—ßܧ^4µ=–f¥æ²¤Ãl¿ÆÃ‚€àîêWêdª„ùî|Øïbt³Y·$šÔv¾¢YB©©lQN£½ç’öÝM™Ö/D5Cû·Ø¼‘1¾ÃHóç 5íjÚY[øžõ«Jæ{¨Óú…°f(¼˜õ»Xü^‚z.ÍJMe»xå- íeÂÝ;“©QÅP˜—~] šœDÓØDäš„šË³½k¢ÎŠwMêD*„Ùî|ÔïâÌÁ=zÌÎ?4UõZB³„RSÙÒêj„¶£ÓB »ð6Û:™Ša¾ o^óvÑLX!EѬ Ô\¶ ~˜íkã^ÜÍë†îÔQj˜ïNÇÁÂâš·ÞÕ’hŠ¢‹~B³„RSÙR—Ólï±;ֆﲭ“©æ»øîuäýÂùXš”šËvõ45‡íÝÝuaª“©æ»ôæË©ÚZÂAþMv¾–ÀŸÐ,¡ÔT¶Èzšíx׉«ï²­“©æ»óÖï ¦ócaVj.ZX(å°½k¢¬Fè´É–ëD*‡Ùî´ô»°]&{,ÍJMe»º‘&‰íØØÜçI¾{XÈu"UÂ|w>ê—V45‹&õOhVPj.[v9ÌvÔD®_Ý`÷èWêdª„ùî´ôk‹ÉÍ,-‹&[C×Oh–Pj*[\u s؎͸À¯á¶R«d*†5CáýÖ/ òciþ|¥¦³õ~”-Þ=QG—Ñ)Þb‹uZ¿Ö Å­ß¾:uˆ¾÷M‚Ö|óá8Vmýf³Åól‡ »‰gºË¶N¦FUCq^û5XÒäšÜ®Lÿ„f¥æ²ï‡Ù‚ýuíj/¼²Éêd*„ù.¼;˜{‚ï~Ñ4Bbø„f ¥¦²¥ÅÍ`Ilï‰ëLtýò]¶u2Ã|wZû]=¥Á®Ï¥YA©¹là0[¼GÙ¡´Ž»ë%¬“©æ»Óa¿¾8ûõ^⎣YB©©lä4Û±OU&ëÚwÙÖÉT óÝiíW×4í±4+(5—-fK÷Í`­“lŸAPL¥0ßÖ~³k‘ŠúXš%”šÊÐN³ýš»ÃãΡ]¶u2•Ã|wZûe^ÑÌðÝ š”šË@³½›¢‘ÌvÙrLå0ß÷~+$"nY4ûªò ÍJMe»jÚ¯0÷Ý ×‰T ³Ýù¬ß¾¢™ðÈ-Šf¡æ²…ÅCš¶ò5w‡Èh·¢$u"UÂlw>ë÷5L‘®…YB¨©h¡ùi¶Ã„ÙÍwJeZ¿Ö Åy3”—JµöXš?_©Ùlÿ Ôp¶ô5VÐ;ÛÞY!ÕiýRX3”æ­_Y¼…ÜŸK³„RSÙÂêb°¶÷Æærˆ¶y1ÕiýRX3”æ­ßn+šÆY4±Ë'4+(5—-tÈeû7eÔÜáÒ;././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015117043043033063 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015117043063033065 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000644000175000017500000017766215117043063033112 0ustar zuulzuul2025-12-12T16:19:48.099857701+00:00 stderr F I1212 16:19:48.099690 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:19:48.099857701+00:00 stderr F I1212 16:19:48.099783 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:19:48.100582529+00:00 stderr F I1212 16:19:48.100522 1 observer_polling.go:159] Starting file observer 2025-12-12T16:19:48.101413760+00:00 stderr F I1212 16:19:48.101342 1 builder.go:304] openshift-controller-manager version 4.20.0-202510211040.p2.gd9e543d.assembly.stream.el9-d9e543d-d9e543dd31e981f279c447e4f92f0dac3f665f9e 2025-12-12T16:19:48.102873597+00:00 stderr F I1212 16:19:48.102817 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:19:48.394801005+00:00 stderr F I1212 16:19:48.394730 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:19:48.398880218+00:00 stderr F I1212 16:19:48.398836 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:19:48.398957130+00:00 stderr F I1212 16:19:48.398947 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:19:48.399002881+00:00 stderr F I1212 16:19:48.398993 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:19:48.399025931+00:00 stderr F I1212 16:19:48.399017 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:19:48.405480274+00:00 stderr F I1212 16:19:48.405401 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:19:48.405480274+00:00 stderr F W1212 16:19:48.405454 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:19:48.405480274+00:00 stderr F W1212 16:19:48.405461 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:19:48.405480274+00:00 stderr F W1212 16:19:48.405466 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:19:48.405480274+00:00 stderr F W1212 16:19:48.405469 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:19:48.405480274+00:00 stderr F W1212 16:19:48.405472 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:19:48.405532325+00:00 stderr F W1212 16:19:48.405475 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:19:48.405678709+00:00 stderr F I1212 16:19:48.405412 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:19:48.408934800+00:00 stderr F I1212 16:19:48.408896 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:19:48.409013422+00:00 stderr F I1212 16:19:48.408906 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:19:48.409043853+00:00 stderr F I1212 16:19:48.409030 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:19:48.409067714+00:00 stderr F I1212 16:19:48.409032 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:19:48.409114775+00:00 stderr F I1212 16:19:48.408939 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:19:48.409198617+00:00 stderr F I1212 16:19:48.409160 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:19:48.409277419+00:00 stderr F I1212 16:19:48.409261 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:19:48.409387402+00:00 stderr F I1212 16:19:48.409358 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"controller-manager.openshift-controller-manager.svc\" [serving] validServingFor=[controller-manager.openshift-controller-manager.svc,controller-manager.openshift-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:19:48.40932434 +0000 UTC))" 2025-12-12T16:19:48.409632748+00:00 stderr F I1212 16:19:48.409608 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556388\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556388\" (2025-12-12 15:19:48 +0000 UTC to 2028-12-12 15:19:48 +0000 UTC (now=2025-12-12 16:19:48.409583597 +0000 UTC))" 2025-12-12T16:19:48.409649158+00:00 stderr F I1212 16:19:48.409642 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:19:48.409764241+00:00 stderr F I1212 16:19:48.409680 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:19:48.409764241+00:00 stderr F I1212 16:19:48.409703 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:19:48.409830863+00:00 stderr F I1212 16:19:48.409787 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:19:48.410484839+00:00 stderr F I1212 16:19:48.410445 1 leaderelection.go:257] attempting to acquire leader lease openshift-controller-manager/openshift-master-controllers... 2025-12-12T16:19:48.411379612+00:00 stderr F I1212 16:19:48.411287 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:48.411734361+00:00 stderr F I1212 16:19:48.411693 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:48.411847013+00:00 stderr F I1212 16:19:48.411819 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:48.419202498+00:00 stderr F I1212 16:19:48.418789 1 leaderelection.go:271] successfully acquired lease openshift-controller-manager/openshift-master-controllers 2025-12-12T16:19:48.419321561+00:00 stderr F I1212 16:19:48.418920 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-controller-manager", Name:"openshift-master-controllers", UID:"76e47f17-3362-486e-b2b6-bc3c8eed3185", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"39658", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' controller-manager-7b9f779b68-rhrzf_40b280c4-e6e4-490c-b2f1-6b8dbf8d79ce became leader 2025-12-12T16:19:48.420026789+00:00 stderr F I1212 16:19:48.419985 1 controller_manager.go:35] DeploymentConfig controller using images from "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:472481b81b280ece6218fbb410c2a32ea6c826e5ac56b95f5935fa37773be0af" 2025-12-12T16:19:48.420026789+00:00 stderr F I1212 16:19:48.420006 1 controller_manager.go:41] Build controller using images from "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c2a80d6dd943dbbb5c0bc63f4aa17d55e44dbde22a3ea4e6a41a32930dc4ac77" 2025-12-12T16:19:48.423465705+00:00 stderr F I1212 16:19:48.423420 1 controller_manager.go:92] Starting "openshift.io/serviceaccount" 2025-12-12T16:19:48.423465705+00:00 stderr F I1212 16:19:48.423438 1 serviceaccount.go:16] openshift.io/serviceaccount: no managed names specified 2025-12-12T16:19:48.423465705+00:00 stderr F W1212 16:19:48.423460 1 controller_manager.go:98] Skipping "openshift.io/serviceaccount" 2025-12-12T16:19:48.423498746+00:00 stderr F W1212 16:19:48.423466 1 controller_manager.go:88] "openshift.io/default-rolebindings" is disabled 2025-12-12T16:19:48.423498746+00:00 stderr F I1212 16:19:48.423472 1 controller_manager.go:92] Starting "openshift.io/origin-namespace" 2025-12-12T16:19:48.426561623+00:00 stderr F I1212 16:19:48.426496 1 controller_manager.go:101] Started "openshift.io/origin-namespace" 2025-12-12T16:19:48.426561623+00:00 stderr F I1212 16:19:48.426537 1 controller_manager.go:92] Starting "openshift.io/builder-rolebindings" 2025-12-12T16:19:48.429279541+00:00 stderr F I1212 16:19:48.429234 1 controller_manager.go:101] Started "openshift.io/builder-rolebindings" 2025-12-12T16:19:48.429279541+00:00 stderr F I1212 16:19:48.429257 1 controller_manager.go:92] Starting "openshift.io/deployer" 2025-12-12T16:19:48.429416625+00:00 stderr F I1212 16:19:48.429385 1 defaultrolebindings.go:154] Starting BuilderRoleBindingController 2025-12-12T16:19:48.429416625+00:00 stderr F I1212 16:19:48.429408 1 shared_informer.go:350] "Waiting for caches to sync" controller="BuilderRoleBindingController" 2025-12-12T16:19:48.431280861+00:00 stderr F I1212 16:19:48.431241 1 controller_manager.go:101] Started "openshift.io/deployer" 2025-12-12T16:19:48.431280861+00:00 stderr F I1212 16:19:48.431259 1 controller_manager.go:92] Starting "openshift.io/deploymentconfig" 2025-12-12T16:19:48.431378004+00:00 stderr F I1212 16:19:48.431351 1 factory.go:73] Starting deployer controller 2025-12-12T16:19:48.435416605+00:00 stderr F I1212 16:19:48.435374 1 controller_manager.go:101] Started "openshift.io/deploymentconfig" 2025-12-12T16:19:48.435416605+00:00 stderr F I1212 16:19:48.435392 1 controller_manager.go:92] Starting "openshift.io/deployer-rolebindings" 2025-12-12T16:19:48.435530238+00:00 stderr F I1212 16:19:48.435497 1 factory.go:78] Starting deploymentconfig controller 2025-12-12T16:19:48.437566669+00:00 stderr F I1212 16:19:48.437522 1 controller_manager.go:101] Started "openshift.io/deployer-rolebindings" 2025-12-12T16:19:48.437566669+00:00 stderr F I1212 16:19:48.437538 1 controller_manager.go:92] Starting "openshift.io/image-puller-rolebindings" 2025-12-12T16:19:48.437644041+00:00 stderr F I1212 16:19:48.437615 1 defaultrolebindings.go:154] Starting DeployerRoleBindingController 2025-12-12T16:19:48.437644041+00:00 stderr F I1212 16:19:48.437635 1 shared_informer.go:350] "Waiting for caches to sync" controller="DeployerRoleBindingController" 2025-12-12T16:19:48.439776395+00:00 stderr F I1212 16:19:48.439726 1 controller_manager.go:101] Started "openshift.io/image-puller-rolebindings" 2025-12-12T16:19:48.439776395+00:00 stderr F I1212 16:19:48.439743 1 defaultrolebindings.go:154] Starting ImagePullerRoleBindingController 2025-12-12T16:19:48.439776395+00:00 stderr F I1212 16:19:48.439747 1 controller_manager.go:92] Starting "openshift.io/builder-serviceaccount" 2025-12-12T16:19:48.439776395+00:00 stderr F I1212 16:19:48.439757 1 shared_informer.go:350] "Waiting for caches to sync" controller="ImagePullerRoleBindingController" 2025-12-12T16:19:48.442097913+00:00 stderr F I1212 16:19:48.442006 1 controller_manager.go:101] Started "openshift.io/builder-serviceaccount" 2025-12-12T16:19:48.442097913+00:00 stderr F I1212 16:19:48.442023 1 controller_manager.go:92] Starting "openshift.io/build" 2025-12-12T16:19:48.442097913+00:00 stderr F I1212 16:19:48.442056 1 serviceaccounts_controller.go:114] "Starting service account controller" 2025-12-12T16:19:48.442097913+00:00 stderr F I1212 16:19:48.442075 1 shared_informer.go:350] "Waiting for caches to sync" controller="service account" 2025-12-12T16:19:48.448450162+00:00 stderr F I1212 16:19:48.448350 1 controller_manager.go:101] Started "openshift.io/build" 2025-12-12T16:19:48.448450162+00:00 stderr F I1212 16:19:48.448370 1 controller_manager.go:92] Starting "openshift.io/image-trigger" 2025-12-12T16:19:48.510011068+00:00 stderr F I1212 16:19:48.509382 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:19:48.510092870+00:00 stderr F I1212 16:19:48.509488 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:19:48.510102570+00:00 stderr F I1212 16:19:48.509504 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:19:48.510399888+00:00 stderr F I1212 16:19:48.510351 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:19:48.510313076 +0000 UTC))" 2025-12-12T16:19:48.510600573+00:00 stderr F I1212 16:19:48.510566 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"controller-manager.openshift-controller-manager.svc\" [serving] validServingFor=[controller-manager.openshift-controller-manager.svc,controller-manager.openshift-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:19:48.510551002 +0000 UTC))" 2025-12-12T16:19:48.510764927+00:00 stderr F I1212 16:19:48.510733 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556388\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556388\" (2025-12-12 15:19:48 +0000 UTC to 2028-12-12 15:19:48 +0000 UTC (now=2025-12-12 16:19:48.510720236 +0000 UTC))" 2025-12-12T16:19:48.510907561+00:00 stderr F I1212 16:19:48.510873 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:19:48.510862429 +0000 UTC))" 2025-12-12T16:19:48.510907561+00:00 stderr F I1212 16:19:48.510894 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:19:48.5108864 +0000 UTC))" 2025-12-12T16:19:48.510916511+00:00 stderr F I1212 16:19:48.510908 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:19:48.51089941 +0000 UTC))" 2025-12-12T16:19:48.510949892+00:00 stderr F I1212 16:19:48.510922 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:19:48.510914631 +0000 UTC))" 2025-12-12T16:19:48.510992233+00:00 stderr F I1212 16:19:48.510946 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:19:48.510936351 +0000 UTC))" 2025-12-12T16:19:48.510992233+00:00 stderr F I1212 16:19:48.510967 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:19:48.510953912 +0000 UTC))" 2025-12-12T16:19:48.511007273+00:00 stderr F I1212 16:19:48.510988 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:19:48.510974962 +0000 UTC))" 2025-12-12T16:19:48.511015333+00:00 stderr F I1212 16:19:48.511008 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:19:48.510995273 +0000 UTC))" 2025-12-12T16:19:48.511528796+00:00 stderr F I1212 16:19:48.511025 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:19:48.511015093 +0000 UTC))" 2025-12-12T16:19:48.511528796+00:00 stderr F I1212 16:19:48.511046 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:19:48.511039614 +0000 UTC))" 2025-12-12T16:19:48.511528796+00:00 stderr F I1212 16:19:48.511080 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:19:48.511069195 +0000 UTC))" 2025-12-12T16:19:48.511528796+00:00 stderr F I1212 16:19:48.511282 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"controller-manager.openshift-controller-manager.svc\" [serving] validServingFor=[controller-manager.openshift-controller-manager.svc,controller-manager.openshift-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:19:48.51126923 +0000 UTC))" 2025-12-12T16:19:48.511528796+00:00 stderr F I1212 16:19:48.511412 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556388\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556388\" (2025-12-12 15:19:48 +0000 UTC to 2028-12-12 15:19:48 +0000 UTC (now=2025-12-12 16:19:48.511403673 +0000 UTC))" 2025-12-12T16:19:49.028382794+00:00 stderr F I1212 16:19:49.028312 1 controller_manager.go:101] Started "openshift.io/image-trigger" 2025-12-12T16:19:49.028382794+00:00 stderr F I1212 16:19:49.028335 1 controller_manager.go:92] Starting "openshift.io/templateinstance" 2025-12-12T16:19:49.028463786+00:00 stderr F I1212 16:19:49.028380 1 image_trigger_controller.go:229] Starting trigger controller 2025-12-12T16:19:50.026647398+00:00 stderr F I1212 16:19:50.026588 1 controller_manager.go:101] Started "openshift.io/templateinstance" 2025-12-12T16:19:50.026647398+00:00 stderr F I1212 16:19:50.026608 1 controller_manager.go:92] Starting "openshift.io/templateinstancefinalizer" 2025-12-12T16:19:50.429431252+00:00 stderr F I1212 16:19:50.429354 1 controller_manager.go:101] Started "openshift.io/templateinstancefinalizer" 2025-12-12T16:19:50.429546355+00:00 stderr F I1212 16:19:50.429531 1 controller_manager.go:92] Starting "openshift.io/deployer-serviceaccount" 2025-12-12T16:19:50.429628777+00:00 stderr F I1212 16:19:50.429391 1 templateinstance_finalizer.go:189] TemplateInstanceFinalizer controller waiting for cache sync 2025-12-12T16:19:50.627530086+00:00 stderr F I1212 16:19:50.627430 1 controller_manager.go:101] Started "openshift.io/deployer-serviceaccount" 2025-12-12T16:19:50.627530086+00:00 stderr F I1212 16:19:50.627453 1 controller_manager.go:92] Starting "openshift.io/image-import" 2025-12-12T16:19:50.627530086+00:00 stderr F I1212 16:19:50.627495 1 serviceaccounts_controller.go:114] "Starting service account controller" 2025-12-12T16:19:50.627530086+00:00 stderr F I1212 16:19:50.627508 1 shared_informer.go:350] "Waiting for caches to sync" controller="service account" 2025-12-12T16:19:50.831142428+00:00 stderr F I1212 16:19:50.831069 1 imagestream_controller.go:66] Starting image stream controller 2025-12-12T16:19:51.027155559+00:00 stderr F I1212 16:19:51.027086 1 controller_manager.go:101] Started "openshift.io/image-import" 2025-12-12T16:19:51.027155559+00:00 stderr F I1212 16:19:51.027116 1 controller_manager.go:92] Starting "openshift.io/image-signature-import" 2025-12-12T16:19:51.027241101+00:00 stderr F I1212 16:19:51.027217 1 scheduled_image_controller.go:68] Starting scheduled import controller 2025-12-12T16:19:51.226737090+00:00 stderr F I1212 16:19:51.226688 1 controller_manager.go:101] Started "openshift.io/image-signature-import" 2025-12-12T16:19:51.226811102+00:00 stderr F I1212 16:19:51.226799 1 controller_manager.go:92] Starting "openshift.io/serviceaccount-pull-secrets" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427913 1 image_pull_secret_controller.go:372] "Starting controller" name="openshift.io/internal-image-registry-pull-secrets_image-pull-secret" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427928 1 keyid_observation_controller.go:164] "Starting controller" name="openshift.io/internal-image-registry-pull-secrets_kids" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427970 1 registry_urls_observation_controller.go:140] "Starting controller" name="openshift.io/internal-image-registry-pull-secrets_urls" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427980 1 shared_informer.go:350] "Waiting for caches to sync" controller="openshift.io/internal-image-registry-pull-secrets_kids" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427983 1 service_account_controller.go:338] "Starting controller" name="openshift.io/internal-image-registry-pull-secrets_service-account" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427983 1 shared_informer.go:350] "Waiting for caches to sync" controller="openshift.io/internal-image-registry-pull-secrets_urls" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.428001 1 legacy_image_pull_secret_controller.go:131] "Starting controller" name="openshift.io/internal-image-registry-pull-secrets_legacy-image-pull-secret" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427949 1 shared_informer.go:350] "Waiting for caches to sync" controller="openshift.io/internal-image-registry-pull-secrets_image-pull-secret" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.428016 1 shared_informer.go:350] "Waiting for caches to sync" controller="openshift.io/internal-image-registry-pull-secrets_service-account" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.428021 1 shared_informer.go:350] "Waiting for caches to sync" controller="openshift.io/internal-image-registry-pull-secrets_legacy-image-pull-secret" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.428032 1 controller_manager.go:101] Started "openshift.io/serviceaccount-pull-secrets" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.427939 1 legacy_token_secret_controller.go:103] "Starting controller" name="openshift.io/internal-image-registry-pull-secrets_legacy-token-secret" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.428047 1 controller_manager.go:92] Starting "openshift.io/build-config-change" 2025-12-12T16:19:51.428231599+00:00 stderr F I1212 16:19:51.428050 1 shared_informer.go:350] "Waiting for caches to sync" controller="openshift.io/internal-image-registry-pull-secrets_legacy-token-secret" 2025-12-12T16:19:51.825829742+00:00 stderr F I1212 16:19:51.825764 1 controller_manager.go:101] Started "openshift.io/build-config-change" 2025-12-12T16:19:51.825829742+00:00 stderr F I1212 16:19:51.825790 1 controller_manager.go:92] Starting "openshift.io/unidling" 2025-12-12T16:19:52.227754513+00:00 stderr F I1212 16:19:52.227672 1 controller_manager.go:101] Started "openshift.io/unidling" 2025-12-12T16:19:52.227754513+00:00 stderr F I1212 16:19:52.227699 1 controller_manager.go:106] Started Origin Controllers 2025-12-12T16:19:52.234717028+00:00 stderr F I1212 16:19:52.234437 1 reflector.go:430] "Caches populated" type="*v1alpha1.ImageContentSourcePolicy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.234867051+00:00 stderr F I1212 16:19:52.234823 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.235139688+00:00 stderr F I1212 16:19:52.235095 1 reflector.go:430] "Caches populated" type="*v1.ImageTagMirrorSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.235325033+00:00 stderr F I1212 16:19:52.235287 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.235709593+00:00 stderr F I1212 16:19:52.235659 1 reflector.go:430] "Caches populated" type="*v1.CronJob" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.235998330+00:00 stderr F I1212 16:19:52.235966 1 reflector.go:430] "Caches populated" type="*v1.Build" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.236158514+00:00 stderr F I1212 16:19:52.236125 1 reflector.go:430] "Caches populated" type="*v1.ImageDigestMirrorSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.246244187+00:00 stderr F I1212 16:19:52.242760 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.252844883+00:00 stderr F I1212 16:19:52.248980 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.260743901+00:00 stderr F I1212 16:19:52.260676 1 reflector.go:430] "Caches populated" type="*v1.TemplateInstance" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.268272570+00:00 stderr F I1212 16:19:52.265982 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:19:52.268272570+00:00 stderr F I1212 16:19:52.266531 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.268272570+00:00 stderr F I1212 16:19:52.267657 1 reflector.go:430] "Caches populated" type="*v1.DeploymentConfig" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.268272570+00:00 stderr F I1212 16:19:52.268021 1 reflector.go:430] "Caches populated" type="*v1.Build" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.276249870+00:00 stderr F I1212 16:19:52.268177 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.280283142+00:00 stderr F I1212 16:19:52.277213 1 reflector.go:430] "Caches populated" type="*v1.BuildConfig" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.290250672+00:00 stderr F I1212 16:19:52.288591 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:19:52.309640949+00:00 stderr F I1212 16:19:52.309567 1 reflector.go:430] "Caches populated" type="*v1.ImageStream" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.330236856+00:00 stderr F I1212 16:19:52.326214 1 buildconfig_controller.go:212] Starting buildconfig controller 2025-12-12T16:19:52.330236856+00:00 stderr F I1212 16:19:52.327374 1 templateinstance_controller.go:297] Starting TemplateInstance controller 2025-12-12T16:19:52.332459062+00:00 stderr F I1212 16:19:52.332425 1 templateinstance_finalizer.go:194] Starting TemplateInstanceFinalizer controller 2025-12-12T16:19:52.358345212+00:00 stderr F I1212 16:19:52.358237 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.560610460+00:00 stderr F I1212 16:19:52.560484 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.640303831+00:00 stderr F I1212 16:19:52.640233 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.899357525+00:00 stderr F I1212 16:19:52.899281 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:52.928492577+00:00 stderr F I1212 16:19:52.928403 1 shared_informer.go:357] "Caches are synced" controller="openshift.io/internal-image-registry-pull-secrets_kids" 2025-12-12T16:19:52.928492577+00:00 stderr F I1212 16:19:52.928437 1 keyid_observation_controller.go:172] "Started controller" name="openshift.io/internal-image-registry-pull-secrets_kids" 2025-12-12T16:19:52.928492577+00:00 stderr F I1212 16:19:52.928474 1 shared_informer.go:357] "Caches are synced" controller="openshift.io/internal-image-registry-pull-secrets_legacy-token-secret" 2025-12-12T16:19:52.928577699+00:00 stderr F I1212 16:19:52.928497 1 legacy_token_secret_controller.go:110] "Started controller" name="openshift.io/internal-image-registry-pull-secrets_legacy-token-secret" 2025-12-12T16:19:52.928606780+00:00 stderr F I1212 16:19:52.928571 1 shared_informer.go:357] "Caches are synced" controller="openshift.io/internal-image-registry-pull-secrets_legacy-image-pull-secret" 2025-12-12T16:19:52.928649931+00:00 stderr F I1212 16:19:52.928615 1 legacy_image_pull_secret_controller.go:138] "Started controller" name="openshift.io/internal-image-registry-pull-secrets_legacy-image-pull-secret" 2025-12-12T16:19:53.227934735+00:00 stderr F I1212 16:19:53.227858 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.228392177+00:00 stderr F I1212 16:19:53.228350 1 shared_informer.go:357] "Caches are synced" controller="openshift.io/internal-image-registry-pull-secrets_urls" 2025-12-12T16:19:53.228392177+00:00 stderr F I1212 16:19:53.228369 1 registry_urls_observation_controller.go:147] "Started controller" name="openshift.io/internal-image-registry-pull-secrets_urls" 2025-12-12T16:19:53.424519401+00:00 stderr F I1212 16:19:53.424446 1 request.go:752] "Waited before sending request" delay="1.195995319s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/serviceaccounts?limit=500&resourceVersion=0" 2025-12-12T16:19:53.431067326+00:00 stderr F I1212 16:19:53.431027 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.528879242+00:00 stderr F I1212 16:19:53.528799 1 shared_informer.go:357] "Caches are synced" controller="openshift.io/internal-image-registry-pull-secrets_image-pull-secret" 2025-12-12T16:19:53.528879242+00:00 stderr F I1212 16:19:53.528819 1 shared_informer.go:357] "Caches are synced" controller="openshift.io/internal-image-registry-pull-secrets_service-account" 2025-12-12T16:19:53.528879242+00:00 stderr F I1212 16:19:53.528843 1 image_pull_secret_controller.go:398] Waiting for service account token signing cert to be observed 2025-12-12T16:19:53.528879242+00:00 stderr F I1212 16:19:53.528851 1 service_account_controller.go:345] "Started controller" name="openshift.io/internal-image-registry-pull-secrets_service-account" 2025-12-12T16:19:53.528932323+00:00 stderr F I1212 16:19:53.528873 1 image_pull_secret_controller.go:401] "Observed service account token signing certs" kids=["aV377pYUivc_NpjUTRV8mkI5FRM9rTZehB0Fpev8Yjk"] 2025-12-12T16:19:53.528932323+00:00 stderr F I1212 16:19:53.528901 1 image_pull_secret_controller.go:384] Waiting for image registry urls to be observed 2025-12-12T16:19:53.528932323+00:00 stderr F I1212 16:19:53.528917 1 image_pull_secret_controller.go:388] "Observed image registry urls" urls=["10.217.5.148:5000","default-route-openshift-image-registry.apps-crc.testing","image-registry.openshift-image-registry.svc.cluster.local:5000","image-registry.openshift-image-registry.svc:5000"] 2025-12-12T16:19:53.528940813+00:00 stderr F I1212 16:19:53.528932 1 image_pull_secret_controller.go:445] "Started controller" name="openshift.io/internal-image-registry-pull-secrets_image-pull-secret" 2025-12-12T16:19:53.627661962+00:00 stderr F I1212 16:19:53.627592 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.630103293+00:00 stderr F I1212 16:19:53.630059 1 shared_informer.go:357] "Caches are synced" controller="BuilderRoleBindingController" 2025-12-12T16:19:53.638637617+00:00 stderr F I1212 16:19:53.638549 1 shared_informer.go:357] "Caches are synced" controller="DeployerRoleBindingController" 2025-12-12T16:19:53.640249838+00:00 stderr F I1212 16:19:53.640169 1 shared_informer.go:357] "Caches are synced" controller="ImagePullerRoleBindingController" 2025-12-12T16:19:53.642447613+00:00 stderr F I1212 16:19:53.642399 1 shared_informer.go:357] "Caches are synced" controller="service account" 2025-12-12T16:19:53.728695339+00:00 stderr F I1212 16:19:53.728625 1 shared_informer.go:357] "Caches are synced" controller="service account" 2025-12-12T16:19:53.841232624+00:00 stderr F I1212 16:19:53.841061 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:54.025792388+00:00 stderr F I1212 16:19:54.025747 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:54.031876511+00:00 stderr F I1212 16:19:54.031445 1 factory.go:80] Deployer controller caches are synced. Starting workers. 2025-12-12T16:19:54.036165318+00:00 stderr F I1212 16:19:54.036143 1 factory.go:85] deploymentconfig controller caches are synced. Starting workers. 2025-12-12T16:19:54.226720013+00:00 stderr F I1212 16:19:54.225980 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:54.249993957+00:00 stderr F I1212 16:19:54.249276 1 build_controller.go:502] Starting build controller 2025-12-12T16:19:54.249993957+00:00 stderr F I1212 16:19:54.249295 1 build_controller.go:504] OpenShift image registry hostname: image-registry.openshift-image-registry.svc:5000 2025-12-12T16:19:54.582828194+00:00 stderr F I1212 16:19:54.582764 1 reflector.go:430] "Caches populated" type="*v1.Event" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:25:04.332758041+00:00 stderr F I1212 16:25:04.332091 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openstack" name="deployer-dockercfg-dm7lj" expected=4 actual=0 2025-12-12T16:25:04.332758041+00:00 stderr F I1212 16:25:04.332732 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openstack" name="deployer-dockercfg-dm7lj" serviceaccount="deployer" 2025-12-12T16:25:04.332834093+00:00 stderr F I1212 16:25:04.332479 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openstack" name="default-dockercfg-vck7n" expected=4 actual=0 2025-12-12T16:25:04.332861144+00:00 stderr F I1212 16:25:04.332849 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openstack" name="default-dockercfg-vck7n" serviceaccount="default" 2025-12-12T16:25:04.332953516+00:00 stderr F I1212 16:25:04.332514 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openstack" name="builder-dockercfg-2cl94" expected=4 actual=0 2025-12-12T16:25:04.332953516+00:00 stderr F I1212 16:25:04.332941 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openstack" name="builder-dockercfg-2cl94" serviceaccount="builder" 2025-12-12T16:25:05.017805610+00:00 stderr F I1212 16:25:05.017701 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openstack-operators" name="default-dockercfg-gdqsd" expected=4 actual=0 2025-12-12T16:25:05.017805610+00:00 stderr F I1212 16:25:05.017742 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openstack-operators" name="default-dockercfg-gdqsd" serviceaccount="default" 2025-12-12T16:25:05.022350670+00:00 stderr F I1212 16:25:05.020895 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openstack-operators" name="deployer-dockercfg-ff6mp" expected=4 actual=0 2025-12-12T16:25:05.022350670+00:00 stderr F I1212 16:25:05.020946 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openstack-operators" name="deployer-dockercfg-ff6mp" serviceaccount="deployer" 2025-12-12T16:25:05.026241192+00:00 stderr F I1212 16:25:05.026155 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openstack-operators" name="builder-dockercfg-vtbg8" expected=4 actual=0 2025-12-12T16:25:05.026418966+00:00 stderr F I1212 16:25:05.026394 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openstack-operators" name="builder-dockercfg-vtbg8" serviceaccount="builder" 2025-12-12T16:26:15.070844491+00:00 stderr F I1212 16:26:15.070270 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="service-telemetry" name="default-dockercfg-8qddz" expected=4 actual=0 2025-12-12T16:26:15.070844491+00:00 stderr F I1212 16:26:15.070804 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="service-telemetry" name="default-dockercfg-8qddz" serviceaccount="default" 2025-12-12T16:26:15.073510219+00:00 stderr F I1212 16:26:15.073454 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="service-telemetry" name="deployer-dockercfg-8xhq5" expected=4 actual=0 2025-12-12T16:26:15.073510219+00:00 stderr F I1212 16:26:15.073487 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="service-telemetry" name="deployer-dockercfg-8xhq5" serviceaccount="deployer" 2025-12-12T16:26:15.074966426+00:00 stderr F I1212 16:26:15.074843 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="service-telemetry" name="builder-dockercfg-ff94g" expected=4 actual=0 2025-12-12T16:26:15.074966426+00:00 stderr F I1212 16:26:15.074866 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="service-telemetry" name="builder-dockercfg-ff94g" serviceaccount="builder" 2025-12-12T16:26:18.293489217+00:00 stderr F I1212 16:26:18.292819 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:26:42.049970345+00:00 stderr F I1212 16:26:42.045496 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager-operator" name="builder-dockercfg-wskc9" expected=4 actual=0 2025-12-12T16:26:42.049970345+00:00 stderr F I1212 16:26:42.046304 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager-operator" name="builder-dockercfg-wskc9" serviceaccount="builder" 2025-12-12T16:26:42.049970345+00:00 stderr F I1212 16:26:42.048540 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager-operator" name="default-dockercfg-crsf7" expected=4 actual=0 2025-12-12T16:26:42.049970345+00:00 stderr F I1212 16:26:42.048570 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager-operator" name="default-dockercfg-crsf7" serviceaccount="default" 2025-12-12T16:26:42.049970345+00:00 stderr F I1212 16:26:42.048864 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager-operator" name="deployer-dockercfg-p9272" expected=4 actual=0 2025-12-12T16:26:42.049970345+00:00 stderr F I1212 16:26:42.048875 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager-operator" name="deployer-dockercfg-p9272" serviceaccount="deployer" 2025-12-12T16:26:42.060484080+00:00 stderr F E1212 16:26:42.060149 1 project_finalizer_controller.go:110] "Unhandled Error" err="error syncing namespace, it will be retried: Operation cannot be fulfilled on namespaces \"cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:26:57.613657587+00:00 stderr F I1212 16:26:57.612769 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-operators" name="perses-dockercfg-6ncpx" expected=4 actual=0 2025-12-12T16:26:57.613657587+00:00 stderr F I1212 16:26:57.613589 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-operators" name="perses-dockercfg-6ncpx" serviceaccount="perses" 2025-12-12T16:26:58.049401395+00:00 stderr F I1212 16:26:58.046419 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-operators" name="obo-prometheus-operator-dockercfg-xntsg" expected=4 actual=0 2025-12-12T16:26:58.049401395+00:00 stderr F I1212 16:26:58.047041 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-operators" name="obo-prometheus-operator-dockercfg-xntsg" serviceaccount="obo-prometheus-operator" 2025-12-12T16:26:58.615615881+00:00 stderr F I1212 16:26:58.615541 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-operators" name="obo-prometheus-operator-admission-webhook-dockercfg-snb8c" expected=4 actual=0 2025-12-12T16:26:58.615615881+00:00 stderr F I1212 16:26:58.615575 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-operators" name="obo-prometheus-operator-admission-webhook-dockercfg-snb8c" serviceaccount="obo-prometheus-operator-admission-webhook" 2025-12-12T16:26:59.045484111+00:00 stderr F I1212 16:26:59.042653 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-operators" name="observability-operator-sa-dockercfg-dbxwx" expected=4 actual=0 2025-12-12T16:26:59.045484111+00:00 stderr F I1212 16:26:59.043250 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-operators" name="observability-operator-sa-dockercfg-dbxwx" serviceaccount="observability-operator-sa" 2025-12-12T16:26:59.619778106+00:00 stderr F I1212 16:26:59.619716 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-operators" name="perses-operator-dockercfg-q7phj" expected=4 actual=0 2025-12-12T16:26:59.619840227+00:00 stderr F I1212 16:26:59.619829 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-operators" name="perses-operator-dockercfg-q7phj" serviceaccount="perses-operator" 2025-12-12T16:27:04.023431596+00:00 stderr F I1212 16:27:04.022411 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="service-telemetry" name="elastic-operator-dockercfg-rf5wq" expected=4 actual=0 2025-12-12T16:27:04.023431596+00:00 stderr F I1212 16:27:04.023398 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="service-telemetry" name="elastic-operator-dockercfg-rf5wq" serviceaccount="elastic-operator" 2025-12-12T16:27:18.813741108+00:00 stderr F I1212 16:27:18.813233 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager-operator" name="cert-manager-operator-controller-manager-dockercfg-72tmp" expected=4 actual=0 2025-12-12T16:27:18.813741108+00:00 stderr F I1212 16:27:18.813707 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager-operator" name="cert-manager-operator-controller-manager-dockercfg-72tmp" serviceaccount="cert-manager-operator-controller-manager" 2025-12-12T16:27:36.298567783+00:00 stderr F I1212 16:27:36.295784 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager" name="deployer-dockercfg-ttlmq" expected=4 actual=0 2025-12-12T16:27:36.298567783+00:00 stderr F I1212 16:27:36.297758 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager" name="deployer-dockercfg-ttlmq" serviceaccount="deployer" 2025-12-12T16:27:36.298567783+00:00 stderr F I1212 16:27:36.297481 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager" name="builder-dockercfg-kqzwb" expected=4 actual=0 2025-12-12T16:27:36.298567783+00:00 stderr F I1212 16:27:36.298108 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager" name="builder-dockercfg-kqzwb" serviceaccount="builder" 2025-12-12T16:27:36.308020892+00:00 stderr F I1212 16:27:36.307827 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager" name="default-dockercfg-42mkj" expected=4 actual=0 2025-12-12T16:27:36.308020892+00:00 stderr F I1212 16:27:36.307852 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager" name="default-dockercfg-42mkj" serviceaccount="default" 2025-12-12T16:27:37.733414806+00:00 stderr F I1212 16:27:37.733346 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager" name="cert-manager-webhook-dockercfg-2tblb" expected=4 actual=0 2025-12-12T16:27:37.733414806+00:00 stderr F I1212 16:27:37.733380 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager" name="cert-manager-webhook-dockercfg-2tblb" serviceaccount="cert-manager-webhook" 2025-12-12T16:27:39.722766465+00:00 stderr F I1212 16:27:39.722138 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager" name="cert-manager-cainjector-dockercfg-bg7l4" expected=4 actual=0 2025-12-12T16:27:39.722766465+00:00 stderr F I1212 16:27:39.722717 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager" name="cert-manager-cainjector-dockercfg-bg7l4" serviceaccount="cert-manager-cainjector" 2025-12-12T16:27:47.124414632+00:00 stderr F I1212 16:27:47.123804 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="cert-manager" name="cert-manager-dockercfg-spvtv" expected=4 actual=0 2025-12-12T16:27:47.124514585+00:00 stderr F I1212 16:27:47.124500 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="cert-manager" name="cert-manager-dockercfg-spvtv" serviceaccount="cert-manager" 2025-12-12T16:29:06.839806884+00:00 stderr F I1212 16:29:06.839289 1 build_controller.go:1834] Giving up retrying service-telemetry/service-telemetry-framework-index: build config service-telemetry/service-telemetry-framework-index has no builds to run next 2025-12-12T16:29:23.060465424+00:00 stderr F I1212 16:29:23.059440 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="service-telemetry" name="infrawatch-operators-dockercfg-n6ssc" expected=4 actual=0 2025-12-12T16:29:23.060465424+00:00 stderr F I1212 16:29:23.060436 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="service-telemetry" name="infrawatch-operators-dockercfg-n6ssc" serviceaccount="infrawatch-operators" 2025-12-12T16:29:52.924568454+00:00 stderr F I1212 16:29:52.923707 1 image_pull_secret_controller.go:434] "Observed service account token signing certs" kids=["aV377pYUivc_NpjUTRV8mkI5FRM9rTZehB0Fpev8Yjk"] 2025-12-12T16:29:53.229531741+00:00 stderr F I1212 16:29:53.229424 1 image_pull_secret_controller.go:424] "Observed image registry urls" urls=["10.217.5.148:5000","default-route-openshift-image-registry.apps-crc.testing","image-registry.openshift-image-registry.svc.cluster.local:5000","image-registry.openshift-image-registry.svc:5000"] 2025-12-12T16:32:05.858163445+00:00 stderr F I1212 16:32:05.857083 1 build_controller.go:1834] Giving up retrying service-telemetry/service-telemetry-framework-index: build config service-telemetry/service-telemetry-framework-index has no builds to run next 2025-12-12T16:33:51.304317734+00:00 stderr F I1212 16:33:51.303358 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:34:06.010376292+00:00 stderr F I1212 16:34:06.009793 1 reflector.go:430] "Caches populated" type="*v1.Event" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:39:52.903355419+00:00 stderr F I1212 16:39:52.902555 1 image_pull_secret_controller.go:434] "Observed service account token signing certs" kids=["aV377pYUivc_NpjUTRV8mkI5FRM9rTZehB0Fpev8Yjk"] 2025-12-12T16:39:53.230458768+00:00 stderr F I1212 16:39:53.230341 1 image_pull_secret_controller.go:424] "Observed image registry urls" urls=["10.217.5.148:5000","default-route-openshift-image-registry.apps-crc.testing","image-registry.openshift-image-registry.svc.cluster.local:5000","image-registry.openshift-image-registry.svc:5000"] 2025-12-12T16:40:38.228506691+00:00 stderr F I1212 16:40:38.227717 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-must-gather-2sjxj" name="deployer-dockercfg-nd4dq" expected=4 actual=0 2025-12-12T16:40:38.228607123+00:00 stderr F I1212 16:40:38.228591 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-must-gather-2sjxj" name="deployer-dockercfg-nd4dq" serviceaccount="deployer" 2025-12-12T16:40:38.230676995+00:00 stderr F I1212 16:40:38.230616 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-must-gather-2sjxj" name="builder-dockercfg-6g4h4" expected=4 actual=0 2025-12-12T16:40:38.230676995+00:00 stderr F I1212 16:40:38.230669 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-must-gather-2sjxj" name="builder-dockercfg-6g4h4" serviceaccount="builder" 2025-12-12T16:40:38.248657257+00:00 stderr F I1212 16:40:38.248607 1 image_pull_secret_controller.go:294] "Internal registry pull secret auth data does not contain the correct number of entries" ns="openshift-must-gather-2sjxj" name="default-dockercfg-r98hj" expected=4 actual=0 2025-12-12T16:40:38.248748549+00:00 stderr F I1212 16:40:38.248732 1 image_pull_secret_controller.go:176] "Refreshing image pull secret" ns="openshift-must-gather-2sjxj" name="default-dockercfg-r98hj" serviceaccount="default" 2025-12-12T16:41:22.312087797+00:00 stderr F I1212 16:41:22.311539 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:43:03.931116648+00:00 stderr F E1212 16:43:03.930466 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.931767474+00:00 stderr F E1212 16:43:03.931739 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.934598775+00:00 stderr F E1212 16:43:03.934553 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.942004271+00:00 stderr F E1212 16:43:03.941953 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.943975941+00:00 stderr F E1212 16:43:03.943620 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.945473358+00:00 stderr F E1212 16:43:03.945450 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.958694411+00:00 stderr F E1212 16:43:03.958623 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.958850905+00:00 stderr F E1212 16:43:03.958810 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.963064070+00:00 stderr F E1212 16:43:03.962672 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.985155415+00:00 stderr F E1212 16:43:03.985064 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.986103359+00:00 stderr F E1212 16:43:03.986079 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:03.987770541+00:00 stderr F E1212 16:43:03.987744 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.033294075+00:00 stderr F E1212 16:43:04.033211 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.033294075+00:00 stderr F E1212 16:43:04.033259 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.033906840+00:00 stderr F E1212 16:43:04.033881 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.122055965+00:00 stderr F E1212 16:43:04.121962 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.122247590+00:00 stderr F E1212 16:43:04.121969 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.122289441+00:00 stderr F E1212 16:43:04.122032 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.287995614+00:00 stderr F E1212 16:43:04.287520 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.287995614+00:00 stderr F E1212 16:43:04.287563 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.288357093+00:00 stderr F E1212 16:43:04.288308 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.614638531+00:00 stderr F E1212 16:43:04.614127 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/default failed with : unable to update managed image pull secret: se**********ts \"default-dockercfg-r98hj\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.614729383+00:00 stderr F E1212 16:43:04.614265 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/builder failed with : unable to update managed image pull secret: se**********ts \"builder-dockercfg-6g4h4\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" 2025-12-12T16:43:04.614768364+00:00 stderr F E1212 16:43:04.614713 1 service_account_controller.go:368] "Unhandled Error" err="openshift-must-gather-2sjxj/deployer failed with : unable to update managed image pull secret: se**********ts \"deployer-dockercfg-nd4dq\" is forbidden: unable to create new content in namespace openshift-must-gather-2sjxj because it is being terminated" ././@LongLink0000644000000000000000000000033500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015117043044033037 5ustar zuulzuul././@LongLink0000644000000000000000000000040400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015117043063033040 5ustar zuulzuul././@LongLink0000644000000000000000000000041100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000644000175000017500000013622715117043044033054 0ustar zuulzuul2025-12-12T16:16:45.864691797+00:00 stderr F I1212 16:16:45.863458 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:45.864691797+00:00 stderr F I1212 16:16:45.864619 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.866485230+00:00 stderr F I1212 16:16:45.865730 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:46.000892522+00:00 stderr F I1212 16:16:45.953127 1 builder.go:304] openshift-kube-storage-version-migrator-operator version 4.20.0-202510211040.p2.g5adc142.assembly.stream.el9-5adc142-5adc14299739bc64c8812cbab0b0ff2d12863602 2025-12-12T16:16:47.608673345+00:00 stderr F I1212 16:16:47.607441 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:47.608673345+00:00 stderr F W1212 16:16:47.608638 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:47.608673345+00:00 stderr F W1212 16:16:47.608650 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:47.608673345+00:00 stderr F W1212 16:16:47.608667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:47.608737537+00:00 stderr F W1212 16:16:47.608671 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:47.608737537+00:00 stderr F W1212 16:16:47.608675 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:47.608737537+00:00 stderr F W1212 16:16:47.608678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:47.614722183+00:00 stderr F I1212 16:16:47.614630 1 secure_serving.go:213] Serving securely on [::]:8443 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.615661 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.616262 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.616250 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.616496 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.616857 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.617068 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.617083 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:16:47.618237269+00:00 stderr F I1212 16:16:47.617597 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController 2025-12-12T16:16:47.619504649+00:00 stderr F I1212 16:16:47.618276 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:47.619504649+00:00 stderr F I1212 16:16:47.619471 1 leaderelection.go:254] attempting to acquire leader lease openshift-kube-storage-version-migrator-operator/openshift-kube-storage-version-migrator-operator-lock... 2025-12-12T16:16:47.723116559+00:00 stderr F I1212 16:16:47.719375 1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:16:47.723186951+00:00 stderr F I1212 16:16:47.723104 1 leaderelection.go:268] successfully acquired lease openshift-kube-storage-version-migrator-operator/openshift-kube-storage-version-migrator-operator-lock 2025-12-12T16:16:47.726354568+00:00 stderr F I1212 16:16:47.725316 1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:16:47.730234193+00:00 stderr F I1212 16:16:47.726502 1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController 2025-12-12T16:16:47.761470415+00:00 stderr F I1212 16:16:47.760490 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-storage-version-migrator-operator", Name:"openshift-kube-storage-version-migrator-operator-lock", UID:"0035874b-d304-4bf0-9d0b-cd7cad8f5265", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37331", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' kube-storage-version-migrator-operator-565b79b866-krgxf_267b5f92-5808-4f40-81e4-7a5e3c058d4f became leader 2025-12-12T16:16:47.829324972+00:00 stderr F I1212 16:16:47.827923 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:47.829324972+00:00 stderr F I1212 16:16:47.828659 1 base_controller.go:76] Waiting for caches to sync for StaticConditionsController 2025-12-12T16:16:47.830228514+00:00 stderr F I1212 16:16:47.829571 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_kube-storage-version-migrator 2025-12-12T16:16:47.830228514+00:00 stderr F I1212 16:16:47.829634 1 base_controller.go:76] Waiting for caches to sync for KubeStorageVersionMigratorStaticResources-StaticResources 2025-12-12T16:16:47.830228514+00:00 stderr F I1212 16:16:47.829649 1 base_controller.go:76] Waiting for caches to sync for KubeStorageVersionMigrator 2025-12-12T16:16:47.830228514+00:00 stderr F I1212 16:16:47.829687 1 base_controller.go:76] Waiting for caches to sync for kube-storage-version-migrator-RemoveStaleConditions 2025-12-12T16:16:47.929390915+00:00 stderr F I1212 16:16:47.928987 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:47.929390915+00:00 stderr F I1212 16:16:47.929034 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929466 1 base_controller.go:82] Caches are synced for StaticConditionsController 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929486 1 base_controller.go:119] Starting #1 worker of StaticConditionsController controller ... 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929784 1 base_controller.go:82] Caches are synced for kube-storage-version-migrator-RemoveStaleConditions 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929794 1 base_controller.go:119] Starting #1 worker of kube-storage-version-migrator-RemoveStaleConditions controller ... 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929818 1 base_controller.go:82] Caches are synced for StatusSyncer_kube-storage-version-migrator 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929824 1 base_controller.go:119] Starting #1 worker of StatusSyncer_kube-storage-version-migrator controller ... 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929841 1 base_controller.go:82] Caches are synced for KubeStorageVersionMigratorStaticResources-StaticResources 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929846 1 base_controller.go:119] Starting #1 worker of KubeStorageVersionMigratorStaticResources-StaticResources controller ... 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929863 1 base_controller.go:82] Caches are synced for KubeStorageVersionMigrator 2025-12-12T16:16:47.933301191+00:00 stderr F I1212 16:16:47.929868 1 base_controller.go:119] Starting #1 worker of KubeStorageVersionMigrator controller ... 2025-12-12T16:16:48.346287193+00:00 stderr F W1212 16:16:48.341042 1 dynamic_operator_client.go:352] .status.conditions["KubeStorageVersionMigratorAvailable"].reason is missing; this will eventually be fatal 2025-12-12T16:16:48.346287193+00:00 stderr F W1212 16:16:48.346251 1 dynamic_operator_client.go:355] .status.conditions["KubeStorageVersionMigratorAvailable"].message is missing; this will eventually be fatal 2025-12-12T16:16:48.346287193+00:00 stderr F W1212 16:16:48.346270 1 dynamic_operator_client.go:352] .status.conditions["KubeStorageVersionMigratorProgressing"].reason is missing; this will eventually be fatal 2025-12-12T16:16:48.346335235+00:00 stderr F W1212 16:16:48.346282 1 dynamic_operator_client.go:355] .status.conditions["KubeStorageVersionMigratorProgressing"].message is missing; this will eventually be fatal 2025-12-12T16:16:48.368124827+00:00 stderr F I1212 16:16:48.367966 1 status_controller.go:225] clusteroperator/kube-storage-version-migrator diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:52:00Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:16:48Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:16:48Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:00Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:00Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:48.395888685+00:00 stderr F I1212 16:16:48.394675 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-storage-version-migrator-operator", Name:"kube-storage-version-migrator-operator", UID:"af746821-921a-4842-94da-28c08769612a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-storage-version-migrator changed: Progressing changed from True to False ("All is well"),Available changed from False to True ("All is well") 2025-12-12T16:16:55.905356232+00:00 stderr F I1212 16:16:55.901146 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.901101818 +0000 UTC))" 2025-12-12T16:16:55.905356232+00:00 stderr F I1212 16:16:55.905347 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.905325691 +0000 UTC))" 2025-12-12T16:16:55.905417403+00:00 stderr F I1212 16:16:55.905379 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.905370352 +0000 UTC))" 2025-12-12T16:16:55.905417403+00:00 stderr F I1212 16:16:55.905395 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.905388233 +0000 UTC))" 2025-12-12T16:16:55.905417403+00:00 stderr F I1212 16:16:55.905408 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.905400423 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905427 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.905415953 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905456 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.905447584 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905468 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.905460965 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905481 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.905472865 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905505 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.905489315 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905735 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-storage-version-migrator-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-storage-version-migrator-operator.svc,metrics.openshift-kube-storage-version-migrator-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:14 +0000 UTC to 2027-11-02 07:52:15 +0000 UTC (now=2025-12-12 16:16:55.905720131 +0000 UTC))" 2025-12-12T16:16:55.906210083+00:00 stderr F I1212 16:16:55.905885 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:55.905872285 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.321714 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.321654992 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322039 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.322023751 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322058 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.322047712 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322074 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.322063932 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322090 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.322078912 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322110 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.322095473 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322126 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.322114913 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322143 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.322132044 +0000 UTC))" 2025-12-12T16:17:46.322197595+00:00 stderr F I1212 16:17:46.322160 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.322148394 +0000 UTC))" 2025-12-12T16:17:46.322252927+00:00 stderr F I1212 16:17:46.322209 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.322167574 +0000 UTC))" 2025-12-12T16:17:46.322252927+00:00 stderr F I1212 16:17:46.322229 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.322217056 +0000 UTC))" 2025-12-12T16:17:46.322592085+00:00 stderr F I1212 16:17:46.322512 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-storage-version-migrator-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-storage-version-migrator-operator.svc,metrics.openshift-kube-storage-version-migrator-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:14 +0000 UTC to 2027-11-02 07:52:15 +0000 UTC (now=2025-12-12 16:17:46.322482832 +0000 UTC))" 2025-12-12T16:17:46.337556805+00:00 stderr F I1212 16:17:46.337493 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:17:46.337441062 +0000 UTC))" 2025-12-12T16:18:47.745217578+00:00 stderr F E1212 16:18:47.744656 1 leaderelection.go:429] Failed to update lock optimitically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-storage-version-migrator-operator/leases/openshift-kube-storage-version-migrator-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:47.745970667+00:00 stderr F E1212 16:18:47.745909 1 leaderelection.go:436] error retrieving resource lock openshift-kube-storage-version-migrator-operator/openshift-kube-storage-version-migrator-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-storage-version-migrator-operator/leases/openshift-kube-storage-version-migrator-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.935267957+00:00 stderr F W1212 16:18:47.935076 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.935267957+00:00 stderr F E1212 16:18:47.935152 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.935391220+00:00 stderr F E1212 16:18:47.935345 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.944690490+00:00 stderr F W1212 16:18:47.944555 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.944690490+00:00 stderr F E1212 16:18:47.944595 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.945767106+00:00 stderr F E1212 16:18:47.945725 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.963377812+00:00 stderr F W1212 16:18:47.958919 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.963377812+00:00 stderr F E1212 16:18:47.958948 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.963377812+00:00 stderr F E1212 16:18:47.961021 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.993165328+00:00 stderr F W1212 16:18:47.988109 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.993165328+00:00 stderr F E1212 16:18:47.988232 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.996097271+00:00 stderr F E1212 16:18:47.996039 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.034038989+00:00 stderr F W1212 16:18:48.033957 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.034038989+00:00 stderr F E1212 16:18:48.034011 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.041615146+00:00 stderr F E1212 16:18:48.041503 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.135333263+00:00 stderr F W1212 16:18:48.135220 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.135333263+00:00 stderr F E1212 16:18:48.135285 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.335713787+00:00 stderr F W1212 16:18:48.335640 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.335713787+00:00 stderr F E1212 16:18:48.335700 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.536125972+00:00 stderr F E1212 16:18:48.535674 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.734992648+00:00 stderr F W1212 16:18:48.734909 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.734992648+00:00 stderr F E1212 16:18:48.734968 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.937534406+00:00 stderr F E1212 16:18:48.937145 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.265098794+00:00 stderr F E1212 16:18:49.265028 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.379978794+00:00 stderr F W1212 16:18:49.379909 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.379978794+00:00 stderr F E1212 16:18:49.379956 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.911989407+00:00 stderr F E1212 16:18:49.911902 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:50.665448185+00:00 stderr F W1212 16:18:50.665362 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.665503216+00:00 stderr F E1212 16:18:50.665437 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.197608021+00:00 stderr F E1212 16:18:51.197531 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.231095964+00:00 stderr F W1212 16:18:53.230296 1 base_controller.go:242] Updating status of "KubeStorageVersionMigrator" failed: unable to ApplyStatus for operator using fieldManager "KubeStorageVersionMigrator-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigrator-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:53.232298394+00:00 stderr F E1212 16:18:53.231782 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigrator reconciliation failed: Get \"https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-kube-storage-version-migrator/deployments/migrator\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.766279556+00:00 stderr F E1212 16:18:53.766208 1 base_controller.go:279] "Unhandled Error" err="KubeStorageVersionMigratorStaticResources-StaticResources reconciliation failed: [\"kube-storage-version-migrator/namespace.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"kube-storage-version-migrator/roles.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/storage-version-migration-migrator\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeStorageVersionMigratorStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubestorageversionmigrators/cluster/status?fieldManager=KubeStorageVersionMigratorStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:19:26.628696105+00:00 stderr F I1212 16:19:26.627724 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 2025-12-12T16:19:35.912383349+00:00 stderr F I1212 16:19:35.911836 1 reflector.go:368] Caches populated for *v1.ClusterOperator from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 2025-12-12T16:20:00.791768497+00:00 stderr F I1212 16:20:00.791466 1 reflector.go:368] Caches populated for operator.openshift.io/v1, Resource=kubestorageversionmigrators from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 2025-12-12T16:20:04.193869347+00:00 stderr F I1212 16:20:04.193544 1 reflector.go:368] Caches populated for *v1.Deployment from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 2025-12-12T16:20:05.807234655+00:00 stderr F I1212 16:20:05.806331 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 2025-12-12T16:20:09.303362196+00:00 stderr F I1212 16:20:09.303263 1 reflector.go:368] Caches populated for *v1.Secret from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 2025-12-12T16:20:10.345354798+00:00 stderr F I1212 16:20:10.345253 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243 ././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_0000755000175000017500000000000015117043043033006 5ustar zuulzuul././@LongLink0000644000000000000000000000033200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_0000755000175000017500000000000015117043062033007 5ustar zuulzuul././@LongLink0000644000000000000000000000033700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_0000644000175000017500000020500015117043043033005 0ustar zuulzuul2025-12-12T16:27:34.974851081+00:00 stderr F W1212 16:27:34.974623 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:27:34.975072947+00:00 stderr F I1212 16:27:34.974923 1 crypto.go:594] Generating new CA for cert-manager-operator-signer@1765556854 cert, and key in /tmp/serving-cert-2432790635/serving-signer.crt, /tmp/serving-cert-2432790635/serving-signer.key 2025-12-12T16:27:34.975072947+00:00 stderr F Validity period of the certificate for "cert-manager-operator-signer@1765556854" is unset, resetting to 43800h0m0s! 2025-12-12T16:27:35.419920846+00:00 stderr F I1212 16:27:35.419793 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:27:35.420881500+00:00 stderr F I1212 16:27:35.420743 1 observer_polling.go:159] Starting file observer 2025-12-12T16:27:35.434060094+00:00 stderr F I1212 16:27:35.434011 1 builder.go:304] cert-manager-operator version - 2025-12-12T16:27:35.435721486+00:00 stderr F I1212 16:27:35.435697 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/tmp/serving-cert-2432790635/tls.crt::/tmp/serving-cert-2432790635/tls.key" 2025-12-12T16:27:36.029524204+00:00 stderr F I1212 16:27:36.029053 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:27:36.034588752+00:00 stderr F I1212 16:27:36.034523 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:27:36.034588752+00:00 stderr F I1212 16:27:36.034551 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:27:36.034588752+00:00 stderr F I1212 16:27:36.034581 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:27:36.034615063+00:00 stderr F I1212 16:27:36.034588 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:27:36.040025900+00:00 stderr F I1212 16:27:36.039946 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:27:36.040025900+00:00 stderr F W1212 16:27:36.039989 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:27:36.040025900+00:00 stderr F W1212 16:27:36.039994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:27:36.040025900+00:00 stderr F W1212 16:27:36.039999 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:27:36.040025900+00:00 stderr F W1212 16:27:36.040002 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:27:36.040025900+00:00 stderr F W1212 16:27:36.040005 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:27:36.040025900+00:00 stderr F W1212 16:27:36.040008 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:27:36.040284866+00:00 stderr F I1212 16:27:36.040246 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:27:36.043499738+00:00 stderr F I1212 16:27:36.042962 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:27:36.043499738+00:00 stderr F I1212 16:27:36.043403 1 leaderelection.go:257] attempting to acquire leader lease cert-manager-operator/cert-manager-operator-lock... 2025-12-12T16:27:36.044406561+00:00 stderr F I1212 16:27:36.044379 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:27:36.044473882+00:00 stderr F I1212 16:27:36.044447 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:27:36.044473882+00:00 stderr F I1212 16:27:36.044440 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:27:36.044515363+00:00 stderr F I1212 16:27:36.044484 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:27:36.044554584+00:00 stderr F I1212 16:27:36.044528 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:27:36.044593985+00:00 stderr F I1212 16:27:36.044583 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:27:36.045688313+00:00 stderr F I1212 16:27:36.045621 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/tmp/serving-cert-2432790635/tls.crt::/tmp/serving-cert-2432790635/tls.key" 2025-12-12T16:27:36.046393891+00:00 stderr F I1212 16:27:36.046316 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-2432790635/tls.crt::/tmp/serving-cert-2432790635/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"cert-manager-operator-signer@1765556854\" (2025-12-12 16:27:34 +0000 UTC to 2025-12-12 16:27:35 +0000 UTC (now=2025-12-12 16:27:36.046280268 +0000 UTC))" 2025-12-12T16:27:36.046565245+00:00 stderr F I1212 16:27:36.046539 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556856\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556855\" (2025-12-12 15:27:35 +0000 UTC to 2028-12-12 15:27:35 +0000 UTC (now=2025-12-12 16:27:36.046517374 +0000 UTC))" 2025-12-12T16:27:36.046611416+00:00 stderr F I1212 16:27:36.046593 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:27:36.046679848+00:00 stderr F I1212 16:27:36.046663 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:27:36.046808401+00:00 stderr F I1212 16:27:36.046793 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:27:36.047380166+00:00 stderr F I1212 16:27:36.047350 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:27:36.047935980+00:00 stderr F I1212 16:27:36.047890 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:27:36.048389471+00:00 stderr F I1212 16:27:36.048333 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/authentication/request/headerrequest/requestheader_controller.go:183" 2025-12-12T16:27:36.050484225+00:00 stderr F I1212 16:27:36.050402 1 leaderelection.go:271] successfully acquired lease cert-manager-operator/cert-manager-operator-lock 2025-12-12T16:27:36.050694700+00:00 stderr F I1212 16:27:36.050578 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"cert-manager-operator", Name:"cert-manager-operator-lock", UID:"a69f3bef-5970-447f-bd4a-82664843fe7e", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"42878", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' cert-manager-operator-controller-manager-64c74584c4-djdmt_c34f222c-e3ef-4913-909c-33ea1a442cba became leader 2025-12-12T16:27:36.078323159+00:00 stderr F I1212 16:27:36.077776 1 setup_manager.go:52] "setting up operator manager" logger="setup-manager" controller="cert-manager-istio-csr-controller" 2025-12-12T16:27:36.078323159+00:00 stderr F I1212 16:27:36.077812 1 setup_manager.go:53] "controller" logger="setup-manager" version="" 2025-12-12T16:27:36.079371286+00:00 stderr F I1212 16:27:36.079338 1 base_controller.go:76] Waiting for caches to sync for cert-manager-webhook-deployment 2025-12-12T16:27:36.079447128+00:00 stderr F I1212 16:27:36.079416 1 base_controller.go:76] Waiting for caches to sync for cert-manager-cainjector-deployment 2025-12-12T16:27:36.079524329+00:00 stderr F I1212 16:27:36.079339 1 base_controller.go:76] Waiting for caches to sync for cert-manager-controller-deployment 2025-12-12T16:27:36.079588931+00:00 stderr F I1212 16:27:36.079575 1 base_controller.go:76] Waiting for caches to sync for cert-manager-networkpolicy-user-defined 2025-12-12T16:27:36.079626652+00:00 stderr F I1212 16:27:36.079615 1 base_controller.go:76] Waiting for caches to sync for cert-manager-cainjector-static-resources--StaticResources 2025-12-12T16:27:36.079663963+00:00 stderr F I1212 16:27:36.079641 1 base_controller.go:76] Waiting for caches to sync for cert-manager-webhook-static-resources--StaticResources 2025-12-12T16:27:36.079824067+00:00 stderr F I1212 16:27:36.079770 1 base_controller.go:76] Waiting for caches to sync for DefaultCertManager 2025-12-12T16:27:36.079892339+00:00 stderr F I1212 16:27:36.079857 1 base_controller.go:76] Waiting for caches to sync for cert-manager-networkpolicy-static-resources--StaticResources 2025-12-12T16:27:36.080069053+00:00 stderr F I1212 16:27:36.080046 1 base_controller.go:76] Waiting for caches to sync for cert-manager-controller-static-resources--StaticResources 2025-12-12T16:27:36.081653773+00:00 stderr F I1212 16:27:36.081627 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.081772996+00:00 stderr F I1212 16:27:36.081736 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.082029683+00:00 stderr F I1212 16:27:36.081627 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.082445983+00:00 stderr F I1212 16:27:36.082386 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.082486954+00:00 stderr F I1212 16:27:36.082465 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.083550861+00:00 stderr F I1212 16:27:36.082406 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.083550861+00:00 stderr F I1212 16:27:36.083060 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.084234919+00:00 stderr F I1212 16:27:36.084098 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.086050185+00:00 stderr F I1212 16:27:36.084610 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.086050185+00:00 stderr F I1212 16:27:36.085347 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:27:36.087541832+00:00 stderr F I1212 16:27:36.087503 1 reflector.go:430] "Caches populated" type="*v1.NetworkPolicy" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.090977819+00:00 stderr F I1212 16:27:36.090942 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.104485371+00:00 stderr F I1212 16:27:36.104406 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.115121310+00:00 stderr F I1212 16:27:36.115041 1 reflector.go:430] "Caches populated" type="*v1alpha1.CertManager" reflector="github.com/openshift/cert-manager-operator/pkg/operator/informers/externalversions/factory.go:125" 2025-12-12T16:27:36.117797368+00:00 stderr F I1212 16:27:36.117726 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:27:36.120141417+00:00 stderr F I1212 16:27:36.120120 1 server.go:208] "Starting metrics server" logger="controller-runtime.metrics" 2025-12-12T16:27:36.120369963+00:00 stderr F I1212 16:27:36.120283 1 recorder.go:104] "controller is starting" logger="operator-manager.events" type="Normal" object={"kind":"IstioCSR","apiVersion":"operator.openshift.io/v1alpha1"} reason="ControllerStarted" 2025-12-12T16:27:36.120457915+00:00 stderr F I1212 16:27:36.120446 1 server.go:247] "Serving metrics server" logger="controller-runtime.metrics" bindAddress=":8080" secure=false 2025-12-12T16:27:36.120721142+00:00 stderr F I1212 16:27:36.120707 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1alpha1.IstioCSR" 2025-12-12T16:27:36.120765183+00:00 stderr F I1212 16:27:36.120755 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.Certificate" 2025-12-12T16:27:36.120792674+00:00 stderr F I1212 16:27:36.120784 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.Deployment" 2025-12-12T16:27:36.120819495+00:00 stderr F I1212 16:27:36.120811 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.ClusterRole" 2025-12-12T16:27:36.120845655+00:00 stderr F I1212 16:27:36.120837 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.ClusterRoleBinding" 2025-12-12T16:27:36.120875436+00:00 stderr F I1212 16:27:36.120864 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.Role" 2025-12-12T16:27:36.120902357+00:00 stderr F I1212 16:27:36.120893 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.RoleBinding" 2025-12-12T16:27:36.120928697+00:00 stderr F I1212 16:27:36.120920 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.Service" 2025-12-12T16:27:36.120965658+00:00 stderr F I1212 16:27:36.120952 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.ServiceAccount" 2025-12-12T16:27:36.120994049+00:00 stderr F I1212 16:27:36.120985 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.ConfigMap" 2025-12-12T16:27:36.121023510+00:00 stderr F I1212 16:27:36.121012 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:27:36.121063721+00:00 stderr F I1212 16:27:36.121052 1 controller.go:175] "Starting EventSource" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" source="kind source: *v1.NetworkPolicy" 2025-12-12T16:27:36.121089031+00:00 stderr F I1212 16:27:36.121080 1 controller.go:183] "Starting Controller" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.123317 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.123642 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.123854 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.124130 1 reflector.go:430] "Caches populated" type="*v1.NetworkPolicy" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.124379 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.124730 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.125107 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.125421201+00:00 stderr F I1212 16:27:36.125321 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.143676303+00:00 stderr F I1212 16:27:36.143596 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.144660538+00:00 stderr F I1212 16:27:36.144611 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:27:36.145237333+00:00 stderr F I1212 16:27:36.145192 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:27:36.145089259 +0000 UTC))" 2025-12-12T16:27:36.153643605+00:00 stderr F I1212 16:27:36.153554 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-2432790635/tls.crt::/tmp/serving-cert-2432790635/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"cert-manager-operator-signer@1765556854\" (2025-12-12 16:27:34 +0000 UTC to 2025-12-12 16:27:35 +0000 UTC (now=2025-12-12 16:27:36.15343789 +0000 UTC))" 2025-12-12T16:27:36.154149588+00:00 stderr F I1212 16:27:36.154130 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556856\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556855\" (2025-12-12 15:27:35 +0000 UTC to 2028-12-12 15:27:35 +0000 UTC (now=2025-12-12 16:27:36.153948473 +0000 UTC))" 2025-12-12T16:27:36.182086155+00:00 stderr F I1212 16:27:36.181489 1 base_controller.go:82] Caches are synced for DefaultCertManager 2025-12-12T16:27:36.182086155+00:00 stderr F I1212 16:27:36.182049 1 base_controller.go:119] Starting #1 worker of DefaultCertManager controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183175 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'StatusNotFound' Creating "cluster" certmanager 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183231 1 base_controller.go:82] Caches are synced for cert-manager-webhook-deployment 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183237 1 base_controller.go:119] Starting #1 worker of cert-manager-webhook-deployment controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183360 1 base_controller.go:82] Caches are synced for cert-manager-controller-static-resources--StaticResources 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183367 1 base_controller.go:119] Starting #1 worker of cert-manager-controller-static-resources--StaticResources controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183380 1 base_controller.go:82] Caches are synced for cert-manager-cainjector-deployment 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183384 1 base_controller.go:119] Starting #1 worker of cert-manager-cainjector-deployment controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F E1212 16:27:36.183440 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183548 1 base_controller.go:82] Caches are synced for cert-manager-networkpolicy-static-resources--StaticResources 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183554 1 base_controller.go:119] Starting #1 worker of cert-manager-networkpolicy-static-resources--StaticResources controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183578 1 base_controller.go:82] Caches are synced for cert-manager-controller-deployment 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183589 1 base_controller.go:119] Starting #1 worker of cert-manager-controller-deployment controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F E1212 16:27:36.183654 1 base_controller.go:279] "Unhandled Error" err="cert-manager-networkpolicy-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183744 1 base_controller.go:82] Caches are synced for cert-manager-webhook-static-resources--StaticResources 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183751 1 base_controller.go:119] Starting #1 worker of cert-manager-webhook-static-resources--StaticResources controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183767 1 base_controller.go:82] Caches are synced for cert-manager-networkpolicy-user-defined 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183772 1 base_controller.go:119] Starting #1 worker of cert-manager-networkpolicy-user-defined controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183795 1 base_controller.go:82] Caches are synced for cert-manager-cainjector-static-resources--StaticResources 2025-12-12T16:27:36.185061030+00:00 stderr F I1212 16:27:36.183801 1 base_controller.go:119] Starting #1 worker of cert-manager-cainjector-static-resources--StaticResources controller ... 2025-12-12T16:27:36.185061030+00:00 stderr F E1212 16:27:36.183858 1 base_controller.go:279] "Unhandled Error" err="cert-manager-webhook-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.185061030+00:00 stderr F E1212 16:27:36.183884 1 base_controller.go:279] "Unhandled Error" err="cert-manager-cainjector-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195150 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195752 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:27:36.1957183 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195784 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:27:36.195762431 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195802 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:27:36.195791192 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195821 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:27:36.195810283 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195842 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:27:36.195831063 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195859 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:27:36.195847873 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195885 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:27:36.195864404 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195904 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:27:36.195891705 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195922 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:27:36.195910655 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195945 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:27:36.195935286 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.195960 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:27:36.195950116 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.196227 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-2432790635/tls.crt::/tmp/serving-cert-2432790635/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"cert-manager-operator-signer@1765556854\" (2025-12-12 16:27:34 +0000 UTC to 2025-12-12 16:27:35 +0000 UTC (now=2025-12-12 16:27:36.196204972 +0000 UTC))" 2025-12-12T16:27:36.196698405+00:00 stderr F I1212 16:27:36.196428 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556856\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556855\" (2025-12-12 15:27:35 +0000 UTC to 2028-12-12 15:27:35 +0000 UTC (now=2025-12-12 16:27:36.196412148 +0000 UTC))" 2025-12-12T16:27:36.202962054+00:00 stderr F I1212 16:27:36.202871 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:27:36.203698402+00:00 stderr F E1212 16:27:36.203622 1 base_controller.go:279] "Unhandled Error" err="cert-manager-webhook-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.203773354+00:00 stderr F E1212 16:27:36.203733 1 base_controller.go:279] "Unhandled Error" err="cert-manager-cainjector-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.203857216+00:00 stderr F E1212 16:27:36.203823 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.204175194+00:00 stderr F E1212 16:27:36.204138 1 base_controller.go:279] "Unhandled Error" err="cert-manager-networkpolicy-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.204876552+00:00 stderr F E1212 16:27:36.204836 1 base_controller.go:279] "Unhandled Error" err="cert-manager-cainjector-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.204954284+00:00 stderr F E1212 16:27:36.204926 1 base_controller.go:279] "Unhandled Error" err="cert-manager-webhook-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.205007995+00:00 stderr F E1212 16:27:36.204980 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.208635287+00:00 stderr F I1212 16:27:36.207043 1 reflector.go:430] "Caches populated" type="*v1alpha1.IstioCSR" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:36.211659474+00:00 stderr F E1212 16:27:36.210115 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.211659474+00:00 stderr F E1212 16:27:36.210167 1 base_controller.go:279] "Unhandled Error" err="cert-manager-webhook-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.211659474+00:00 stderr F E1212 16:27:36.210258 1 base_controller.go:279] "Unhandled Error" err="cert-manager-cainjector-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.211659474+00:00 stderr F E1212 16:27:36.211339 1 base_controller.go:279] "Unhandled Error" err="cert-manager-cainjector-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.211659474+00:00 stderr F E1212 16:27:36.211457 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.211659474+00:00 stderr F E1212 16:27:36.211491 1 base_controller.go:279] "Unhandled Error" err="cert-manager-webhook-static-resources--StaticResources reconciliation failed: certmanager.operator.openshift.io \"cluster\" not found" 2025-12-12T16:27:36.253368049+00:00 stderr F I1212 16:27:36.251570 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'MutatingWebhookConfigurationCreated' Created MutatingWebhookConfiguration.admissionregistration.k8s.io/cert-manager-webhook because it was missing 2025-12-12T16:27:36.268322818+00:00 stderr F I1212 16:27:36.266912 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'NamespaceCreated' Created Namespace/cert-manager because it was missing 2025-12-12T16:27:36.285241676+00:00 stderr F I1212 16:27:36.284911 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ValidatingWebhookConfigurationCreated' Created ValidatingWebhookConfiguration.admissionregistration.k8s.io/cert-manager-webhook because it was missing 2025-12-12T16:27:36.338163745+00:00 stderr F I1212 16:27:36.338059 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-cainjector because it was missing 2025-12-12T16:27:36.349542293+00:00 stderr F I1212 16:27:36.349039 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentCreated' Created Deployment.apps/cert-manager-cainjector -n cert-manager because it was missing 2025-12-12T16:27:36.400544314+00:00 stderr F I1212 16:27:36.400212 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-cluster-view because it was missing 2025-12-12T16:27:36.495803585+00:00 stderr F I1212 16:27:36.492654 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'RoleBindingCreateFailed' Failed to create RoleBinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving -n cert-manager: rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found 2025-12-12T16:27:36.541258205+00:00 stderr F I1212 16:27:36.540022 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:37.208695536+00:00 stderr F I1212 16:27:37.208613 1 reflector.go:430] "Caches populated" type="*v1.Certificate" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:106" 2025-12-12T16:27:37.232373806+00:00 stderr F I1212 16:27:37.232298 1 controller.go:217] "Starting workers" logger="operator-manager" controller="cert-manager-istio-csr-controller" controllerGroup="operator.openshift.io" controllerKind="IstioCSR" worker count=1 2025-12-12T16:27:37.306028460+00:00 stderr F I1212 16:27:37.305799 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-cainjector because it was missing 2025-12-12T16:27:37.306028460+00:00 stderr F I1212 16:27:37.305836 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentCreated' Created Deployment.apps/cert-manager-webhook -n cert-manager because it was missing 2025-12-12T16:27:37.504231896+00:00 stderr F I1212 16:27:37.503047 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-certificates because it was missing 2025-12-12T16:27:37.715540734+00:00 stderr F I1212 16:27:37.715416 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleCreated' Created Role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving -n cert-manager because it was missing 2025-12-12T16:27:37.724513801+00:00 stderr F I1212 16:27:37.724475 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ServiceAccountCreated' Created ServiceAccount/cert-manager-webhook -n cert-manager because it was missing 2025-12-12T16:27:38.484760222+00:00 stderr F I1212 16:27:38.484663 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'RoleBindingCreateFailed' Failed to create RoleBinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection -n kube-system: rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found 2025-12-12T16:27:38.702322098+00:00 stderr F I1212 16:27:38.702238 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-certificates because it was missing 2025-12-12T16:27:38.908051225+00:00 stderr F I1212 16:27:38.907971 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews because it was missing 2025-12-12T16:27:39.708393111+00:00 stderr F I1212 16:27:39.705700 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleCreated' Created Role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection -n kube-system because it was missing 2025-12-12T16:27:39.713149311+00:00 stderr F I1212 16:27:39.713093 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ServiceAccountCreated' Created ServiceAccount/cert-manager-cainjector -n cert-manager because it was missing 2025-12-12T16:27:39.727799062+00:00 stderr F I1212 16:27:39.727677 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ServiceCreated' Created Service/cert-manager-cainjector -n cert-manager because it was missing 2025-12-12T16:27:39.915699798+00:00 stderr F I1212 16:27:39.915602 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-challenges because it was missing 2025-12-12T16:27:40.088594923+00:00 stderr F E1212 16:27:40.088505 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-deployment reconciliation failed: Operation cannot be fulfilled on certmanagers.operator.openshift.io \"cluster\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:40.105472580+00:00 stderr F I1212 16:27:40.105352 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews because it was missing 2025-12-12T16:27:40.119170377+00:00 stderr F I1212 16:27:40.119073 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ServiceCreated' Created Service/cert-manager-webhook -n cert-manager because it was missing 2025-12-12T16:27:40.510632065+00:00 stderr F I1212 16:27:40.509876 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-challenges because it was missing 2025-12-12T16:27:40.898226023+00:00 stderr F I1212 16:27:40.898046 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers because it was missing 2025-12-12T16:27:41.114236730+00:00 stderr F I1212 16:27:41.114147 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentCreated' Created Deployment.apps/cert-manager -n cert-manager because it was missing 2025-12-12T16:27:41.279837321+00:00 stderr F I1212 16:27:41.279748 1 request.go:752] "Waited before sending request" delay="1.160806808s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster" 2025-12-12T16:27:41.303355786+00:00 stderr F I1212 16:27:41.302529 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers because it was missing 2025-12-12T16:27:41.699746429+00:00 stderr F I1212 16:27:41.698794 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim because it was missing 2025-12-12T16:27:41.887577982+00:00 stderr F E1212 16:27:41.887494 1 base_controller.go:279] "Unhandled Error" err="cert-manager-cainjector-static-resources--StaticResources reconciliation failed: \"cert-manager-deployment/cainjector/cert-manager-cainjector-leaderelection-rb.yaml\" (string): rolebindings.rbac.authorization.k8s.io \"cert-manager-cainjector:leaderelection\" not found" 2025-12-12T16:27:42.116334602+00:00 stderr F I1212 16:27:42.114797 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim because it was missing 2025-12-12T16:27:42.479436443+00:00 stderr F I1212 16:27:42.479354 1 request.go:752] "Waited before sending request" delay="1.194359699s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-webhook-static-resources--StaticResources&force=true" 2025-12-12T16:27:42.490312398+00:00 stderr F E1212 16:27:42.490226 1 base_controller.go:279] "Unhandled Error" err="cert-manager-webhook-static-resources--StaticResources reconciliation failed: \"cert-manager-deployment/webhook/cert-manager-webhook-dynamic-serving-rb.yaml\" (string): rolebindings.rbac.authorization.k8s.io \"cert-manager-webhook:dynamic-serving\" not found" 2025-12-12T16:27:42.906239026+00:00 stderr F I1212 16:27:42.903658 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-issuers because it was missing 2025-12-12T16:27:43.720895004+00:00 stderr F I1212 16:27:43.720805 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleBindingCreated' Created RoleBinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving -n cert-manager because it was missing 2025-12-12T16:27:43.929550435+00:00 stderr F I1212 16:27:43.929353 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleBindingCreated' Created RoleBinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection -n kube-system because it was missing 2025-12-12T16:27:44.115433109+00:00 stderr F I1212 16:27:44.115355 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-issuers because it was missing 2025-12-12T16:27:45.113763435+00:00 stderr F I1212 16:27:45.112882 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-orders because it was missing 2025-12-12T16:27:45.698151615+00:00 stderr F I1212 16:27:45.698027 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-orders because it was missing 2025-12-12T16:27:46.106488309+00:00 stderr F I1212 16:27:46.104119 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-edit because it was missing 2025-12-12T16:27:46.479172882+00:00 stderr F I1212 16:27:46.479101 1 request.go:752] "Waited before sending request" delay="1.195009184s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-cainjector-static-resources--StaticResources&force=true" 2025-12-12T16:27:46.485281956+00:00 stderr F I1212 16:27:46.485214 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'RoleBindingCreateFailed' Failed to create RoleBinding.rbac.authorization.k8s.io/cert-manager:leaderelection -n kube-system: rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found 2025-12-12T16:27:47.107284129+00:00 stderr F I1212 16:27:47.107204 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleCreated' Created Role.rbac.authorization.k8s.io/cert-manager:leaderelection -n kube-system because it was missing 2025-12-12T16:27:47.114548563+00:00 stderr F I1212 16:27:47.114479 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ServiceAccountCreated' Created ServiceAccount/cert-manager -n cert-manager because it was missing 2025-12-12T16:27:47.130671371+00:00 stderr F I1212 16:27:47.130566 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ServiceCreated' Created Service/cert-manager -n cert-manager because it was missing 2025-12-12T16:27:47.479642123+00:00 stderr F I1212 16:27:47.479513 1 request.go:752] "Waited before sending request" delay="1.190947552s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-webhook-static-resources--StaticResources&force=true" 2025-12-12T16:27:47.888194803+00:00 stderr F I1212 16:27:47.888012 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'RoleBindingCreateFailed' Failed to create RoleBinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest -n cert-manager: rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found 2025-12-12T16:27:48.902400311+00:00 stderr F I1212 16:27:48.902296 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleCreated' Created Role.rbac.authorization.k8s.io/cert-manager-tokenrequest -n cert-manager because it was missing 2025-12-12T16:27:49.703512776+00:00 stderr F I1212 16:27:49.703416 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-view because it was missing 2025-12-12T16:27:50.103958851+00:00 stderr F I1212 16:27:50.103824 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io because it was missing 2025-12-12T16:27:50.498985979+00:00 stderr F I1212 16:27:50.498863 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io because it was missing 2025-12-12T16:27:50.679799715+00:00 stderr F I1212 16:27:50.679710 1 request.go:752] "Waited before sending request" delay="1.189238399s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster" 2025-12-12T16:27:51.324770219+00:00 stderr F I1212 16:27:51.324218 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleCreated' Created ClusterRole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests because it was missing 2025-12-12T16:27:52.103258140+00:00 stderr F I1212 16:27:52.102718 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClusterRoleBindingCreated' Created ClusterRoleBinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests because it was missing 2025-12-12T16:27:54.080705038+00:00 stderr F I1212 16:27:54.080097 1 request.go:752] "Waited before sending request" delay="1.180854407s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster" 2025-12-12T16:27:54.291079752+00:00 stderr F E1212 16:27:54.290996 1 base_controller.go:279] "Unhandled Error" err="cert-manager-controller-static-resources--StaticResources reconciliation failed: [\"cert-manager-deployment/controller/cert-manager-leaderelection-rb.yaml\" (string): rolebindings.rbac.authorization.k8s.io \"cert-manager:leaderelection\" not found, \"cert-manager-deployment/controller/cert-manager-cert-manager-tokenrequest-rb.yaml\" (string): rolebindings.rbac.authorization.k8s.io \"cert-manager-tokenrequest\" not found]" 2025-12-12T16:27:55.279840066+00:00 stderr F I1212 16:27:55.279709 1 request.go:752] "Waited before sending request" delay="1.193505086s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-cainjector-deployment-reportDegraded&force=true" 2025-12-12T16:27:58.516797670+00:00 stderr F I1212 16:27:58.515122 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleBindingCreated' Created RoleBinding.rbac.authorization.k8s.io/cert-manager:leaderelection -n kube-system because it was missing 2025-12-12T16:27:59.108095834+00:00 stderr F I1212 16:27:59.107988 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RoleBindingCreated' Created RoleBinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest -n cert-manager because it was missing 2025-12-12T16:28:12.291364924+00:00 stderr F I1212 16:28:12.289979 1 request.go:752] "Waited before sending request" delay="1.002134303s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-webhook-static-resources--StaticResources&force=true" 2025-12-12T16:28:12.312133770+00:00 stderr F I1212 16:28:12.312070 1 admissionregistration.go:69] MutatingWebhookConfiguration "/cert-manager-webhook" changes: {"webhooks":[{"admissionReviewVersions":["v1"],"clientConfig":{"caBundle":"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJ3akNDQVVlZ0F3SUJBZ0lRTHJPOEt1SVpsTUI0QU1ZamlodmQ5REFLQmdncWhrak9QUVFEQXpBaU1TQXcKSGdZRFZRUURFeGRqWlhKMExXMWhibUZuWlhJdGQyVmlhRzl2YXkxallUQWVGdzB5TlRFeU1USXhOakk0TURoYQpGdzB5TmpFeU1USXhOakk0TURoYU1DSXhJREFlQmdOVkJBTVRGMk5sY25RdGJXRnVZV2RsY2kxM1pXSm9iMjlyCkxXTmhNSFl3RUFZSEtvWkl6ajBDQVFZRks0RUVBQ0lEWWdBRU1KVXd2M0FQd3hvSHhFVU1hM2xJaE9XUHJuZVQKc0FkaHZlN1BQejJOOUVWWmZWNDducWdmaURhcVVMY091NENobTdRUkJza1NLemhINWFnWjZZTlQ2OUNWM0taUgpxZXJsOFBQNEtuNGdNeWE5WDUzRjJVRVlBYk16YUFLL1lvMVFvMEl3UURBT0JnTlZIUThCQWY4RUJBTUNBcVF3CkR3WURWUjBUQVFIL0JBVXdBd0VCL3pBZEJnTlZIUTRFRmdRVWJ1cm1kSjVCSlN6MGlzdGx0VHc5RUEySUJod3cKQ2dZSUtvWkl6ajBFQXdNRGFRQXdaZ0l4QUkwWWhndm9qZVp3cjljL1pDbGFhVXU1TWhBQll0alF6aXFRL090bwo4c0EwbmZnTTc0N1NzdnZ3K3JZQnBjQVdZd0l4QVA4d0YvQUUwbVRQVmE3TTFCNVY2WnA5WWllcE9CcDk5QWZSCjJ3WFNkMGdjaEZucGdmdFdyUWxwcHlMaGFtWnFZUT09Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K","service":{"name":"cert-manager-webhook","namespace":"cert-manager","path":"/mutate"}},"failurePolicy":"Fail","matchPolicy":"Equivalent","name":"webhook.cert-manager.io","rules":[{"apiGroups":["cert-manager.io"],"apiVersions":["v1"],"operations":["CREATE"],"resources":["certificaterequests"]}],"sideEffects":"None","timeoutSeconds":30}]} 2025-12-12T16:28:12.316555222+00:00 stderr F I1212 16:28:12.316485 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'MutatingWebhookConfigurationUpdated' Updated MutatingWebhookConfiguration.admissionregistration.k8s.io/cert-manager-webhook because it changed 2025-12-12T16:28:12.319968638+00:00 stderr F I1212 16:28:12.319867 1 admissionregistration.go:144] ValidatingWebhookConfiguration "/cert-manager-webhook" changes: {"webhooks":[{"admissionReviewVersions":["v1"],"clientConfig":{"caBundle":"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJ3akNDQVVlZ0F3SUJBZ0lRTHJPOEt1SVpsTUI0QU1ZamlodmQ5REFLQmdncWhrak9QUVFEQXpBaU1TQXcKSGdZRFZRUURFeGRqWlhKMExXMWhibUZuWlhJdGQyVmlhRzl2YXkxallUQWVGdzB5TlRFeU1USXhOakk0TURoYQpGdzB5TmpFeU1USXhOakk0TURoYU1DSXhJREFlQmdOVkJBTVRGMk5sY25RdGJXRnVZV2RsY2kxM1pXSm9iMjlyCkxXTmhNSFl3RUFZSEtvWkl6ajBDQVFZRks0RUVBQ0lEWWdBRU1KVXd2M0FQd3hvSHhFVU1hM2xJaE9XUHJuZVQKc0FkaHZlN1BQejJOOUVWWmZWNDducWdmaURhcVVMY091NENobTdRUkJza1NLemhINWFnWjZZTlQ2OUNWM0taUgpxZXJsOFBQNEtuNGdNeWE5WDUzRjJVRVlBYk16YUFLL1lvMVFvMEl3UURBT0JnTlZIUThCQWY4RUJBTUNBcVF3CkR3WURWUjBUQVFIL0JBVXdBd0VCL3pBZEJnTlZIUTRFRmdRVWJ1cm1kSjVCSlN6MGlzdGx0VHc5RUEySUJod3cKQ2dZSUtvWkl6ajBFQXdNRGFRQXdaZ0l4QUkwWWhndm9qZVp3cjljL1pDbGFhVXU1TWhBQll0alF6aXFRL090bwo4c0EwbmZnTTc0N1NzdnZ3K3JZQnBjQVdZd0l4QVA4d0YvQUUwbVRQVmE3TTFCNVY2WnA5WWllcE9CcDk5QWZSCjJ3WFNkMGdjaEZucGdmdFdyUWxwcHlMaGFtWnFZUT09Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K","service":{"name":"cert-manager-webhook","namespace":"cert-manager","path":"/validate"}},"failurePolicy":"Fail","matchPolicy":"Equivalent","name":"webhook.cert-manager.io","namespaceSelector":{"matchExpressions":[{"key":"cert-manager.io/disable-validation","operator":"NotIn","values":["true"]}]},"rules":[{"apiGroups":["cert-manager.io","acme.cert-manager.io"],"apiVersions":["v1"],"operations":["CREATE","UPDATE"],"resources":["*/*"]}],"sideEffects":"None","timeoutSeconds":30}]} 2025-12-12T16:28:12.323953789+00:00 stderr F I1212 16:28:12.323883 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager", UID:"42d129dd-f2fe-4d03-82c8-db2f0ce53f25", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ValidatingWebhookConfigurationUpdated' Updated ValidatingWebhookConfiguration.admissionregistration.k8s.io/cert-manager-webhook because it changed 2025-12-12T16:28:18.283741852+00:00 stderr F I1212 16:28:18.283378 1 request.go:752] "Waited before sending request" delay="1.187933275s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster" 2025-12-12T16:28:19.284126781+00:00 stderr F I1212 16:28:19.283505 1 request.go:752] "Waited before sending request" delay="1.194389359s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-cainjector-static-resources--StaticResources&force=true" 2025-12-12T16:37:39.290003004+00:00 stderr F I1212 16:37:39.289074 1 request.go:752] "Waited before sending request" delay="1.194227525s" reason="client-side throttling, not priority and fairness" verb="PATCH" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1alpha1/certmanagers/cluster/status?fieldManager=cert-manager-cainjector-deployment-Deployment&force=true" ././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043043033225 5ustar zuulzuul././@LongLink0000644000000000000000000000031600000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000032300000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000237415117043043033235 0ustar zuulzuul2025-12-12T16:16:46.148500376+00:00 stderr F I1212 16:16:46.141526 1 main.go:57] starting net-attach-def-admission-controller webhook server 2025-12-12T16:16:46.148500376+00:00 stderr F W1212 16:16:46.144366 1 client_config.go:618] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:46.151371556+00:00 stderr F W1212 16:16:46.151332 1 client_config.go:618] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:46.154086732+00:00 stderr F I1212 16:16:46.152363 1 localmetrics.go:51] UPdating net-attach-def metrics for any with value 0 2025-12-12T16:16:46.154086732+00:00 stderr F I1212 16:16:46.152388 1 localmetrics.go:51] UPdating net-attach-def metrics for sriov with value 0 2025-12-12T16:16:46.154086732+00:00 stderr F I1212 16:16:46.152394 1 localmetrics.go:51] UPdating net-attach-def metrics for ib-sriov with value 0 2025-12-12T16:16:46.157472225+00:00 stderr F I1212 16:16:46.154129 1 controller.go:202] Starting net-attach-def-admission-controller 2025-12-12T16:16:46.362102701+00:00 stderr F I1212 16:16:46.357362 1 controller.go:211] net-attach-def-admission-controller synced and ready ././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000202015117043043033221 0ustar zuulzuul2025-12-12T16:16:48.099897668+00:00 stderr F W1212 16:16:48.099147 1 deprecated.go:66] 2025-12-12T16:16:48.099897668+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:48.099897668+00:00 stderr F 2025-12-12T16:16:48.099897668+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:48.099897668+00:00 stderr F 2025-12-12T16:16:48.099897668+00:00 stderr F =============================================== 2025-12-12T16:16:48.099897668+00:00 stderr F 2025-12-12T16:16:48.100119093+00:00 stderr F I1212 16:16:48.100056 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:48.102380649+00:00 stderr F I1212 16:16:48.101043 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:48.102380649+00:00 stderr F I1212 16:16:48.101497 1 kube-rbac-proxy.go:397] Starting TCP socket on :8443 2025-12-12T16:16:48.102380649+00:00 stderr F I1212 16:16:48.102299 1 kube-rbac-proxy.go:404] Listening securely on :8443 ././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-vers0000755000175000017500000000000015117043043033104 5ustar zuulzuul././@LongLink0000644000000000000000000000032000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-vers0000755000175000017500000000000015117043121033101 5ustar zuulzuul././@LongLink0000644000000000000000000000033000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator/0.log.gzhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-vers0000644000175000017500000126177615117043063033134 0ustar zuulzuul‹3F&ðïg,ÎàEc¥…¢˜iý!äedòÜ{ç}‡ &ÞºÓÈ«ÿ` òr|•üÙ{“Ì‹Ò俚¼ˆ³ô§™Éƒ2Ë=6&hŒ|­bD0F gd|p}!‘Ea¦Éݸ(sLÇ&Ñ~síÈVU¥Àœ(åTUrª—ªšôæ&È­®X’ÏÞè Êyn¼« 4^d.ƒyRÚþ”fä]Ö×^Þ$±IËâu’d·oþñÓÇÜ&¸HLôê2H s°~‚t¢ß‡Ü\š¼åa ~—^fùÔäM‘ÍóÐ4Îѵš¦æoAN¾‹²´zšÁ±à®rœó¥z`©Ä„0.¬†T 6A81…7Ëfó´Š<@ÌûßóÙ.Õ¬fжPÓ-ªiöÙ"°h¶¨pY`Tƒ²ðÂ5]μ?#Sʼnš‘tÖˆÎÓë4»MŸV~[¹ù¼øhæuNâ|Zk®‡7Ù›Êiæ@Õ˯ýµ£ÃRkN[tXj¶š! i3Öç¡ÚóÀ(?OLsÉ[\÷¾ ‘2á§LXæ­±¡¨ð²K°\^{ŵ¹Ww¹ž_?˜Å…É=/Êxj¼2K`JCã«IUŒ½ß²¼(½4Ký«<Íå<ñ*`¼ üï<.âJ &SF—â_ý“ˆâ¯ñv¼°TÔΩ.¼°”kéüð!Ï~¿0²ÀÔ9AŽâ}:t?€°g„aá†rõŒP¥ö2U=Ê~fu§®Áãl­+ý×Õh—º /§NÏ"Œ®@óørd,£Ø»3ÉAá¥^KpýÒ ÷ƒðb™h‡—S,©fÚÉ Càx€S,ÃÉá¼F/Uš8a¦B?¯h­®¤Šíéþ4Hƒ+5Þac*‡{´VŠQ²K) !Å/‹â$.ïÝ¥áù»êNo!ª(ׯggoÁ‚IDEÛ¯¬¹ÁµlACaݺWL²yATQ‚5]ŽàçòÖ˜ÔƒQ² ëŠ3o»Òký=ƒØØÆ U0Fc¦ä"<{âo‘‘ýib]ê¿saÊWQ¥½—WŠ«ÿÆËÑ ´²PØ«¦éf» BñhÑöBõcChBˆÈᥠ‡è–ˆœ!õÙû-´ƒ=…K  \…®2«¤4ÅxWëDhŠ©»u˜»¿.?CÀ_šé¬Ò`«‚Î|0×d²F͵oú—½lþwhJ°Òb—¦ ÇÔ—4•@'Å< a@„ –nÚª¸C?Bc­K®ò~Kºõœ þÓþö̱?]ü4þh×G $þ+N£³Ñ÷V¿Ñ7ÞŒðDøg£Ê6bg£Õï¿|÷ölAc(#|ÂUೋHúZÈ'B H‡‚ƒìëß-’²Q˜e9xS•k›éþƒÐÆòÈÙˆÂ\x›$ú”“³Ñ误aŒ?žy/~„¸ H\|_YçŸu^xaž« Ô„jå_ˆˆûŒá˜]øHrŠÒˆ*„Î5Ö=Ô„¯‹YpgGm5ï0Èu¿‡_¬?ÏgD ÍÕ:ð½íh™"ŒÇî–µ’ÛZ& Æôzâ[7˜×±?5eÚÞWõì÷u“{Þ˜F³|^%“gÞ(˜F‚íR“iFSN5™F«5µM…E¥&…¹ñ‡úwˆóÒKbpÁÔâeƒÅ÷?ÿüá“å$4®þži0Ýv]1Ž.](Xo… ëÏžùAe®w05¦A‚?çðoc|ø‹9ñáŵæÒâJá×óÆY3aŒÃ<ßÓdä} ¼:³Ïb¥§ÕDÙ Ž‘W WoÔL;Í(yk.ã´ŠFõÐôF;Ú/~º7¥Œà oë•À‘ªì\dië›jÉ¡qŽ{ßxÙ¬Òø…)_môe— ã ¹í ñ€vxkn>@|›Û¿‡-¶ôg‡=6ʱ¿QË­Mz=ÙcèñPŒëômðQÀ`2–L;™äð¨ïý} ßÚ癄ŒO`çô rõþÏ&œü àßÖ]ø3.˜t{¿dDt?9×—ö·úþ}à·óóù­»»›¸çÚÁ ËÜÐ !»‡6…ô>˯gY‡w06×ìjΆóþX_ùP}k´–|x_È>7[—~dÒ;@9…´0M³²2n±$É!‚ÖÚ ’äˆîFÝö˽˜þg¹USǚआÒ'ÄZ­­ovñSZ¯0·ò kA±;9,ú€zù«W¬ÛO]ŠM¹$N°9¡ª;°çåÄ@Æ֬濿›UKî@‹½SÉý¦Å'a¦vò Èuéä»qo:|$¼ãÔvÄŒ·‚[˜är±!åO⫉Üq²Ø\˜§€ú”©æ92X9‡AyïÝÃ[S¹=  ˆæNü%у°Ë0ÔÞ»Zó;A˜(Ì•‹ß­ïÏŸ`ÿPà÷¯Ÿ ÅÄ5§L 'ô\1ôÏ‹`~•ð9rñ?È A2Á³£ I‘ÐL;ù©îÌAx=ŸõŸš6í‹d0Œqg^jå0êÜ!è¤?€ÛÓµg v‚L×Åtò „ÑÊ Á‰Äε>bOvs³OŸ«u¾Ø ¶ms¯ÑcQaˆJéÜ»!„j!ú…}À}›¾¡oM.DjjOE:á—J’~árã¦oüÛÓ¥B"ä<ÐeåPçþ?Íž,‡ï Å9«E8TÙcFÎ-z“Š÷ø€lÓ+è­©ÆîR"˜e]À3Εîø!y¦WäÛ“ A˜îŒÓ¹ f¢Ã5O"š6„”Ú¹à%ˆ¸' ‡ –'¿VÄ9} ®5ë ðAåéD,BrÂ¥›L„dRv ù@,r4úˆM¥Z­içÐÆG% ‰5à¿.ˆAã®!Ž)ŽKJˆá´sóA)Ôå4˜^æAQæóê þ\q¿½#‘ÅH2L¸+®9Áú{öèð¶4B F”çö%†Ao Ã'½£ÞšX(QX"÷A8+‡»ÛPN³h:©Z9‰P$4ÄÒ®ÄÅÊqÔ1°CPG_à¶& Š«“nn€±Ò]<Mô…p{r š"©„bMxw+{E81Ñ<â òª©cÑCéÓCB¨> ‚+z…¹5a0 ¥N¨1•¬¨a^±nO  Al{œmo°Ë$$…ß”©Û:L²y´ÛQ0au›õ]v?ò·óW÷[æý[xx]ý¥ÖÝ’É7íí%À,’8Kîâ²{±½?̦³,µ8wÍsËú¦® Ðæ}l)‘pÓ³r  öÁÃ:ˆGƒŒaŽ‘ó#Û¯Ú&døtÈ#¸-NÀ,“gf–?ÞS0 AÏË0Wáì$Ì"Ÿ—YfÙ-d¦'Ae”=/Óܳ‰9 2cÏŒÌ.‚ÜØâ·É`™`\ ¡Zƒ3щ1î¶œ³N³Åga“‡U ûu“Gtx…»1È)ÕÅ4A ŒçAfó´Üñ§Zìu-u0Î'°F|X€à©îs4|9VIæÄ—cÖßš‹I–]/–·|8ÀíinÏ¿7ÒLv‘Ñóü"Ïó,1›Ë2öÂØ&ÉòøõÒø›¥G?—Ÿ*1´‡s¯'(;…s…ãIr°P¶¦´{L¨½6‰6¡fçá‚»°¸ú¸—X☠e [?õ¥r\Ê®PÝX¸ n»]±=â G”1Æ‘+R9Jh_xÚ¥¤n=æâÌù¶ÒˆóÕ G´î Ó«°Û<∫JœaŒ‘ÀNe¡¾|4¾˜V9l§ .nz4d9ã@“Øé©œ1Ù—§Vé=í×ú–GCUÁ¸Ìù$ ÈSô„j³¦Õ)¬G^'\…©t\\(‚»Á5\!U'E>Âľ\¾7¿Ž÷›ÔRœÔ§ ²â§òâóI‚¬éilO’Ÿ ÎΑ™%ÙÝ´:¨sŠpc|ZpçAœæóÄœ(Úê¤ÐžeYrš@~RqGÞHT¬DQUÕúa§øTa¿ˆ«7$Ÿ6ú§Å.$å$œ˜ðú4ñf'B2˯֧ŠqóBà]aF:½ÚyZúöòþQßãiî3=XNŸ°=Áͽö k,ºXÑçA4‹êýŽËŽ=ííùa‹ÿ°ÅS€[³Sƒ;¸-žÖ’ÚÚ+ÎÊ{¶*ï`vÄhq¶eá%&¹¹Š‹2ßÊ&¿IÁ•ôê·ú.oÖ_&ûDÙ£-ÄiÞÁ\ˆñùj ãAØ1+–`¾]J.3·Æë.U1Yüº| qã í–š41Æ®wó E{ ë∦çA’d·>ÈBÎVøefwó8,ü¦ŠFUâ§—Wß;:ÐB‹0ŠêbÕ³ÝFY¸û1M²Ôáé„Ñ“óÆ ¦Æâ#ãaË'¾ìbÝ‹ûà>6Û}úÀ3¤»ðzy¾þRùþáÞx…ýS™wÂõê!×7ù¾iùxàÛº î /`$…÷|$kQ§l=j¶'ýµÁú®ù²Üª?R¥>¡(ôœ©–ý`qn[ Fbɰ=—æÀä(Wý`=Hm˜>ÁnÍÀœq…]‡Ô-Çì{æd;Úô|u6d09bµO yŽÆÜYjDVˇ½!=$¹ž§¤˜ ¤…ëùX»ô†ø dr슒Q­NW4båȞ콬ä/vÌ×R¼#/Ú¦Æ`xsE9’ÎèäÈ^uÓèò¥‚«Ü×</?dE¹Å¯¿P×èá ýâššziH¥YY¿TýоÓýöi;ëûÛ Àò±‹ÜöÀé^ÙÖfC›»åOz2ÖBCªQh%'éëÓ,;m³¶Õãd»jðAîµE—ž8°SUÞŨ cÅÉ¡ÛTöÕð~9OïÌQo)î—Ú}úV¡QW˜ͨ‹ýÞ®äDûÍ à&’?äxðÆak?H’6Ë\ƒAȧÞxï±€2«LÙ}“wâ^eÞNÄÅ#Eg^^¶;ò8üÿ[ã·¨ÿüè+¡d_~-_%iÁaøÁt–˜õ|Xô>{l¶ù`æx ÐϬAÇĶgÔ±?WÉaq଱¹o{)Ìwol¥çrÿ²¾í–-âžF£Š¢Îþ3Bu×ý—›¯Ç8`»vצG[ çŽ4¼–ûZÛe´‰áÛ‹-ª³l¶ßzwº¯íîv¼‘©’ªkoÁ¨›ÝÄÃ:‘Ò‰;ø`Áw w×7k)ĵÀ‡æQ±^Ç]ÎÝ>·O-ƒ¦Å¶n§‰Ôœ2'šˆ®Ýî£Áo&Hç³cïÙ1梨¶Û£<¾é »B"ÙÆÉgc û^ ?Š‹ëÁÁ0§¼…1(}^Ƹ„® n !-ŒÁŸÍȰŠÝÌBÿ"ÉÂá‡Ò¢•=Ô³™3š“DZÔ w8£âÏÅ €7¸!8aŽw;ÕrìÙÌÙMœ—ƒÛAJ†ÛØáГ 'd‡¥ìÍô6X½±fhÓp„Ö-L£é34Mc“Yž]$fêG¦4aŸ¡˜Ê8§ƒ€æû®¼@öiŸ â¨ìf[«U†Í ½!J„}%v­ü‚—¼#Dcûw’ØÍ‰ì®}>\H°_ØáÊõÅÏãðúSä«5­åíÚ­&PN%gL9‰S±·3-Ùªz¯zó:§"®?+ü¾‹pÀg¾Ë>³Ãñ|Dh+"Bv6R„ÐÞÐNOäÅmÑ‘{“™ID‘ÈÆ"°Z×]}Étfð[¤³ª=gµ$í=ƒY-o¦“›8z‚ufñÝÚ†a}1ËÓ<~¬zíc‚”àØÆFŽã0†µÝôX0®×Ä!²”aÆ·"K©B «Fã4¹=t›µqˆ°ÒÌœ«³"¬:ÇõWí…Ë/ñj1@̽$®6*jÅÔö¬«g@Uœše¸ÍÃã]#NŠ(,è·É"ò^Û7¢; [Eìxç XüÉ4×¶Þ rSs¬ât•fŽ–ñ"Yš™/0¶þ¿ø¹ü½n~–Y0g[]ä Gt—PfY Å-Bnë¹)ÝBÍÒê»a›/œõ ¬$æ’b+¶Jtºå°Ï}U­—´^[Í=Ø•œ a²&Ú¼ FŽvÇÑ$Q„¿Ì “Dsêe²YàYÑ;}ïBr‚arÔŽ1;©_Œ}ì>úÀ¹íÞ#'D)dwÖ¹N»-Ï`íeÏÑØ­×øœ#Š%#¶5>È‘Nqjy9Ÿ¬³ÝÞêlmq³¨{F³Ä2÷rT¿ÙÍ;¯HA9ãV'>È‘N!á-Ñ÷âáð®ÖŒ *M„¶²»RºSöñ–Zðãñð®†ö\¤MÀº¶ÅׄuJò³SÍ7gÒÜ“Ñv‘ÇÁFZJÁ´Ý"ׂJî7øÔAk>ÒZ‚*¬;A\ƒ¡DêÁç}>Ñš‘Ò9ç¶ùÙ¤Úà̉&Šæ{&¥²ÔÁâ>ÁTsX Ø9¬Ý"¢ß–…e’ Ém‘7 ':EÞ¼@þ‰È© ÚóT(az7r¸ ÿhôì½´ÜÜ.2À4ð^ç…9‹’eœ˜HŽYó¬ñ¹Ð~Au-ÏÖaÉ]—nlÔÇÙ¦LÀ†m¦(È‘NÎZ]\޳qz4Ïqº3çB‹C’?˜ßÞ—§ÐOegˆ E(Vϯ¥29XpUÀE‰2ö{ðk¼œÜ<ЊjæÐ¶ +Ž@iÁåtZ½É Æ“[ã*f€Cp>ÄÁw?}¤`Nd-Hƒ›e2 Ìw'óâþ@}‰ŸF€À]¸ ÞÑ}¾»ü€c—>rþþR]òOXë䓯Ÿ>0Š?p||ŽÇÁ÷áê]ðÃ<: Þ–äëàŸÓ8º_B5ÿ–Ù ˆy¼Gê½àúãG%.? Bß«ÑGõὦêR¡o/?‘ÁðÇ÷[Ÿ¿†Þÿì·¿FÁã‰îâèKp“,ëížÌ³Þ a€-Óó,™A“á?K“pº–L0y<.ÃÅÂÀŸ÷¹`.²/měЗb`Aù«»U< ÞììI9òQƨ£uõÞ¼³V¡èÚ¿æ©×r&m´/Àï‚›p:5Ò×!@°J‚"ùOþ±ô Ë%HÄÏ7u¥¦äýúqp·Z-Ò‹óó"êºlr í]L2æ;_#TÀpž˜´%ÐÂóu«×Rç…Ô»—?›,—3’)zu­ìÙ"÷]$•Ø>v^{ žˆî¹†É¸Ú fLÈïAjm6/F þB:åòÁz?"ïGiñ Ÿ ‚*îqøHèÍ›êí·‰¹Ù!ýæOX/±¢‡\°3‚Î0àõež<Î˧Ùý æ—W÷éçË%Ì;æÜ|¼x$ôm}òÏÿã`¬øÏ1ŽjÐlvòò¬{Ç ¬”Ùý,0×UÌ#˜Dx=IÆ™1}Æ2×G –î­$¬-›»¹ãÍJr•üû]¼§~P• 0ÙoÍü2ž¤áõÈ’]ûó³é~Ù›ì´G<þ§H=ذ¶‚ôi0ã,qNkód,ïçg·¯æB,ÛÇðïÁoá$Ó.(úH–ø#¸MŠz¤ûÊ–(»3ÅZ6¨‡6ˆñSt€È´ÚyŸÏ«·]¹%ŠüªçÅßoöU cf;·ZTKå`ÍlW«0™ R÷‹±tUŒ*Þ¢r­7Zk¸˜J 0¬~)å†Ü4„‰?›¡Þô aÍc§Ax›¼«õ$Xjdóä=XÓ QGuAñ\ñÚå8•²Áªù9½ë®íà­gB“ÿûßôë½uðÙ¢ŽÛƒ~w[óE]‚¢mª Sú"Ž‚b2xWpnneÖñÞÚk«'¤håšOMM¯Œó,Ÿr¸*[qQM9©ä2ûŠ& I©_Vt ùOBP™½Y®Y é=à³_²ÁóçÆ¼óð*ç¨7ùÔO2CþâÍ߇OÆzŠÌNýê<‰¥Yñ·ô.$\\(Â&_ëHPø×uH¯½‰ÙXHD#ÞðÞiÌ1<€¢0f ]kŽäX SÖ§ÃÅD—Ï8÷`“}•/`67æÔmbþlÓ„uåŸ> ~3æ°`—`ÊOŸ‚lùN'™© WQÖS²›{™Ìtb^–ՙƒƒÕH(ß׃žßÁö®UmÓí2È̉⦨ä&€µ\6»Eá"ç I¹ÊhZ¬Yÿß;… Cµ¨q-çC£ÆDA4Ï~ʇ×EPØ@ooŒò»©öRQ‚«6;7Ù°Y°ý+ü3šéÅÛ7ç{gJa™ˆ_FVÕTTÌDÙt  ¹.S³¤YųEÖe—&ñb Á­Ék_u•J1{uڽϢ õ‘·¿°&dhÎoså|]Õ°jif¤<Él,Ø>ÔÃTS;Å«zÆ(e¾Ê–×Rþ|4?½…Ñüï×ÿUýßÄ0"£øÏl7áÍfÿ…a¼Þ¶ÙOÿ…ØÅ›õÏ¿üð!cËŸ~¨èaÇ&Æùƒa¡rW®N$Ÿ&ñtüS¸ºƒŸþñõE°+ò"øêßÌòuúঠ|õ¹²èþ_ŃÌt™óe¶PöÌ¢’ß”Œ›ï"|ãƒp÷ª•ÀrÂ>÷)VÛlúk«ÕJ¥Ò’aŽVƒ‚(P•°.‘¬K2´Þ§-¯Y¨ÝˆR¾+^•o¼n–;×Èàˈ‚?¬ÓŠ °Dð§'ˆ½è¢Éà,'YçUCµ>|[w£.±Ò€¨ÕL9‚Ü£î¿÷»@¾uŸ— fF©}Ƶ‡>ïó½CøÛ{ $“TH!ìøCÿïrµ±£ÙâêèçóSïÞ{íùÔ𠺶¢DPe‡VaÞ?´E ´Ë¤ôÕvÛŽ{$Úœ‚Æ›+b´¯êÍU]"W0ºj\-m†½qBåÐù,·.j¨ Ni®$âÖ~hâu¥ ˆ}LiNan=ä5ÆðØîæÐ)âj/ó—S¬ÛO]š!¢‰²n ×)‘þ°ËðŠ"Úêû*S·{*Ù,z(>Ñ‚QEíî#^p/<Þ.bªØeZ*É­{¾F)?({`qç=¼-•SN¸` Ù$FŽám×ð.'f'ßpð–h~¦$Gl¶Ç›I9U‚0lí, ‡±ÎâgrÞ[ZOF”+Ä—Ôª…ןn~ÌÙæûËW2QÁ$GÄêÊ9¡'è_Yš{>…æÌª©ó¤‚WGAB)]\X• ¤à½)Á„‘Þ/ܯ£‹r#-¶GÜ€œ ªwp}Љ;€[ÓˆDœb%­ KVoï {! w(·' I  [ÙZRLûƒ¹0p3çs¶)9‰½ù˜6 ŠBÌQÁíülòC*·°{t2¹†¾5¹(XÏQiÝ.9®‰[ø}z™\ãßžv”VØd޳*@÷iŸ0Ône÷Ä9çý!s5¦$º&9äplãôÖT£¥4GÜíÀKÙ)%A[à}òŒSä[“ CŠ"©±ÍŸr÷7Éú5jŽÁš1÷‹IÍ­lÎ0‘œ;Ú±‹ý«OÉ)çÄ 8—H;Ü ¡Å ëjÍ(¶B.º'=¹'Œ>ZC«0á}Cë7%ŒÖ›„n}Cì)†¥ Æ h3õ@N’þv¾'ó›e˜®–÷Q~<Ü9Wl–7i,…†%¤ lEá5M°½°‡sÀ[Óˆ”BrÄl»… Ç•tº>qŽz{bQDsૹ§ˆ¢ý™{~R:æ‰ë"Ũ ZK+°°FìXÔá ÜÖ„¡8âThfXJÖ3À^hÂÂ/ ¥aÑa N2r¸?ë9îâñýÔGÔôº¨¡hB#¸Vëäˆt±®p skÂÐXIA˜uÏäˆrµÖpŠu{êÐÚÜæeMür˜v§“¥j´˜†ó¸ÊYÕ¬eòÎޯොöÍrËô¹¾ù…£,]¥°™!Fî€(ê—ëÁcv{ºhKB#Š5²ò=È!Í<êÃgž{ iÍT€´BX b׈äWûñ*ðÍß¾Ùje vôŸcÌ¥²ï ‚#´?l}‹k|Û3 ÁX3Í­ÄÜcÿõB®A~KP…²žn¹Î¡ÇU-k·šÓ·óòplÚõaÚR`;ÅpínBggˆ8,ÂVÚcã(è‚ Ùè~¸s@Ö$ˆ*J™-‚äˆêO_|90S,ÍMåÖ~j.Œ£}àê#‡fGn}•Õ=rœwÙÅ ëÖh±L&PÄ(«ú &ˆ,K´Ì’§é¨HMØLØnî–\‹íÉî°þÌçü+ûó˜«*×ßÛ¾lrûÜ©~Uëž/–o­/0aYòqs8”¹Ñkø^FQ2[$s@2Ý7ÏåÙ ¶„õ{ NlE(:ìƒ-†:ˆCAF„¢‚›Mk.&c¬ÈðéGø˜ž‚Z4}ejùà=Å(D^—bn£Å)¨£×¥–Eò+Óô$T£^—jÒÅ]|dFÄëRMuù–¯•  ‚9µ*C"Ý26¢-gá¢×Õ@ù,*Öaùfy‘Ãux0b5ÈŽ1ѲŒI¹ÁxFQr?_탸¸-î2—:ç˜bâ`à€Ãx²ï ‡/—”ã$Á”èßÇøú.I¾”ÛÛ–|8ÀòDZ ­© Œ\§ó M¤éÕò:Œ®–É4nn˘g&1I²œü‘_Üܺ̿ô~ùh‰¡5œ½ì3çÇ“ä`mLJ;›ˆ¸ fWÑ,»µŽ[bÈ2J©ÖÚš$ÑÈÑÞPml܆ýîØ¹Ã%4¡˜ZÏõ‚\·œ%­ð4[Iý":èæ”B&Õ·… “ʦ·Q¿ëˆw•$fqŒmc^š[‰#<'׳l Û+¨åGC:i–,Á†,á‚»bÓlyO{Å5ÿä`¨‚ÍO5 cC•)Î\ÿbO«WXÞ'SXQebº,¸‚Ѻ\—c›IÕK’h¦©ÙÊ<ûŸQñ¡ã‡šRuRPŸ&ÈìDúsä|’ óÙD’Ÿ"ÎÎãx1MžfY ÎÁÍ(Öd™ZN îe8™/ï§ñ‰¢}Z$²H’éi}b&Þ²ÈX;³¬Ö§;ÃèTa¿†ß„Ê6ú§Å.wq8]ÝEwqôå4ñ&'‚wõ«yAzV\å}L Ks,ÙAï–foôÌóT ú|yïóo§ŸÉ¦¸jcxK~ Àn!z‚Î=…µ„ç’YA×Böº¾ dzIš]FY¥ptäÛE,£íOn‰Ø©Á>¦Ç„µy(”²…9ÒCìFelKá “e|;IWËlòk8ŒáÍüö·ü+ßÖo¾=Rö (YóŒë!¨ã«õÆ–Ù±H+0?TR§ËÌ›ãŒ-º/fªªÀ¤ü±:ؤݑ“¦acì»PyÈ@ EÂÙí5»E ¦Wátš<Ž@Öléh•Ïàr¥£"‹F–âÇÉ}Ë¡-jqJÑ}¬\0Û¯”²»©’ª*„i¿+×(N÷Á6¼PHœc1ÀÈØ.y@ð)Û©Þ(©ðÅ&øÞG@³Üg`!Zs:¹^hH‚ý’5s4ŽçOîá®—6 ÈBR%„dÑˬ¶¹¾0Ì`ø¢äáÀW˜úö‰V!ÙÑB/ó”Õ­fé_æsó¥rÕ”©OQÊ¢ÌÊ"”²®aR6Œ}äq‰sÛL0æ|¡¹’[±ð?7X{É ãìöìAµâB[ó\çµÓn´éÕ:6Ä ˜íSˆDVû› Ѝ3¤}ÒÈÀù<GØÜa5ºAŽ gˆ{%“¡3v*ÁfB[­Á8íØË«¬A£Òc^[â ¼hW5¼á ––JZgKI%ïQ]*¸>à^ëéèªzz—¤«ýú™¼FÛ¥OPÔ Ð›'€ÔЀÏR¹Š¹§:ôµtoý¿YäVÔÒž¹C›™CȔڢCAŽ:3—.ÊÚ†Eõ쨓*–•¬¥œ^¿rm¬“ºR†–H3ën=È ‰úV=TѹÍç';)½,ü÷²¯Gì½¢Å[Xq<†OYÈûr¼®¡;up$$æV—·‘£¼ou‘»¹¸YZíêçÆ wØRˆVÖU1§œh÷|¹¿Ž+³ã¬=Ó­g“Û¼[ù±~ÚÕ¡ÒCKqwÚa‚É­E ‡ìùk÷¡¹†#ΈÈ,X²õ^r:æþ„‹ š‹µqyÑ,=+u¨8¦—ÀNöæ2ò„ÐÌñ+ú‚ÊÞÑguºv8í´ÚjÔô–ôüŒPM„R–<‘™zfÉ|ÜiŒœÚ”ÃÉv]àÖÚkG]q:`Çá!#Wq&‡å®Qs5ühu?ß™#gKÜçÊÝ}§;Ô1QʶË1æu>*"€ KþðàF°õ(œNÛls½ qô0tì€ÇVI0eüñ²%þKƒX)‚uæÍM»à—á§Ð_¿2_ü ñÓ˜*fYgrHÊÃðKÃÙb××ÃÂùìÑ,skæØp6kDO,!9™Õ:ê›ûñî¥h¹ßA²ûJÏÊ™v‡‹¸MOT`…%¶·ŸwÊàõlûeózŒܵûœmaPRhÙ¢(†û†AÕ¼Œfax€{±Ev–fù­½‹¦ù ¥µ¶Â$õ¡¼¾Fýxì(QM)µ"`‚М"àÝk¸8WܬµÑºV9Júƺ Ç­çþ>×%—AQbËn'1ð“TÄÈ‘¾§©­Ž?Åáü~1´ßÈ$ˆ‰¯ÓÌÝ>^NÜ»°_¦ öz4aî•'é—£Ug¯K7Д£U† ¯E¦b‹ht=M"ÿƒC›\-ô!ÑkÑGYpäúP¯f|@u¼ãU„~-ŠH&Ë•o=hÌ”j£-_*Ù‡Ùc¸¾±æXU£ÑkTM¡“Å2¹žÆ³Ñ8^Å‘S…˜”Ë”Ø× ×ùä¬>Í©¡²Í²Ö» ÍÎ…ÎRVÈ¡¾˜CÜÓ©qN$×fïs{#Áüž®œ¿üûIôåçU¸\ïiUŸk·›@1BkËm§™ïŠZ±Uv¯zqS:ÉŸ¥#×I8¶J<Û½‹î:‡AQR¦4¦V´%íâeAÛG"?ˆ·ËÇaÐÔBÃÁŠºƒß-¨{IÆáöÖFÿÏÞÓ.·m$ùÛo1ű´‘hÌ÷€W®=¯-'¾sr)$U‰Ukˆ„$®)RK¶U*?Ö½À=Ùu@$ 0üˆ“ÊÖV,™žîžžžîžÎÓXfÓEw€ó=„ Zý¬³ä§‹‡ƒh]E³ëå!Í]S|³§¤[æÞmŠW‰àŸ´àJ(t ”s6 wÆqZàFþ~×ü(òaÑÉ×Ì ¥´r¯'ù9Q7?Z„3^dÍ<ƒf¾³Í”ž¿jóÀwj#€ ØÎ¨­öz¢ ínCíßd–”†F8¨ p`XûQs2ºAúÝ&7ùxº…"í;&f¾½ /ãþì3³v èÿ·§iþXþzo4¦A ´t&èŠ }"–ư·}-4.b²GÊÒ4ÍÃEY€£ÛPÖœöA9Ž&W_ u×±Ù#…•’Š9³hNzËîb¼7ÑôC<»Að^’}9* Ý|Öù³R)§ÒÅøžÛÇ, äv¥XD*µ»¦hIgˤˆ’w{#,Ç;1¨{5ã†ú:€fq2KìO§ñídŠ2[[oàÃW‹ïüü„à¡ …tF8%|æ¤.¤âf)·ÅÚ”û3„Ö{+zÃV_ìM‚D(°Ú†qÒ6'mãCU-ö´Ü[aWJ˜‡Ò8ý‘'´ð¦#Q„ðÑĹÞa7“ÎC{!…Ônrœ4f’»¥ñ!¼‡ sSßcsZ{¹®¡õA|އ vã=¾0áÊu ž…ÓOFžYV^NkëíÍÏÖf7‹î?¢±Þã¢ör¿x³Û¡õŠ „”Ö®0Àq/+ª!õá88šjPÙ 6N¯ÀQ/íÞ ‡‰xœ u‘d,5ãôî"œ×f­”+ÃÇ3iûWF›]~Úˆi-å ê#{¤ÿo>8$ë#(Éwòà‚}΃CÞ‡pHF4×H°,¡pîQÁd Õ~8‘ ÿÀJiÑëï–÷!%V~ÛÈEy©ü. nAùƒ«£½R¿±R"à\8 q£B#öËÃ+¢½² ¹þÑÆH Û3´Ñ^ÅP öÞ ¾png`Ö转 ÓíO¦ñ39nÖÏÿU‡òky6K–]º±‚ÏÞœjJKÉñªƒðççT ³ËqVN¦5NKk.48$ù¿®ªSx(‰U ¯å¡ëH<ÀÁ¦+'\r7îÿ ñ‰Ç<'¯? oo‘v·X«}<°³5!—“)‰ÈhÒFd!ÄR.'%fœÆÜ˜Ñ ¨ÂLpqN~Jû!øŸx@.§“òïyt‡Tëã¾jöhÒ¿=ͨúŸÉuĤê,YĽû`ÛQvñ Ã/c1P:àý º”1¼ )ÌxÀŸA \„2Э(ù4œ]“ë(¹&§ýÛùÙÙ“ãà1pp SS¨[$º(QEf¯¦h@ʪ …L'·˜ƒÜ±jx1‹¦WñŒdUÉ=]té!cËï!³‰E–¤Èw&·€Œ3šÅäÊPæ=(«JòpAYò°òhvN~œÌ†—wdv¯ 4»Žfd²¿qfÊhT9•嬮©e¢qüå8‰´>'gøëè#íþÏÅ¿@B^Å—ñ4÷ã{«;ªãç”i“‚’.9Ýþq¬×Yþ~ûâY¯ÿ>ùéEÖV¯Le=úHh±çðìù0 ~Šf×ðëËqÌînA=üq2½‰FAAE @’‡ÙÔ{igÞÃõ™˜!ô¸“Ê^'Õ ;û—ÁÎÊ,|ܱӰSÉW%©{m¸BJ3Šñ¼ƃž˜Ó}¼Ö$¼&œž•R ©šHµªTŸ* çäÍšÞ„%@?~„)ŽJu±ä$ëq4ªB¶/`„5@§M³ŽSçí4¶4< ¹»S)+i`XŽÏì™(ï§k° ÈE>JÜ1ìq,B½|±íÃ4—YSäZ].‰‡_ˆA‚±Tçtzg‰ÚoçˆÍo÷ËX:Á;)«@Ij*O’Ùt#»_+ BJ–B MfŸUw_y„T¿iƒËz+nŒ {£œ"Ñ|6I@ [b±ÞNcÊ,ïºÈ’äŠÊîîpBzPl£µ´‘s³~t›næ†EQƒLѹ‰nAêÆ“Ý|býc> z÷_Hv& ÿ\>½ŸY¥û*»qÂ>HËŽãŸ?¤¬{-†ñr‘F˜^c5Åw…|7üYHåJË´ÁÃóã*u¢F9—¨N´©Ôaw,¸w›Ì“T¯¦¦D|Óÿ8龆'¿Xè×öñýwñ8N½=X瞃ÂCzžLíêzBžMÆq­7Ø›ãù‚Š? –v«Wª¯ J¶¼};™Ôh\±ERË鄼„%ö)Å—Ë)ÍÖÊÿŠÆóhz‹¨]?óÿÛ—oß<=Æ…k\ï}åVOµ³¹£™›™¼)³zÈÁ—ëOÁΉo{[ºù(iWa5¿Ë¨ðna7¿;Þ­YÎïRxgcÍò(—ÆÔÄëåÆÞׯåçØë¤,a©µù²y‚"ÿf“᪉݊F0×>÷™Ðl00±È_‚ôƒ9ÐÆ0{QB!é÷Žþ–”ØÔñQðYT‚àø ñ´ 6 ‚”?¾ËåȾÒ®n7¿¿?Kunñ ˆsõ—w÷«‡@‹³¹øçºÆg+zØ>Hõ0þ¹ÔÃø«Jã»b.òÉJJmª‡;@—ÿO>ý†ðúÅëìR(ûW®§ùÂU@#}“[q h“?Nñ[\ÕE˜ªw?£ðæ2O w 7#çœf·£a8Ý-ÅÃAÓTõ}ÙvAT¦Ò¨™Ùvƒ€þßÿVr[¢¥ œŽîSl×ñ4”mr{žF„PÅ7Aq[´Tá~÷B×v·ž“·‹Gi”}ÀѸÿ0]%5(&©]M€ÃÃÑÈn±HÁ|Ÿ Gd EÓãô×­ñÞœ<†Š¶·¤'°œ™.e|ñãž a7d å”;ªâ#œaZ{`á^3DÐCAqÖ_ÌÂÉFjr 4Á,- ÜÅbŽÔ© u;({¥Š €cÂcåb¬ -8LØXJáÀàšIç:¼ BKuqàh£Àä:ùæŠÕ¡!a;,©¬W)œn´~® †lB Ë¢êš$uÛr’Øøð°Ÿz%ÃÌUŠ=ÿ†éŸO7÷Üé!ò¨ÏÊô§g{N?fßLÆd0ÑÃ:^^ö £oæcØ4Ì­·rÐ}[øqßn4„!=’Æ»¶L©Íb$Ipu2W‘¦Ùxœ­2úkZÝA ØÝºo ØÝ²o ØEç`gS¶Ÿ3mÁ Úó /Ÿ¤“£"èפÿšÐpßÞ$¹ä7I&í»óŠ»Ñ÷µ×W~zÂ;å•êpç‹ñåÄþ“Øæ*W¿F­¹–_hù»Tâf$½+4¿¨Œ,o MH4If/‘‹4òc·(ªÔ÷ç“éÅpÓ¢yâsé(ãÏ TÓ!>\õuÑ2ÇXmÁIå(ßÏ®‡ Á&É Œ‘\Gcr1ÇÑ$ØrHÃ÷MFé#~«þ!3©ë‘iÿÉ—ÒhÈUH.R¡|üM *F¨.tÅY¨ê‹¦p¬à&ÿ]¼¡‹j6þnP‡0"”<Юñ PW¸XDjX-Ù%2T²¾Î•…ãº.0}²[§kÉÝLÛ°¡ÅøMè XDªLB!E …va N™GtF¨ h^3)¸vÎÁPSÊõJ¬XYHÅ¢düX²T ¥Lý…¬\À¾‚ñï"±Cwg %Åz~Y6Ÿ(\rrn³ûM´½ ÷&†õ|žà¡X%vu™Ï·à*žwlìär2ºäÅŒÄ ·Ù•Œ "^Äñ˜Lã› ^ψñ–™}=Ž‘öåöƒÑãC°uµHæ è¶£À‹u 0ÐóO»µ€î†!u•±B8,!å…q—@m‡˜ häAÚj˜.^)c R‹†…£…r¿ßd¬ .ææ²btãÉi0X)DF;È  ÇÒ TX3]ƒitRÔßµšÂQá‘u%TXk…c6“l’ûÕ Ÿ¢wra03Y‹Þ¯ôV.4´àù}'H<ë¶X£BØÕ„&$¯Š[¸ÿgïj{ܸ‘ôçûÂ|8xƒ…ï/ÂyœãÛd7qŒLîÈ%d©í¬‘½Äv‚ûïWÅ~QÏHÝd³[=ÊF‹MbKT÷SŇdU‘¬*§¨>;aì;¤Zh.™ a©æF>±Q^ì‘ÏóbnEÖÕxò?´P"fA;çÜ ÆµG#ÐNqúG°Ì‚ˆ Á1Ç#>±±ø;ƒ|3¹K¦»y+ƒÄÑR€|ž3¾ØŽ !žVìì¬H+y5‘ Qê“WpBÅ¡›ƒØ 3ºd`m2ØÐNèˆ#F2ð꣑’ 9å¶°”ÖÁÀv s<ùl›×Þ¨v}›Ï° Vº+“[”»NqVõºRT†Q¡©½6òOU¯+ӎƪ~í%z«×åÞ¨#J(?2[žé/õº.õºÎ´^WÊjʹþ¹TQrʺœBñ¡­*Ì))8†ÁHmµ›´øõ—[އT8¯úŠH¥í¤ Ï7]ª+tH6îê“®`ºvqÙ¤¿„<‹R`ÐŽxP`Q QŸAúèðtp™¨UòÐizù…¥”ÚS]0X$[\#]õ V[=(k‰¬žfFt ¢\ ë¯ödíb’ÜKã½—)ùâ/ªêo³¤íHÌ5'é¿ÚƒO7œs&¨ô¡k1…Ê£¥¢†‚O‘õÃѵ“,³QÔ¿Ÿ']ÝgÆa‚òÁPX$$"P©˜7j O70AZXd­…1h‚ûù©ÀËÓF3OA'×ÀFÄŽ•ðו’zȘ¥š ^?XÓv$Fþ›‘øtð¨aM÷8®ÙoVÊß%z¨1x &Šö €v„ÆŒí§§zB3¥ˆ¶cä\ÜÿÃøb„ׯ±:¶L@ðØ.&=‚Ê×QÕ^‹…y„UºÎp(ÚÅlì(ëa‚{:'AZb¬´#1¡´o«-}º!RqF}ºÀB?&bXÓ‚¦pæö €vTDCçK¯A‡Všª…áÚEÙµš{,Jx:Ã-fXš‘µ(Òv4E¨´Õ0øI$LÜõüÄvŠ'ÞÏ­ÚÈø°“Ã`lX1c<ò‚Ö…}âÝGX#w@ζ•B^ßÍ®%un"¨j² ¬Ìe!‡»Ë/¢€vZœ*ÁÉýxr7[¸s^ogïœ3¥k©‡?ˆò©2©¬U¦ö4jÑŽÄLÊW¿,}¼Ù\ãƒíbÒyhí_ š¡ˆÉ— ¯ä¥{¼fÝëš1ÎåYæKhj£¥òpjLƒÊ'7§ÆFÖ6„X´/}#ÚQ"ÿÙ*Ö 14Ìr#¸±¡3Ê7¹M¨k,éö­”´uñ½¢g´;—£²°–Ôa+Ú•NþëïÎR[Ì” KzÚËÞ¨ˆ!Ì/2õ`gü²;wÙ;Ëݹœ­ gkág5Uì”»sšcr§c»s€6„ »ç›ô5†x#ì.ãK¸OWC¥µ†%°.—[Þ.ª¸²a¾Séùã ÓŒp? C#ªn›< ëPÀDg SžH€k$~Z+hºØ´1<õ‚—aTýôïÚɘ¹¾‹ðx;„%ˆjIl½Î]»ò*Cúí_£WËiòâ‹/Ç`¦,^-‹/ êJTC“}O^˜½“¬Ëi²GœÓ¿Y”<~§ûÖíÀ¥E™Œ;~·sÚ_¯w‹d]zuúÁàÅz¹ø;X¨h»9«ÍeŸì‚a”6`˜á,’aG×ó@Ze¿=œ1_Ýþ÷b¼×ü½0¢¯ð»ý·Ã4ÕhÙ[£ì›×it6SÍ—hvôiÐʾu¡A·ñÖã‘Þöúg¬þKõ25ÈÎ dnų˜üÅë¯oÝ·%#àÕòõr:e_Í&Éæ—×ëd’g!¾GïèKÈb9(Þ=<†gµœ–˜ãÆ‹OnŽ6ë²ÒØ!æ¸`p¡Tü¡!|Ö{ Ó)Fß}/¨úyéKg¼½\LWËpÄÅÍg€áEqí—üËò¼Z~B¤ñ3âHºDÅ=-U›=÷hG<”3•i?SVåhéNØ&(a}`‰?FýØ‚þüι¿³Ã}¿ ³¾Ýùd)H¸õ­¥’Wé)Ã&±ÌÎH~D/ƒÇm ì§q!8oâBH9U6Œ[ΜާL¤ÿ€_ܦ?È6ƾ͚ÿ’I÷¢êV¥žýg)Æ·ÎÕ="]x9Ö#¶‰ÉͪMî/+<Ç¢@*/ý4þsÜŸï?ŸÃòÕnñîz:I·HþRՆ寋›}ÉÏᯮqÖº¬Óßçd¸cÆÁj½Loï߸}Ø|S¼H¡Ï v$ÈHÈÛ«¾:SÁ1ÕëÕWÇÖB{àð ;"Y€ˆ/Ü×i$èzð_E”‰[Zén0x‹“ñnó F“x7_¾iµ›ÏÁæð>+þ¯~ó zg¥íx¶üýö»Wƒ´¯ìË9·¶î´bÖ®:sx6U‡ÿ£ÙkÿmïÃà;îÀ…žý–¹XƒgyŸáÍJI´vlÀ—úœÇÙjžq MüºZ'¿âÊ\bë¨!–ψ²yd^ç«ktx¯S®ë§Â룮b0ívú!9¯‹éß•½ùkúóƒ›aü5üȪ{ޤ·§½S‘$²Ò…QŒý|ÒR’M±‚z+±rå03$×ù¾Ø‘7ª!eZ3ªXíñ×NZE»)Àù8MlóMøFÀ©QÕÑní`už¾ð¾ë›*é¢õìÁv’ÑM_VœïÐX#üœÚf]úT8¹jݳ "ài["jOœ¸vF”Ò|Ã1×fè%gªc®Í´£ªÓv~̵!²r ~ÌÕH ^aç!íÒ/Óà ®dÓÐ-ÝßÀ°t?³Ïÿ{šž²Hãã¿ï“Èxj^c'?û,c_F÷Ä¿<#'„M ÃÈ_À9Z.’‘fözðÃr;žÐ>)}&Ó¹ÆÓ%Kst°¨0k @ãì eF¥¯Æ›»ÑÕÍdµ{ùò“ ò:ú!ïÆ÷S%àÓoÀ}zÛ§G”~™L²#¥”¥J©†ðŒ*˜¿JüëƒÃ¥_À;ÓŸæ,ðáVììlë&eÆéòÍãO¿Ý&«ÑUö~í ›Öb(¾pÛO.ÏÊ´öÓUºlÀ§W¼l\êEx§£ÐOØ›Ïïî©ï"§¥ëlôþ3áa—:rg£Íëñb3{xÿôû‡ñ|>‚±Í„fÓ©IÌTZþ†Kòq;O¡2k’dSC1rðQ8)Bõ”¬1x‘×p›%›‘Š?òK"`¾+–ØçË-ÈIyeŒ·ðÿmr¿Úh<Éi5óÂ*¦@už¦•Tfçj"¦€õ•iŒÂVße& ¥Û÷³Õ*½{’¸´ØGË·©º¯\^ŠmÊùÁ3IuéjÄt¶qLÊ£{°%mÅвòà»@¸/èªi_'«åz›šÖÉx=Ÿá¥Ej ΗËU{<շ술?êÞˆWüãûd³‚ñ|Ì+é¶”~홀Î˕ܱ®!‚(¡-Ò>è`ÌyÑÁª†t˜€:—÷°Ð,w`¿Lô‹Üл¥bÒGÌ ·Z/ßÎ`¶–>{ÃiÞvÆCY GN¿âÔ¤ùxâ˜êRæÍ‰“«†g2}¬h @:㉢íyŒ¶ZèóZ^Œîn>qú­T3íŽFµ&E(Ö>(QiðI(a iJ çãÜW¡ëøçà'Ã—Ó›ÉØé›uÆ Kƒ'Œî@÷@ËÎŒ$L7$É&ÍF¬í ûOÞYª+[="Ë À÷Aš3óe¬lºØ`^é`¥ÃBŸlï’Ýææ½Ù8u W G8©/Í”ËH¹ÒæxŠ„‹U½–vI‘pw$ÂÝ,DõÙ«N»?ïþ¨ô阒îXl¼´Úz²P‚ñHÖÏòââ¡Nß´;b(}Úuäè>H¢ûØáÁôNÊ2ïQ #YPżG5c‹‚yµ§øæ‚úAð^ÜÃxëlc¶O§xb'Ù áÏ÷üÀØGtÅÓ½Æ}áI£­–{ý$£u›%àè”:ÚÖÌð=ºxÑ#¹ºµ¿ÃO³ö—Ü!M­îvá?в˜j;öIh`(=ñKé¸Ä ÌmËEñÕMö(× ¢3®&O´ãÒX”>ÅÉyŠ7Ý´ûu<ŸMÇxgãCòæn¹|Ÿîšî²–WÕZwÚ–ÝG†.2Aîƒ RŸA”hHR ³®Â>TuÇꓞVŽ>¨dìyQÉFÚ0!ç€Û‚³D63YNz8¹î˜jäy1r®ê}¼pu~I£“Þ³üÚÃà™f¶^~M¢µ¼—ÁbáÊëEܼ‹CÞ%u*ëuðÂÑÉVº+PÅvr8l¥Ï«“u^]®Í½y‹å7œnyw=nÚ¸tAOßý–÷ÓýÔJ¸q“6'[Ò²)Ÿ@Ù÷Ÿ×(Xø9Š—ò˜c,qÆ3cu€ ½„÷¡yÓ&¬w T§Oéïw)% '¢Ây>XñÝŠ[öÔÍ f‚Gus’-;äƒÕ ¨y²ÞfœÆ•Ÿ†É ”iwŽ-xíg~Ç#;]nÞ·wÙX2þNçÆ0€’™NVƒãèâ;]XÃCàó~|ŠIBð4ߎ 2°KÚµþ¾WÐ÷4lÌÆ^Sñ€eX±)z¢@({J Ü`a§dí½†‰,PÄ`fD€ DZF"XÚcÈ8%!„`Œ´½ãv8>?¶Äjï½K ¨<(±W`ÂÆw½dL…H D?]¯ÀÂc;ºíR§`$éËN®–W[ÂpÆÁÃCéå6¼Ç2Fƒð˜–s@®Ò Ýúcƒ¼& Z9 CŒî~Î`„ÈÀ‰ê¥ûƒñPÒÍ<àQ±?:È9‘&1ãíf‚`¤ñd”Ë€@'ç¬2àIØ <údsÁÍòÍÛÝ&M&w“VKwêö‡¹´`Π—¤óéÁ:ž$ÁbÉ~H¢)ç!x;õŒQ¥u„‘ƒ" d˵>Õ$âMA 1QFnúñ3#\‡à±ÝøY‚áé¥'ÛÉÍêýìs"nç §qZ÷!C…¤Ò |¯ôÍ›Ýbšƒ¨?ò î› Ù+Òáù¹ZáŒ&‚f4Hñšö3Ùk®™Ö!xt§DÈßG öG3ƒñrÖ |8㉠Q!‚ôæÖRr„Gvµe8Õ2ŒR+pös´m7çkñÅw|¨ªŸ ’ÖFÈ <ª“„ õ§a˜?بq- lX«ŒÁ@ã©` ‘6DÓBñ4Nxp$ÅM¡^cëÙ$_Þˆ¡æö€Ôéw<Øb;<<í¥ÃÃñ0Ñ6ÃEEþƒÖÈ8MoÑqF†íyu®P¥•÷Ì™¢»~Þ­îìé)¡Îl¼7ߢ®µ¢ÝMùõ›ñälªŸœzew\/’ÕåéI`åY‘ yq¬º•4ùˆhÓÀSõ½%¶ƒe¾ÜɻܞÙ:oYÓq?ã̺I¶ç:˜îŽ<ÔÚëðé©Qº¿œÿJdù¯h}ú« ÎËT±²iê V;Õ¯ 1ð!¡”[£e}(ÚkêbäRz–‹Ä}ƒNžp¹vJ”8ùÌÓ âÝ'¯žÌdzû:} >ÌJ0/~F¸8zÄ/JñæàŒê2›nDõtÓ =Þc5¬¼xZÇ:'óånz3YCÿÂz2ž×73: 7C«è«q€+—Ãø¼ÅãZ vúˬ ñè®jø5лôkªxÒñdrжü¢p·åK¨|X…´¾Hf( ÁCY‡%üªT_ùëågF’i¸è ˆ_[)ZsI _B›\+ýpIÙ0íwZ˯q/¤iÍÒ­ý”2Lq ”ꢨ_G´f–å&HèÒNU°ñ¥r_×[_TJõÿì]Y#7’~î!èÁãYªxõ²ðú,0˜5|=¬Ç€³¤¬.m«$¤*»gàÿ¾ÁÔÑRU*#H1Y½‹1`·[¢2¿~$ƒÁ`“øZÆ D­oßbô<Ot©j9 M}Æ%°M“4F)ïÐ~’Fvå3j5òûF¤ú/̇ÇÅÚC!MjEã]Í‚#5aö°´°8µ¥å:vwï7 £ÜÇ»vì>µyçh‡‡añÑÀ–íx”ºüÊö>zixúî x½b`P0'1˜ÐNÆ2é‚GÀÒ¾@oFà‰ÝõÞ‰~°}ŽâÆZ…dÈÞ¶ëªØØÚm9ÙE‡iJ Ê<.Û¥øsÚ˜öB¸³Ü´S]©ûÚ}p)¤£ã1E%O´³b2_ŸèÛ:†ÂÝN)b1¬+žôì€ÌÌ,:ZYÂ[Gé|n„w6šÃ TåyHš„ÐŽÇšÌÑœ£c1¬D/Fà‰w5[Õo«Í霅í¿5—\H2Ks}Ž‘—YH]B ‡Æð%¦xÀ£¥Ç#™ÎvŒ¶÷Ü?Þ®¯¶‘3Ï/J£Sl¨è ¶º)‚vÌE/y0Ð19ŠÁ}ãî¡z{ª‡iÇI_à\sBÅNíIh„ÆîoÑ”‰d<’ÙèÛ‘w«j½Y=Ž­OÔä5!ôjÑk¯-ÏvNc‚ #7ŒÅÏ÷ž4ßG€à%6hx¢j/WÓE“XeV­Or©<ÂB>ÃWÓq5 ™ë9¶V[øYÂ4ŽËXÖ½ ÌãÜïdŠqFâX a p,”9‚Ÿå”«7ãÉhR­ïoÕ*TÀvÎeëÈÎ[íKSúäK‡¥û¨zV‹ø5êmL:ÖÄãVZ ‘Gã‚ÿÜEw[Ÿx+±íÀ£r^oB›ý`îJk$g¢>{«;Ë»s¸õ23Ð![0ÅsR [ _bcAǪ½ðð¥é´ó±?µôÁ°öÌ;4îÁI®b‰”Q@Çs|œ×_gFà‰Îö¶gT'Ú¨v”²)?Ñ´“Ö_xò’ZT°Jš"]üßJð¨LI‰QbFN¸O+”DŠPB9ö&~T4]¥hsöh´õ?õxs¢lËãYȯ-P<žuÞê9‡¹˜atÌŽ—ŽÁã²⪱ùÛ‘.CëF›¨‘빆îÓèã¹2,zožÀH:_b<ÒY‡÷.ï!>ß»¿½?Ñèᵆك£gF^Kéâ{«74Љx¢s—Žïë Œª“=‰Åx*Þ ÔóÆtÝÄ8×_}Ï ÆðJhÇJ˜¶ðžPN\ðH™íÜ»E›GæÛ ÀÒ ´`ç€Óñgª)¬ŒAt¦I8†g( ä>Ã}9ãüÙÈiÔßùTÓ’{¬Wµ·^ £ZïìþâÑAE­½´ Iã"E‰JÎ “Ú¢>Otlȼz¨×ËjÜV è¨ë°ã’p5Ø;‹F†vRd;KÌ8^èø­I˜÷wì•覞ÑæY Œ®›âôÊK¨îµ#%¼ÈhÊ]UoC ¡XùnÛüRüºÄv,¼Gx!-Ož¡³Õîh¼žŽ&«éS½ ÖU8?Çø§•PŠ”œ£2`rÏkØûRP¬LÏÃ"Aäôp‡k½ªÿþX¯7´‹dÕ¯ëQ}»>Òýñ«­ÀIb`ÿà2Ñ+Ó÷. Ƨ?‘.Æ¡¬e†$¼-C0“ O|^¬ÔNùÇ㪺ßí|!¶•¤ˆE¯çTBœ|4£ÊïËÌc†…:I<ª(Íúl¿(”f†K¥ bV†fDq²ÑÌe)Ýjx!š‘ñ˜B4kÚlª1Œþé|rb³œöޯɦ¡#'YÏdK*å¨ZP² åÈx\IÊ5mÞŽ—£åälïœrÆŠ™"”‹*åÈZ(âÕ3–káe…(7½}=-Ç£ÛÙb|Þα(ßÈ’¹¾·±e#]æã"›/E¶‡j>UgûÄec™e}ÏjdQŠÓ˲Bs™0^“ðø×¢×Ö·„;_É¢QšVÏEÈG'©¼%É\Æa•°VðH^Ê{š®6çF¸c8«4$‰T߬¢J’\dÑ]rŽ_Emp‹‚v×rñk½zZw[*÷Ý[Ë”§HgdÛ+Vª|¤³žYEQC!Ò…d=üZ­êÑÓzy_¯Î:ž~$BgIBö@¸|¤j£óÖ1Å$ *OÁ]÷,W ÐüÃhRoêñ¾oðs'„²ͱ‚Ä£‰”ndrâÒñÄÒm%pZ¦îåw’{MD/ÀÜ $õôš´ÌÚå`Þð<©Á-¬÷±;g@mÿ¢Ñ=î}wþ%Ä„8²ÞŸ Éü!ËXf7ç ó”p&§yZ‘¶“ç”;ZmSøh‡;ÌÁX3š‚Øˆ¸âmÉH“¹`=üKÄ–‰ µ9 ¾¼vSô}(—"ŒQšxØEQ„q.½ÆS.!’D²Ä•`xgáÚ?O»k¾\̦ã÷H Úu5›-~ÁGusƒ¿Ñ6î´òKJ ŽgTÊäDʲT¼G„UŒ„ÇõÏ‘­¦G›Å¨ZNGÛlÚÞq_´—¡Î=AÑ#[ø“yC•O–‰ð°c±ž‚§ov©A‚âw3úhUÍ›èèï…rG­(²¨¹C”!™?Tu!þX£ OYþ<7 Ž:÷(“…ê¼ñ‘H$a’E¶DÝ‚ð§5%.É[Ó?£@ù“yc3xÜì½¢!w=Ò¦q"7, …(ù"܈ÀcssãÅf¤Í°ô¨ãÖ2n¹ÀEQ³-R±Ð“Ù"Œpœ Z›8/Ä–s†¥W8o¬•R„‘¼gÞ„HfUHÅË0ˆŒGbP‡UÐôÎG$­{&]–ÞùdØÇÅ'£ ñéÈ\08uB‚I€oUÏÔi…̪X®KÈxTúéÁ‰²ÁŠ\®OÓõt1Vå>å㨚O`†¯êÍhUWáÆÄÞSïQg1XjBJŠ䊦ýàOå çܼ,geV+:Ýož³Ç‡ºÚlªñýC=©s‡sFÀJd輕À™ìÉ|¡Ê& ñE*áIxúæËjzû¸©×MæðŠ÷8i”tš"ˆì‹4D’™£%%¶ÏòBv17Üâ5é“9ëyµ\ß/žÕPv¥ ØX‚"ý‚XfäÉ\!KV$ÂÂÇŠžø‹úç5ûŠé?@¯§9â_hu[  ,ÃÔ:•#d©x¿ž–b6Íϑݣmî6Là ÑÒq vÉò1„ˆ9™T™ ­7t<&??(‹¼a¸çW8. ©-@XŸ(±à“CÎÊ2Œñᜅ‚ÇçcÌC5¶[ˆcíâÎ] O@ë3Ì!(ÊTÐ¥(’¸ÏJ°÷=d*3Ž ¾ƒ‚q׬0ƒQs›‰Ðd&Peì )’Œ‚ÇdfÂ~>(÷²JJ+ÀæØÍb “@¢!µâŒ„GgfÀ~«·9V0î<•¦% `ò1Í…@“™@¤ÐŒ´ÜkNÁ»*Lêålñ>8/½aî)•Ž)I‘ÃR¯½öƒ?™7Î E’¯5AÅ“z*ƒ%…= Ì{É(À<›)zÊR•!ß.’)†ŒG1‘­hÈÙQõá'M¥LÜ“©`B#\î·ŠSŠœ¨“9B•J”‰%¢ã‰s}ªfÓIµÏªÉÃtŒñ—Çá{…o¿í³˜7êÆÜ˜`ùhX.ðí°÷>¶¶P®æTŒÐîÈd¤gŸ×¤ìóðt&•' (b®Äà±Ù*!ì¢2ΧtwíNŤ dâÒ%{{[ÃFt:Þ‚ö?~XnGÆàÇà ù|?H¾Ù’k LëÁfSÑÝÝ ¼è“ÇùÁ»>¹úáè/ÿŒ“f0xó_·¡°ÒÍà¡ZþÏ„?Wó÷ñÏy3¬–ÓëUmx3Ø>êÓáaNXÕo§ða#ÏÕ;·†Ž¸~âÃ?þ)áMï¦óÉðf§õý›Îª8í°Tð¸ªyÏ3å «ù|±Ù– ¾PÝp:‡ufR_­êY]­ëò]ß¿N6;Òô_ 0Ý? éY¶y vuK£Ÿ°®gwûî§oïGÕS5m—µ÷ñƒ¶³z4_LêѤ~ªga¼½xÊïð˜} ï§0Ûnª‡åQ«mßðß3{£ø 3ÿݼ»MÛUâæšQŸòðùN€¯§õlôÿS£ö}”FÇÁð®yî÷ÐOG?ܾìÇãð——}w³/[¿ZÜ6Ѧ“? ö¬YÐÖÝÍÞþÅ}=¦¿mióûï5ëûìR<ÚðWWWðãEYG£ìå—Ý£ìêœîèÃ/þçmC0þ)è0|ùÈm·ý Üù¶¾«Wõ|\ÓóË»›§xèx¥Ä¸¾MnïüHÝÞÉ‘¿OFöÖLŒ·wB39üý—3´†/ëqk‡Ý° í¦Ý3šy¨6ãû/aѪ`P¯Ïµû°üu4YMƒm©…`òîÖ^-—;´mòž¶¿¯Ö_ÿä/Õm=£ýî»ú/ÓùãoøïB7ŸÜû½ËÓvœ~0NwƒzO’„µ®½Cãôføœ‡>OZ‡a!Fù÷ÔÏÂSw=à<5%¾úûãèWÏ7јwO?ì*¾ƒñßäjéí¤'/{íè±/Í“À“÷–ô·ÛõS* Þ¼¹œIð°yp}=ƒéd[¦i6€?W°ûN{Þp;bvÊVº}ñíWŸÿUJûO~øæËæ“4e©3¼ìÙÛìXÇ?ÿ³ýó×ãE³èž0÷ßÒÙ›7ib&ý*éGÃÓå Ýä«[† §VÌa‘¸jÀÿ>øÐâdIÛ[øj ê|SÜÑNãi¯ú×9Ñ«qöùœè_GÿŸŽ>wò·G.¿VÕl'ð¿n÷™È©Ççã¸îÿåéføe=?lEßÇ™Bç¼s¦Ðõ §¾CµqÏÌãþGž~ûyòîÿN-ǹÿ»õæòNOpÿcÏËëþGßv¡û¿›ñîoš˜I¿JúÑðdæ|6^)ûé$½ ÛfãÓ¾lfÜ$–$ü&þ'gqŽy®æÝC5¾ŸÎëç7^¯Ç‹U½Xn‹Í¶Ïƒ"²`a½îǯÔëˆ޹‰Âñ~ÝImÚÌ6²•ѶõGÄae޲õ‘'&Áö“È33)ïx¯›Ú~9àK¡¤ãÑŒõ¾—=g-5šE¼šB(ž Ô¸URw%r‘ É7«º‰·¨'ƒýnõeê”ýખ!×ZR‰‚´^)´uhç äÏ‚÷8á¾Ý‡v<¶"v{‰ÝÁz›ö4ÒÅ’{e˜Zh'=ëÛÆ¿„¬R0k•c˜ß6´+‘Ž"8O¸µRãxz‹Al›M‘á$Îj´¤Fh'x–ü'—ÐW1˜EšÂ;´S%:Þc=^<´3‰î€ŽRòÏUxýüƒÑºjÔŠ‘ÀsÏ€*X}hg­+g[çƒíd0ä<Š·­‘ n‹ªäÆa\†vNñ´¥¡—•Ò @‹fX íJÜ\ï±BFÀ]öE¾Jª2Mb„ð!÷žµXéµ-¾MÀÑse…(zÎem¼'8óÐJ%1¯ºM@"dÀX`ÖkT³Ð±Ko¯de4 Ü:¥Qä’Ûœ€÷ÈàM÷8áež#N¡¼´JX‡*Ôƒù£_u;Kâ´pÊá’8UÀ[Þã‚'–€Çøl©7³Æ$V%Ti×JYÌÏ`”e¹–¬´.Ã$­Q³š°®ý€ÇHç-އËK/Ëî<¡»{íúÃ`€#’¡1WШã\]hùóோÍôîýȼE¼ýúfðŶ¿gÓ® ê`|êÃO_ÖoWÕþ§QõðûpM(\s~]ÍÖõ°·wRs¼Ú ´ë<µk%îkJïœpÖ1\6Ç Â{`Ñð¨c"à±î#›‘p, ë¾dØbÚ1~©uùx*¤ªg8^u¾b>n„÷§Ñúå¡V™ì&‚±nIÒ¥D]×ÐN*ÿ‘MÙ¬à²)£âÓX¾;Úò]'[NÁz¡ðrN1)]&ËârbÄÀ.‘Ñ?»¨êÜú=üÿÃÍA›7hxÓªI$^ÌyǼhØqh×e ‘°v›”Ñ88§V4^ËCµÃ㺀áïR;4œ-´ã‚: ›ý½×äÍEÏ;Ö#î>¯ ·aÞÁLqÔQÑyú¤j ò›ˆ<>ú‹ÄL¡Ht…÷ÜjçÐÚ»ÐÎD‡5”!s8¨Ð½B;Q‚ðã=Z+0´Ó:ÇÙö9Åu:öÔU¨öc¸ÝójÓŽ9ßG×_ÄÝ( x«ãðDeµZuÐ4Öé궇‚|¿½obDX·6¹ð°‚[$6 ´sÞf)ê‘ÈÛÔVõ¼Í×íðÛW0YÙ¡Y è½Å¶íœ-@å¥P^ðH.ÒN«’ݳÝ‚ÊK×(‹½ÔFf»•…ÅÝqæÝXV€^ZÛAOÀãÍåtéT† ØëÛ(,CÒS+œÄñ;^À,ïÑÚEÀ£Ä¥¥>ãÃÃÛ ³\[†"4Ыi;Á‹r΄Á9i8ã —@ሠAˆFƒYÐNû,WizÞž[©g¸ V˜2›øX&,'à‰>—:˜—ƒ}j½FE®[E*ŒØmílWÀ?Ù!þ¡¿«å´É&pjaPœqãôœ÷ß•á=š)t'ÚE‡T´ºê:ö hŽõoX}=VD<´ƒé$ñò,å" ¹{˜(<.[>qºk4Šx5lÀ´d“ ´cY.i¤‘Q+øTZ-Pœ‚ñ^Ìð͘’<2¶çÊ:èêx”hŽõ©#À)týÕ!º:_¸}NV’%x ôvžèÃÌvcŸ®Ípç«S›!v n°Ý*´Ó"v"¿«1ØúÏ÷‰'K*ƒ—óáu×—lXØ«#ºõ0±‡Œ*˜,ÐŽÉ,[Ôœ¬õ!Ó‡RGÏû ۾DŽ3­.Ý Æ4ïT’IkqlŠùK·¦ñØ,·žÐÁ¾% UˆÔ•U zã¶`[ŽÍrlJûØ ‡í Ñ¨,!%cÿC#¼GiŽºsC;){š47«@ÓI˜fnç“YÝh!§õR;‹…ŸnÛuy"É{Øêqs¿Í ¾«—ô¿ä]ÝrÛÆ’¾ö[ x‘ØçˆÔüÿp˵ëuœÄµÎ9.ÛÉÅæ¸* I\S$CRvt\z¬}}²í@   ÚÚT¹"‘#àëžozºgzz¸ä^ˆJ(˜üüïa¡Î½GKå]8tí ¾ÞÓYqá³ n$E¸Ïø¡9:­¾Sfbë\` Ã;ÍçóŒ̳ôm³“þÂ·Ô íº`th;°¾V~°Rõ°zåÞcµ"uðì¶í}= .<ÛÛV‚ÿ`Œñ₨—‘NSôZó!ŠêazÇàÑ"Ø*ÒJ5[9—Ê!‚1—uP)‰k³ve¤1yQ¸x÷«[8<œ¶ÏÍ(±…=nˆ <*Ú¸*XÒY(Nu°E¯.HŠDÊÈ€À£BáP¥w³Ûjò‡ñ-IØñ]‹«|¦‡ƒÇŠÀ3ø=§Ö7JÜ¡ƼFHX¡ñƒ¼kVÖoXC½C_±:Š‹ƒSÍÒjÍJ!5¥L[$ÐNQx>ÇðT79Œ!jÆ€ÇCÏKjµ¯ç¥¥†„Kí¢ç’(Þý˜ÂàÑa16Ñ*«ÖªZi«µo‰O ¥ãn“tyc".ÛCä‹À£‰‘öâu/kÚ§1£9ØxÃ|ìnaòy™jŠÂ¿×®÷vc<ï¼lß&ÝÓ<<Œ˜ušÿpmVÉ$¯…¹9Éu~Å‹i´ŠoæËxZŠÞhbE ô…DàúÔ<%›ÙÐïЦ|W¥s}¦£ŸWSàç×ëÉål›L¶×ëd<пd3ˆ## Ÿ¼vw/¿_Ç7£Ùòt²vuXN—“ÕpÌ“x“üÛæ2fR “î&=zf'ÊÝ©wó3ÃÏ1&ð ‰Ïeß¹{<áø‘ĉ0äÌJ¢§Z¹w}¿„¡2>wåAoKµc%õ³Ïi±t4HB>F™Òj¦N' 8wÕI'×ëµ37@‹dû/Ñb™w"Œ—߯bË ŸëÈŠã4¥E:$ Ñ÷ôr½\ÌþY§ðœÍóóY2ŸŽ^«¶~3ÛlŸ.fógyƒç߸ÞwšÍïÓO¿þ,\—»Þä Úx6wüô/9ûr ¤O|ö”ü1!„hFž¤3Üv}Xnãùض“èåòj5O``ŒÉIô."Lfs:†ð/¦¸2 Ð8í¿=•~Œ7—ãÁp²º~õê‚Éy}ŸwñÕT øôM¼Ù¾]/Ó*²ãíì*}Ÿ:uŸDÙïÉ$¹:KÖ'0ÛÃ?ÿ4ü£'7ÐáîˆnÞòÍrÏA’ðÀ”—Ñõ±qþçwoƃËívµŸž‚²€d—ñv4Y^BçÇÛøôÝï_ "ÆÔÂáï^)É|3þõãf»†^H)q›Š3¶Lwýà:ûmÖç3ÆŽ.o~úõý6Yùgîëd ¯ù ð¤ÊÈ¿Hwg\žÿ#×Ú?Ùí’ðA÷jƒ—Å…^„w¦ú‡ëÍ ÂpôÝ |Á´wÔwqÎ:ÕÒI>úÇÿ',áa—¦äÎG4Öñb“ÖHùÜφŽûéë—x>ÃØfB³éÔ$f*-?‡á’ü± c˜"ÌåH2Ó°9îQÎ(Bõ”g·âe¼ŠÏ`¼ogɦ@¤ýÇ7{¥ßjó ѹ&ÿþë«E|6O¦Åo€Îåyóuðï׳¹ãåàår±°ÕýøÝÞI|™m Âgig½K.ÀV®oÒ²êÔîÇŸ²3å/Þ¾v¿ý=ŸßÌΓÉÍdžü”×´?q§ÏÁ nWspIܯ…±3…›èå?Ë/‹f"¼ý~¯6—Ëmúë|y=}¹¯rV€‘}VÁ9·ñ¼¥ø¯+—Û#ªøÛrš|¸v>™O1eßýâÈ;xßÀoîdzx\%Û ´_·n˜­æ³Él;¿¹£‡G§™é+u òŒø'fY\Ľ›˜·p•€7Þ18É T7=_Âÿ—ë›h²›Ÿ§Ãb–ÔÿzþÿfΉæ`ƒž³7¦?fŠI-ˆ´wà ¹g}g¦Îð[~Îgx˜àa2nn?‰Èƒi=æ=‹Wù(“¹ œ>ëÏ틇¶ýáüUÊj%Õ5‚u,È1£ïg‹Ùæ²]”=T ùŸÿ.YwÑ®r‘ñ]À"³31"D’96^D ì¡*º0œSνRž” m¶Â¿¦­Ô‚„I ¿¸ƒ‘B÷YˆÁÓôIó³V(|Ö¶Î éŸ"D<¦þ<&ÈfÒÒií`‹K±³ÂZw>Á—ÓjÓ«é=‹5.0}dõ`ðHÞ>ÃûÞLU,dçɆ²–3"™öâ´¼²?bï§)Ýê#íá„.z‹¿–æR­UŽ 5"0 q"eµ¶S"@²v3âpšÎÇ/ ’°_s÷~+YÓ5ºÊ„0áƒm™BoM†#%¥è£³xЕóK6À+N‰Ì£DKµáÖúbh§” t»s@úÖ‡¯»¿â‰§a]ü«åböàh}©F˾Huë‰%• ÂJ“¶ãMïz ÎghÑý¥Ÿ8<‚9+ãQ ÷(кÛraÒ÷¶.¼jÖë½1!‹íÁ: ð`­Ãñ\_ÏQ7êí~º?ÌÌ®'•0¥8õ9=Z0"l$¡v¬F¦¤‡Àƒ‡¶Îªuò JhÒ6 ¨ PæQõœ@gr_> »'DÒVÅ”Õ>ˆ`޶L¯f!ˆiRA_çëL¼j ¢b·> 1xLËu¦c÷Z¸éîŠÕêž4îÖ -™Ï/v×âÊ0»¢ØQƒXØ–ªlÝÔÙ:©‚‘>ìöòÜ–L6³ézærÏK¯VÖ³fhÜ)tÃýXbÑ·¹u5\jc¦†ô°´ÁCm˜E®ºªôÌ!®ÎˆaVùv›­Ðø; z!0F‚B%×ú†ÉÖ1LŠôE€ÁCe0Ôo—•w§!ž k…änÐÜsý;fÅ-ÔXª]Ë|q ¥‚­¶Õ¿ëË©²J¥zDŒY¡<µTÒvÚ`æ©-QÀ Wx«ÄÉÎ*‘r³¤]²'¸ÂŠzaXFÐ%Ž{¢@}Š—HÕŽr8Í5É)’ÊÎO$&ggy=Nö‡®ïõ¢ׇ¶jï_|ŒÞšeêkƒrMg›ô0u4)žTœ,o/yT\`„‡ BöZÚ¸J0Éf»¥ê¿ûâ¡Ù¤2OX¥×RäIWôÅ¡â>}}«Ævóƒ­2kÔÕhw×y`¸üq¢Ê×§ë³dŸT•Ÿš^Í.vcTTW{Ã!6= > «C;Ú¹3›ÑþǸ­gÝFTµN­Ò–ZJ|2ØêfµY0Ë‹o8l46Izèo Û<§©8©åê*Îe¢ú¶ =bàûK®$÷à„v¹Ð/2°î¯¸ÄááƒjW=¤D‘>2@(”"^àØ—àµ&m}œTÈ:G²`>]€?2Y®§Ð¿™KQÁ<®7œ•ñˆà²¼ A7ç,¸íÁð£ð¨W¥¯to\  ,ç€ÊsGyÚŽ1ŒAyŒA³è€Ácƒ™†\“}U´*•z¢-!ˆÕLsŸë íŒå!NU„c4»íþtþj£c§’êéòt´YÜ]Žîdžo²ƒv] ¥FcDP= ´‘8X½¨µvzࢥšUá4klÝ•™Ž$hµ2ƒ‘ÔêGÅ!JØŸÅ¡aüÏëu’öƒö3Isê¹0kW¯S9Zó©®¼LôçÚxþ4›4¼˜¬Ò^0áØ$hïl:”¢7. õ¸¸$Iˆ˜è誂gQºÃR _äíx˜’á}3„½LJõñîÞ šzE„>·Ç aµ±¾még"DjDpJÕ;xtˆs¿Õêã¤Z}JHf´>2ƒF• v’«w2#äd´‡­3 &:=ŽP²Ç½Uî=k¨o!ÀvÒ6À‰á–ܯW±VYQxT˜TÚ‡z;=ªHÏ>¥’ £ˆÌ +:=)ŽÌ¡ŒèabÁà‘X‹áÔvlûÒmXßÕð‘h몕3"}=uN0 ¬a\xEòMÀÎFàa¡JG¤ùqõy6«àâ­=p ¸skøqƒ±GƒG‹ Ç¿Ã{]çÜúT{®ßKÛi.•hAc\݃“ˆÁƒ>PêËY8®AÏÆµu‡’½…xÓvgÔP§¿{'5FFÙâ&"í+Õ×Tø]ép!=ޏµÄJ&¨o¶ƒvF¨–çðÂðØôà/`ðXÒ¶À\……­Üº6#"Üm6ŒVoè¤í”åí«Ù‡æ*J-:Ÿ#pxЇ!B§0;œhâ»t=kÇxÛB½Â-õ2qÿK )ûà'FQzÆ)Ï–Øëý]åéî>äêžµî6Æ¥OKe£êf¡,f}œªûÉxl°Ýz³¶¬Ö¥»#ÏHc´»k04(o12*×·Hbw$°Ò$Áã9wœjÀ¨ MXqïy•ŸŸpZ%˜óÉf6Ü䗤\]b[g—ùîŠÕ¶2qBºj(<áŽ×€–wJ®“˜Æ«/x`A]é,âÃR®I°-¦0£>öŠºW)ÁcÚÇzJ,xÊ\UëŽ îÖy«kGÂYÛÐôňÑ}ai|aécÊ+.³òêJÿ8x¦îqg¬Æö ¥ˆg¹-‡n{ééÚxÂ\aVVùÿ´dæ=r‹7ÄKºR™Êe÷Òƒ4}ðÊPñ¨xeh€I¤y?ÜÍ6ÜÐp´âuíNÂôªîˆâð,0lÜi?°p¤’Gªeé…S=ù:&Y¢aœ—éz ŸùÕ…q¨uí,ÿhÛðÂÎ%¯!‘a½ð¢>4ɲTáËô",n„—µ¡[‹'G;È­BÝ’½_,Kú™ !ºêÆ¥Úv{žÄ'™'“Ý-÷Fúié&¢†<•ù¢EÚt+G+. ÂXY9é‡KÂÐZºç2ì=>mºÂ¿>c¥âµÄA©ÎÅiÅ,—Q-kˆÜ“cµ»®EÚ0+Û MÓfý‘ʃ[ƒ×i_¤} =”r›Cšú'7·©&Úoï·Zž¬U‘>¢rž0×Yho»v£Ô]!œ*ÐÓÙ\c„ô \XÕ蚸žÉ‹GÓvZQxZUÛò¯‡ÝµN5éY2„pÍ©ð!‡võJ0‡[R˜÷±B‡Áð¶às<ŸMap,.âéÕlãrbVËùlrS#¹~xo“/ñM¼š 'ëépÿ„TË>ZH" ãÔ·5 „fÆ´©ÄœÐõ‘sÖCÒ}G7{-ïÕëÙ3r—#J­¼ùMÂåb“Âþ<º×—Êö‘N€ÂcB•æêõFº.¼þ’téÑ4Ð-µÝŒ8ª‡%6xTKYfm¼ƒÃ|ñÐÄQc¯ÙÍp»LÙ‘F‚–xUYzenRùúkȃWŒX–<*†0+û`ÈçYò%Õ6 FNT—9‚¸~pú¸øÁ©èƒgñf6^oò”AŽ%•—6·fI)î^¸Ây?\‘BHUéƒ+»™þ΄s?]êŠ l—t©‚ÞŠ1ŠÉZâIÑc4!´` SZÏF ´Â2©¥oùÚéÂMÉ"YÏ&xû1úy•¹ÝÑ/{üÅÎ~›yà§ <ðhzDÛ%°åü| /þæz±Ù®¯'Ûëu2ý\øå+Nº(zò÷³ÿJ&Ûqt¯~…ç¦ñâÿœ'€ýKvªg0޲G=ìe€±1ƒ‹Å—O?ÓÁ³“oú4Œó^ؽ©TåÍÞq•lcx\œ¾çrñb±ÜfõäªìÇòͽüÖSWf© š×yÉÈg'®ö(všŒÜ&K¼IîÿÝ&™Ÿ¯â˜‘éðrvq9Œ?dzyþŽÂIÇAÛy2\,§Épš|NæŽO¹=I¯%I…ü0»r´¯V…V™Fé°D¥ óŸé»Ó1‘þ´ž-¶J<¥îó\€ïgÉ|ê´ökª¬Í6#Q48OŸûáfUTqö²_Š-à—Ã;gÖïèW˳ôœÈô‡¢`š9m·ðö——Éøyq¤Ííí^ E}—FëÀFðÇEY…±qøeõØ•éÎ3hŽþM»QÓà‘%#ç £ÓG-¿_Þ%çÉ:YL’úÊøíÓøëàzn`h,Ø$9NÏÎíPœó¡=ŸL‡úLM•ÕçL>¸ý­„jÀêU29ÚIç  ˜/rY¢‹«x;¹|¹„)×›©Y¿ &n8 —‹ùÍnvæÓÛæðr~$¬œµ¬}%L¹½/Œ`•6eOÙÆ£— áÝŠÔîTë>0º·ùn0ïBüƒž öùÝÔù=|Óh–„iò€#Çíú½îÛ½÷i^Ô4?â<çÏ£o¿{õæÕ‡WßFÿ-çÓÌ?‰ÆÑ2ýáÙÈ)dt±^^¯¢(Õ«kçžÝÞ¶’³œ>¸§þÕ=5l¡GŠͫ߯gÀÆd±E÷Mþô}]Ç÷iîÆò˜­jöä¬3 =t’éùÉ`wŠè]zùüŽNMõ¤ý˜ÿ€bÑéiÄ¢Ù4ÛÈžGðÿ5„iÍž7ØsýNº]¿|÷êŇWÎhì>ùùíw>ɇóϾ§`÷úï/;ǵÁ¿î¯»×m&ËÔ˸Gí¿4³6Ož4“ºÑ_5³÷§„-¼Œ7{sè—^/Ηén?‹¾ù&*ÿÚYÍ-„¿Ù©ðä2½xû:z™öpônw!Úww}Åë$Êý¥èì&Ú^&Q¢ìž ÁÀæäï—ë³Ù†E-4—2ùŒj&â·÷FF.Ùd½HÜÕ=Î\–JùÛör¶‰Ü#£+1ºŒ?'ÑÙr{ <_Îéð·:R6áD³Y¿,9Œ¢,_‚ƒÿ“Û£ \"å·F¤è-Â>Öˆ0hÚý¢ O—‹nÙñ†{ç_¬ ¦KIh‹Äûf˜/̢䲊#’þYÙÒÛóÖ“-ª¬¤œ(æ«Å ¬PІJQé,y!Žî#Å…Ç)†Y«>„ð¤h—ý $õá†v„Û@.;';B*€Þ=;0xЇy޳£ÕQL«Ãé·öYånåh<=adôqqIȶõòêU,j ´òlÈ¥·’JÚ¥Xâ= ë„(äÕ¯V%ójU¦ªXëÉžÕÇc[¦ïÝý M B½Þ ?wX†â™èŒ š b} #ÐN±¶Ù–ýCV=$mcðè@×;ÔÔ¤'_Üy n ÷¤ ´«¸ UÕ9ˆw†Àmû0xhë¹§p?¤ðv°`Ö¹à^`¢òÒ¢zUYqÀ8Ö¯1Q,ÉW>Ðó‡ÚÕ_%U F¦ÞÃH=<­O·?8PWnHŒ¯”š’i¢½9[À2>Ô©D —ÿËܹ.·‘ëxüUÎ 8á¼`žálí©Úªý®XJ¢ÛÊ‘ì™Í>ý‚-ù.5€n4íù”Jèñè?IðôˆbU@âVC;ïç~je*p³ïð½5<Õè{‹ã¢ñ<œø%€«!æøohW²7Žñ§¨UŒÆuOuÁ>´?™GÒ4 ¥²ãTKþ“²Ñ—·Ó­œ¾C: %.1 œs" Gð1UŠhjÔõ?—Ô®|ùÊ:ž óS4?{ðx“ys·þµ.ÞËŒ÷Ðc¨5r¡(µ«Kôü)ZU@×Ó¾ŠÇèx@<1Á2´cÒìr`¨¡…õy~Æ·yBUÐzßa×ð¨ËüN/_\V<çáÂx˜ÔéS)\ôBí2*–S—åŸÃëxÀP¢© ?x–‰®øÜŠÀq»QÔ.%ìÌS¸Æ" ®4þJƒŠ"»›uöʧGEçn6ÀµoÇ|8l«ªR¹NEí|tvR\ SÉ- ¾Ã–³Š§.4\}=NOÿ<8’ û3­é‹‹™ ³©].Úúä6ªU –Ðae¢áQOMçËø‹gÙL®V×¶aš‰í3†B£/»|¢v9Æ…ºú$A*Àk‡a]ÃѬg¿è.þËàS&è'ç*{PBí¼«Ùh'(V™Âò^Ó]7LŸpr^{ã0TLb¼‡1Vkåh1¤Ìú»­L6äûО¢Ý‚z^¢íd74ÆÂ%&b¯œ¯¸­Ô !Ø Ó´¬¡-eyhxj´«BûúÖàáËѧoËw2‘rmóW­È‰¸ÒBºjO£úŠXlÉ«È@~²>Ö}†±u§£¸dT÷ùÛj¿i €nZïñ‚: O?pцR€ jZ»Ðaù¤âÉf=ìÉI?w‡û×áñ± yäÕ(EŽâ"žsQ;èàséªn¯7Ñètxøv¸Þo )Þøìd!³dDÜA9}.ud4SGK¾¶½¥pæ­ƒ+‰JÍk5Ü:_Ìé:ÒGr;|ßîoÿ&G®nnWo½œy1øPEÌ8Ú²ÎP„КêúÌ5'ã±›7~®öëæåÖõÞ¸˜iÉ{>°ÇÙrÎЂԔÐI àbñjâ³Ç^w½»ýEK¦»w[åUÑ@%è±¥y†>¤FAì£DJÄSÑÇasßRž¼u5òêÈK€„˜*àÚ›Ô)²(%¢ˆMµñв™o†ä7Û7!\tŽWFÍ1H°‹Í¸!Ä¡ ©AµÓ˜)‰tQíÆŒ_»ßýµmç¥ô#g¢ûè<+ tÈ âNñtu P6a÷Hí÷„àQă'F·«ëŸÛ»M;fùúx®þä³çK>G÷N¼L¢C'1!ˆçkôz×g–A€À¾SxìõòmwõªFhtü–§˜W–Î✡„s”XÒ)•ódÃyåyˆ~;:óûœ˜SâT æ é 5HmÉ¡ Dö2Ù‘{þÓáP|úÉÁçü–'ÖŠÄ‚±[e¯ÔaM>C-bÛ°Z0€—ðÔ¸´ZþÞ|û¹ÛýùÊåì–(­|ÈÐ/%–qðÉZQ˜Ö%â °ÛE_$<8M+«ëëÝÃëTV²n:¸¼ðZñ´ã Ç£N+Và3´"5ÍC­„šAÄS­£Óg/·ÔñWûo«ë+ þ÷÷àìÊ«$ú&ô¬U)ò }HŠ®>hD”ð€Ík½—î~ýÆéŠéžÈ+Fj HóÑ.hÄ µ |"3s Iy*ÌÌà2ÙïžÝvíTp¡Y«5ýtÕÈí«]T#åñΪ†q´7“‰Ç.&¸ËëÂûø¹tá˜ʌg/pôŸ€sZYm1ߌ//µ vúò€½„'ÛÇ"Œ¿£@BzHvÁ‡˜z†JR-IdöQ‰”'EƒÚ=“ܼXr•\qŠ>Ï)â3~†f0‘%óúD¨-oŒžâL o(ÜÍnÊáÃ诲1 zºFäfá§ÒH€87©ù¥§ Ñ2­XA€–&—Ð0BТ:‡ }ð1åyèC©Á‰0ú,{Äo-‡W^-³ˆ6¦/Œ¤”–²ÆŠ‹ˆ'™Þí’ ïÈ ¥9Ô øÓ¼çES¹-¥#55»>Ò)XQÄsŸ½ß˜ UüÔÇF‹îPÈùûì.gt.Hxª³Þ|ÕÕ^n±]­nn†/ày9´Ìd"|´ÚÔ`[ Gj)öYöWãø‘'îE…CNßý}µù±ßW÷»çZ Wë»!ün_" l)nil0””Øl}$E‹ƒ*â©]%µúµ½z.2)8-±P|'0$ôÑÓE,ÅDÁK›c'1IyÀuÓi9zu¿_}ÿ¾½>¿[RÙ°¤ŽÆÙ-%Tœssó”Œñ±øbbÎÄh´ ì;¨†QƒÑ™Øéôšš¶Ãõå~ÅŽ:í~»¦øb6.Æ.OÎcWgu³Ù·º!írãá˳;ß=ÆÎ\§ž30™rŽíRNzK<˜Y’{¼Pñà‡Þg&xšÚ UÀíªP» `öÐØLãþÚáF¤†‹í˜q,½?Þr}åÕëÝ~³kϹoŸ2§l¹7Î5qÇÔ¹õüÐËÛÑÌ’å+÷jyŠi6‚3Ïø€é[=F ÿ¹(¦´:eÞô5ú´¹CØ®á)‹¸ÂóõÍi$þf"üÈ||„9xÏqS»˜–ZA,̆¸¥^•¾ ¤(Ž¢§âzsÕJ™î[7¼"òÁÇ£×$üç+xRëøhhJ^(J\À¦\ó¸nh—!†ß“æñ$v'î‹Ç‹•ñ^¤yÎÇz/Âè%ÍÁÃä‰NEŸ}éðí<Á8lØonVß67gB±wës®GÅEœÔÎ-¿†¨v´]ú?ýL±:O¿52NÄDÑNâ' Œ·” …«°¢ô‚‚§Úå²üµ[Ÿœ86´2ÝÊ øí¥ÃNí Å^k4ƒN=æ O€~kˆäÆýH«0—\-7-Qýñ®œ=;–׀ЧZV>ý¶s#Ó•I²"V.” õ —”®‚; rpŸe™<ãc,®=ob%¹Vo7<Ì•´!u†‚'Ë‘á$š1g¦Ÿµ7‘¥ pºní\ø[ œ®56A]^ žd˜>êê¸2_ý±_}§?^­W‡Ÿßv«ýú¼¯߆qß¶²ê4S$n[Úm"ÃÌAlK{屸N4<>°N"çÛÜò£³[(Ô.g9· Š >µ=¾Cn:3¢Ä™ïñ'3 †èK`ÞJíBL"sZ’¡í“ÇÂM­™VRÉ®@‰™Tåüʏ¼4<êü:—ýyÿsu·†ÑQg2ÓQÆ@k 9xô˜ÑnǶˆÅÜÁùÜA Ë#¿šLïî·×Ã¥þ‹ÎdL -²O‘‹I¨]5,Yd£àRiî=»}ÒÚE·¼Úï)P‹ð¤b‘™éõ³Œ¿°=p¡ „ã£&h·kg¤T9|1€†'u™º!òð†j»¹èQ&ªªÃ¾t*ܹyk§¾6j Wú½X2TÏóaî°[K«!šÅóîȃ&‰×h¦¼j?z³[­ßz/1Þ#Ý…T™4qC»œ¼u1:}Ê-(®Ãð®â±{%òx-âq½ôÎÌ¡Bp+rØÞ!Ztó9²UÐzèðÑUÄëÁ‡…ñ!úÇ‹,Ûeˆ–cÁ,+¨«ëðå5=¦È„äµMS)NºÔÎû`ÛýM¤«àO±4<Ù.óÐöîû~u¸ß?\·£ ‹Îdöü|¥ r‡®­]@‹WŸ3Õ«îq„©áQ§ ;_¾N1–fÖ™SŒY_ó¬š{“ ™’:0vQƒœ§Î­¤w±”È\6}B7ZPojy%_úTßÕE,þZÝl׫VâáïÍ·Ÿ»ÝŸÇða,óÂOƒèáýD9:SÆ/ÎÕV+³Ž[ÐÚe†û‡3“>-¾s¨ãÉÉpš*{]tãxŽ.ÂZ¦W‡Í„§g5ÐGÅdAñÈ;¾íÍù Å¢Ò)™ƒÏ—“9Ä!Á$º ‡Ñ’PFÃÎ4óû·ü¨ÉEä±cIºfêC)³_M»Ñ©ÚE/úq/zˆ© cÈ í`,™·8sÃl«Ýîw77-V:æo&T_˜³ÍZ!yäz¼Ç Ïõ°˜:B­ËëTÃvWOîvëË]=Œ»0@K$+×Õ©]«n!ÒêìXk‡aIÃÞâÅéç½\ ùñR.DCâsí G1`±ëêsD*GŽËß;Õñ$»7f»öTÿ¢ã¸#øÔ^Krs(µõg_L¬ ê;Œð€n]>3^D ‹<2‡‘C»”ínœÏÓ«‚;ôyOv&é^§ºk.ƒq—øÜ|í1tìÖųuX>€’G»×ö쯓»ž.Þî*Œ»ÐS,[˜ÊÕ­«Ñ$-ˆ^‡bD’a‡] T»®Û\ÅÌo4I ù[¸5"µ+êlªóŦÀ«ËgtSò }ßdvZiâXrsÄPìú¦Dhr´ì:Rž‰)‘_~ºÄLB ¥€‹ë¨Ô`ß«^Zþ••Ž'Ücâ⌯íDx{}|Éluµ:ÀÐ.–sì¤<})%½ò8¥Ã«âAË”Ôÿ³¹¾x¿#1U¡õVp>qÔÔÔU¤†óò Pð$õJg¦/ ³EX°•vLìúœÚ%gZùd–zÔ±‡<àgfÕz“oú}U±ÄLŽ­djp…]™×!iß¾Ïéµ±·#dz‡–ÿòô{Zòèyž¨^üž½ÚÅz“kðûœUô±å¥ãŽc©]ð03ËÖ|]+h{œÂjxªÝ®÷áúçf}LE|ateæW„ÖÁZÓ ërì°}¦âÉŸnaN¢Kt%Ö:,ÁÛeeŸßÜ=æ º99º¿éÏ·<ùëÇ›¤oÆç§¿ßoVëaå–Gçn :0nÃЮØ\ƒî§tuyñ˜TÇ£Nût.Æ;á<)Erê1žŸ0)ÄnO¥"gµs³ý-Ø6,_÷EǓՇ³»õæå¾NöãÎñ±—Ê]ýÚù ’·,«W…9!Ô念†'Æ€q/—’c’×·vA»ÿ¬–®&tÈé¥ã Fy|ØÕ[fæÏ-ÝúÄ@‡– ?¾ÿƒ9= O‰fY gG`ƒ¯ãkšñ}rÉs¶a{ƒaó¾ÞÐ¥‡@<ÕÏßÅ~ëÀçMÀÌL­1"æèRj§ÏùüÁRVØÖ¡¤Ž'¤O6ldÆ×4BöL•ᡤ0|†â¤ØCrž¤.óº¼â˜ÏTÞdfå–ùÑ…š89S»Rã':²™müò"Ñð$gòhÛÆÏ…ñ3u²vן[5S»&N?ˈ_^;D\vIzÑÍÍ­Ì”h¬h1QeÌhíœváò:רÕc£á)ð‰Æ“Êøy¸xLS%gèiÙ…ï¬n 0£GŒ¢à©Î.r};Lº”™Ñ3xp.;΄Ü.!À'Yª]¹C¤¢â)sŠL~u<ääk-äìX¦–Ù›­yM'B… è:|f O6 EUîd"ûÒVæè™d/C»ªŽ+&ªTÃôâa—|<)§ñ¤Ž'…>˜Ë¾r¶]ø…bq™~X~¾†gO»güUY…‚˜XÑQ»Tçζ“øbËÍ^|/n¡]îþM§¨§NL§Sô}U<ÙäyêÍîa}uJ¾óØ/Ÿã¦!ÁWˆÕ'HÜù+µ+cojÅycN´ÄVC°cÃë1ÉûãÇOû¢oã5„\î–EEŸªI‚ Ê¡GV†ŸZÁ£.9EdM³ívusØoþý°9Ü¿÷æs›³Sð¸Ç¯~\ÿ²,:;¯ƒ4ìGX÷Ç?ÖÛÃêÿ™»ºæÆnûWTý°•lµ~“ðVª&•ÉìNU¦ò±O3×òm[ÙòHr§{¦ö¿/p%wÔ¶uAè’t?d¦[ [‡à! € p¹ÄgÞÝw—‹åb»è7³ïéW}ÿéÛ&kÁ›/‹{>|qÜë~ß ³£Ëqo4¸Õ–{ÏG׌{ ¾,îÿò¸÷¯‡u?Ì)ƾ8ú>®1û^_+þÅ)¢<ò¶¯ÕçgÜvó߆9²å8hýÃÁcclÆÃI""gÃ&g¤xÄ=é§å?Óä  úð£º4è°)ö•É)°P,E§(%cˆ©>$xR‰×cÊ3j\yhd%ïSÝu‹±Ü›’"ä`O-åò“€Ï”¡°¶>ý$x\¹†¥”ÑÙÝ/hÊÉ8vÐëñ·Bžú1D¤SÑp PâQ¤ÕCmHÁàÁ&ßÀBµ”ËfQ/xÄÎêËYd9+9çà!<=ÑåPÜjACÁN7Å8› ß(ä‘WûÈ¥±ˆÞŽ*ã˜ô¤A—~‘Â"³O¸cÐlÈÂ5yï5|OR.Ø<ânQ§§KÑ"×Íh'§ãÔ¬A!°è»i“\°òZ¯Ob5º¨Y+xÃê(øU Õí¶›ß,îú3ÜD(Ó˜ã %£<>Óâ¸Àï±øqŒxRû<”íî³2Œs¬†­õJ'~D£× ß”ŒdÂÉn$>X~óD9—Úp*©èrðDõºœÚçZçYVQlÌ©Œ1A|V½0–©¼ÂÍ*ÚŒ1; Mxå<ûTr'gáUyu˜?a\ๅÎW¯Â­cã™Ì/ð)™Œq§6û–71Ëãñ:¾*¿vÙ±ÆE–YÞ«sF4úô´³žd*§|Äßæ2F\N~Sžä_•Sï7÷7ýþI,¯‚±üÕÉé×±±^ÍTn´Xt†ílÛ=„”árQ7Ì׵ݗ·Ã Ó,¹¢Bû=Ã% éuÈud8SÙmJ*Ão‰¦ »bÔ1eXñÑ¿îÎu¿ú½_¿|*ÏûßIá2lÚñÀN=r½<š©ÜJ&p¥vrº ·’Gÿ7ÃâMîu=Ä»‡mw·ø0̆æ¹)K7cT!½ ·^ÍdnAt:ÊO©—H±ø”aó‚¸íè‹™ÒOB€ž‰”¢¥Ààzzk „(?{Õÿ¼ÜŠ·P pŠ 2ñDxb¡Á#5§¼e*;†¦ ›:Hre’÷¥”Ä•`e-I’Ó±Á ã÷DoÙk,’óEú(ì›']3–Å-NûîP¶ãÊ ”˜nmâ\>’³©Pcá)ô ¿Ì:çYÀcîåfŸ¾')!OHòBF;­6ùõ–9ë(4aÀp %9íKô*ÉPPÞgܶ’\‹¬ZúžäÀ« òý¥ÎbÈÌÈP8¸dùqiÛÀ&Æï1&Fïyæ>{‡<À4é³7¿ÞÝtwW”ùú´ø½ø+¾}Cþíz¶Þ¡»¿_~|q ÄߟŒ‚²,‡ŸÿÔ½h6öã³Åfv·Úκ÷ÝbI9›opæ®ñ¾ýõôõ¸td.Z9ª?-¦më¥-Báë·è“áûíOËâá?R¡Ä‘úW0~1†ìtT`—$رÁÙ&Ù‡ó±CýÔžX®W+§ÒA• ¼¶ …gxzðp4¤g<ªò—› f»ÏgÝrÝwWg7ÝfÖÍõÓÃgñn1<¢·³Í ?˜Ýõ¸MnW¨hðí¬»\=l¥{Ú7®kü5ß=nj»9|ó—n¹Á û›_ÖýË[šwí ï¸À&ɘXÓ²ÀâE.€ÒÇë|¬¿ð{<~œrðœTÞö…€9j´ŒÁø¸Š;;9±ÃWyÝ¢mu`}U’3¦€‚BA™ <.TßÿØ †lÖÀ!Ðú7š^ ëÅÙõõø+ml<ΫaùL“O?tÉÜ/0É¢ ÌùvÏ mªïR лŸWÕ·4#nwƒÍq¶spv›,ÉèŒKJEŽÏ$§|±²–EøŒ˜¼÷FG»w ldúž„frq‚þ[çú1¶^Ô%sG©{‹rìÕÊ–i.´7œÊfÐÉà¿z~I58-è{¼O)øïS£{—¶4ºÅ2$HŽª $͑匸·reK°'BÈÆòxe( AÈF€Çª©!ªzFñÝ«‡åá-Ïs«/0ÁÝÊSéMnÝ Üh‰,gµÀºÀM-f]€G‹~Y}û ÅÕ¨™lÊ[Nˆ›óUQN{3qÖË‘UÛ¸³/Á#.IrÄÓh“¹™4«“âJ“r1ú"K¿wóaœôI Á'n¹Þè%~ÖÜ'9­‹Uì(LcÉ(|.Hð„â• ³µÊÜ $0ô¸ž½¬MÈüÊlZ+ÛÀÒ’àq¾ä¾“OÆ-”œgÕzp*i[ºNa)&KFQ¿T¡ OS"éÏ ¯Ã+˜+mè‘òêQáKnEÈ+@±Á´çã E ™½yÍKçÊGk]bZ ‘œ‰zÒÝØ$~Ê€ÆêÑ3ž¤ &¸pzÔŒñðA-øÀáÆ£^\ü¯(3³’L¸wuã(Ÿé1ëQÓ#­#‡å´-™Î2‘¨ÜFAýù—àÑ¡üüsê4Œ:MS¯˜ý$¬±uý}¸C ðD_žO.&ŽªÆÕj¨­yPLvö wÚ£—ºlÀOõ/Vdx¬+`ïqú³ŒþÀÛ¤¢^@#G•ŸþR, £~dPˆ …™úLŒ‘m]°6ÆÈá·TÕX0'ÒX‚76˜¨R¡ƒÓ¨c4 Ô¬âF:8[ÐZ,Å`þ¤0B‚'”Τ¼xdÛ´HŒÙíœ ^§ÈÙ7ŽÊ Å*e9&RX0‚ D $xÄ=NÕè HÏ(’<Þ¤­ç€Ó­€+`y w³kU¿t— “§ÛÌÖ‹íGŠ”ö¨+íf»îwÛÍçúÈ—ª^äpV7Ùúá*¡½AmLœ|(ü—‚åî#©ìb2ª”`jþ’ñ ¦^€Çê6«Ð#c'J&Ñ6r”E¹¡J/‡²˜É6âã÷€ }—ZÜð Zdbà)¤wD‘œ±m~I¸}ƒ‹= žâ!ÕyÙ‘&Q¥ƒ™òÆÀià‚KCÖžkq—ÃY êfžO¨~©Ë@#‡¡²²bÃN(|¬÷ZJÈä|Ô j ÈðU¿§Ú ÃQÛÎèHQ¦ïò ‚¯~­;‰Ç¢ÑÄú|­êÝîrjÕŒZ©Ì‰5ÚsÃícªß¤*‹Øù˜ƒÕ ¨ Á“êmOÒ¤SW£vÆD\,ƒ©(ßIõNÛ0X0 £c}6HðW– œ* £JðHcÅX>$gäöcE" `{߀<âZ /w9ÎÚVa\O*º 5 l” V—ý‰Ä•@  Á#Oô†ÚÙ Ö2Ê”å’\9yߦŠtÀv F˜Ú¡÷@k¤,P¬²b´\ùŸœ;1`ÜœMf2xßëæ¦ï–Û›ùM?ÿmBÃ"`l'ë x¥Ù“Ê:íäI=÷)Á`|ý4pŠÕב«ŽÑ($JWqœ)‹k!zip[¢ FrP¥áxý¤‹ßaÑÇk°ˆP$e0T‚ŠÕ+ uÿ¶î®û÷hâãg·‹ë”›óGz>{ë­™ €£¤˜rƒ\<Õw©»Ú$# Ø!Á#î'ÈÛ}œ6}9mÂúÃzG%Q3¥„#€/ŠI¹©,[:êF .>£1™³^¼¬• gýÞB<±ReŠçIÞ6r»> –xbqƒ—·Œl}‚ ÆrP‚5ßF | I Û"œ*Ác\+úH8rz§R \¹—Ü´dŒ²‹J‚ûàb ¿baÜÑQ‚"6pçð{ÀRqÄ <)L¼ð»ºm[oSÒS3 “«;ÐÖÕ1†O^4è©Áþ#Áõ÷ŸÏ¯F 2W#I &ZÎÃ#955Ýw"i%X}›H„§P­\N…ŒÓ¨±¸ÂÓ„ƒ ZU}ÝKÉš~¤ŠRAP&o0`3ð€*ÖµŽÛEÏxA?»Ó-sθA>3cA9Uª¨ãD2 ë>’; >Óâá_2þp„`,ºìŽÃŒ¦MÅÙÕ¡pöXœI-ø À#n™q×oIæ~µ\Ì?Sm·\®~?Ãÿ=ë¯ÑM´˜˜Ë´„ Ü®.) ùïF;"&±X‚¹~†°O,v0<Õá :Æœ¦Î{AǹωÒ¥Ó]¤ÔÐÀáIõ–:þ©ðñ¦tP%sóN;íë¶¢œuºØ p WPÝÁÕü¸CzŒ;Œ(>*ÅÞŠ‘œ²õ–Ì)“-€nÄL$xÄ5/NP%ÅmwI'0ZMÀ«så<º~LI×\³œ,BÿÛ¾MF5{Œ&?¿Tød-ïúùÇù²ß×B¦™£Y°cðèìøµ“ã«>i%š@‡Ú¤•áÑâ{®TùøhFßË# í½öÁŒ×™ÞÉ©±­¾eÏæ‚^]Ôeæ_S ¥Æ­©\2®>uóõ-îW‚'ø&Ä]/æ;ƒÏ«M+ôš“M t”CGµ.uï»ùoøòeö{¯-¾zÜJŠ'•ëQ´[òûŽxŸîA7çóÕº_ÑÿÝ’6­b´ Êzk-pèA™¨›lÙ,·nUƒ A‚GWÙÝÃÝþîã A?®A\ÜÞS‘|1Ê9q%âêÔ ÷ÚÖŸ £J7+Ûƒö #ZeL[‹v–‡ ¹c åä¡ë:,ÎG¬:¦çgr{.DQ=~.Å'^ ?f=ìÙ÷ÂõÃøºHnÝx©ÖœQ¡tƒ´RKG2 ß`Cá)—p¼¸CÐËåý²Ó%ã¦8´7œSŠ£/Êé“z"¥o>X£˜¤"<…îÒYjæ8÷ÎX€à8¶’œ)·a«{Tõ Â#ÝüïºÛ~ƒnÛgKh<žfŸÎƒö\â‚@”¡P´É„ÍÝÄþá)wcþTOþ~xõ¨™tÈíAŠ ©ê (Ýô+ÐV€Œ«Ï kJ.ûÍ .ÆaÉã´EÃÂK>$Î߯5iœÕŸ·ÝšBM»'ÛlO¶1^îÿÿÍÛÙæ¾ŸÏæ7ÝÝu¿y»WøÛYww5»ï>.WÝÕ1ô`c À£¥ž¡8ÙokDÿˆv˜ÝOQ²Åfö^ŸÿzÕmû·F‡hÛÏ·ëþâ ‚þßÝÏ\¼qçFkü䯷ÈÓ‹7ÿ|è>ž/VßÌ×”öÍj~¶î—}·éÿ´¹éŒÉx¥MЗ0ÿtÙÙËdßõî*Deçª{ç{ü7Úðü£êz—Ô%x¯b ïúË Í‹wÝrÓÿßQí$ˆ¬­IrÇ/p¼Rÿ˜í40£OI'=p9Û®ð _¯é-Ràºßþ×ìnµŸD\,ÿ| ÅA”O6™ d‡ñÂÃzH¸>F 7kÜýþu°Hñ÷l¾}·è—Wç?¬×«õ‹Íö«»Åòë½À·ÿA³O?ºó,>ý÷÷w4å4›UÛ-–4É_ýçž}{ ¿ñë¯Ô‡9E™N˜úúí°­]Dog¿¬Ðn¹ Õövöýêö~Ùã¸Pog?õH„ù‚z¸^l×=2…Œæï•þ§ÛÜ\¼9›ß?üðõñwê[œèÏy×Ý^‡ŸþØm¶[¯†ÝÅvqÛŸÿ~Eê~;Ûý½ÇÃá²_¿iƒÿü/âúíÌ¢îƒOô×AòÇÕ¼[âH¾Ãï@LHùŸvtýÒ8ÿëO?^ü?yW×Ûè±›ÿбÅ)Åóýa é¶Å zrz zÓäB–´»BlKµäM¶A~Vÿ@Y9¯d[þÐÔË™mѼöØzÈy†ÃápÈ7·Ûõæâü|:›ì_É>N·Å-<‡ÉŸn§ç?üùÝ·“¢ˆ ËÞÂï½RÞ,®6ÿþóf{ ³0PâAœ%°e~?e²ÿ¶›ó3. ]þòü»¿¿Û.Öoöß+?^Ìác¾<ƒ2ö?þÜ£qùæ§½Ö~zs¶,Zƒo´W|Øô`á3 ýTfó[Pá8úÃróËf˜ß{ê—‡·ƒ–¾Ú¯þ‹ÿ–ðå”äÞ¯º²h~¼…cÅpÒø¸¿[:å«ß…3Ǭm㢙ÏÓ"ÍawyËeñÛöÂ¥d‚29…B’˜†‡•SþT1 Né8Wêïÿo§ëé%¬÷ír±9 Ò÷??ðhø¨öínoÜkòåïÿþO7ÓË«Åüð'@çã¿ùù÷7ÿp·¼*¼|óvu³—»|ù/8ßå Ê÷†Éúañlåíçá»Ðùòû]ðÛ¿}Wþõ¯ûñ/÷¾Ù÷{×ì«RÔ êNm³áƒ·ýÍ´˜ÂÍÐ˿ܬ~½9M„wß½»™®7WÛáŸW«»ùÛ‡ô껟€UÁÂŽÿ;àʇÛWTñ×Õ|ñã]qÈ0ÅûÙ¿ò¾y·{FW¾|¬IRçe™­¯–³åöêó#=îLß1—!hkŒwèÆÔauïǹx × ðÀ5añ(µlÏ—å¹àç³Ùý¶øÍ°<Ïæ;ÿë›ÿ7{ÎÙØ oþîÁ˜þy§˜Á€Hnü…½gýh¦žïð]}Úïð°ÁÃfîÊÞþÕ™z¶­ïÿæ‹WýSiç.Xýòo}Ù¹xnÛŸï_GYílIJÇYín%9éç³^Þ,7ÇrÎþ¬ûÚÛðßÿuäÂ"d•uŠõ‹síÅ;5Y2\@— ëW.åsÀÛ5–€ÇȽ`áh‰cF§lT !•qìâcãx®CÄ…Gî‚euu½«öT X#w¦1[£cô˜•‚qÚx±ôæˆÐÃb°ðD±„«Ud¡ù¬M¹uG¥€qZ¢5¸4±ø}‡p&O0M4¨ŠôuEØûtP^^i¹ ¶{Ó™#Eêp™ÁÂÃ7³[zô§¨q2¿}Ð#²¬BVÖ: )†»ài[!Ec:p«:8<üÔ'Ž}ÝõW<Ðd¨k6dG²„ÅXÀiΉß?¢ué¸Ò©=8xSH6w—›Ùír]O»À–SÌÚªìÑËXgØ•cÛÒ—¼ÇNÀ›8/Tˆd$ouZaË8­Ån±d˜Ëï;Ì? O”ªÃV±ª‡­†ÒÜÛÛéû÷ËÙ \dq¥¬¢5 I1Æ…`š¸l2Ó!GÕáøÈ„*añb4HAvÅrf‹IPÆ©(U“­£9ÂØ×õHWIf†@^™öü`áIgÏLJÁõúÇØJsÙ(cÀìœË:³#P˜Í€s"0ð$?ö¢bÈ÷8ß]õMÖwWW“]ªÀá‘ÌÕè19ÙàÀeœÖK1ærûOÈz}bC÷ßPë¢c ¼,ë­Ñ˜E+©yI½‘ 2°5,õB±…½qûºO{©e_×rpÚÀæ1ûã´HŠKS’3¤1ªƒ áàa·Ÿ{(Šõàš*B–UÈ)Y¥#fÕ`\±Ø2!…ö4fH•;œ/x;¿aï^“‚6½½uF aŽÆ`g¢RH5p§‘•É´ïá3|ŽãnOrã§•zàzœåˆ­”˜Uˆ޵˜0N¹ÓŽŽâÌd`v=hÀÁÃ]Ýó‡å㣲ÈerZ)uBäH¥WŽÏ…tæˆ:–9xØ× <•>¿è}¢VdÅ%8ö–Òb˜ã’àØk¹V¢'ÃrÄ!OÒâýÀxºE’²ÓÁfã±Q*6µc¨Î#w8g2ðåE6üÞw²¯›sxC–^.‘3ƒõx*ãtfg8ô¦:C;p„ÇŸø®jT¥½¢ŽpÈFi ãN ,t„ØãRŠ…‡{é0<(ÜláH}ý4t¿,ª ÕÛ\[Ò+ËÝ)âÖ ã´ö"þ@£Ä“£}ÂOî—Èöšj¢Ú f´\žc¢À86EF²—ÍdzÎÀœØBŸLo·Ë÷ÓÙ®vQÐu¥ír'-R^xgrè—‚v5¢ØöÏ[xx\hzH|]©OüçŒ(7G“ œf1a`œÒbëžIfHÛÁpð8+cö]Õwõ`ê 3N§¨CÆ( ãblOQ†0ɺö³ÏÁã䢣tŒí±°$ë­Ã<çÙ·"Œ¦ Zw O6Ϭ§­«Î–ò¶ÞFÌ_q6F±£|S¾2dr¦ƒMàà±ìŒäÛÕMiépzé!lƒµÀæè]Òü¢‘^þL63 Æ†€…GÈlá{;e¹º²œ‡MÈ&íp®´öd×roNK|£:¬rvA¤“^¾¦Td'u9äÒþÇcBa]–YëtÂ2Àå3NÇ“”ZÝ«ÕÕNY¾®,_¢9ÌR»Ü.`u,&Å:îJEl+Œ9XS¶g¬Ð›KJÄØá²•…'ˬã½é C‡—ƒ‡EK.Ÿ÷Ø+hòKÚé¹hÊNyëʃq.y¹ÅK%#Ÿo_”‡ÇpçùI‡à×+[|¸Z]N¯¿1èÙÕr.ŽÆ1Ã8ã[•v<› ܾCOðk5½Tg¬Þ¹’‘eÁßvõ§IÃ8Ã.Ü„º<̹ùògá±J|ù#á‰F¢Ñ¬²5É䯥æšN 1~2'´·ûòéxy7ôX6+Æ4Œ«Vû=m)IЀ#ALíûâ³=ê|¶Z/óÉlóiðCv)yõÝ>¹447QaKûjqOr»ø_î.“Íìãb~·KÑIbû{b&ž(Ñú©ÎÃ9À­ºòLy$¬‚ÁÀÂ8åGvg< „ZwØ 9xŒ*Iõ\qƒµÃv “…³Ò-r7NY‰îÍcˆÈÛ>÷—‡‡]Ú¾èéAM‡/íu²u5•†ÁÉ;¤`Ánœ6Be£Nb©ë°×rðø ð«R%jTwÉG­Ž5±ƒÆ_kû7w»Ï Á oÚÍód»2èР:,¯m”Æ1gkÚÍw+ÌÎw™w:žÜpÞ_”`ލM/33‹"?Ú6œý“çIÈJÁ?—ªgçR·?—ÂŽ_9˜Úlœ‰6¢0Ê^Í}Ú2b¯fÀŠ\ž$ðœéˆ7xPQF'WW¡sɸŒ;Û0N{‰ãçõtöqy³Ø¸ˆ ³ƒ ˆF‡ö³ÌÁcœh;Ž§Ê«4,(¯wêJͺdZÌYsY¹$ð¬I‚ªtÈÞöàc'F 3|=]S”ˆŽx\m:ùº–‡Öï)EÌ»ƒq6hÑ~rìfál׉ƒÇ‰ˆÕ]Êà Ø-ƒ†×`œŠìÄ‹/ÆkºT•¨’ %x¬Dã…ËíÅúª—B]»Áé’ˆÚÂÒ='‰HØ‚3ÀêÖƒGÆ: _má02YÞ¼¿JÃf8ëXzÐ da¼@}ܶ|Í:©”¼Ç¥‰¹ƒ/Qð8¯]Àñ$“Äšrœlx#—ì±<"YaŒqÑ ›p­KÛzm5j†Ë8Õa[(Ÿã#à1Yänëfy .lNK.OŒsd`Ús0bý6Úò4km@é •©ÏýWùœ£×<^‹<­§_$' •»u“¦ß2N‰ø,JÃÇZ¢58<ë\ûé/Ÿ“‚CC~e\´"ÞÀ§åíö@aØ|fTÊË•qžÝðûKð“!OûïÇÆÙùé ð¹I—êñ8¾Èn Ñ_Ò§ärù}Ì<ÔBæå¯Ø)(ŒîÂ*0…:x žQÏxn–›ííçɾ’ Œ,f›¶<ÄH|ŠÀ\ìµqyv0w¸m`àqìú¸¯*ìÓfýqq»8P6§Y»œ=z²-ã´L<•t €¡ƒïÇÁ%Z-Ñ[®æžø’ˆcJªÎ½a\H"™OLî rêãSj>µÃçdëlÐ8žÌ®†ðªÊnî¶Ó›åo*sجf¥¬QUYÊ9fÏ©ìœ ¤AU ãÚßûŸ£uòH¤c7NÙ‘¯´†§n“{ͽÌuOY×µ§‡ü«Œ±ììø¾ ƒî°v9xŒH„vvµº›OÖWÓ- ¹>Ð6¹YÙÿ¡HaœÕ#e§"mˆæ™'ʤ”hêJ,yÐ9¦€íÅfxÈ/±žO%'©¶ì6sbQ÷#WWç•+-‡q kãL@Ê© ãtP2ϯÆ—ºCbûáÁñêžÏôøôŸƒm]@‡ìv eœY‹åИ.‰éÐßløœ¨³I‘€çÔö"O[Ë÷Vד½¶'¤ën‡Ñ#—b´Áb^=Œ3Y®ÿÌxZÓqÛЃ pæöÆ%CÁ“û˜‡'DÈ®®PW¶±¤#æÝ¸¡†’Àñ®›9Bô8ûqð°Ë#=w¸™Ü—{¯Ã¸P²C’XwãlìcxdfppŠ¥GùÃ>Ê{ÖÑ0?Fªã~>{÷ËrÇÇõ>‘z¹º)Jxq'²ó¿ûͼ&2æ1ùéâl¾ÜL/¯ó³Ùt=½\^-·ËÅæâìmùý·¿þýî·É—‚ŠÖáò¥öùL*ž(b[öÚÓÔŸqÖD4ð£lÌ©¸¥hC3vÚ’" "á9-?k—¦>8›¥p¦”7 äÄÛF@•"Göy ¿—¬Ï¡Œ'‹nDœ%Z:F#„‰òΓÄÙN…/C"ø‹Æ[\ZÐJ— ðd¬&ÚOjüš¿2 'ØíD£ÚŠ÷Èy\¾‰¨ÒÓ‡DÖ¤õ=n©BðGZ€óÊÁ‚¥HfÝHã$-‘Õ\Ȥ©uªÕ¼–„ǵ¥m>ΰ I oÚ0Œ+ˆ±ÈrÇ>Ä 1d ž0Ê›b»±gPL}ü2 ?ÉXŠ*Tc'Ÿ)yHxbóÛ²óêTœ<ÙàÍ™C³Ë1ª RtÊYeŠÈYw¡“V>‘¦ ûæ[ZeÐ@´ÑÚDC¤þšlÜVF@ˆHtyS"i´þÊ=îF·cõ H8ƒL2$ Œ¾ £"—¢Žõ: }¨CÅc•œ÷SQxÆ©âlt$Äi¼×CB*E ˜‰DÌÙ>Ô ã 1èãšÖ çDPÉTïGÄžI¥È"ÖFó^ò>dÈ>(KÁ3ª„tUÉxH¹H”Ñœ’CD'D²0¹—AÅSâžOËjºÆãÂ@Äà(`µ&‚”"„ 4Ú÷!Oní;\L×Ë]›ŠIéu¾¸Ù.gÔÇ*Üàà¬ã½s‰L3ß‚)‰µ¨‚Û>¾‡q%üLÁÓ-Òr³š/ž¬o<Ä Go)R8ß:ÆRA/E!Ÿ,‰B¾Çb‚ÇêÒîñx,/Ô‹GoM, ê|Ážî«TpIM;YŒ>á|SÖž€'*1/å…–ñð«IÑfÌ4Ú?©À“"UšÔÉ3É>Gž±ÅÉÉ'D<Šj• :Ÿ‘?¬A¨²YÕ'ônuÙ(xü˜Íá±Â{UÝx|Ôêr´&àÕ'lÙ8¥ha‚ö$±úlÖšLR³Ñ›QëxHÔÚRŠ»VB ÝLØp¥HB–®OÈܺ›#àqj|ϼïX^ЃÇP­ÉEŠX‰·ý´GŠd¡Ì*Azßǃ¡ãÉ}^¾Ãíåt6Yß®~û<̵Ñ&O#¤¶¯½ëð¥H”T"MZìE±)R^5nåf±-cÖ««åì3o&æ‹÷Ó»«íd¾¸ÙM̵ÙY’^5ŽÒ¿Èâæ.4rJÛLÈ:¶Ù¶¦ÑKíãA]§’!<—(b¶bO ¶ièRöÙÀ†Jê|ØW\F&«YéO"ËȬ•'á Ý9ýÏ»ÛE}Âð06Y@²³ÞY°fLÌÉ““û\zå­&á±Ý™¸¼¼®O2÷ åñˆ''¥w« éjér÷å2!ËÔkÝ…ëÕ¯‹ÛO›ú”ááyo‚QÉ¥T»‹ÖŒÖªDRMŸS+OêÎÆ³õd6».3cñp¾À:B±wýH÷º͸EÖ@ŸüY2oºsë¾]]ÍX<Üï£Ö”<|t7Ò1EkÆF²júÜbúò¶–„'wgã}¶ê”á×>yí)"Òs¬z‹ÖŒ¥rE5Ý0é½âÿw5Íæ¸ù¯¸úyøM`ªöJmN©Jª²Éi÷ðZÒ´U#K^}Ìnçפ$·ì¶^zIöiveªõ|‚ œ{èñV}|Ñ TÔ í„[º&rk‘Ýr¿=îæ4i¿­6«Ü/áËvý²¼åñ(«-Q!ò$Za,ÀTqfÐd¬7YûŸçïþÇê·åüÛ|½<ÓôõMñ Ä’ÌÂî!*\ˆj¦FC®aAÎGÔ}BÜÑcq~üez¿î¶Çwwìß{žþrmòN7¢2g"ï4Ñ1.m˜Æù±„¶¢®õšÐH`÷ˆ&Jð@¥Rpm2×dDQ!²öß§NðÒê]XL€uÆñ`„ViëÙêê ÖÉz_Xo1쟟¶Ãîœq~Ñên¿›ÏŽû%é¹:’ÁYé~4N©Ó²¥ Q°×Þit`ýŽõ Øp_w×½k5~¿ñÉ VyÞîÉÝ=eJÐgçlÀ“"™K­r’a䀧`Ò‘½¨,¡Gý"žæá.™ÐB¤LÑ/sfƹ±jECM" +xà!t ýNTÙgƒS«Ýÿ¾Y•¨¹RŒ9“Z³gœ˜xÚÛoS’ }îyì#yP§Ÿ~ÇÑÙ!à±¾© ø¾»fE2] >Ҟɬhœ~¢¨Ã[ä ;dÈðÀÄ¥ º¨‘¹É…tõç[· ÒòM—¿²à¾CÌW‚'˜¦kÿâN¶‡auÉdñbÚF£bA4n46W´üëqW;Ø—P’9¬}P¨ašk¿{Ïm}iœÑ=D)SõMoAóhìÁˆ\¡Ï)Ãã1Æv´¯ÛEÖg×§%ï@EÜ–Gãì]'ÖŒ–à°=!$x|¥®Ì îƒB-£Ð”.h {ö´)ÒÞÓ(et9~ß>!ã{zé_o‡“Ra\©Ž8­q‡‚Æ똉ª¬–¦=+$xĹ˜Ÿ”%º•rãó¬RæXï|.«5géhœ ª£¡»\§:ø?dN—)YÓ)D.öC㈽ܾ K ‹×âj"ýßã.74úø€<Ç—ÔJ @q·zV‰½Ö¥»þUP«Ø†Eu¶aÆŒ:#r0hœóVªÅŸÃ ‰LØÁwá©»úN0™v™Ó;bTÊqÒ¸ôÆè¶4–¤ù¯Ýr¾[‡åâá"Á5+s±î·>hQûŒñQå.ZÆôi 3UI]]áihRš{À'<ÀÜ‹ŸqëjtN4X/"ÕŨ½¡s>sÊÏãÔXs )\Ž,Dk¢ÂÈC´:t˜}úh¬<.Ni—sºµúõmNÝÏç¿›oÇÕ"Ïí¸Þ´3ÞY°ÜÔÒ8ºš hÁR‚ˆc&ð¢xì`ç à «€wºÌny¡z5£Þt="séœÇYñsÒšŒàDÝ<ØÐ04=^ôÀÕ\Ìãœøµ`º ÄñªÃ† Á£MkðºÝ®KÕkõ¦ÎÊrâ䞢 ­A¬†Óꛂ©ï¤”—a>_îóã×u´é§=•ÁjSæ‰mc*RV Žnÿ®\ˆ'´±¥ªµŒjQ£ Úq¬¦qÆ`uk cµkôh Àº±E`ÎØÄXpl{¬<.FÕÆ"T¢¬@èa DxbkpÖåír…£UTQ‘%æ¼7Z@´Š!ÀjXƒÆ à1¾‰!xI-g'çjö‡ÉYJãJôÎy Ž2Ìl#a:m%RÄgFžêÆ`»:®Ö‹RCëÝ¢ÑJq…ò8¥m“p­ ½é@ èe˜#xŠ~¥ŒQŽÅ4®Áù¡.‹²hÛÁk”àq¡¾…ùbLÈ.Ý€9)MUöäO[±[:tp Exbs‘4ÈÈ#y|‰è<Î(_ß>Tå°DßÁ›”à õn÷f¹8åw,¥ºe‚x‘ûRÇÝ:«U3j¥µ’lÇá$D£¢ŒÃ¼!v O´l@ןq !8P ^!ô< ˆh+Û׊‘á‘[ƒiþïU—õe8}i@ý˜®þçø4¬wÏÁ20{øò?›ça³HÿLœ$Wþ‰?}IüÜ=ìΠ‡××õ·OQÿõì¿~ÀÚŒæo¼õS{xÿ…‡Õž|̯»a±\|¡©øúuI?ÿê„é†ô&X«?;Æ_ÛáÔ‡Çpúüo¿…£­±Ê°;†!Æ¢í`j9SQŽ×;Óa ðDÝÒÔ&åÁ¸ò¬óΪȴàÎãìXžoñÓŸóë°Ùëz sY9ûer¬‰×ÕкΕ¾³$Û0Ÿ§sÇg[Õðºúå¦>¯ªÆ2G‹‰±S™×–k¾„³°X ÀƒÐzÁ3A —Þ¶í#–Æ9sg%”.”•Ò¾Ó¨ÏÔVRw¨“9°8$Êˇ\»Àª€Ä°Øcî%xê¿Û,éÃÅì­š@zP«˜8…O%‡ÎKñ©º©­Ônº]ð£îp¾–à÷xÇ€Bý1N³GEæ 5ç¹Ð8ë¿è¸‹±å;´ ’ᱮϢgÂÁ)°Qƒg Ӹ̔Y¯ÃR^P"é‹]WƒìL‡C½­¨ßl7»íöp ˆhÅ“cn6DH°¹3t¼¾+ÓTtû9á uú2Üá)1¾rDÑhöÖ7¥býäJ!sËÁ‚íàØKð8ÕzÙÛqå‚èLPÜ RÇ_§ãB}¾J¤ÀV@€'ªŠmÊ5êÇP…è"»wÑ8ßbû—‘¸lè±ñ‹ðÄV kÎkSé_í#*¦;@[¯Æq=¢–K0V¥¹ÞÜKðˆ[C§¡ ”ÊxÒ˜V5bäŒé:[­þ"—#õ]¦_€§ÝæŸ57 ÇÔØjk˜b”yœ? éCT™±yÞ¼O˜š+1á^4Cðô1ÓÈ5sñÎàýO€ ¡Ë,ãAéÁþa½Z úì˧çíö÷wïŒÞ4™yÖ#©ñò(éã;$ã,£[Ô>¤Âœ,xOí𪫩7=ÖºOý72¯»Õ«õòkJ"å…q奼×t§ÉQVßóJ¦3e%²Ä{RžÌ9åɸÛ9O28âå÷·‡ÿþ}u"äë9&é”ð£]o‹ YÐÞºÊ=ìÕ&ùëÃbµžRÛ|xžVëÕaµÜ§ü3úÞ¿½}í¦@!r¾ËYph¿À$xLÝÒj~sJ~=oF“´ÈaikO×R1ÜØ%ÿ5OšÀïÆžöÝO¿cp%xB½z9Yõß5»y‰,oд¶@€ÑPò;ÞÔ>•1¥vHt–áÑ®ÆMÝ­e:»–5nxªxÚ·]òÑkå&fâÉ)•Ì«>I"|ž0é|?®ê¬cÞOBp&–`í·usãaœÌÔ¶Hïà{ðÀ(í\xÐÖ¹çûÐì¦ò³ÞÇ úTäñ“œºuqOä } 1”Ègt¾xom,Á35Ó³TáßW©çÙ¬Ò%èGÛ¼~dK-Ô“¹•V%Ò…N\‰Ëð`MÛ¬M6Æ`H‰ÆCnð5lJ1Þ©ü(•«CQ†Óïp¶V±%%«1òì°A[,@=z)ÊÛÚÉÜpäí–HeCnç°“¿Ù}wDLþ_v³BŸþhu,l±3Zhê …Šyþz°Ë Gž v4FûJ»Ã­õÄ`d9a‚ñÖˆâÂÄ c²“YT*ªïc'¬"Îà nê"×ý›¡¶ltµ\€{·•:LeP± }d”ƒ+Üð„ï´/  anžÌ´Jƒ>žŽó&Ä‚ŽSUº²«ü°;ž¥ Yëlô5užÅ’S¼+vnj¢žÊ•bé:ÅL†èKðˆ hœ¿n׫ù·2Íëõö³å×ݩ|<Ö+ï±àLï°ôt\÷T¾¤z²®@¾­ñÜã6–G½¸µÒÝz§%)þqçC³é~L•âšg\€É *´}u´ü;ATq/®Ý0y^_wÛß²úù m ¯P„Âh0±*>ƒ?•=Åbš>ö'oS„Gü|²úÏi|yø°nk Ò~H`ß‹?Ÿ 0™A¥‚Æ>秨l,‰£ÖžA‹åoÃq}˜Ñ¾e½óáHêŒøcñ t]ÜSùR,Ÿé“±ƒ6±à¼­¯Vwìθ«åCÇŒ7gÕX|ÐRŠÉ\*•¶“ííTI$ŠmÏço‡îœLJSç“’Ãx‹Õkµ’`*‡Š%uª‡¢ ®àÄ^O~*5ž7 !9ŒÚ” ŒQ–?×+£Ÿ$(É\ìsfB  ð 8sá*wy·üûq¹Ÿºúf_篳Ý6ÿ)eFdñ Ä3¥g©®bMf[©øÎõa(åKðx÷ÓÙ¶^hZò„°¡e«”2® ô€ Ï>h"ì¢ ±àÔl¯û—¿ü²ç—_~üá—M/@C Û%ömñ´)À#Ïùy0rÊ?ÿéïÇ-éì1ÿçã“Í¡h—òù³,ªØ E¼ð)e9XÐ=v<Ƶ~‡Ê„‹Œ£å–}:Mã ÖkáX‘« Ðu˜~ ù‹”ÛM1wÛ’Ý~<ÝHþ DŽd­Œöš F˜ÔW,´^ðÁ†÷â"<õ+ÒåNºS>—gb"}´ý›j1xiœ¯Ù¶uC% c‡ç¬<à«-ó«lÊ˼ÓçûÇáxxÞîVÿ÷ió[Íu®Ã…"hœ‚ú«_Nàr¼ºÇ呵 ¬uÎg-rŠcT¡hÂ],Zw8äJðˆ–ÝÖí~9?îV‡o)wqùÏý‡<¬6‡ýãåO?h–c ±Z‡h8VÓ8hÐODÎjÞØá(ÂM­‚MúÓLèݧîØ” ï,ÆzV¡sË%q´ƒ·g‚–'NÞ¶·Ãæë2ÝÃχ“¡}Ó'iw¹Û ëJýpAíÐX`¡Ó/¶5 e”.Çë\“ ÁãU“ÀܸgZÞlhܨ“+uª—F•õ¼ ñ*Ù¾2ˆ\µX¾¿Œ“×OoAR ààÛ϶OœtgyÑÒóñéRSð­ÚuŒÈ¨ ­r~ô]ÞyœÁ`kœÄ¦²¬UU‹ñˆ (‹•ÇlƒÆ¥Æ Ö8,ƒ±÷güF=‰˜Ða·–àÁílo@PŒâP“óšs+h\[ìêd€=fY€äñ•[7°_†Í·ãêš`68ë\LÇeÎØÐ¸ÑÔÅ^ؽD”m\VGŒU­µ¼GšÑ#¦®KvìÁØeœOx%N Bì0Õ<èÚ,cfsΣմ.ã@­´ÓÙ(ÀMÛeû©–à UŠeÝÒ⻂S £GŒÆ§º†nÚi¶YÕXbãîÂrÚ˜=0¸“í€wg®Ñ=º3%Ø¡ƒQá •úF¸AŽQ#Zm °áGŽh»®ëÁì2Û<µ·ð—tO<;ù?ç»Ã쉑\ 2IcOé/㜮mÇã¦ý|Kð€ê0ߌu—ã|‘;M@*Šm*5Ÿn$°´Ÿv pSkÜ”çcžÔOîCàÁ¦6o.¨3)Às§þ²Ý¬²®„‹‚¹%LááôùðNi ßž‡ýÃð@@3%6ÛÃê·ÕéåÜ¿<ì·ôÁÃf¹\<¶¤g’÷åaxÚ©ÂWö ÞœÂô?Uû:}ùaþœÞá-þõaµN%ÂN“öå/»#ùŒôÏ}ù÷a½_~ù\n4Á Š“¦µ±ãÖk9Ø lºKðÔGœk6_¼Ã0®ºÜ W9£¨©ñ²3ñ&¯L rTí']„GÚã• ,iu÷»cYrã°^î³4ú¤Md´‰>¤âPÀ¡Çd +¯w!ePv˜xžêÁ¨ªcü‚TÚDÓÁÊ3PMª‚"ulšsT‚¾}îžOµ–×ʼ.¶OÚLÅŽ#*F‹xc¹ÍÉ M :MWz¬{Lx9#.R¨º¬7fk´D¸ˆh8§550¿kFLjc;Z$xœk±¼o…~P3šD“^9Ã!G=ZÃrÊ/â©'ú3^ŒÇ(¥®of#tNëÔê308S="ù«Š–Ì OuD›Ï¸n²Æ›×aþûìé¸YœÝ 4ŒÑ8"rÁ”TªÈš†«ëáŒì¹ÔÌÞÝ,Oérô'ú$¿SÑ–Ù ½³àU´œo™».6ÙÂåÄ`¾~T^¥É_ª4Ý(Ò$Fa:%xÄ­énáJ¯göóçå‚NW»ýãÅHÿX‹!b ¿¤,=.¬”ò±¦µ¹gÕÀÝÁ¡à©zëñ©ò˜½98µ¤q`ë¬FÔrøxõB·Üú„³õÁ1ëC®„²Þ)@Ñ#F!Áƒ¦b¢Åü™xwyŸùÖÝäÆç4¿LØïÿÉ»Úæ6räüWXúpåM‰Þ_˜rU6¾½ìÖy/[öÞ}Éî‡19–S¤–¤ìÓ¹ögåä—¥QÔ §§‡”T®Ê·ÒÔÝÕü©Æ¹kÅ×Hï¦yº§C[©¶_·ßïþ@4²ÍèCœµ‘èñ;´sIïÒPHMÀ˜ÿ" 5™AgP—±€ÞϺ·æc;G®jôr„%uÌËì« Y5Ç׫բ[ÒÝ[B"'2§,Ãzæ…>“jÐé0S€ <6¥—ð°ˆ 4Ýbã!{¥–ÈaNl§éy.^’´”žåOAÄãRdBi•ìUµ!ÎΗ«Ø_M°s Gè¹áJKŒÁÐN'½CMd0fþ0Fž¤6Á#±Ùn±Mm¸1Lhç„N‘åH:Rç¿GÃCNbÕ:`¥ûÄYÄÒ­^Xé¦PºÕ»|SÝ&ƒ)T5ž‚G§\ãŸ$Y–®[rR ­­³A í”V –3•ÐÍD~ð¸ÒŠà~§ÅKD®° i & ÖÏUÒõžÎfRW`ɧàIšÏð©äŸY…Œ\;LcA;¦Xi=П«”~¸¾? Ò߆·åUƨ¬£T"UXÌ@ý#1DSêÖeÕ>Ra\玂'i2†'µ âHk¼Øj¥CzŸ ?q¦zaÙIÛ)Y}8J þ¼çÈ‹ù:Ðeš0ý†½?R+Uþ CÁ£xÖ ƒøFi¦¬4˜?oT˜Ü êW¸0È'6W`T)xèYø†>¶›.æ³Õ—åbUÍ6gí³Çá†#šÅá‚S‡Úiå²Niž ©a¬ÀàSðø,S:Š q­’L{Ç1c݆X‘,<7%G)]ðöAú㑌ÉÔÓè‹a/V‹ùòS‡X‘#¤°Å,hJ¬^¨L«y/÷‡©U¥OÁ£U¾yßé2Ê3ð¥1RtÏÆvÜ›Ôó>IIÝù3ñØÔs‘¥AdéóÚ#û6M;îòMx™fþ.ò W¸Æ²+Vã¼; âJ¡brÒ*ä’Plgéõ³3±/vÇX‘!îǤžÕû¹»:dj1™:&¬åíƒõÒ¦æCù>Ø"¼ àI®í»™O?üõ¶C¢®[¢á݆qƒ1Ú±ôæéøœ áIΉMuu ÿXš“&(gXŠÞ0rxäðÅŒ‹k[b`NpÞñéÖ|Äæá|Ç0ã0XJz²Ë–Ÿ{½Ñw预ƒLÀ£Yê™|[]-0‰vWÌ‘g‚=b>m|°aÓÍf–Õùš‚ljãk7(Az~Ÿ¦’+ÄBŠkð ægB;éxꙞ„›ý{ T•NÁ£ÓíÚ]Ö‹«é%X@ô&ÜšƒPÃÓ§åˆD½Ð‚c×/B;®µ=¾ÐS ÷‡ºŸŸ<¾Œ&@|"©„ãÞˆ@†v–§ÛÈKÈ[JòÛÑð$<¶»^¯þ³žn)‚ˆ`ÃéµPNcÖ;YF!Ød(°*PðHâšÂ!!xÞ†‹DÙ"ž² c 'Òòw¦ÓéyMèˆPV êªñTnr«…ÇñóM²y®I1ûXM‘–ƒ%5ÇÐy.õˆ¿5û÷B U`Ì)x^nÂën©†J±Vjõ"TŠ5>Å„§0·?:Á¸É?æx Œ?Ïë/'Ò]\I…ØÏp/Bb´cÒ‘“å¦aô<ÿ•IN©ÿ¸X} ÅÝ®ªiÕšú Ðr¼q¨Ã4Dq*nµ@€B;£2›‡S“‚Ñ0ž x,;j‹¬ ?’]Œ׳yë}tMR!Õ áÎ †É›©GäIÙHj l‡ðØ$Ó¸Ãó0Ȳ§‚SçŒÂ, ¥¸£ßyÏÀK`Ï]þá¦àTS¬‘Ü“ƒó綺k ©ÓiwHÈmln"¥˜ÐÇð’€ÖˆÃLÁãK[ÜY µâ\Y†T‰í˜§î’¤$)Bƒ¦ÁŠüuƒ›÷h˜Ü¾%ŸÝûùퟕ²O¬¦.Äbxmi‹¥)½.`‘“𘌹AEó[h-0 ÐŽ‘kµe )¯.`Sðëñõ”ßõ͇Å|¥‡ì:’§€¡G‘Í’”–ýŠ'úþBÀ#µ=ðX1dÞï. )²›ê•¥ä%æðúÉ!ÍôD[N' œÈRðÈDÛ€‡ÏŸ}:®fWóe”(2™¼çR m0Eà=3Þùäíß+ Ø<jT*¡Þ%Õ^­¢°yç­ŽáœF;$ì3´cư4{Š)OêåÙ—"Ÿlù X6صÒ8uijΊ']ö/Ç{Bç¼,ÀšÞx@²äÎÆszšónrXµb(dŠ4©dKMz¢óPlƒw¤#’)£¤±(ÉÈ¡ø¹p¿'ø¬§yÄaê+ƅİz¡¼Hîx âmÈZ˜ú<*›¿ñ¬$E·$E¨eεPòbR;øGU„¸̈́űÚü÷mš÷Ä›t=ðX“ÈÕØ-¤ÏJ™7¦avy ±×~©˜§Þp£,(rÃóß¹ŽïGkƒãŽeô(: ªF´²[´RI­BU´SV%òŽ¢1 1ÆKiqÄF»üdïñÞ ×K‘IéY¡!3Hzi†yôЗÑHÀX/˜ÚÌá]qù¸â{|¨Ìp<žù#‚õêf[÷Õªª[!fŽ[.1A†˜9r”J*âR@æ¿“EÃcSÎög/)+d®(˜ÎÌ8†iwh§Ü±‡×I¹ÙxG1º„ÃNÀ3èéÛ‰$OÝ-O\R!¦ÿÁÌ÷Ú'œëƒhÛ­ LÊ>úÔ9«g»‘m>×ëÍã¨ãæ£qûÑyûß“ÓÑæºžŽ¦—Õò¢Þœ¶Ò>UËÙ躺 e”E¯öŽïÐGRÖ›ùÐß¡C»»…<ߌ>ó³¿^ϪmýõÛõôr¾­§Û›u=9Ðk¾39Qg‚qxòÃ̬ÉÉo7ÕíÙ|u>]‡iw¾š†™E]mêÙ\VB›‰šqaø?5°H‰•üàäÇZÍŒerʪº†Ï<×À¬ª•c¼fvfMxןVëi=ùX-6õïGKç°S§ûuÔH`ž™ÔÐÁÅh»MoÖëx(pQoÿy´\µƒ3å·› Øc‘é}×"Ò"N“ó=½\ƒNüÇÞ …¿³yýq^/fgßÁ¬X¿o¶¯–óÅ7mƒ×£¾ÚIJ½O¿þ[½¬›(ù‰ÑVóEäWÿÔ²¯¥@ü‹ß¼bŸ2Æ,ƒcßœF½6±ÂŸŽ~^m«Å$̶ÓÑ›U¨#cÂNGïj Ât¾ “íú¦¦„2Ð8ŽßŽJßW›ËÉÉxz}óÝwB/Ùk臼«®fFÁÓ·ÕfûÓzu±®7›Év~UŸý¾ â>5¿×°d|¨×§#.àŸþñÓ‘t$믱åÛÕ´Z@O¾…w& ü»†®ÿÛ8ÿ×wo''—Ûíõfr~^M§Ð÷3 Ùeµ=›®®Îað«muþîû÷ߎƒ &à™2 ß{¤\Ö‹Íä?~ %y—‘¿ÇîÌ-³»qƒýS3æ 3&.o?ýú~[_ONÚgáãz¯ùðDa´Ä?w¯\^ÿÒJí—“Ñߪu ŽPçïaš]/æÓùvq{OD¦ê;h2Xf¼qøÂlöï‚Ü/ÌÁZ¸ªÁ» „úï Ô°<_ÂC=Œéݲø:NÏѬ±¿^ÿ¿YsF ÐA¯ÿ°S¦ß7‚‰šº´3à/´–õ½šz¼Âÿeõ¹]áa‡Å\…µýtÄ-ëíß| ñ:ÿ”kÌÉŸþ­—‹Çºýñúu˜Õ0³ÚîÝ(Ú99î×ÑŸæËùæò8/gô Á3ûßÿµ9à‹y¡ŒV;)7ÆÈ7/¸…п[B²·O|{û„¾}BC¡  Qðh‘¬߇°œÆâ¦°ö=.Pf‘£U²5Xë±Hhç”L·Ôg#ƒ‚ÍØ¤àñ"Á¦ÏsYx¢àéiˆZy…O@;I¯‘“‰ˆý1«!*űV¥¨.•BA¡JP.Äà&e<>uÈÑ“=| ÷(:ÅBoTB;ëS¨“¡S„Ô) )x´HxÞôÌ^<6Æž{å¬Àæ´£/98IÀë „Rðx‘ö¶A—‘K>dˆñ e¨÷ºEÂc#:C P¹,ABÁ3(:üðˆ?o‡ 504œ\a¡aèkÇ0´?l/ma'àQ*eÌ(r oì’¥ WBæ-$qql'­MJ”%áU<û1 `™”ü³‚ÄÞkøŸG®¥ÇvJØ”a£ HÛ»f¢ x89€¤Ùcnw¢®æwÁv)d7óå¸mw·u5Þµ ¬[¢á¾Rȋꑄ{V:×J0„ÆýKV‚ $<.™>Àò…ilü½ä\X¤f°‰u ȱBeì¥0ÌXŽö@Š=G¯ÿ¾o÷ôá}Óä×Ù:jÚå¿ñܼÇ™T<úؼk»¸¯ÅÝaÙþ~ôöÒy¯»ÇX()—H1ÔØN8™LK;¹°¥,@ ž¼:ê>ýYwqVãUX=‘H¼ØÎ‰c/4æg2¡7ùsjñ$¢ÄñB6ÝBà,ô [Ô¤‚_EVuÑŸçÐÚægOªëi‹ºË·šp‹…ÐCÌ ^ëÒŒy~"÷ï”Í0OÄã“mxtÉyqõÀƒô¶[¢*d“‚n`v´SŽ¥T ƒXM€«EPðÈDy$Ý]ŠÖ7‰é1R̉dû)9KèÕH@ÀC.JÅ‘T¦@vöA¶×ÌF¯¢@€ÈE3ž¤K!Œéæ, ðN~ÕÄÌœ8ç4˜"±‰ääùÙiK@Ïeóˆ„Ç ÛÀé”åý¸é®%mÏsJZŽT|‰í¹jJ!þFt°BOð^Øü!Êñ=üM×CªNR—euUo®«‡xªICdfFy £UΠܳIÆR«™¼ŪYþÛÍ{¼‘RöÀcm–e~[/½èõí¾¢Ä¦I¸Gê…C‡Üƒ9GuRÑÒsÃHǨ½(0Ô€G æ{à1œªÌ§»+ù›¦Þl^š]ÝÌÆ÷mž+ ðPÀãê7ë:Š™ãb6Æ*Ñ£[º+)£úuôþÓ¼‘s‘îLF³ù&ÞëM÷“&Œ%98ØmXŒìÑíüyïã{,?­ËÄK³ë"jkÅÊ-«¥µªG§:kgåÖÓÎË,è±Ä )hgLfyo„é‡|x”|0殦«f<$J.*xõè—ãö¥ÈõlŽå¨-$óoÚ)W„_ $‡wƒÇ½øº?ØVÓOqHJ1pͰÜM;á^Šb‡ºt,ËB¢~$3wÓN—0´=< õMzà!—ŒÈ4ëÇ׫/àþ4³_cTS×;›a‚u:Š´Ù~É7J”ù>ý7¢ ßð|M;rñåÔã ãB ŽÛb¢#'pf‚=êȱŒâJIÛ§Ãù/±6ï± –Êx´M¶A|½¾YÖë¾»lŠÙn.q0oÁcäØ’Ï•0äÚGùv=¶4@ †Â–2XOóPùèGhçİíáj:]Ý,·äkÙ ²»ÄfWhÇÒ¨fa3¡+º-xÌe2F'wÉØž”w

E{ƒÛCf{l¡í‡r;ï#Nd•ÐÂX¡ÿ‡½k[nãH²Ïû|˜X÷ vµ±^Ù{VÖ(${æa¬˜hM#À Ê…?k`¿l3«@Sd£º€n˜ –à âRè:•••™§.Y¾y(g´m{ćh)”R͘õ"¥”a&ë`ÚI<Áû8†M ìÊS'@Ïü-· ó)xŽIÓ½©Mm_¥ÿðÐ6®ÂÈ)]M–½r8õÈ”z|¦š3柧’›à[±ºÐáÀVÁÉáÙccÊj8f§.Ë~‘Øoý¶7O?¸•vå™^—D5’çÁ‰ëñ¼„(kHÌ"ûÿåDp¦®ÎU4½>ÂfÝ x ™h-DkG°ÇcaØ·ök‹Þã&¤ä’II|èñBaWàpÝhŽiÖ‚#[åΠ»M×Wæ(-<(Ý¥/Bøœ€Ä«ux§¼4Gω>’o²B¹üE;<–N fП¥S8'¨׿ýt%å÷Öև縹jsíooZøéÁŠé<‰ïzã2º2}$½ÞG3ÚQ9€Õ<¹¢X'u%õÉM_ x¼6Ì7Рœ1²=f¶O×`µRµ§¦×«Ér¼ŽË³é%„9®g‚J[I!>¾y ~ÌÒŽœ gmá’D’CÎxm‡@«RÃ<>Ö@ÎÁnù|,”3´Å ƒ=»:®jA3g£‹Þò´ÁaôL$k8ŒOB ,‡‹S‡œ›: Û›c´ÁŽèÁÜ`÷z\¥ÃÎ bòÍ:[±+JsÅܧ§C0ÝžÄyÐl^aÍ=³Öjd Ì××Ö ¼æÔN_ Ôíí­˜;²6¶ÓKÚ>Áky¤òäÃvå„Pí™ÎÞCiéá:úóYž÷²t®{¹øÃá´œ`׉¶žÅ WN’ö6¯ª¨¥má´×ŽA¦vC¥ þûE\9ÁZàûû*BNÓ‚‹/Rõ;x=Œ‘ݨ2ÄÒ2©!²ó*('4máàÔýÞª-ía«b<»XïÌ„ä\ ¥<€¡µ-ØÕ½ $ã-,b•Kǽ<½žO`PÍHꉩʹ/k›+·ózÈæ¢Ãº¾9`Jõ{ªw#õ¸U.(Ç“~ăʱ6œÿ^ýÒ¶0Ýä6•8cŸ³±È<-ÑÌp@‹ínK>¬¿ªöÒvìÆìñ¤lrd=˜¡•-l6 îþ€Œ´°¡åŸ«tña•ßG ÷€Ä¼V(_<å8­í×:¬ç0ëî$¸v›éÝp•xéaÆçE¡áâp ÝKB@Ú²–®G’ð`´š[£¨ÏÒK«”5‡o»Ú»Ã›ã gúoûØ9¸Çƒ*Á…•Æ‹Wa–7Û²‚6ìûŒB·@˜W£«õT™[ùêÝŒÓíhò0fe!(@á}hÁ„_AÒ…4Ç+ÃoØke!”¡éT…5_h“þÛÃPˆ¶îäª^ù·È >ϳÞM¶_Œ‡ëµaK=<] N ){€ã¢[¦æ°aÜ-îëÊèT”Ÿ|-Ó h®ÏLB9¥ÛJjÔŠb4®+hóq¥šŒ«‚´yþu Dn>I§Ùu:¼ÂoÙ2ï—¯ï2ôÆF0iU>µÅr”u5ÈBÔ6±l# ½t>ŸluTÏÄÁ›Ë¡ã}„ÊíLz~ô´Mí°xꊯŽC9)Z #ûÉ:¥iaV'Ÿ»+wBRÏl‰E.l´>ˆ¨U ‹·ÝBä•l2Ÿ[UqǪfÃl|ã™O‡Ù$ÉÇ—Ót’|¼O²äc J[6ÛX‹J— PqÖi ž(h‚ª²ëâ.*)ÈUžàί²Q]u:NªÕéº@R*ÂÛ,_M–¨ضAÉ_üíeà&›÷ŽËFƒ2ÁÃZÅ=âÉ8O¦³e’Þ¤ã ÌHîý¼´6¥¿ÇŒ2À1ÊFïmŸØ1íň}Ÿ¼[]_§‹ñ¿°¶o¶Èk+åRñF•Ö‡—Œò÷ÉÏóQºÌŠÚÉ7Vu°–ÍŸKѼž-¿®—[˜ØŸ}U´®ïþ¼[∬ÉJó‡nð¼ø.ä Ùj™½Ü”ý±(ú÷׳7³Q¾)=Üû$©¼œÎ¶'ÛŠ“uÅb:‡‡Tšn,Þ&8wÖo㾋ieöÆSk8q‰†ëŠ´cÎî¾"™íi†{ŠôM9 B_S¹ÿ„um/~Z¬PC²)4ñ¥ûú;S6:MŠ¿Ér†ÛÑyŠ>#}šœgÃt…ju±)p-.³är2;«;_MÀãÃå IF³¯6ÿAÕç·Ë,wÝ•Ž§ÉŸÞýùu2…òµýAˆ¼£ç·÷úAÒÚiIe! 8ùyz•NGˆ÷[Õ öÈ‹ÿ«ößVîܶë¸Z̦ãe‰ÛÀ £ô3‚õ¹Ûg•VùóAò#î„ ¨4]IºÈ’ù"ÃSX|¾˜]‚€óA –¯BåðgY²°ê’ÿL&³Kx΋m/Tó®´ÖÂ?:!Ψ Äcïï·:û" ´›aRœç?ÁÁr‚Ýâ,ëáøê*Îýõ¬DrºŽbî×c|jvïÿvå@)·ÞðfæÒPAßa” í\G¡kL>w<¤zN±¥µüê¬ü{ršàÎp~Z&D8[;Jæé-¦C¬E¯­Ú½áµ,WIö²Fïè,/ý­ë­m§æÉ í†øÓ× #Ël¸\-²Á €þKñ›ÁIaÞà“p<p¢úHÕÙp6Î{åüÆåW)“j`Œs¦è¹âcì<åç†_db¤4áC’^È ¾Ã¤ð¼$i& 9·’è‘VX×w3à{ƒ Ô¦ßê¤C©Ré—­¤$!W„Ÿ¢L€^€µe® t“ —ÙòßÑ«˜àeH(Ø:dÀ<ûCÖ-¨xI§ÎA Š·1¤•$.ˆ½g“Qß‚Wã|ùl:žþ´ˆ¶ß¹O?ý1›b—coòÂa'?ûªÔ¾RÜŸ?#¿ ádÄG‚M~š-ÓÉ}ôiòr†ôÆ€œ"˘Á蘠]‚%MA–…]ÿmTéû4¿œô†óÕ·ß^29%/ £ïê]z=R>}•æË7k°_gýoá3÷iR¼js}ž-NÊàŸ‚æ4h.H‘‚ë²à«Ù0@C¾†*hüÛB[›ÊÿüöÕàäj¹œçƒ³³t8„¦÷AÇ®Òe8»>ƒ¾O—éÙÛïß}ÝC9 ¨…~‚ß½œf“|ð·÷¹ tœFüæš3e­»ûúMÑå…b P[^}þé§wËl>8)?ï³Tó#àqÂ(¿pÛÚ–¿”Rûå¤püðA÷bƒÊÒJ/BNƒ~ÁÞüD8};Î?ä®ךï'¥Órð¾Cx¿Kr—ƒÇÌO‹tš»²Ÿ@÷‹‘ƒ¯>}L'“ ml42™IË/èi’ýºcxJk*É,Ãfäà£Ð&Bõˆç¿ˆ—•»Ú*Š´ùøv£Gî;mO”’¼ÿûOߺrTýÔ¹þ—·ŸNþ{5ž ^ž¼,Ó”ÁËo6ܦÙñ3×Yo×GÓñƒ"–Á—?á×o~ÀwkÆñj|‘ o‡“¬$Vø0íÙr>I‡®¢­×/&òòËÿLg§û5áÝï¦é<¿š-Ý[wýÝçünûÍæb¼›ÿèÊåÕòQ¼øÓ ç}‚©ûî/¨¼'ïàx‡/Ï!¾¿Î–h¿8Ãa†“£ãåäv«™¦¯6b`VxÒx–åªñçÖ/c°PPºñâƒìW*zç+ø;[Ü}+½â 7îÁÀíC/ö—™º%Ç/ò³bràκÑÙ ]çŠ)Ÿ˜¯}V˜…¼ÂÙÂpžpÓ§„õ ïSC܃8¡3…Á¾yÒ\d«¼Ž;X¦ åÄ+${'æca¬–b¥´ ÐW–§ž cé0?÷KÇÔ_bÐ c…5&+o‚ÌDÆkd¬‘±FÆkd¬2VË,\5ñËÕ㑱FÆúˆ«åœ)0à^­æ€«CÆ*…êKÍÿïë+œ±&@¢ŒUÌ MˆoNc6 ñØ+¢×Bî¾ÉiÝJùÔ+´ZpÂòKGÔ_°Üc…%%Ò;™„ȉŒ52ÖÈX#cŒ52ÖÈX»d¬x·¡’Q¿_–*2ÖÈX¿ƪ &+¡»“â–åX—ŒÕU“VÖ2V mTЧÊX™”F+â[‡ÃrD=:Æ€^Ñ'ÇX¡"Î4§~é(jÊX™ÔÄwŸl‰Œ©ÈX#cŒ52ÖÈX#cŒµSÆ ~_-ü~YW/ Œ52ÖGÌX™â ÓIxµZq.:d¬†ãNZÏX€Ê'ËX•Ò†í]íRJ=°¯ö÷f¬ŠSk­½¶OnW0´Ú2Æ-õKǨ£žcEdx™5@VMØkd¬‘±FÆkd¬‘±vÀXaˆIÛ`Ï”£$2ÖÈX¿Æ GæßG åH·çX© –™ºs¬!@ùSe¬FF…ôž(4¸¬ncEô8 ˜=£â©1VlµeDóÒ©ÏÿÝc…¹bŒÈÈLÜkd¬‘±FÆkd¬2Vð·‚Yj´ß/s«"cŒõK`¬FX&¥P~­¶T‰N+„QYÇX€2&ž$cÂ8tÔn!¹rD<²5Ö ôTè§ÅX éXÂ=ëçE9%ŽÇX]Ü á™')ikd¬‘±FÆkd¬‘±vÇX¿ÅYîÝw¾—å*Ñ`d¬‘±>ZÆŠÚJ¥––¯VSÙq®`I ·5¹‚ ¸+² PEŸ(cå$¤-ñ±(,ã±1Öô’?5Æ ê‰ó#0VLuÆ1ùRd<®±FÆkd¬‘±FÆk§Œý²æDS¿_®^kd¬˜±bjYn”j[¥»<ǪIßVsŽ5('÷“ =Æjµ ̨ݗɻrÜVÎ:>ÆŠè5´[yÑ ÂÙSc¬Ðj Ÿkí—%Ge¬X£UŠÛÈâ}¬‘±FÆkd¬‘±FÆÚ-cË©²šùý21óRd¬_cmµÖRÑ Ú´]ï †°—Ⱥ5V«ABB&@ï§åyŒ•÷A•„æÄÓ›XNZöÈr;ô’AA½è•Pêi1ÖB:7Dû¥#Ù3/¹1‹>áMÅûX#cŒ52ÖÈX#cŒµCÆZøeE¥~¿¬¸ŒŒ52ÖÇÏXQ[µ @™W«5ç¼SÆ*¨ÕuŒ5 ¨°O’±Ê¾¡†K®èîu8WŽ©GÆXƒÐsöÄαºV+ÑÊ6Ž9"c- MH#d:2ÖÈX#cŒ52ÖÈX#c펱:+™bž€Â›ÈX#cý+j«…Œ éÕjKd—»‚µ5}¡k68)1¦ Nó$ +'}+˜2TÓ‹]e9ú¸kJSÁh€^Ë'EXKéHÉvoø.˱ã]ÇZÔh(#»7s—ÈDÜ k$¬‘°F k$¬ÝÖÒ/+£w§ .Ëq k$¬ž°rÙ·ÒpªŒ½ÇqþZÕjWŽ©í^ÛéG·Ók j½š~@땸à29AÆÒã0MQQqج«ß@¸ÈvÇᮜf]fÙ'TQjµdqæªÒl÷ `YN>®<¼* ±·0 ÐÛû'\™~Ÿ¼ü mat Ú0v†ìXá4ûE²ð"µ88ѺKm‡®lôîáaúF(¢9P]8Ër•V‹ìbæyæè!×Ðç/Á3fy2ŸÍW“;¥ôظï27:þˆäÑ1ëËñòjuî,ç¦gÃÉDÙ»œ•s"gã)<ÜH~±W¶˜¦“ÍtÈEŠ5ß:gÃäͲ}‚©›ŒÜÍ{Ër• ¼Í*b‡ÓyѨ&w¦oÓŒ-þ*\EîÕ¤ÏÁú¢wg›*ÊýÝẑO%ç¸PNT’c…À-ôy6Iq²2à)¤¢ÖÓ4(§Õã²~aèºÃ­ëˆƒš.ç‡ub‰‡­Ÿä0ü•0œ›š\9auíô˜büÄqAúÀÊ$©8ÖÏ+Óâ}òf‘ÝŒg«Ü}YjÇ ‰ÓZqZ+NkÅi­8­§µâ´ÖÞÎ"Û:çk™|Ÿ¤óùäv!HärãažŒ§ÎgUǘ¤Kø™]Ï— «§­ÌZ/Bô ñ'µÑPs•ÕòÆ h ‚îaˆÀ;D½ L•¹}ÖQZ@G}³˜Íç•uÁE6Ÿ-–—ÈÒ‡…ë.Œ¡&³Ù¼†`Ÿ6ÀÃUJ}¿†«ãè!(òìFäl†~”aüèôè$]-¯p˜F8ﯗ£ï쀗S›Ì. ÌžŒ¿²ÎZß©än§²†J¥æ»o¬-ËQ{”NmŒ'¸S§éu–ÏÁ¸Ý¡(Î&÷Ö݈‡-¸§ã ·Ì‹òaÄ™MYÑíp€8~C¤~Œ¼’‹º±rñR¹Û©](¤¨—fõîøP¯ù".²Âªƒ JйÙr’—Î-y¦Èvâܹ֜«dX’ÒŒ_Ë#ŒŽ<†¶2:J?ÌǽÑbŒ¬çª5mOý,Û´Úñ{ì¼ç­yT=/ uv›È5ÇAY¾¼+an{Û23töÿì]mo#7’þ<ÿBЇl²9Ù|1,r»³‹‹» /ûáâ#Kí±nlÉ+ÉžLùïWì–dInu±ZlJ9l$36»û©âC²XUd ?.ÅÍbK÷ÛS”¶"I—‘$é^ŒO°ÛÝ/ü™Ld#د°Û€º?ì—të ’sŒ"N~šI^4S*+Ín'÷ÅÁ~QéhÖdè$¤Y¤8ùi¦ÏŒf&ÍÊ6ËáFÿd:Þ±Yv{G§#ÛáØH"²µ*?åÜyÙæÊùœ”+Û¼=Ç{Ç$£œf,åhBe§œfæ¬(§9ËD¹ÉÍÃàùq4¸¹ŸÛ96ßï˜oT‰ò“MœÙd.²= §“ûáÁ>qéX&mÇ,‹%?½4;/ziy*zU¾%ŸŽV†ç¦Õ¾ùédÎËӯ岯ž'óå¡îX:VY×µ )I~r9q^är&£Ýõ8ûXÌŸÍ–ŠKç»×Þe°½¨Re'aò¬Hg˜Í¾¿|~ø8œƒçÅã]1?èxréB†ÛlûL²pù)xxt Ÿ‚«îyœÏ@óƒq±,“Ú˾I'0Rå#^œHùé¦ÎkK`ȱ‚U–@¯¿ —Mçå7Ñ^~H×Ñk£Ï+¸h 5¸¸ã`2*†£Ñìiº¬Ë X©øe€úE©ûtÞwccSº“¡sþX{^üqäàt¥Èù쾸™LÇáwýCÊ„V¥†Ó9ÌŽ;‰´s.øó íY&ÛÍ%G ÀÁC±œOF¥ŸÅ¥ós[®h“IB¤dзµB¶gÐo+ÿèWç7 |;ð†ô‚j…L‹ehó8»ŸŒ>!im—ÃûûÙÇüwPTgBß .îxôù´;‰x Q·dT¼T*‹Cˆ€ÇtÏ‘JÓƒåln¹[”Ö•z÷騢]wl‰Àß9o§…œ†7ÆwÏ›Ét£øÕü?˜‡CÆA÷ž¥ãŽãÝq'R†ÎùãÌyñÇó¬üÙ7$¶:'#’cyˆ%L׌rì¼åxFòÇÓÒfð"m„ìŽ6µˆ;ç†ðçÅ ™Ü~µu©3,½LG•Œ$Tè³E¹ób 9¡£­Ê–^¥ãáó&BˆÎdÎl-²,ƒ¬‚²t:"Eçnw/Kç|rçµÛrÎfâÓ–¹`ÒQÇ»Ž©S »k–øÎgÏ“pmF°*ËóÞÃÇÁp:†~4/–ƒy1 ç+Ö~}o“ÑÅ G@$Äß9o¤>/ÞHß ožg÷OÅp¹ŽîÂ}3¯tžÎ[ì5KËöÎù¢Ï˺ñ†uÌ—ùäæiY,F÷C0öŸÎiìïˆ4‘tÎ{^vŒ·®Kæ,V·cíiܰt¾bï\”‰@Þš+Â:#™ÏdÍÄáQŒ-oÐ8ì+&¿‚^'ÓÛùp±œ?•÷½Òv„+8}tx<%êŽ9íXŽH+…Á£ÒsdµÅ¨›» ‹ðú*eL öèøv:Ì­ù¡s12©Lsˆö*JÇʧçGÌ"RðQ¢x&<‹ˆtD¡‚o͘Xál¦½P,—ж}N¦ƒj ±­]ܹËar01h}‚UEÙ–œ¾)8Ë“ùøõ†MÌmƒo£`Ü5Ë¥±&pü™ó〶fB¬ 2}Á•Ò> IÌ„õ<¼Q.îe õ\ Xe± dk”ˆBgš Œõ2 Oj¬·zËmãÎS!32°M5 @Û2!VÁòä)œŠÁéGÈÆ›‹¶=Aaî)`í6·ÝÈInð·æ1ÂÆÈ§òxK£ñè–{Tì Ù£Q÷Ýi[ÖíQ€mžžuÜÇRãm`^=Ë»âi1º;ñòHÐ6Ç=™Ây¤|ÜFÊÈîO‰º5G¢¥Ê3ú%×VÇàñÔ½Äóð~2† JÃñÃdŒñ×áðµÂ«ß ÖÏT…Ü8îÆ„.Q>bÑk¼Û¦öÂð7Õð{C/? ¢nªw¡ø€ŒaxNèy1¶8 °SW ‡{ÓIÌæ…ÚpT‘,“Œy;E§¡^ÓP5ò0…Íâg¡à¡zg÷÷*žnŠÝ±*û–²ªLÝ:4øàÂZä5æ}ó:`ŽÝ^ƒÒˆt<3üY²Üx<;07œ·–ƒÅoÍŸËR?“Q…Ýÿ\ Øÿ±YÞ¿^/ïßVËûeÄòÞ?½å Œ¨ÛÛ+øÐgOÓM\p|ñãÖ_þE“¦×{óß7ÿ[Œ–W½‡áãOU‰¸Ÿ‡ÓOô÷¼é'«šKý«ÞªÚ\cÍÌ«2S¥<«ú¢Ï¼ÿÅ´øÒ‡ÉtÜ¿Zi}ý¥ƒ*n÷PŽ ^7,¿³§œþp:-+ƒ¦ÿJuýÉ,äqq±*ø¶³Ä\Þ}V–ÛÒR(Ò0{͆k¨ªûZ*†ÉoX÷·ë‡w“÷wƒáópr¿*E´½/ÓÙ¸Œ‹çâ>Œ¸Woù ^³®•êÍÁ¼òð¸Õªê>`âf¯¿bæÊo¿΂֓éÒ¨Ïùe°R€¿†*ÛAÿ?•jÕGíèØë—Õ»?@?m=X}ìÛ-à/¯ûþöªš6k5»)óäÇÛl¯YÐÖíÕ¾þç»bô!TY{Ý&T@[©a[ß7ƒ(qqŸ‹²¶FÙë_6²‹Cº‹~ôÇë† ý-è0|ýʪÛ‡>w¾+n‹y1ñŠy÷áê_ý§ î;>TbTÜ Æ7·~ nnåÀߎÆ{cÆÆÛ[¡™ìÿöîí€áŨ¶Ãn«š«i÷€f†ËÑ](L„‚A½8Ôîeùkh2Ÿ¯Q ¡„¨ávñ·ùìéq…¶NÞÝöwÃÅ_·ùûð¦¸{îûâï“éÓ/øs¡› î½*Æ/ŽÖÕ ^“¤ÅZWß¡ô½éï3`3#ÀÏ[­Ã°£|¡½õËðÖÕk78wM‰·ÿ|šýŠé’ŒyõöM•¨ï‹ª¦y[½yVÚk[¯}mž´á¼y½_þî©Q?µeÁ›7Ç3 þ6÷./{¢7W¤ûü>)íÞׯFÌjB©¤[÷øŸ¿{ûõoÃPZÿäÇoÿRþ¤2·Ô>¶÷µ ;ô÷¹~ÿb4+Ýæþ±Ý {󦘭žjõPw9¨7ùŠ_ÃQ²]+f³H\ÔNà½?õ^Zì, a— ¿êK«³ªqý¶æïúßLK|Á [µ?ô¡›½õË^ÿå6ö¾ü|¹é{·úXÉ‹ð«÷–F4]Ú×ËÎŽ´‹í_'“v÷­-¥mË‘{ GàÈ;ÐÕçÕÄz±¶H/JÀ‹/zŸ}¶‰|íZq·‹÷¡[£»DÕŠ¿‘§Þbh×—u̽žÖƒþéz…¸_k×ò‰ÅÓNK`ë:4k©Æ>JÇø¶ZÚ!r‰§¥}ÕÎ&;´¬ÝDn펱­# ý‘E¶½çʆ&àxÏš‚¬9YQ§`4~®rÜ„ ß16k1x\ 'ëæ~ÇãÔŠ$»Áoœ1õ‡v‚µ Åù‰¡3°ƒ„ÇžŽS«æÂ ÎP14³ötìH'FC"rJvÄãá-/C~˜M'eâA³XƒR×ÓµS^0t^äeª‘2¿#ÙœÌÁ#˜Ì<3*;3!'~¥CÓ+¡k’­–GÝ'üàg1WÂwŒ1RÇàq©ò§j¦÷ƒZ¬—Ú˜v¹bF+\. ?³IƧ“Íù̉ÇãM7Ù5k¾a˜C,®Q Úm]HO"øÏÇ/ ?6§à pÄœ‚¦÷¤Í)hþ-§`¥ñvŸúwjÁÉR 8û=§ü;Zþÿ)Z^F ¿ÛŠÕª¦šÀÿ«rM"ò¯G´î÷|ÕÿK1ýÔ¯EßAºy^$„¡_´ î6¨–öÎ$cìíí#ÆØ›ÛGŒ›µLŠ#¯zs|§Ó#Æèû’FŒñ¯1Fh@ŽcxÛ‰Ùê©Võwfνñ³Ån¥—~Ýl¼Û—åŒÛŠ%-ž¡?òÛ”ó\*Žîe”%È·Ú‚ÅÃ6"ÇN–€§ÛM6Q’ÕÒ1kQô¶ñ/šC¬;N¼4[g>Ÿäa»'yLÌ2 #.@ÀcT²eÅr4>|(Ë"g²¸`À]¡ÑQ%˜õ¾ÓQ›QÞÙ~Z ßµKÿC³Ô,6Â…â–9\EwàwGæhÐ|ëÊäèiÉFMK9n£àÑÔód“ XgÅða‹—«âÚËÙìþÃdVŽdÊ­ƒ£”œu8räHk„‘Äè,ý/C±ŸùuÏÏ~¹x*ý]Ã|t1/ž'ÅÇÅŸ–“‡bö´üгEõ‰ñdYŽ{»¯¾êA—OËàÕêaEŸ·O‹b|Ýÿ¢w7\ôá­èc\êh^üó ¤©(½x‚Ï%—>_,·;ð½RQoWú/ú0Þ˾zé請iÄúæ±$ÙÍ޶â½ÉaöJ6h*;©Ù«1Í*)<³•D òM/g+I–D¯’]•$éÃ`ù«`8mñôœP›j«ü~dJåàQ<-ÎŒGX¦«â‰CçQÅ™ñgÆ£t²ôË\ÔÞœ‚ç 3ùföu§í¾~Žî&ÓbUáië.ÊÙ¼˜-7³Ù²2¼‚»E`ÉnJÙp1j‡(e,ÙI”¾áÞâÚ1’~MIÑÈIÍ•ãB£ûsh×´C“ÀoçE™ÓöõÚ»öú.ʵaÏ3ŸU¥'º­%€Ô9œZ<äâK›WÝÍuÚÓ˜KH+o€g¨ííuœœ­Îá ài¹»€-ïì©þÚÞ}^îÿ`°–jE‡ƒ6Â8TŒPžµÝÖâ¼ÄPYRãá;0Óã{á€ÇéÞŒVf©Ilo¸òNJÔ5í´=ÒÃy2ä9®—…ïhn‰Áãy''M¡ØfÝ(¢–ˆi.]AqGL€,'8ÂÆ#ð˜c/Q\£«<Ûzýa»l®Ð£xlýŽ òâÍä¤àI”£FlSl¹² tØ@;©û¼°[d {Ÿ€ÇUoñ þüpµÑæÕ‹WäªV“Ø™TØ?k-9š3í8•gƒ\æÈ¾"àQ¢=6#ЬPÌa-ÿ¢KV¸:Ä·§ÂY`r¬ Ö &ñ;GJÜ,…sí IU:Pç7ÃQ¨ö˧Òã‚¥ß:&B¢ êápŒ‘·v"ô‚åp´ÂwBz“ÀÃy;BkãÍrt°Ý"ãà…Q-ÏKŸøåj(x¬JZÀ*NŸ˜wÎéP.Wá#K«¦b¿µd83ü:ËÌ •³ÒÛ<þØÂT­|ïNØÛHŽ#4’µ3 ò!ÜÊG‰¤p¶Š¤øÆ@Ї-¿öõyÑX°4:²š:£à8uX-t <•íNÁcº0˜×š¼zQävHÙzŒvF®Q uýB;z!œs“@åØHð«ôÕª)ó@‡ˆe¸Ú‘çÄSU9Ìž¦7â!¥•öFÆÃñ1‡2¨JàС‰v*Ù‘”ÌÐÈŒ¾c˜ã"´Ço‡jÏåC±œOFe¢†G¼ÍB*n¿Âr¾ ±æø P>¸6KoðH—ÂR§ÀšŸ­|Ñ¥N—s(9̘BSŒE¨ƒlR¸CN+ƒÊp2ˆ‚‡|2l3möÖ÷ž”*B¼ÊRi'µGwÐÎ*Ñn¤wÉg°ÑxSÉÒa"‡C©QÄQ,ô‚qô"Îpl¥Ë„9‘:ÃH¦à1"‰S+^›Ž!Îb%¥›M€vŠìß<'ôšg¸'ƒ‚Gð£+­“| ðMçÁq86gì±®,26/„BÉB»­#añN,• pk÷h÷ÁF½)s%Ú‹õ2(ÂÙÀòˆ"¨K ÄÈ%à´YhOÁãR¸.6êÚž.4GÌ -3ž£²j ™‘)<]ãTŒgˆæ’ðPýôemtµm"j¬¾ºvÆÃæJ…vÊQ½Qù°i™ÁíLÂcRlF_óÿ²é—/:-¥ßH%-3è5áÐN«²œ¯,2ÇÑ( !:âÉrÆÙ8˜„7OáLy©IÄ0`âæfŒC»D®ŠS Ïqå ï4D…-§ {[ iÈÔÁÂÌÐN´ âž¡(.C®7gÉ"Ä©¸Ô,´Îjãš±íÏ1#§•DË¡àIð¨QéKA $h褔ÂK‹Í{ÐNHw|¼#Z›!ÜAÁãtgX›„„ SÚhºq b.‰SìÄRØ »Hž£³½ZºVàã0Ákçpô ƒORåéÙx<Ôž½½Ÿ}\ŒîЇaýxY +!@/u¸:pn1¬jU¬ÊúÉæ» hMçc†Ç&I·ÜÕÙ6ͤRÍÊã`-'‘¢¸e;ku’ÀE°®û,òþsWOÛÑ©4¢'ïA—8¡ ‡Þ3áò"CÿQð¸þƒzêomË¥2Í*R‚Eì‘#xU;f8òBî>AކGó$1ˆJ<ðó—øŽTÑ­ ":Y ÐÎo›É/‹Ì ŽË⺿d»úŽFË>TíÒ$ØÒíVÓ&ìnV§ ™IÖHlq’aØ™.©Ñ9|oºgfÎuA@ТNÏB…Ã&=h'™ë’ Ãïþnl"×i ³fÃÄš• 3Ü â¡ð^uµì¼d2À‚GétqJÚ2\j–#š…=“µXnFÕŽ§Kà>±$:ƒG€‚‡|oÙ¨O ­Šf­j@ç„@*¡UíRÅ®N+…ʰ’ðØccWtïšvR8n_(»O %’kÙG}½ûz÷UïÏÕ½Í#{NŠÉ¢7-{«2ÙÁäˆ/+Dƒ}) L‰v‘’Þ¶pÅʈrua¤i #µoÚ·F»bCÙ¶cxå(¸|†} O’œñ ¨“F˜Æ7Î{‹Y¡HrνK„:Ãî‹‚ÇP¸åìDܘV+#‚u‰€všìƒëŒÎ°ž‘ð$ÈÉÚaööynÄ+iÔÆääAÙ®ñ† XGjœ>Ã~‡€G3²«¹¿mÓFœ‹N†ø4VÔ¢j—Æÿ‘vß9Oßy·=Gq#:.ès;ǽTõ{W€M†ñNÁc»ïud‹¸ }ßh:f ´¶£žï¸âÆ< gâÃ>d‰:]–Ç}Õ9n?c˜éí¸¥_4q"¨Bf°ÛHxl§íýÍŠkt÷é &…r–ëæE©jÇ»½1®Kèºó‹ÄhxL:ovƒ­”(%‚A"µ(hĽJt\ç )ÏÐó<&‰Q lV —ÂXÁ+gËvÚ¤qGglX†±NÁÃÕ±žç(g¢®R÷WeÝÑw‰¤.–ÉÑsñxt’lëWd׈Ê`û¯¥5ƒè¤2&Ižu‡õVi†x·¾Â‹—ÒPˆ ë IYƒqÝ6|_ìDóšï3Ñ"¤(p| í¸MYƒ±k´¦EiRØ VŒSMŒ£ °¦{Ɖà»bVØ™5rP%¢R'™UXÑåªÉŒ˜S‰ Û\lc<=&ÃÊßáÌBŸEà±¥f.e†pÿÿ±wµÍmãHúsþK&ɬ%〮\w¹$³›ºìN*ÉÌUíÌT…¦h›YÔŠR2ÞT~Öýûe× R/¶$‚)ÚS›­šZG‚€§—nøàñ¾º»ó¨¬Ò¸nzðB8àµa‘,™;̽B·qÌw/Ø;ÙòÁãíc×½Øzº<]]ŠÂq‚e8>rf¸vˆ`0iU+úïSÝ…ßàƒÇÛo؇®¦^=F£ã¸ oþ*IŒë°Ê‘P´vWð!ËIY³~Üw©=”\¹ç¬„sC s䨷å4åG½x|?Bÿz€'ß-TTÛ.¿?»õ¨NVmn¤ÔŠº0\žy¡{ŒW¥ZìW(µ¸÷ ]0Ç< ‹%…™C̹ Ú;Û¼'é`rõÂcZcE©Ç8›Ìa¬«® æ˜U¹áLáœk ¥íݹ/¤ì€x”÷í¶-·¤–‡{ºeŽ­fUkše•·\®#IÐÈåò‘ôø±H~x¼c‘Zë~ôÏÅ,±ý¶Ç$.»fÒN9:ã“àð‰+ÜÛPOxo|ºŒ§¶´“Mµ¥Q¼s6mKÑ”Kõ¥5‹K!oc{iç Âq8 8—JˆÐµ/Ð9kåâΑQJÒïâƒÇ;fl×¹rµú¸c·_࣎’í‚«ÁA¡-ƒw—w0+øà¤H±»z;Ý©HÇöý-œ;ÜPŽyŸz?àní1À÷•¼})ìUµÝêcõi|•Kׂ[â㳺¥ŒÝÀíbeêƒG5®7^û€Š3C sd_·å4 ÆwŽXupèƒ'lÚçUcű·§p'WQââ¦Ò¼"7PÝœ6]àA©-ö¯ÓÚõÙzcűsrÎñÞ¼köÁr4líJlçØÿN…Å›_>ÚRâÆ%îØWÄ0•»Ëï yãþþ°rÚÁI”žVÞÝÒÞúE«@ÇÖ¾Q ÝèÜ.À7Xy+ÙÐ;vp¶ì…ÇwïîS4NGÑ>‹F×iŽ7°¦Ù8oj¬€ú—Ñ<ùÝDÓ´ÏFýU VËŽ=<ÌîO9‘N©45Þ©>þRuq%ñÀ¢ÔynmßZPmqçpõ:¶Ë ç$TF»¶§°ÜÆöÔe2IÀÞ¢˜ß‚Ÿ¦ìàç•Ï–0Þœ A0Z$Á< FéÅÅþn1ÉÁôÄóÅ, ~ÚøÇ?é‚àÑçÿ“Äóa¦î¨0ýMnüëyÔØ?W-{à¨êIo%Ã,¹LáÃÍÓÓO´÷ô䀖>Â`ë Ë^X¶´W凵3yÕE¶;ÊéE“I6/B©{[ªë­vâon9Ÿâæî†j^•§OOðØ:/FÉ`–Œ“(Onÿ.OÆýëh]ÂTs•^^õËWÿl5 ’ÕAÙqÒŸd£¤?J>%cäßV-_OìU5+äûôƯ§¥ Ò>aïI8”b(ôßmÛvLo1ÁŽÌ•xBñóR€Òd¿ô)X¸ž¦‘`qrÞ_˜¾8¿à}súá¹)^0Ixïë‡=TVO“xg']€&`¾( ä]\GóøêyŽ_oZŠ~ëÅ8úÙd|³œÝúåô–゚ÉïS´–£oS¾ÞGX¥|_-ËѸSà}Cx™yrj°Zï—C÷k9‚˜‡vw¡Ezwû|=uþß4KÂ4¹Å‘ÝvýV÷-Û}Rž‡—W´ÐÓ9; ¿xùúåû—ƒ²ñ¨ðO‚aÙ?žP!ƒËY¶€¥(Õ‰µŠsO¿~m$ç~úøÕú'¬µ¬v£G6š—ÿX¤ÀF|{É·oÊÚW§ÉïÀšÅÈǤ?¨æ¢36ªÝv’Òó£ÞòRß[›£uI§CªzÔ|ÌÀÿ€bÁéiÀ‚tT¼¾<àÿgi’V_oÅõµtËþöå³÷/Ñh,?ùéÍ‹;Ÿƒý³Û_*›¿Óþ¾k•¹sZ6—Ç™õ2nQûûìͣG‡I}Я³·§[xå+s¸‡óÕä"³à ~|÷]°ÿk´šK[#>‡Ùi£æ?60xöæUðÜöpðv*ñbÝÇA4K‚Ò_ Îo‚ùUØ%ʲfX ä· ÿÍÎÓ »Z8\Êäw0ª…ˆo¿r>À@¾Ù$Á„Eè f{¥ü0¿Jó« ®AÆà*ú”çÙüêîñ[U‚/‡:üPGÊC8qج¿o9²s1²±Êr-AüÁøÿäëî*¡¸qí`a9ï#ö{Ø1òGup±×÷£¦»ãòkw‰Ê«Ká€h-‰4ŽGÚ°œÂ‡ÚÈ'Ð5nsü@"O<ºiÇ5mVšÔ¤UÂ4b•ÑŠ×Xuêºx mí~ðÁýaû9IXûh·L!a~¤:¢,M8Ò0$µäÕp V¹DÖÁãÿÔÃî,2ût=ƒß¾†ænÖ`‚Æ:¨ë»@- mÄ‹º? RÑŽ†›:xT«Ñ£{Å­º…›RVze€ür4ƒÜˆ! —ü5Ä’¼†„ ¥ZxtK/n×¶Ûã$8ö´¼8^àZºiÖ‘'¬ëÞWŽF\ª-«ì„KšPaH <š´ûzw“®PNJÕ«Òa»K©£‹Ó„YõE»a%®Xª7k¬"®Í­ï^©Ü>!ïÏl_Ø>«)Å0 Q¶SžU}pòpÒÁËO3ؽE².m5éØEeš Jâ.ä$l”ò>‘?+ŽžPvíÙ¬´¼R¯c¿Ø¦ÌW„¹–œ3mhÓpì‡$ŽáÌ0>xob1¶n‰./g ^©Yi6ïÏ3{«ÆN膴§J¥YF7|°â#V(CBÝC>¥Ég«mÚAôQ ²q'ü0ìañè.øqåiÜÇ»Vç¬-–pBÉ1Y²w\Ù”-<Ø’åL¿6á¼=ºð£Ò¥ z'Œáòa1ÆÿŒ{Ƕ¨ïòÀ_„”¥Ñ–³åÏñzß' f¾”"ol4-4| Å6Šž'ÀØ…™§“42x¼wØ…”‚’êèq‹œ¡ÙAÎ2¨¹À´ €¦ E©æ¬¸ìídªÀ¦•]ċ٠¯­—힉#º·uICÖh}ÃÄ·I±KcºÁ­‰-4z“eã 6-å[]ö¼3˜N’ò Nfz1“lÜXbÁßóÏI2 `fÌV”ƒéïÇ ° *Élì’­©¼†5AM£ÊI0-h˜'Ið!‹ƒËd\ÇÓÖô^,fó«cæ mnG6Þ7c QAÓp§‚¤µ{b̓KÚîп϶ú9™á Šãøã?¡`ö&ï”ÌÁBìZ˜ Dq‘É&@¤~:‰“àt5äâïXÅxÌg7ÎF³Á~µaµ4[£ÐîÙ$y:(Ÿ6§·Ò<€ЦQI¾<›A‡Î›ÙdØ\ªa¯ ðÉ«k`Õ°÷EtƒÑ\ñ -ÉiOûeÿäW“j¨™$”)znb¾(;ø¹æ‰)°Ï1‰.dßaì|’(šœIÂQ¨°­²Yœ /¢qž|m¬±ÃFùC:Ió«¦F*x‚8úÿ›ï'ªRœêH¥Øžêw!õœëñe42ÝWÔ|î: õ^×URæåIü)–_ƘPgieaf[ðo`vÊ>À Eäé^dJ ç1"3[ÄÖg°¦^ƒ?ý€^ͲIúÏ §êÉÏlòÁKðIg¯Ó|þd’ŽŸ–ξÃNÀŸ×úßÙO¿¬Ã‡˜ZDúŸ|_º&刲5>}B~Q¯#>äé‰ÝE†*< Þgóh8¾Ú ±h£¡MË _±7Ÿ §@Ñ·iþ1·ý»d>®çfVK'åàþ!æ•í.µä.Ž™÷³h’§Ë¬LÅÈÁ¿¾|ŽÆã! ml4Ò‰IÃ/èIkÁ¡ÐšÁªÛh…$‰Á2¬FV…6AŽÁ gX›®÷6ˆ´úøfÅ#ûÝÖ¢uû÷_^NìöÅæ7@çý¿¼ùÒûÏE:F^ö0ï.ÏáÏ«ˆÎçÅmøÌvÖÛ"ÛÑý  „?ÿZ8…ÏÞ¼Â-ýÔ×éEßÄãä¯e*›Õìé|:ŽbÛÐzË#´„yôò_X&ÂzŸÆþÓ>Ï±ÚØ€qçᎆâ¿®\^Íw¨âoÙ(y¿Àý—bö}÷3’·÷®ØµÂ?ÏÁÙÆÔPãzêüŠÃl:NãœÙ5=:-Lß^ ,&Ù=/kµs^Fgá:gcê3: Tœ¯RÜÃ%Q9+žÙá ‹ëÍžýËÌ9°:Èçgß­Œé_ ÅXK"­¼¨¡\o¯ÍÔÝ þoÙ§r‚‡ù&sÆ ¼rgZ/ë¼eñ*«Ò…·Àév]÷ÛwmûÝùk«gZR·³.n=Ùuô}9XáÓw¶”ÙÚv@†6‡ Ö :¼†õøeV.Rqs`–ôm‘`fr|gIË4ü÷ƨ•9ÒÊGŠA(Ùþ…à‚*øï.ÆÿÞ¸ùšÂçh†æÜ²ìØbò§«À®&›Hl³Á$BË„fŒð¾ö•ĸ·ŽÛ±ãªwèè€-×à‰ÁŒ¤×ûU(nkؽPÿr›)°¾#ŠhSC;¯þ™ÂÃÀÈŒø¶úÿ¶úÿ¶úÿ¶úÿ¶úÿ¶úÿ¶ú?êê_pM$ 5÷¼~[ý[ýÿaVÿÔJ˜›Õz#¸¸ýÅ5t*Yµj­ tãð»£ШTRÖ'Éñd€+|vµŽ€ò¨gÏT”ÚOÍaB;o™iƽ^ìä ‡]Äùàñ~ÅÉG¡E ‚[9*Œtë&c©j`7¢Apüa˜¿u[_.Ax7©'¼§ˆæ­Ø+ãhÆi÷ÝJqè®^*Ù}ñÁ£ÚI,Ü(/Ž ÛÓ¯®\9¶C^²ª‡Å%ïœQ¦m Ôê71u–çÔGÊH!+ʲº*ƒ¬ ý 8hkO½Ê¼ûS@›Ì¯’EÞÿ¨1;%Ä+´` |tá‚ þ¿h3Ý=dÝÅ,æÇ˜v< šštD½JËøÂÅW(' kÇËé¹âD¶zá90æu6I­m¯TfÕ—…jA®èL(—.:C9âý¨ýƒ…Ò"}ð0ÚÖü°s|9BWWÌhÊ\›ZŠóP‡mM GFªEqË>x$iy6Ø©@Ǧ“‚iI1F]¦ Ê1Í[žŽ ˜Óv@¼ð˜ömÿ.;Y(Ò±Eâí!Ó®Û&XŽèö-}'ÀE³¿ž²`¯5X¼G™LFÓÌ>iµçØ 5SLjâD‹ÛäNð÷ƒVu0Ú½ðt4Ú7viرkf‰ËÐh—W¢‘½ª3ÐDçõw'äòù%Rµ=áÃtð܇ž¦éÞî$JÜ¿ØÓ®WS´f˜ÁÕù† ”Ó¼ñŽÀî"9—Í[íý]Ús$¿3œJ*ç©:”Ú´ÚéÇE+Y“”Ó°¯3T_•K¯]︛ñÛ}]Ýh.¯6;…«Xî§ž¶6 kÛÉê×Vô€pA©auØŒ-W™¶ÉkýÙ*z EÜèéñßjµí0ʨV¬ÇàÂ.%2‡5'Š0椀Æ×ìŽAÃ@SAkô<”c¦ƒž‡vŒ–ŠÖÀ£X;=_Öòj-R<Že†»´ˆå„n§ë[Am0Su£6ÇßtÔö™Gªˆá5𨰽Eé3ºçs«YáÐ,°“j¥šSòö£Ç‘ÄÀj‹»%¡ªû€YÄ8ø ‡yN×Õìi1öV_[EÊjE2Xµ‚¥u<iËUÞBlD‰CbŒv—šŸ czuèx/¨(GÚ³wÍíÞo¬N•C§°îÆ”õNtÈ©l -Ë …$¡N´àÌØL BÕÀã¶{=¹Îõ¶ZªlnX­YΔŒI׈ÃrÄ—]K¢h(ŒqK¢6žs©¿…©Ü¬ŠÚÑ„‘(?>S±Lkàñfê$ºNòiWeZÀígGÇi&˜ÚIA<ð]Ë´ØÏkDåøiá <‚1CÝx$S­¯KM×.œ­— éáx¥£·ØI‰(ƒ÷r]Ð j£ _ÒÊx¼’ÓÅs»Yÿ\–€Yþ‘3Þ½U—Fé8Lžy3~Ñà!¡ªM¶ÇË›„DÁã%RŽm_gÏË×Àê+goK÷ô÷¤T&Ñ"=ª% ýr½DoòÞI0®EŠW’%áq½Ü¯ïv›Büân>ëÆCž1H„ÃJ ZL5œ#”­Õ$Kb%Ä OT‚ûÊÛýquÆãœ1O9iGpû é5PmImâ›  PN®1Åñ—‹Ä¥øð“=çxÈtJ,4Õ•F~¹ZÀ”GHÛBµä#˜¦àaWÚbsþ«»X¯¼£‰–cT$¶êwé£ÄR>B+TÓ\›{VF;ž +ºÌæóõËû<ÚgÚSŽÇK!¯+$È.ˆ0ðZ‰^’im|íO4ÒÞéË?^î»»Íýl~—€¿÷dãñÓì¥ZJØèΉäËõ‘”K–bTRMôAÇãdaŸÒýþ)óòyâ1Õ¼ƒ‚&“õÚ~B#FhHï…`¦nãé¦| ÷$ÛæÞ?9„' ª! Á¥Œó„¤»ä€LÈp/ÖÝ &Y„)ØdÅcIaDþ[Jû³X•Œ³\óñûõ0ð]:ó Ò-Í|HÎÊû"ßVN^Ðù £n ’`nK%A¢Ç×E´;9±€Ñìk$øšp[ša÷Ú9ÿj†A·—ÒJTÊŒjÒtèÉ5•Š·¤‘¨øy¨Ô§ ã¡¥K;0I¿Na€>)’@èÇšÆÕŠ ÖÞ–ÌØW>ž#ú¥M²ˆKk"댬xzØ-.YxÎ>°‡ré­Ba—& g_v ‚­,,#Yý¸˜¢èátZ´0ýs&žIÎ!çHô‰¥3ePH†~œ±0ÅñcbÐÌ<Oòy –ÍžÇPç±…Ìî ªØ<Τ©|ƒiq[«¦Ÿž8‘'ðqÛÌ»fþñÇÙÓ¼»+ïØ7å3¼+¿cÿE„ãR],¯óØ)©´2f÷fú7°iúò<<à&÷ Îž3¶¼…ÕA›Ç–ÁÞtßVÝõâcÌÇýŸý¯mK‹ÓSZæN¥ÚŠÉÂi7î¡òÔêpF·Q‡Íÿ—(x俲=ôzî.—RFGlqÌ*:Øp±~¨6úF« Oà¾åxêveL_-ó7‰úE÷mö²ÚÝ-º§ß=Ý^N2äò⢰/VI–Z¤˜´ˆµúè‰Ä5*†£Vò‚Õúe½z,Î#åÿ~è?—ߺùïùªû¯þP´ù_=ÖœJ5SÑoÏASðp 2¯ï¢ Ÿ ß«½ ZQÀê^]!9©TÜ-‚åßñ6¡×&=n-ÙÇ8“ý®»”·[òÁŠ‚ÔSë]ŠPRQÑè¾ñΑðˆÔhøD2¾…eL²„`ÆTg¨ œ~²1©Ía¾¬M@À”+@wŽñ¯ƒÓp”¼&Š äSœµSð¹.¤³U·)<ûÜê/ot~yø¾»RÞï¢ÕVkT…uâ>›»-üƵз²zاÐnöŒïX¯7Ýz[^§õœ"7(É•5Ø©39ŸØå¾nΆ Lƒ“' “è¢Ææð•IÊh¿(å­ö¶––r§=L¢ˆ6èÍÄQ6/¬…M·šÝw«3ß×§•7"¬jƒóµ×1Çq!(YM\ÅŠ8ñþÁÆã\'Éõâ@bí;„Kë|I"¶vël²bŠhÝùk;k¾Ú§ó¾«Ñ˜]yMïké*ÇqÚʹ°mÞœ­#ÀަÁìÓñˆ’³H-¯‘ULí¬Q¥¤¾Ô"óNRmÁkÓ`àà±S(áÓ íŸã³T®ŒµtÆã¸Pënz±⟸«#O”óÞ¿:¨ñi>!ùüÙGÌ¿ÉãªU8˜z¸ ~h±>0ð$¹øÓîaö´î¿­*™¶N¦‰1Bvs">S w„ÆjU»<ŽS-v ž(’¨ý>ŠÂ¸ ²Z1¥æqž]±BŸIÚ_PªÁ|rðD‰ùÌžÓæ®üèj=[|dÏ×ÙëÂB°ØyÅö-³ÛmI››|®9xôØÂjoT½æ{½ýÕ[Ú`h¹Û¯¢Ëõw9=µH°Í‚-o’Ü*ã”Y¤ìvL™ø%=»€óÙçÈ|ý=‡H|ÍYç• óW]yöË®'ruÌÎ6ˆpð85Ѽ÷Ä!A´ÒÀ@õòë8®zù5j²…Ú‰+M±ñh¹g5´å³'  ù²%e0O¾8ûÁ]VªîV ˜:;ŒGkÁü7+íÓ•®fÖÇYvÛ†+ãõ â:<Á u@&=€DɲçjLt€Å«ó8m¢L™Òë O ö{£”Œ†NS(·H¼,äsr°øu’PÛ°%NÜ5Gª¢ñ¸ 1µþáBÔ;á”÷[Z®# ,Š-Î<ì³Ä™¬Œ¯‰ÏE°cLíy\tö2óúÈ¡ÁºÁÁÃnÿq¶¨â —»nUZGo~*-ö"á:pJ¹"æM‰‰xeÀ-ò 8x@‰¼‹e|>AŽLò{%Aеœužyœò7¤†;R:Ìdï>ewÙ³F¼(gëŒÚ¾ÓØ€XPš úW² Áôš`àqÊN¢‰ý}åuÈÇra¶Ü0fI6ü$ÚhoI4¶F8x@V#‹å÷n»{\n6ëÍ¶Û ~v¾Niéõ>)L殬QVW0ø‘WuuÂãa¾8^ Z`†¾$ô׿›}ÇWS$çKö{>[ ©b'¬Öø}ƒu€ƒG°VÜòéÛf¶Ým^æå°6H&ž ®Ô,Š †æq*ÈÝ6ˆ7:Ÿ^»8½8x+@•‚{Ýv8`ã‘0]€|2Î% 6”Ö)‚h;5ð xœ’‹.J¡²ˆDëúR¶yóê¾¶ÜnÐ5˜_>•‹%<­ß¢Ë'˜Åj±Å*óVî$Ð rt fävþu¹«ä‰ÂAyÝê¢Â"yœõr~3Ì©Á¼3ððWúó­ ÷9Yû¢™åµ³Gi}ƒA-Heœò…&…häpð¸(7«…*$–Êûà`&¸TÞ5¹Ù†Ö⎗ƒ‡ÇÛ³rÚ+Ö#A«”\2ÆEìò1öÌM Ä×9x’’¬ƒ÷Ïn>ðÕˆ”ù¢Jû9Síßtç!IV¿k„:L·ÂÄ#«ã|HjþúVIðî”Þ4öÀ”÷–Î`h«¹,Ä8-ÑVê Î5\½Ëíü¡[ì  |3©Î£¶Æ¤£Cp—qN.4Ò÷ôù<<0ªÆÑöwþó㟯|ýy|ñõá›zýûM7[ô{gP§`Jsv¤Fg?θQ5nÃh°°ðp3õK¨áÔ9 ºNޱÆ;oƒEÀäqÎsCaS‚I fŠÇKU¯@·Á`Ë“è¼ñ`ÛŽ)iV¦ð@KÐÐÂOãà±N ³÷ßZ`[g®$t…­GÚR]?ޝ%Щ×jú9fáËæøð(³Fà™§‡!²NÒÑ`‡[*,¦Ë¦üêÀ§¯NÄÄ“*“}rr^½šAš ­¾N«³ÅŸÐÈÃé~œñ^ `Ùõ͘þn“…Dz#ÞÃ…?~fUJB)X‚õ 3 ?IöÖL°ZŪ`àa×7èoÏ¡ ˆù¬éPjá ð}Ùõ¸ÞámÁO ÔÀÀãùùôc«˜}Þž×.áø\X˲¾xR½–^oÂêMè0\o"ÿzˆ&zŸ°ƒ^§´@½‰/÷]‘ÿ.ï««Ršgß03¦#æ ’É»’B°æqP+O/”ÖéË…ñð°ŸÅ½S ïôјcÀzz-IVÎIÜ„¶F ± <^µ›{Äý‰Å õ`± rg¢i6÷r¨­nàÊsðî6]{ãë@×iØSc^M„Ð2ļˆ2Ž]§-<ïÌ&»œ.ħRŒØK©ý8ägSžkðmrðx3>ÔŠ­o'M_ Àäƒsþ†ŒNj|ðõJØmðUsð8ÝVñsS)_‚ÁV¤’€•R[boPÈŒ‡Çk‰’¾(›Ø€·ÌA‘+ÎÔ?Ðô¤™Çéä%êÿÞ¨uÆ6HÁaá‰7§¥ê¥©-Ib~ĺ~\ä?jüw²núvµ<<ÑNzYD9¢Õ{~f˜}…ö|ôÆÌË#ƒ·iN¥€ Z8xÂõÕâêôê¼)ke^ ý8Í/¨qÓæLßú˜‡‡ÝÆ`øžnôrÞsí®Á'òþŠÙؽöEv›nöxbØ×Åfù3Ãܭ׫Ë]¹‡HZeÂE„úpáÃ÷<Ó—‘câá®bG=¼ÜŸ0 2S%ùO'­0L¦¸˜ZìN»É·Â±-˜é矃'ª[—Âu*ý› `kng"׺TÛtL§U è«P8¬BP[…ú |y … (ãÔ­}aȬslsvú/ŒƒÇË´³‘á9"<ƒÏ®JBOKyœ©å•“o©Å260F[‹]–…$n&dÎ(hdo*in1¯”}:œÉj¢TŽ]®ÁžËÁÃŽNÉ3 <—èIi Ù&‘›ì‹TÎ@ìÖ¾fC@äy\ðN¬úôfɰ¢… ÂÀsšç?ºjÜ¿çK ©ÈNòù/züN/ÓA¦¾Øx=3@çEtz%pð8Óü@ƒ0~÷}þÜéUr¬×v.áÃߺQ¡l]‰S«DašhŒ'†›ÓÞì×¶Ÿjl%ÀÍhï³uÍ´×bdà±üCøô³ó¯—M×ÏSŸ­V½j¬¾3öµÒŸ5ö¶ôÇïê3ùüô#w³ù~Ž,®Áìo&Š­ÕTò¶²q´½ FQ¸h´†ìSðH<÷øx p’ï’& ¯ÅÊ"}SûqÚ Ö–=µ€ËVxq#ìôopYx2«b9%Ÿtp¬#wö–9TÅRM)Z4ÒRÞF=þ½Ç3°Vø–›£G–@ýDܧÊd.!7‚¥ ˜7)aG¹<.°»Å´Ò+݆¨¬(¬â Qo•'v’'@ÏþÊŠÔDGÑë€ÇÉ•Ñ/Ù³çeYlÊìÐñ[×kžº/*¯°ãî~œ bÅZ–Bv¯/HkHAºÊ{„üëÁå5"hF¹Ì±0Œ˜èð£½ài™SÇÏZÕXÔ6oBF§kéRäŒüÇÙüaùÔÝeËÓ—ú•) Ÿ™¾fIÍæ'ì®ÎŒs(ÃVå/5,ª†^eÃâuKF¿yÕDSt<:\ˆçåTÝuTuÆ–fºŠé¶töªº:½ä*-cÄ´•ìU´5dO3}%h£/l"à¹FòÂé|ìS˜Œ‹¸²Œ$Ëý`ùu”õÙ’Ñš"[ÜhÍ* (xŒ¿ª¦~nŸºÃ¸®œÅZq¬WÑÕykFkËk¥(VOÿ†ò€I뮪­åýcÿ#ýt$\\¡¼I&˜åÓUÄ5`ÎhuÅòd…`vðmÔ“q$<×=>¯u›Ÿý™Êãço§Êñž`\çtxÞš±Ú¢[ÝfW$ãI×=!>½ìfOË¿ûÙи¶´ŠÚàV9u3âykFkKGï(Vë6ÑgÖÔî€çÿÉ»¶å8n$û+|˜°7H ÷ 7±^gíyÖ!yæeí‡Rw‰ìp“ÍínÚâ*üYûóe“‰*’­¦XYÉàÙØ{L5!òààH$™6Ç{¬ U„L‚ÃÒoô1#Àš#iÈ'¥h¿û³:fld »‡ƒ'Ï‹»ÁÇÏV›ŽBc&%I…Oª²”¶`J’P•ÏÁÃã³dGÞ#ìÅã£KéÚåàKdj‚ÌÃ( ‘ê=µS.fy@7QžãkUa~sðhÁÏÇ×±6X_Àjb¯×Ú8GÍMí¬Ê’•8£B9àk8v9x‚Í?Ý×'» Žº#—ˆÙðA9-½¡¬'hgÙN‹ßKóŒ>Å *ù*IL>&®‰½>`J uDè“Ïh¦™ ŒÎÀP^8㪱Pìs) .ƒNx"ª k'¸iÖ³ —´¼Ó•‰ÇOMq0-X Qaµ Ê€tíø—-%Ð:éü´ö¥ðŒ¼{{þô£i&ŠPCqôF½èq«‰ÉÓà›˜”~ 1M¾'µ©°­ö†8½¥vRÙ [ cYä`/_Œ“‰Çgs¡P”&*)€ii¥ ‚`{ÔS¨åíx¼"·™xBžjchÔÃ4*4 ƒ”b¡ñùÜ9Ë€^ápÁÄŠOþ‡==…U:JÁ¨¨W-©t!OZµú[•ÏáÍij¥U{Ääá‰K3̥ƈdmˆ 0©Ò±øZÀ0½¯ ž Š/`ù_.¯’;ð䢹Ztµi”£4‚‚n®Ylç“ÙrcåÑ3{¬°A°ðp­Ã‡¥ssC[ŸåÒsi´eÍKÊy‚í¤-¾6o8p}OtY–Þ¯¸¤Ñ4ccˆÁP°¡šê¢È(Vlo*Œ>OPy< 6 §³ÃsœˆŠZ±0KA´Y¦~í2`Û÷",<~â0ö„ç ƒU^A¨Rtq&EFá2Л [?ÕÙ<OúÄçȬ$³‘ι”ÚÁ {ß“ù/ëĨ9¼Ý5ÌCq§ˆþ®l†i…ï >Ýg-éj@ôßzÑÿ÷èx¶½nç³9GÎÛíq?6Ç38Ì®›ÛÕºYLEo÷l™;ôIÌív¹ôwh“îïù–ÛÙ/òô¯×‹f×~üjÓoצðŸ³#ý·îS%N%|ò-FÆŸý÷Ms{º\¿˜opš¾XϯO6íªm¶í¿m/eÝYPVHåä»8wÀõ®Ñï‚~ßš…óBÏEóÞ¶ð=8Jø¾Mk‚x­ð ïðwýi½™·gï›Õ¶ým2;O{u¬?Í:fø)rÒBW³Ýz6¿Ùl08$pÞîþuvµîq†9kØÉÈâžéœd‘¦N€©ü€^l`šüÏÞ|†Ÿ³}ù~Ù®§_o6ëÍëåv÷ÅÕrõeßàåpôñ¯vg‘·éÓÿÑ^áãhj ¶Y®p¿ø—^}½ÒOüò ña.„ ½0âËã´žyçg?¬wÍê gÛñìÕúòzÕÂÄ8dz7-a¾\Ð³Ýæ¦¥`Ä4Nãw/¥ošíÅÙÑÉüúæë¯Ï•½/a ?Õ]s¹p>}ÝlwßoÖé4w¶[^¶§„_ ÝdzîÏ-¬"ïÚÍñL*ø×Á¿áxÐb[Ùíû†¯×ófù ~@Å¿éÔúÏ&ù¿¾y}vt±Û]oÏ^¼hæsèú)hì¢ÙÎ×—/`ì›]óâÍ7o¿:A@E0Nð÷^&¯ÚÕöì¿~Úî60I¿¥î,A,‹»aÀ±þ¾òNg¨–ׇŸ~|»k¯ÏŽúÏðÛí~Íw€'‘Ñ#ý¸‡µåå=k?ÍÒø h?©¸;_À×›ÛÙünW|™¦çlÑ™_/ÿßì9³¬A/ÿp¿˜~Ó“VèÒ½Õ?¡7¬–©Ã þ/ë_ú öwØÌ ,V`½ˆƒm½ÿ™Ÿ¬xƒ?*tÖ‚–Öï;‡kûáþ5YÕ~/]îý'ü4ûÓòj¹½˜vÈÁ"!þTÿ÷ÿ}ÂÃíµõQT(¶Sb¢&“cƒÙW#cá‰ÏáûI˜G¤nM0î” Û)•íf¸€ÿ€ÓWÁ=ÏÁãóÝ?‡Y">3hg…2†2´ãßÓä2dÀžÉwr£ñÏf&kµ“6fôLZ»B­i¥ …Kø6n˜G­uÎ*JªÐ.H“yÎ3¥ÊÁZ>…"×yl»‘WÔŽ©£%W¶wþ9reàÞ‹åÿLC÷Ï4”xúÆPŒ´Ú¯†vÝ׻»6 áQ>Ž»K·æf·ÞΛŽ1í÷ ¯ËÅ¢½š [Š ‡#ž˜-3`OäÛÓ»¯á'fvÕÃù¾¾r!%‰^`;ó˜Ì™–tÊ ‚ƒÇš<+*ý~‡²EL0`Ç9Ò§cðªÓdËã—[ÆŒ^¸Z`àñ.w¾óѬÆaV-¾Þv^SKœÅWç6Ïâ0]ÑÔ¾‚¥ÅÁ²® c÷PF‹ &Ê`£¥ÐcÒ•;{y6%3zádMpðø)žôdž×þ„vÄ„Ó ½þä1ÁiÙÉ¥‹‹w³«Rü—w°"7b2›ø3©˜Ñ [ÃFààQ:£Q8’Ï@Ù1Hï|ð”Ú¹ãšuªŒÇãõº‚ÀÂS&u2Åè ¯ÝcD,P:û3S;cuFk1‹‚;üSØÒø½*~ãÎã³GRžÝ©aÓ6‹.»`ÃÂÐY0]$%hgœ)’#q¢„=°Â——´•…D¤%ˆÔZzÌðO‡o”¹,Ÿ£Ýñ€¥ÖFžÇHöÈÏo6ËÝ-zJÛ;øÏv·i–W»í§|>‘e4(‚Ë`àè©Ú`´T•ÖQªe/[ÍÄóÞ$¾Ü0_Êá­ŽšÀ§tŒ‘Ÿ<¿¨HGc‡vºÂRÏÁctŽêèûó%*mÞ5ó0¦>Ü&5A`‹J8"ƒ^jçuÌëþ%N>_Áœãà ª¨×?Ñç‡éC³C8-<WãË@“£”ùD=2‡hË7Oº@ƒ“®æ~¬ùIs~¾iÏïãÎaë ´wšðX¦vÊ™¢£4;.Xq$ÀÀ£Ê¸öma˜6ô>Fø‡RjòRªY 2(•ÓWÁtcá ufâ‘0 HÑo(3Ú©,…æŸ)YÌòïÖyx¢­q×XŒÃ,Z|.G0ÊÃíRòæøcDÊÁDùÑgá±åžC<$ŸC Ù‚ÁŠŠB”t¶Æ=Þ(ÍŽG­jìñôÒSÞ Ìæh\¹Å€)dìPÁñÏÁÃVÀâ>U6{Y%ìð€«e ‚º›‚vBë¼£?U¸ 辂q€x¬p¤qðøÂW?äK¸á£6ÚÕµR;i¹¯BÊÉ™;šòš`àQâ™V^Þï±–R\ ’,ãqnÐàŒxæËŸ*àjØ{,<Üg¾=7m³Ú]Ì/ÚùϪÇFÂvêB£§,W ñägM«½N6ž-¿g[¼‚ )®þ'†=a©6\ßQE¡óz²g’=„E$a1}ùt S>±û=ø$QÀ#M¶|8ï·]}•¾ Ãåò<‰r{z'ÏGo½¥'FßV›@ÂS»À~»Zg¶1zdupðøìÇŠM›Íá`ý|G9Q©:d~/Gô ü~ÆÄc§°dê¨É“˜îzö´V<œZþsí´— äq·öĪ/µ!ZMÍr©ãG¥×ÝÁ}Ù­2ÞF±½‡lŠòµC˜xÊXÑOx ")ζ^TÂPЃ‘l‡ZÉIÅÀ½7©Æg,t½Í …ÑÑYÅ@mìŒBdà‘SË,®¶C•¹aJ*­…ÚP ……°mcø¹“fÀþ8µ|ÍTÑŽÇêM…‡…'SŠBâУµ3Ÿ1­;oWq±2ÐKUaêsðh‘­jµŠžìéÿnÇm$¸ Z ¯É=]l¦Œ/SÅÌ€+¬,<ùv‚OXÜÿCâ8­¼´ž¡ó.[!»"fôÅË gfîÉäªÝa›ëõj9¿}ŠÚfµZÿzÿÒv¥±ÅáœsÏjÿP»´S>Ÿi8MÅã1kUÁÆÂã²m ‡&êsÚÂ&&"XÊT‹/¸Ã]L¤ÔΖp/ËMuø{HáÝMi¢’¸ùÂ\²ÁO[-h5c)ÓgiuÂ÷zñ¥VÛ¼ŠÞÈ!Ýëfþ3|g™~íÕùÀ‡ ~+žŒ5Šº)ßWÄ»¿ÝžÎ×›vÿ¹D65aûGØs•£rꆷË~¶VVÅè®ÂNÆÁã‹ìd æfÇöê61Høü"˜‹KH"†íYø|u‰2I—ÞVðXrð8‘»XYï ê$X4mã©Ð9/‰Ò´©6¦Ä‚ÀS1ñ¨ ôÃHŽ@_ ³PQ|5ê~‹2Äx&_ ßE=ôêûÌõÃðJ+'EžÔ*1ñâ´"ØXþ⌉'dª}Eq(E¢÷N¨ ½³.wI¼\‹%  NXâ е+Ÿ1ýžˆ×Ô<1dÛ‚–Wzµº^5C\*‚Ë  è˜ÆŽÉ¾B¦*X“eÌ]>“ÏvkzÈãÁŸ÷¯Ÿ$1¥úƒ”%—víD¾Z™Y$ ˜”‹1»²²¼à÷h©#ðDîÍÙUsÙnáôþÉÞ:ìVÕšàƒþœ7”Í¢02ßmiNõbJfˆÇè]»òa”Ýï1Ö5ÖÙîN9Œs ït±p µ`;ö[Ùæà-Ÿ‡ÇˆœkÀ6Ñeº‚ÆKMLt:Û…iNŽî{Öøãg¤;òPÈ ÇO¥²Y¢÷#ü ÙÏݧŸŠxOh—F¥²D_0¬ËÊœKШ)Ä÷´:ßÐsðÄ|u¹×«Ëî ü€ýFx’ †:Hi …+Ìél¶gÑ2úž±,Y1fYâ ˆȈÇg#ð~RñÁlŒÁ•03-&a×T´ÔNÅO¨Ó§¹6ö)krîS‰> s‘>`Ԍ¡ÉKc=Õ84—õQfVIï†ÚsÿŒ_¯d¿^éÁËÁQ8` ÃÁ:b}Î- —8=(_Ç…LÓì3ì|³¾¹`ÓlF,Í ,µa¸hìÐu'T¢7·Íåu¬oṁ²†’…'Ÿ x·9õÌ=¹YC¸u½ŽN:(ÍB»gTt(­YzWÁ ÉÂ㳯D‚È€á›ÖR'tä—hª c~_a+àà Y|Qw~žÊ}¿Ž5„E‚G v å=Ç”ÓÙçË–<ÖpBsð°_]¯‹åvssì½»Yœ·»qÎÜŸDž\/Þ%n‰6£óŠr§bP`49|A“$<­Öì—ý:⇜`2]2¹ áöMQ…R8ê¼ññ7wo¨,jF_¤2åeÂÁ£eŽ„¾ŒÎñ€;ßá#Ú÷KK÷WjÿXWviÊðv*ãk°"rß]#¼ƒGªl•Ÿ¤vp•º2”CððÀ4|BîÛ±_Õ’;³¦´³—‰Çê‰! J—’à2(#bäéÚiöSm}3úR<Õ&ÏXVòÑú»¼tì!cú‘ù”xûdÜð¥J×N»©i†2ëš½|©)&)óÏr)U¥xi<¹ÌA;¥tÆ’“¹äÌÀ_ü×9²sŽ%Ò©ð.ËÉá§´];3• ˬeF´­°<°ð„lf&—UbVA]¤9hg¬Ì‘3—¢À‹'¥âá±B•´º{Lºa&q;UBZM Oo¤t6{2³9½(^¶¸Çc5‘»µo'{y˜oÆû}Æ“9çÄ–«ƒvÚÖ];kMI“­còXáŒÉÀã„-b,<¢ÐSˆáÆÚ@Ù7ÐÎ<£e1Ñrp/1ÅÓ3°a{ón;ßüƒ¹«in+×±¥kÖ£„ß³}Ûyõ¦jªf7 EVUÛ–G’ó^æ×y%Ûò‡@—DzÓ•vÑÉxˆ € °yhW‰P_ÖP¿PÆ ¾^ʸ!.˜·|ÈÁg…ýà ©Ó [YbÛ-¡Y¡p  ʺb¿ºÝpöa°|pq<$x|·®u ëzþŽ~êxØ-¿}Û¬&åŸÙˆMJ–rlÊ:ÐéAv_n $k!Á#/‰âÜIk¹­µä}íÁl©x7y—c·îƒHÌÆ£à0Hðب`!¶·wçìê œÐ;kŒ¡L\YgÅíMûqX€29…mà3çäï•?ß}yÞû/—Ôø~LIŒÄåøàÀw—ê€WT° Rò p^!r”à ^ÁœÚ2½V*ñ•…Úݲ³”å ÈËåÕ¸-B#²àñÆöíè"rÂ"qÛW“0¹Æ„ uM FãŽK„H w"< `8Þµp¤¾ÐµsªO9QI³²Î$Û·ÕK_~ É ùh;'Iæu'åw}¹¹`|¦Üá\gOyã æ±Díà±âëR£ûç`íyAU"õñÍ53ªqʺèÌÀ»g`£ÆŽ ðˆ›lI/ÌÅ %Ö7›Ã¤Îæ}ž­•w6ÛH”ZLë ¸Þ&à*Š@[;<¾”áqF£Fá<4[¼þ&[BÁè0 ‰ˆÞ(š‡·bâqaî=ÅTâñù˜Ê_<<ÞÞ.ŽW‚çáXh+°6ª Ù‡H®Ý@£J­ÂLB jLGïÇ ë;d¤^|iw×jíÔ·0gßîp\ç Ͻ—èÁe>`¯a $x\¯ÔÂI´‰=ˆ½×rlkyjM{xè´.”à¨Cª¡ùÈ£±a“¡ØŸ®L@Wbá.k²U €5xÐwï1/Ó-¶u[GÈ2•;‹õùÎÿ*  ¹D„¨@ žä‡ZŽ··½¯ÔJ|{#F¬™uK‰ÑøÜ½}_–óeP$xðÊüÓœŽzÓ?¬+g/““A¼.>Ѓh­ÑØUéíãô~p(AòÝë´}z[Ç©5U•¼·ÞFC®V^z3ÔÌϱI1²ÂŽ‹ð@—x€¾¾_œz‡Ï„‡•0×ZòhÊ:'n÷8“¹h`¶\‚'w;ä‹åî°ù¶\›%⾪gˆH¨…\Ï/rã7_‚'$½Ê³TK¸D€9¤ìÔëÊMM5¢ûkH)"x…“.ÁÜèþÛGým‡¸ B´©8 ‘Bh°Ó—²–Ñ: àñ®ÏAß.?Ž ¿&•A[eSßjï|& Ö¦«6ŒÎßK9)A?þÊFˆ'>á½À§tèùŽ1ï(ÒJ¥¶vk‡OŒÔ´2?õTÛu´O?âó…ˆÆùñ‘àñ]2Ût9)8_®Ž;J.R|®ëL)Ñcù,&:*ð\Û5õƒ.çWæIÇÐÖq»r 2”Ó:L®G^|½`³Q „ö° ÓŸ%ÈYlî¿í–“ÒˆSTç˜gk [ÅÍâÞücYË—)Y…È]‚ÇÅ.Ûóô‹mýÖ¡È6B¦b©iPzêa® ¶dTðDxr—;ðûÍ™ºˆ30x냡¾^e³¾KéöXŽ ä‰ a§ÌÍÞIKF¦7@¶`h|ÁÃÜÝX|gýø`ᔉK­DœE *¬âãÉs’»õ÷Íþ°ûµ8uH(+«Y¡¶­–@ƒÃ@}ú¦èÐ¥°BbõðÀ)ÜáIðˆ_ù|üeý¹ÙÎF˜Õ:õ>9Ÿ<ÄYµÂ׎UØQ žÜ!‡tAo™¸ÍNõ–Ú9CáL•y¹‹O'cž`VðçxÐtñç~î~¬wë3•g!!Ôrh ’S S{5?ßs-÷8³Â »O20ó}ÖôÈmñ¤¹÷µñ˜‰ú„Úœ³„dÝÇÔµKí¢˜ˆˆAÃág÷þñ°¼ßüëEe8P¾1ØDBÄ Î|›5Ÿƒ´I!õ&Á¦Ï‹,Z‰ÄMiíÈ™ äÚ‘3ÇYÌJDШy“àKþPe«ÛíãÍâávy(KîÎ4Gœœ²þ‰LOßcŸXó)޵OîÖ­ò_ÿï¤G¢ŒèXè¼#p×Boßå}-M%HAÁËáÉÝ.Õ.ÜB}nÜNâ,åâ%ùâ.Rùß²}¿î•ó¹+À¢#xÀèØ€WDÈ͆Pk ”}l P×ù,žT©Hi‰$ÁŒoi.Är-«ò£íÝâ¤íë;xB©©8¯ÞSB Äàt섌ßò5oµã)S_¤¿˜ªÁH­üŒ ÿóÇý¹9²óáT#¹ÙÞW%¼»×8~v_†IŸ½3pî¥|áË7›ýòëíúæÕòaùus»9lÖû/ü­þþßžýïÇßž-Ÿ…Ã'Àã;ºâï•ýêž°¹Ê`7ËAÏiÒn'’@´T&ë(ÝøKpžäF’dR¶#¹ÁF a7Þ Ô¦Ú¿%0õ(Ì'âç‹+Žɦ]ñý¸Ó UϹ£"Ž2ÉÀÀ_Šd`}·0’³5Ç:ئݨÍÞÞçTSJ›p.ÿµ7çé@»Æº¡þHs*‹üÍÉp¯ÌWWܽhÃ3Úpñ¤ù!»äD3)GC¤f¾æœIÃEéE.¶äJA.q;Êw;2¹*<µÉ 4Řrp7‡5tÞN4዇*4A[ç}Òxš8DÝ,ÚGš-λ ø6 ØÒ v/ÒxGJçtHS[c±ð`ÛrÒî Oý™fM¨]yø}žec®ÅÝ‹6\1CÔ¡M„l'^çïKÍ'g“£qg Í””!r '™?3j/r ±Ž%™Ž¿ÂÆ]?D’#ê aÎå{3C ìòAº~'±¥Uòf¸x²ý†¿± t*:»XTÆ£5YãR<Þ~/q¥UònŠkS¤ãà^ Þ;„´ÎÐiêM&¦€Ÿ4€3So‰zQ«%(ÇÌÃÍXªñöƒÎNçTL=G dÇ0L*H/bÑgè$·NF:c ,<0kH…Ø¥sÑ9Ç”8ȯs f îC4¦„e³Ê-}q»=ž<ü¶ìss+Èô3瀘7p’Ù»ãÊЋNl‘Q‡NÞùÀÁãüðOZc€æR¨É A|ö)c ЋHљ̑78"%ÑsðÀ Û±ö Í TÜŽ1w¾ ã"ïE® )êPϼa¯}‰†Â3M•©…Í|¯‡…´5rrž%˜Jš‹–¨ÙR'Ü®Cú²¦-™{FkÑGTküŒÜ3 b'2ð%Ê:dpà€ƒÇΪÛi*ÙÒ<ðÉs(ÛAÞªÎa¢ëE®0^Ç˰!‚<]^‘´tíh&ÄlÀya&È^„HÑ&ŽLQÇwpÎg &b¸HŒ À‘.«<Ì-x2 Ì:ãëÙ zçèj°˜—Ûu¬±ìó3Vœ^$ãJou>PÁëOÖyý=ù»¯ËÕâa·ýׯiè„l(¦‰så¼ûÚ» ¿‰ò6Í넘‚ãà Òø~}¨k¶·›Õ/ÙNܬ¿-o‹›õýqèdnH1r®üBd÷›‚¿¸â&£C#Á±ðÄÑ4z¯}:©êÀ:|H£ØÓ‚Ý‹4Ù™À‘R)¡ú»Ÿð`—Ù>óë¨_{tv·H[ÆýÜù Ü6¯´%ëD½èò¡űñX7w¤ ³µÙE¤ÑΕjtV¸ý¦æk|‚N¬“c]™Æ n¢´[ßÔ˹åí~·þßÇõþ£3û¼æ£9N¼xÚѹäÖeŽì—¿A8‚€ÏÿØ¿K 9–rt|ò8¹å <Õ¹ü¿Çݺ½at;fðŽ# ‚E‚ c"W1J¯’AjŠó ª3qóõ®½]tÊBNtòºö¢ ¶Ï°Ø~l– ×hð Áseƒ‡nƒ§™«Ÿl÷%>VQ•_\œ*…Š$.¼}ñÌ¢ÍHåRv¯1E‚Gþª_¤Ë—5)’| ìhÀ'ª¸µ¬ó­Þ£,¡'{%ÀA¡FF„gn‹ú?ï75¡¾B´ÔÝnY2 5BÒò«ÔG‰ðä¡§ÿÉ·:lËÛI—Dô§'žŽ¼«+ëRœkúðVŒB1’3þÓ- CDiG¬o¢1e*ûÑe´C¿œµlìÞ8 ðø±á@q£O9çý“f'Aw*žR‘©|GY‡€3-A? `g7g ¤ ¾ƒO šVâH‹L5AGïSÎ>Rþo]gBŸ|bgfKDИÓ!Á]cÑÎÞ(“HÎÕfK¦N¡À—ÀÂYM3Á%´@á<-ÄÃöfÒ'°gï’7™|ÅZÖŘûˆnl–€Ï ×Ò<©×U"Z{£P"{—1ú`™-+ëlÔ4\6óñ×`ã !Àã4ý‡ú—ßn—G¥6£øøÉøšU æMël†>&¢#«E¸ñ]KdxBèc&ˆàíB=¡ÐÚ#xZ Q\ϤFk¾É%Vð¨æ)Ÿ”ú6–ËmýÖ< Hôë8®³¦ÙèÊr‰ãžhç·e»TÒ¨'PiÁï‚!zéÆc!üÃ!å¸@P°!<èÆ–>=éqñõדûÖžr?9ï ~CÀ/ëšð?¤‡Ã"àøêžkûÌ_¡Ò—GA“R#¡T Ù¸DÌ‘›Ö„±åP×КÇÏá flEÔöæåf¹=ü&~òÞGo£ î².ˆ‹#µ¸,¼ qäõJ½Y®ï¶÷“B¡Pô);RôQüÑÉf,1cˆ¾Çug¥)ü‡_H¿P”¡?ƒIˆ§Kµæ%B®¶»õv¿øºÝ6we£÷Óû?"¨,ñ[Àf)½–šš¤f¢ø§I"@Tðh%x’¼_Åö¾¹Bk ¹_¬¦ú¨õòþñaÒ-º­•HÙ“ßÝPï º¸1}¹Í†ïÍY@Ï7Xùd°[+F$Ÿ€²ñš A™YÐ*D <.v{^=¨eÑån÷x_¹>ªtÿé¤âãÿ>î¦aoŸdc$h|.Œ¤òju]«Ô™þs·^íÖËÃúæ'9Þw£œÚp?O8+-et“`Êù‹IJ.9ñsߣv‰LIáC"Á®+µÁL»Dâ)aJõ”SŒ~ ©¡ÄñE^B<0g"Î1ÏüåyO¿ìW«/Ëû_›š.@Â@påûOá¬ëŒëj U"ÆÝ›¼ÿÙEíÖãs»>ˆT›ˆ´Ôé+É‘Fe¼¾ÃÄlBóq¢U1%xœhZ…ùb¡©d~Y×xH)6#¨*eüqìg^ér ˜ê%RIÓ 7ë€bt='Î:ôd´çø¹B<ý]‚z?½\­Öû©hžð§³O¥DÓ:›l7k0вq*µdxäexê}Øno¹ê%Jž2ºÌ zË:Ì¡»E1›5k¤ Exp°Uhº×é“ñ!ÇlˆçÓ: yŒUèF[™88Øh]ÖcLC'îJD<ž<ǘ…“./÷*Å­¢KÑiÜʺÝ‹p« ÇgExäO>®V¢ýò®n¹­I¿ KSÉ–$ãÿG[®Ú¬Ç³I3›²“¹Ùäâˆ<–X¡Hœh\y¬}}²m€G4%™§Ñ<’­½pY"AêëÆ×F »_‰BZ «šóh'¬,ã†ó— …ä¨@Âcs{…Åúz;MR=®Ftë$óš1TäZ[^ßÀs†èÞV (ï‚‘ø;–ñ ¥²Ä0ÎÐËQÖ¥0E–{ _À=Ð"2ƒ(wWVKaÔ†qZ–R¨M@l*Ä<ô‹Ã'i‰Â•ÔÊÅ0‡2zŽçwY LFÉ Þ‚GåÛRîy;Ùk7©ºµˆnC7mÃ…Ædಊsù—¯NDÃã &‚ê(\‡ÒB\£¹'L¾³KEˆKEò 4 à¬äaè#¯_ìO:ã݆B‚i…ɲ궜SH¡4ª­°6Pð8^Ö A·‘ÂZaÆ_ôˆópÆd±B—§¤zƒI»œ-îïÚùfàIƒäö T î±¥ Æ9#Ëú ªçüýtv’êºÛô]>‰±­T,<§s^+¥''˜«6] ÅX…¬7%ƒ‚x&xuÝŒCð_ïã $}o¡ô¶ë…q\é"ÖO£°S’Æ8ŽWè Y c*¸$¢võs­ÑL8˜æ\Éxà²Às!ËO>¬ºXǨj»šEµ"izÎÕZÏ1;äŠg6`óáu® x¼-é‚ò ÚËp$Ö",Œ;áØb5ÎÄ–‹s€„çD|ºüóXuQ_Õ— §ùŸ]Û}ý ŸöÛïF`Âý4:ûa~ÛÌ'¡Ãêk $kð/Ï9W£UºY.g÷ŸEýãØ?>Áú ÇOì;(Ž`4]C”y³j&íä ¦âæ¦…?¿µÃtDzÍ@µè£/•=¨'}ü¦,{|Sְ?~Q¾Ý)íÂM …SÊù’~6ÅO¤ƒÕªÂZKÁ£Yi'Û»E±—,0 é!Æos\qí.‹],g ˜ËƒÙ¬Û°«JäBk™(~Þ—†GžXW­ëÊû¹uªYN_ÕçáNUH(6m†eÄ '‹¼Ï¶ü• —?¹2oáÅÉÅþJv¸aÛßxÓ†BBI‰´7Œã8“§©B\Š Ú•çÑ™ÚLÔiu†ŠA"+ ¾;¡òf!* —oHÄãêxÞ¯B †fƒ@†q’\¥9šV)SaÎ x´.mà²_y€jæJPuJµÓr¥Àv"9›§Ù Ñ²[5õ|(\ƒI®6˜Òf/ó5²(x\)³šSýšÓ›s%‘gûqœ™Z­ä'+EŠò×5ixÈ. §(kºF²=Öa§Îµ˜¼]1óObp:RÅ+Ø>àEmI‚`ŸÉE)ŒãdÛ¯ÂRоB¸GÀ#˜Íz`’ Td“lœ0VZ‹Å/aãEM_çCª*D~$<ù—ýåjúa:koÂÃcP’þ²~ ¦5–p„qRæ=+™«!¬*Ï '†ž’ôPÔ†&RLKŽC¥7êýý  ^e–ÓñPÝü‡f64xí—öúv±øùÑ £½&£";=‚®#=½$²u²!\ñÆ`Yg§ zDŸ•Ö° )=žâɇ†xÓÃʃå¢BœLÎßVf-A!Õ çxwÞI¨ãžh0ú|¡~½ûyº#ä²;ôJxîJg‹íä†L`yÆÖ/ò`¼M¦ëæ:b7Ëæz:›n¦í:>ƒÏ½Úl°@5)$%ïP~¤Ëå«ð#b"‹/I±F›гA>„†¶7×,npz|úh‹â¿ F…:|úU¨lžL&ïfÓ žádà•fX £’ð¸L«Ã1{B°Ç9a”à)¢ô>2MY0‹0˜E©¢êJ,²Zi•‚Ç ]Cèºß;j‰gW“1þÔe%ƒäŒ7)‚Ú:ùÕt&Pð¨|>aÕÌoÚð4~Üìí^Ÿ Ýv5ofÏ þ &Ú1§œÆ Ü.çÂ+óˆ@ÀÃ]A—•'ú•‹âr®°•,óu&_”µAœ8¥+Ÿìr•0eG“•.—·Ljí0¡,¥(h••„ªBÌFÁC¾”÷¼§h‚#DXg¬ Í[1À±½{ŽhmÓÁ)*Ì6+m̈©ÄºJB[ÌóÀnÂZ>¸Ýípj¦VÒU°m ?(•ò ¥ÛíõCMÁ}µkk‘$~<$eµÅ’0N:[Ú¢m6°^VˆÃ(x” ïÅŽ=±5óûít—¦@̇æô†Œ†0މA§ŽÑÓTØcQðXžauÌí9$ëîò‚[‰QÆY+É»ª,L‡N¦ÂÜðH^Æj{ÍÁ_²°fÞõŸpŽãD–LÉiô# — Ï- ÊÖŽ^GôèB!a+5†ÛI£l3ö(De•°…¨*´ž¤á‘.£ß.öx¼+Ãb!vðÆp/˜0ŽŸHb$·(kÑð(–£8Ö1->*0e@ôè¤f [mâ8ér.ÐDš¦ÃTªÆtðhQκ+JHáµÅÖ'8y¡.ÇJ nËÊO7 O–Òýò  “uÑ¥ÓU^a ‘pÆ{[ÎÂy.˜–é ë7ù!¦¶»ð˜èbütÏb2!',¼@Z]Çqô²…)JÁn+„é<.Wë脘H!jtp!0ʸPx2¯¥ŸBY^_Áä x«aòˆÙÀþß*c‘ò qœá¢Â|gÄ[þž& ù8!EQyˆqke„WÎa[p­´$_Ó-ç“(°m§àqlh™Ò‘L8c·/®|[&"ž ¨Ý-æÓ¨+¢QD•N ®°ÃqS® ãNr<é`yùS 4õê½:ÍBË?,Y£Cžžç7ÿ$§côÂV˜{) Ù¾î×™ å•ÐFî4¹Ú¶ŸÎO‚ºø…}"zìªjnÚ°iƒ}ÜÝöP» ]§¾ö¶»Wh§ÕîãÝç/ö_<<0N8« ÔÂ8ëD!G ³at¼Â"@Á#r:‚Ç ñ¤ÁÔ¦Ã1 áP˜Ú‰O64þ°ˆêâ0£ï6Í*›~˜ÔîîÀ¨£ÚãN”ñ­¾èþ?;­—íx4¾ •Ü×çâÏGÍ|2Z6÷³E39†ÞK&ŒÇÑûƒKXè#ÛõtèÐÆYÞß{˜®GøåËI³i?~µKÛ´ãÍvÕ^è¿ï>su¦.C ^ùæŒìêìÛæþrºx1æ·y±M[ÛYÛ¬Û[ß6B›+'4ãÂðk?6~ºn䵓ï[51–É1kÞëÞó\sx~dM«»öšÙ‰5áoýe±·Wï›Ùºýí¨v L-KÐÎñSqš±ŸF; ŒÂ«A'-8m# á*\ Ü´›ÍÝ$ŽBÒ Ø#È Nq‘€LD‘Ñ.Øé;z»‚…æŸÆ ß³~ù~ÚÎ&—¯W«ÅêÍt½ùb>}Ù xù§0û᣻¼Ó»øêÇÿhçíÎ!^IPm3…Iþâ_:öuˆßøåì×1c’MäD±/Ï£K»²Æž¾_lšÙU°¶óÑ«ÅÝrÖ‚a\±óÑÛàŸÇÓ½Ú¬¶-0%$ä`pœ¿=•¾nÖ·Wgãåöõë¡çì%LôcÞ5w£àÕ7ÍzóÝj#­¯6Ó»öòÏ€ð‹ îóÑî÷Öáëvu>âþøçÎG-<ͲZIÛ |³73ä+ø ÿvÇÖ?åxûæêìv³Y®¯^¼Ø]¿ŽÝ6›ËñâîÌ}³i^¼ýúÝWAÀ"˜'øÜ+àä¼­¯þë§PEu~ñ[g d™0?ÔTZE-wÆõÂ>ŸÒHîÎè‚Í|¿jæë˜ ÿ¸¿³œðÓÇ_šÙì LlCL&®uX\Þóó¬°W°Ãà³¼3$cð {Ë _|‚bÜNûò7ñê =ò‘ö/ßïyßÕ¾Ú-&Ÿþãëyì¼|øÐùø'ï?žýûv: ¼<{ÂbÖ†ÿ¼¿½ù*F†áµ8YoÛp•«ûøBì·¶?~» /¿úî›ðÛv ã›éûv|?žµß6søì*¼wxEðüpÕ_7Á®Ï@//~™Ÿ&»oÞÍ›åúÂÁðkì8½r`<éE=Püo€+7·›Ï¨âo‹Iûý6Äe˜b޽÷÷@Þ³w»È<üxݬڻv³«ó·`fËÙt<ÝÌî?ÑÑéÎõývt]6R$¬Ë†ø´.‡`á®…`"dˆÚ_A©au¾†Èýhü°*¾Œæ9šì¯—ÿoÖœÑ |ÐË?íé×;ÅDO"í£ø†.°þ䦞.ð[|èxXßa1WଠzaO–õî;y¼Þ¯r»hAòçßõûÎÅSßþtý:Êjð馿XñnÜa>v¿Çq?þ2O×·Ã69£/$g—–¹ÿùï#O3­”vèvÆqMN¸ýŽ Š`¾B¦†€G°²™šG/.‹Y¿¦±ÇòÖ…#² Í=Ã8)s>¶¥¥(0m…§v<ΖËÖ O:œÔÞk-1˜a·EDfÞR$3‘ðø%;?{cÃÉ‹9„—KIT r +^v 0À.T«*gø6L©*Ø!áÉiøÏú¹Häi‡—šsˆº±lŒcdÏ^‚‘ÀV•Ÿj r5Ÿ¾ëx'<òÆNýúдG+ô:GÇsÖi¤Ó–€TW8œAÂSÖà{ƒf~ Tu€¢ÿf7Ž<ÇUùJ“Å”®BÄCî‰0X·Ÿ{‰èÕIk½ì¿^ºgpMNãĽ÷ô¤Êl:ª n1GrªeVMmè“\2‘ö]^dxÄM˜ßÊíU‘}úöjüþqcD‡Þ—#e*õ|Ȧ¬Ú,¼q Àca^@󜲺Àn%Ël-WÌùŒÖpçGg³4-I—ʨÐþÅ¥O’û¥Ç±æË`¬ö,§B—¬KÀ™‰ŽòÀg–ÃìÄJÁLB…/Á?v¿3›ˆ:¡‰–³´Ê8ô(w6ŤªtÐ[j '½·Þ¯o‡ãýúu¨ÿ"ëºÚŸœ>=@rŽ;w(º ñCw¸S› ´C#Ãc¥÷Ô7ϵº–J•ÙL¡LÀó[Ê8”÷w[H^ ¸ìÛ/¹O6š;ýrIcµC9}:™šÆy.r¨g±Ta÷#&uEKŸ…Ç~ùœ|æñ±¯'UfGEªê”kRÚ¸¡Ÿƒ eÂâ ó(!vXûHM0°'<áÇ„«R{,k¼:THðäò$FtèMÙ4™Û6]¶·ùƒÞ,z„o$xÐÏ‹î½<ª{-¸Q^‘•W9žùà4?éŒ/b—õœŒ'µXO~‡Hµy|rƒõTÄ ËzNÇ“Å5M¶wô j7üv¯*Áý¢±©(Ìê~·Þ¿œvcÚUÀº4“+‹h {ÕUÆÙh4eññ"€ =nå$xbšíòœ±¢÷R{*òw\†õÍ9dÁ­0ã1 ³øŠâJ{~ÓÇy“E_,ß»>'ƒyž 5Å¿ìîþ¢&¡·ëWaN=}ÊÈÕE>e`r°øÉ¸À-ûXYç_¥,d§fî±¾"ŽKT6û,’Jpb‡Ã_‚'[Èãë¬Ä߯~uµ¾¹ÝîG‰2ÞvBH9[ÏΠŒ3©•³0‹³ä= AžÜÐG¨ÚQ'Ñ2á7òvLðìÅ7ºZ{!YœP—Ö’ô¸{”à zGà Á2ž9"Ú³ã®ÖÊ8#~Ù™åÓ§bmŽðÀROòp÷ø0LÕÄL¸.S ,ð2ΉË|÷$·`"•›X=FHðÀRóâåêÿªiæ—;c.Ž×ÃÆEã–^9¨²W$5ï 9{Óºè‹8= Ï™PöýL(Š`;V"|[Ý »á볦öŒ éf±0Ôp˜‹µ>+ò¥ÍÐéx+—¼Šk.Àc­nX¼&F¬‹ÑSï܆£*3-¬ˆT•`ö±ýÒKð„¤·ÝGY1AOOñ#OK7+#®-°C‡ó[‚'ZÍQÖýÏõŽ^þSQKBdžãŒsz[| =%ذÃV–àÉ¡‰koRGÁ1~|@Ÿr:§ŒÃ¨zy©@ÈéØ+• ]€GüƲ8Y‡õ×áÇp «övûõéÕùs¾ðîñëv¿:[®žG’D þGç!Ž™A$Xc‹Ï¥­(ô°ÔEx²¢¥~Åêa Þˆ!$SlG*/Ž;÷a©`Ý’'‡’9‡B-4 @L‡#G‚Ç. <[Ý»í—aósSVö"Àüη.ÞÏSP¬ºÆÉAòh=è(ã¢CEB¾¡$PÛ×–áÉNÕƒ¼~­ÄØè Áã8[)¡ÍiiZ}{âNž t(ü Ä£”f·\ÈÌñŠþæJ­ÆÙ¤ê΢¸oè`®ˆð`£ ÓUA2&?"Å\æ¹2²R¢=›§OÊA‡D‚Ç9µ EMÎÅK¸ô 3s$g6•ßÜHãLn­šÃo ð¹7<Þ«) îµ|`<ªWÞxÎ+ã0Dµ¨†&ƒ3èaTJðdßC'lÖëÝÝ××R­žÃ’MÌyBã Ú•A¨–ÑYÛ™öiù2<›j…—WòõþžÚoû\–.s ‘*—÷Ð ³,˜Eû'^2<âÊê/÷=‡Çén\Ñ´ëÝpxXÑ:`¹.Të £7 Kž54U Óù,s{&$H<´QS)\שÀˆ&o˜pÙiœøA_7â &q‘»7=jù,Ìñ§g“¹—|g´Ð‰<âztï§»Yxüô´Æoz#³œà Ýü2À˸ASÍÚB¸Ñ´_w% ļ¶©·( T:Ñ…P|'2&«–™©@ÕÉÀ)†4CóÀY󀯩ªÅ^6z@†+VJPzk³pɧCÆt°à$xì",¼züy|n¯KÎ3’ÃH×ÎŽEŠÑÕÜWþë0lÃúa¸ùíiƒ¼ms¶½¥;ÊÃðu{|8ü,½Ízc£L‚Gür²ª_Ê¡rü4 ðI~ï*Ç™9T„ÑÙ‘™…gýò»|)EH+Í‘õÖ_‚'º¦žúµÊUõ®žVŽ&µÃù8E¹Ë˽÷£ïäYDã:ØÇ<â´ò«Ýß*RļBÙÉ pö3¡©K>‡¸øí»Êð,ÓW4è(°È ©6¥Ž“!û¤ÑÑr'±Ç¢ ðˆKmëèfØ?l×»ãaøóq8¾êà¼ÙÝ=Þ¬^Æ\«XùZÀ«õÿ=†QÌVKÌÎT+Ëúÿùí¿¾oOrî2?~»ÙןwŨܬïן·»r  Ç?~ûýü<ÿøÅÓ¶þïÄ®‚?š]_Ç«vo@[€Å­·“éÆ,çº0«¸›!Ã<ù£™µý|»¹;­‡cÉ5y^µ^&mÉuu>Kù5}Þø÷â—¸m¨úzŒßxXo¾Kâõ(V­šÜ”bïM©ËRú{± íßC‹­îïþ?N»?èQ-°6{g^Ýø–Ãß‹où£OÍq!¢Á¬ù([ì—‰ôb”µ/;Ì*V«|x܇©!6JÈ©r‰ÞUóL§ðqœYÖàef|CÐÚÚD‚¤·>Ï3Q~ì9Ð6J“[nLÁÄÄÞ>–q•kq\¸ s§OÅC‡Ktž¸¨/ûéWŒÇ§ëo˜l¸T¶sȹëä\%"÷Z³x:z_I£Ô[z ‡ór|«²|y3·òè¨ 0ó¬i\ZÔ­]¦°h;,´O6 ŸŠ¾–ÛïSÄÉܧ!U‹ ÖsðÇç)y^&¯O%X±ƒzá‰MÎý‡a7^ªJnÉÑA1CÙd×2ÎÆ¥mZ0v:|€±} gÊ+jý¸{X­7›»Ç=9b¬L-s—é±C´È¥¶dY\Ç “%°±CK‚gîÞóª‹_ËEŽè“c«ìŒãbÔ(ЂÆÓçl‡ä 0:Cr…[ïƒéå”ó+Õ½˜qœœg"´aµ {n~"Â&ikˆË KäV½Ø«>@):ï”Úü6à®`)tXtjë?C°P¬-®`FËä½Ñ8sÖV " ¦ÛS@‚'A¯á@ë³Ú¬GrK@­•²ãÓp½¥nÀYÁDzd„öE‚GüÀðŠÐ¾ÃbµÝÿö…c?G‰9Fbƒ)Þ‹ èZ…ZÂ7{ZIt ‡«}áHy-¥Ëw¶/ª¤ˆê2òƒÀ¬eÑÊÞ&Ïô&ÇU¯N¦nQ9Û$±ÃA&Á“Ãòýùxc{tüßíŸúüÆä©!ä|%õÀÊKÞx/àÞtŒ°ÃÊ ð¸¨QÛ÷ĘUõ.”•÷™3¢Ê¸vë<î P–¢ýºŠð$½%À>÷g_íOÖH`dW<åˆY¬”dã4jùŠ9(@˜:ØK<òZrïÞ›±Uz€ 0ëÆföé:޳Öê’æpR‚µ}½ O zûùöq÷°} Ãû¯ÅÃÅña4P~³p1dÔ;–T€:»‹>O4Æ(”]¨hHfÁ£óH¥éµŒK.ëíí™ü”ÀM¶·úåÛûîæËêI„e2KŒÌ(‹!šÄ:”qA¡zÂ6  æ^±Õ‡Wyzq)³ßßYëT9HÖyp¢y)lç9|`t&·_d õŽhJ{»áx2l‘zÒi‘ Дq€JЖQq:`lož´ðéc]rÌRU+L™9ƒÉÞF½3YFHJ€ž³.߯ÿþ|w<®†õ}ZÑõÂè­ŒrËŒÜÊÙ•,g2Ð8±×¬ËCR×AQKðø ölª.Cfµ35¤tÖsqº25¢`sY)À™¡CäZ‚Ç)¤ZšâZàQ"“½’éê2›Èùy™‚°AíYÜ2B 0§9"®ß¯ýXtŒÐЛТçèT• ªÐq:ðlæ¤Á†)ÚE‚zpN€Ç9ͪ‡»Ýýn½n×›o”Ÿ;<?ÿþ¦â@®¯­+*$˜Ì„&iœQ!·xÖ΀Lí#@2<oê_ íIE{Ff”5[¶6GÏ2ÎËËŽv£§`!¡éª'žU˜šî‘À¨5y‚Ë>÷çœm’[ÂÛo<• k‘T¸¨&ón À–ÿòßãÿX<œ;죂§hP´<ž .1w5Kï)­ãB¶¯^øgäù8qµXÖ%–"]B‡©“éq¢Šð Ú‰Z„Z#i3Üî¾lwÃñÓÅ×Þ´ÓËY#n*GšÀïA߉8Ѹ4 ^³gq—í9Üü*è` Ï“1n;uPàÉd´Kh1uB1ö¡ÅT<ɨÑb”ï»b¶<)² &`®¦aO#ÅT¬K(1y:© %Š¢À<Ov*!¡Ê9þºb{0Àr#Úÿgïj›Û6’ôçû(}زS=ï/¼ÓÕùg“=ç¥,g¯ê6®+ˆ„D®)’K€¶W~ÖýýeÛ3 )J"1=ˆÓV˜¸J O÷<ÓÓÝèéÑ^…åF{ -íf9QL Æ1xÚ8„´JÛ{ü?Ix˜,’1ƒ¢²Ja›,ß„4Xù:òAÀdÁ#›hZ©ô­vFLîÕÎ(¥Á7B`WØE§=ÌM(¢!rA‰ÕÑâc±‹BˆÉ;²Bh<ª½Ã7«ÔÈŽJÎãj K1F¦ª“›wY£ÊÒ„RhqM7”r!lÔÒ%¨Üp2Wk¸©Â†Qu6~´Ý8ÂZ[i0xtg¿‡tLéZmŒ0ØÔ`A MÈ`•Ô(Y:"¥Ý,/>êõN×âöþ„Á›Êíô-¬#»@7 ‰¡Šáä’Ä0.)¥-/ý;͆3´†0˜Œ¶´Zì„×dàÑtó Çpøƒ‡“Ã,[ ']ÐŒcÐ Úî:±e¸ÖË(A:¢Otv56«½õtý,gÓ»Þ¢«¯ò£Nºp‚ˆEˆ¤ÈôÑ¢4!VZMº!܈¢ðÄæb?¦“ñ0-àµOÙåh6ûP>d[.ÒG%´îµÎÁ ³ƒm r‚`¥²ÝĈ> OKÇŸ6ÉÐpþÕRA0òت½ý÷XsX9P /j7Ù˨Ò<”5ÝÇ´¿lt?8K(\¬ËzàZV«„ ˆâží.£øÊ`½® £Qtä'¡ñÈöJžÆsXà ×*~¿é­ÐûìžÎ•åDŒœgÁ…µFcÐRèn( h¸ŒÌãf¤ÀÙ"K‡^É:Ô&Àa™a0£KÕ›c­O¬0²›x@+n lºkÓ—T¼ØµÄêP_`³Ò €*Sw‹&`í¡GK`H'CÇÓÖnÌ*³ö€öšá¬O*l°Ì Ü_€ÇcÚ€µJ÷è6œ4ît$ TVw£6býáÇÊÀ» ¤† ÓŽ¨8œ4Òj…A,i3K€FZŸ JKEuã-EáQ³g³Ë«e^vh=ËÝù«;œ>4†kA¯uëæ!Œº>IÜ1!–ÝÄZÊ0x,9´Åاõp†Ñº×BàëA¾6eÐÒÑn(AÆPÆ2ÒJœ±iûüXéY18›ŸeÓb\L2W¿ëµNBZ1 â¡®å´Q Ò|}Ê`¥Ý,En‹¬@áÑmÆ'5”ÎdZ¥™ÄÈ‚Þër8êH &9FÈn‚YäÁàÑöLVYúÅÔ|“eŸ‹E:púwŸñ §@-,ûƒßð:éïÆ¸ëÅ«P‚uRã…h‹ÁcE«g‚#ôîTNƒ9RI¨$<ü¼$• r¤M×% ^4J»! Ãâ‰ï3X.ÆÅ­ë"jt…¨ Ëñ´Èc'k>xõÓ0q¸P # º¥Ð!¥¨O"Á4JLλ!‘$bð´×°y­éµ+÷Ö¿=ì}«) GX^Èæ}zë!¯O¥”A‰¦»!‹†9ŠÁ£Xk B6ós;áEy˜†át§yã~ !Œõ €¢#ÿÄ2…¨ƒÜ¢}lˆkÌÌJ ëÇ µ²½á߉°öàSJ¢JDíÆßÀãQí~ÕêœzË0'˜P#Uíq"x}ª`%cQ…+B4O,U®À›,^•*Ì¡ˆÀ C ‚ETp¥ YtãPÅ0¥D€›µœtÔmXSqM6¸ª­ôz%Ìú4@ËÑ/À@axt+8l´{wîÁÙår:,“A4˜ùDãeøÌD#œµ‰À˜ÛMФ£<j•«Å÷‚m˜\âð¢3 ÍpÖ'‚`Vcé(—À„µˆjÀÓZùÎcSËÂ9J&•eœ’4}8_‰¯þÀ+nJÕÍÀkˆ¢1xm¥CSu5 'Áa(hÖ¨h}*…©€pwCË Çà1¼y{ÀzaŽ-ƃr~…3†œ n1­ªßï/€­ö€cÁsÒM4ÀaY—<Í[jíé°g¬~?­–;2D@îhpø<œ·vìIÀf†S\Y…xÚ.9ú4ÇÀÖ§V%»¡„¶\bðÄ×2TzÑ~§üâ2œOõùÖ«7œò„H‚A‹¯\h€²6 ÐbØnH ˆ¶ˆÍûw‹«|öÙ¡-k,œêLkÌóqAy Ëüpõ‡‹ž±n†Ü"žØ"¸aê,kž5ê:X8ÿ'„4ñ M 3¾-®O ¶0OÅÖFy|,»j€E«û_áQ¨n|aˆ–<:Ö½\Ž'Ã{iý@ŽAqI•ÁJ.åj^衚êÅ´…‹A,;è ƒ§†šyz3Ÿdgé|>—eð^këŽ&œórg"TïïçxÖE 5Zã²² ¢Ê2Ä P,]QxlãagËáÙ`‘ ]Us:©8(ÓhÑÞp]û4ØZ€÷.]¯Ü÷½Ú|]cÁ¬xZ ±¶­ó`#ô.[# %¶éa°µpwÅJå“â e´Åã`÷©~ï;~T{ì‰= ü RtÆ%nž—oñ\ØèQ([•»=·ˆJÕ±- Ó³¶*MðÞ]Çe¼%÷‹ª'Fpëȧó±³4Ù½š«Ð“Í…¦ÊÓÚšsmDóà¬^hÒ°œ¤<¼…NçùÜuB¼(D ÆG;ÿF1*MƒëHtkózT‹@d:(։›ÆsÍj}¬ŽªÚRS Ò1\¸æ´4TÜ×ÕÆŒŒçÜšÁz,ºp¢XŒu'¬;[·¯2îL÷àãVwëày¿»’Ü2žèvÞ;v¤»|hØPÁ‡‹ä6ïÕæ¦G´a¨OûX]ø¢1x„¬gâïé'×ZÎüÙÇ!³×i;lí±+¦á„§1x„i­aÁ>íÊq,,5̪`+Hk˜­êôSeû£Hç“JÚ ž*[Ñâh¤[4±5±Ãi~O7•©Û#  ”Ö˜-2+­8¸yõ÷µ˜@Ùa‰G¶WN³Vàê§×hÎ0C\\Bj˜Ž>*–sX ï`cðÄ&‹¯2¶È®Óâ¾ÍÒÕ:¢°ºÜ*€ ®«hë½g¼ZeVR·U"Þæá‡×q(ßú°¼±4h©<žö¶á¯“a£åeÞ+ËšîbÙX ž½”¡°¨¼.úhÝÙ^–`”²0&yø2Êò>–‡úç”×E?Êߤ×÷µcªµÃ|ÈF»øýu´ª’'òü·¸ˆÜY{D‡‘sÔI³m‡ÀØ÷íˆc(tëÜÔ"Û€Êë¨~€0žù*“4¿×4e «ÂÙÞ‚°wâò'4`ø™{º.˜ !˜àæÛX×1~ ÑpÒÅ…ÇFZ¤y±Xœ5½§&[­&p  ðF†¬;\§uìIÀíq no Ìy†ijåæ$Æ08`3£°LžSî>Ú()x”l£TûAÇ¿¬ φi>ºœ¥‹¡{ÔfÓ’© 1 ×);^‡F´}þðÃ\‹¸—kÉÙø£¯ÓŸ²I’¯§é$ù4Ãrõ)kÄN¤Æ¨¶ÖÏý¨h[¨”; ŠíC%Y£Ê÷.‡Qk|»­ç n§@ o³|9)œlý¾&?ÿË«’$ɦa΃bÉqžLgE’~LÇ·ï"yô›t0O³UÆÏ}`˜Á‡Ùð}c‰*j±ï“‹åÍMºÿêF9‰f‹¼ñM+"]FùûäçùÐ=ïöwK¸Ü¬¸ýµn~\©æ‡Yñr¿ÞBŠ~öU)OÏÿ¸(\AtÝߨêk6÷<ÿ&äY?ùñ%\sáKãßΖEöz:œÏÆÓâå`åù®|5›‚Ùøßõ›?O7Pâ¾áåÝÇþèú/ŒŠbž÷_¼ðñÅÙÝåt>ÏÏ‹A¯Èr7í^Œ²tRŒ~=é'¯üf¿¡ Ì0šˆa4šíƯWtŽ¡>vôÃúnçïË̬)ˆøÊ¿ý (6ž&åϤ˜ÁŠè—"Ñc¤G“Ël.Ý€_m.¸É×Yr=™]‚•›/'“¤<:®Ÿ$ÃÙW›ÿàÖ—·0.—_¤°¦þéâÇ’)\¿w Èâ‘3þúÑ8˜ýMΔ…5õäçé(u-7‡Ék§ª7"çÿwÛYNýœ‰Ý=F p²~ÍVOŸ­í:½a¢§ áÔ•äÏûÉ÷`ÇóÉŠy’.2ðx²nB•Ú|’¹‰æ«èÙÞüû¹÷ÝÉ¿ƒ/u}-ÎïF¦˜ÇßÃ%ÓJ¢ªõ×I½ÕedðqæÝ< >Ì…ÛòÿvcÖ“ŒM¾®LÙì#ôo­Þz±úyršäól ïu–Ÿ®ÆÓà'óôv2K‡{Ñ+A5 ¢Wjë©þ½÷À²|¼ôk´Þ;ÝŒ ÃGÚ+MÍ—— ¢"óO7û'úÏågú'å†W¾sŒíŸüm™ÞöƳ`aB¾˜ æg‹l’¥yöù(eRõ!þ ”)ziŠÃo—)¿4ü*C.Ú€¤W2ƒ÷,•^€_Iš C.­$z¨•»×7³Å ë_¹5â·}ÚÑDÆÚÙŸŽ•„lŒ­OƒN2pâfë`¹X¸†œ@ë¬øW˜«AÓö·¥Sì^dJªêžAk ¶–sO ¿ˆ/.6¦b+ÈðnÑÕ8› {žöoÆyñl:ž<_]pþ7úÿvá_ýòÇlê†Ü&/-´äg_­Ø·¢€ÿÆçÏÈç3$†ƒKòüÔ;ñ}ðMN“w³"ôÝ*tš¼š¹M‰01úäÔù­3˜·Š°0S\Ý\ìÇoC¥oÓ|Ô?9Ì—¯__39%ç0Ð÷y—Þ •€WߤyñÓÚÀ¹.H½¯á3zšø?þ”N—éâö4ÈÖ?ÿæÏï^î—ð€þ¶$çScøÏoßô7þGê}•Pj”½Áìæ uZ¤/Þ~{ñò̱ H£Ÿ{œf“¼ÿ—÷¹_¹=~óâŒõÖÝÐþTŽpɃ¾#Ç›‡¯~¹(²yÿdõš{;Âm¾<^«7ü×Ý™’ó_VZûå¤\Éà…ë n–n"ÜÓæ7šÎÝ›#ߎó¹ß5Ñýúéµtºšëý »÷xHßÌÀér޹)òn‘Ns_"ù˜_N÷Û—OédÒ‡™Ì„fáÉÌPZ~³%û\ô…1Lfr$€!ðt_î¾Ê™A¨òü7ñjk/û‘6/ßnxäßÕ®Öÿ•&þËkï ·ß:ïÿäí—“ÿtíŒ@A'¯Våâðë×›¦É¥ê^óƒõ6»˸¸õ/”ÏâݯߗnÈËŸ¾s­]è7ã«lp;˜dßûz…{·Y1Ÿ¤£»E¾ÜŸ€^þk:û4­'ÂÅwÓtžf…ÿÓ·ØÄ([04h(þº/ÜcUü0fï–.}R̾÷þìÈ{rïÀ_î×KpWo²¢Vçonš¹îãbr{G€NKÓ·×A0Ö2aÂ˰!f×2ì|ƒ2FhrSgìcÁd?g‹[ˆGV‹à¹ŸžàÓzoëüw³æ$°AçØÓoKÅxK"mœø†•}g¦üzîn²Zµ˜}Ìn.³ÅiÂN}š0VünQ÷–ïùÆñ¸gñ*¿Ê€“vŽÓÇßõÿ;mûÃõk«-D*nÃÙCVÿ÷V[Í·v¥}Jnv{^k õrúÁY¯Äû’nG{6èùt£#ª›Å0'÷Ý„âÕEËëŒx”Pó>ùf•RlS%Ï(ë—7»ªãøÖÂp†@j¹`‘Ê˹Z}4Ù–Ø2K[·.³.lY½äM¡Ÿ‹Ÿdƒ€˜›WBI&¨Ì“ôzvš|O&>®_@î.K\ÑIr$#}©ÿ§1ò­ µBÅŽ¡ÔhòL»\ìïÿ×|ÐÅÖÙaO$âAÏõï-âÑÎþž±‡ˆøcmw=FüLjÿñ#þcÄŒøãˆ?b–[îÓ1â?FüO8âaõÖ^’DÜÊö8“mÄ^Š<jwÜw'KO·¼‹°[mÕÖo¬îï#Yîê•`RdwcL¯Óñôt]ñÜÌYRÀ|Jذ=AˆâF‘'£ü Ø<'òx:ÈÊ >-Šìfî~>Îáû‚sCò¹ëÈãÐÑî Îki?yÆz’0AùþÄ cDXÉPåŒÑÓZPFÏùýÅèŒkf¹ k‡ïïDp˜t 8GL¾½9õ£côcŒ~ŒÑ1ú1F?Æè-ÄèLiM1î“¢ê£côŽi¡8C¸½z×cÇÖbt©d‚¯Ý1º$®ÒY ŪkSËëä–üb¯•&ÖV7Þ\I)åï*öZiGCTeÂÚÑ‚t{•w4’©ê6Ÿ+d[ãvŒ½Ž±×1ö:Æ^ÇØë{c¯†±W¹¼ZitõY«ë¶»mc¯cìõDc/ÏVE°:Ò(BY‘,4w;WwÄ^ €¹Ö=€¬8ó~s“öéÄ^wè¦U=§ê÷{ÝIm…¶¡ÓÑs¯Í…5ŒY²í6ÇØë{c¯cìuŒ½Ž±×1öj{m–W)¨æ$¼ Kb±×1özڱ׆­JX¸‚ÕÖ0öâRô¨¶»c/ªzÜuZäDWF/åulÿÉÕLò÷ɳb|u›£²·aR¾ÝvU*õá]´2þ9ñ}ÐNœ‡âºjìÅog:ŒŸï?JG1öþà8aTapÚ½]úNÏ+(§ëÞ‡;ïh©’îD²Ðá:öö®¶·‘IžÑð‡¼ÜF¾¿X‚d²°Àyûp‹à"Kí±vlI'É“[Ì¿b·$˶º‹ÕÍæˆ3#QÝ‹‹UÅb±¹`%mdŸ”³ÚKìP‹­X%¥J^µèzÃo&æéý40%½ÇUTá­óªíu;eċЖìÐ;ï¥ÆÑk)ÿTÑ’º×F‚Õopé´Üu’ûÔgŸúìSŸ}ê³O}ö©ûùÔ´eØ0}ö©Ï>õ‹÷©k¶Â „Œaµ´õ˜;ë¿;åSWéçVh.DkP¦.íËÚϤ¡WJý©|/št4³Ù|/"2~ÞÏ<û^gßëì{}¯³ïuö½’ù^´e&ëÙ÷:û^/Þ÷¢±Úòa³¿ýXÉSÛ™pÃuÜÒú¶ÈGÝŽ{Ùr!°×¢OæÛ¦;Ñ÷/ã\X×–©ýª1·ÑXõ[ñú|?_‚©]íœÖÊà²8›Õg³úlVŸÍê³Y}6«ÿ¼f5q­åÌ6ž6òBÿVLV«Û—E­ 0<ÞÔB‚Í]-¿åñ:Êóï+õËxÚ Å"àiウÛÉæÝÿ¼]OV7òÌŽ lÁ/‹ïa ‹.¯ V|ñÇ|{†Ÿ4?Ñ ìQÝŽhbžÕŽÂ1¥ÚŽ¿På€4£`Qð= ÕŠBJ.¯D A)•nä g’WÆÙt]NB<&ßòtúÞx=µ/Êítö) îòºfU\¾Ñ@µ xü-¼}u¥]—«åz[ÛËÉúv² låír¹jÄ£­Ç£™j–“9íG©zã5e1¹+7+PÇJ"j´—[¸æ—˜ÕÜJ‡#´‚'ÉÇy•Ï!qˆVZå# j–eP£ñ–bPËìxx¥BÇ×á¼Æh¸m[m<{sð/20¡`>ÇðÆãáÔ9„S\ì&EŽÆ„cEPU8×LK9$ÊðõâZ4XÅLŽ‘ŒÇÃ9q$Ÿä/),yˆœ”3œ Tç*§!*™añ`L3ݼÇ2ç}ቃ¶)×ïç-êêõ]¸Žjº r3ˆÜ4“ðŸBÕ„Xû p)ÇY.)s˜@ð­ãð(Ñ<ô×ÑÈJ…-÷žsnPÈ– âÐ&¥`ƒyB€ïrì¿ðx1¨þ?±ëŒ.¦Î‡ðGÁ;å©¶Þ0ä¬yŽï6úô‘xPü¹  ²Äñ)x¨#w}»üc3½)ï&mIÛå÷P)§AYÖº´$°Ï:¨øæJÛ Ç›‚Ç$³ÐˆáÏJ²˜íkÃÍ ‘Ñž€NUÚ©˜ÑvH#”û¾Öü=—ÃR$à![ 'Ó\ög`6uî#–q,ªf…PZ94eÚ‘ÃÌó%¾'"‡Í@Áã’x ]¤ŠùaV:)@í^+­)’¶ú±9­;Ê¿‹VHj§ÚŠ xŽ x掳<’º{ ‹S£·¿fb²Z­—ïËõkøã®ÞoFï\H p]]Œv %QÓ Ú ›ÄkdÅ÷"KÆ/íëÊtȶ q…ïµC;M]†ek4î,ÛðÏ ‹ÁãºÌúƒ;ÑMœ'h,ŽŸP±ÆÚÞÎm'šFãó*ËpÇâqŒ%ñs—Aj0Æó`æ…{R+D,B`½q`¡1th§}‡ >41ãáÛSÖƒ+k£ð¸ŽïÝr1¯æI„D›¾¨d‹¥ú8&™µœô˜'qy»’˜€Ôç{î…ˆÂc†˜þ•Ô°5Ò mCzŠRè®ÉÈù¸ß—,Ôð/•Ác©Tjr}iAí>Øÿ{~X+X‡ Ö0æxTGQ‘T6Ј1eËmeI‡ß()t ™fýosаE4lÛ(g×;Í<ÕµËKÖèŽð£ OtÀAï²5à׺õæ¼°YTQ<ezŽRÈîú3«UçŒ(¥ðù£…4I´N¿ùVÛ,#Lj„æÇ‘ÂFØJ#4–‡vR÷Œ+$#eµ]µ“`-@øËf|øëœ¨š³J!+¬`œÏÑxhh'UÌwµ_«KÓaËIa°-ç6éqÎòxQÛ‰ëx-SÉÿ’‡¸ª` 1:µ¹—ˆ—ñ}°9Ê¥Qðˆô>T¨åÛ HŒ Ž9­>‰³Vw?ÁÑ›´ñ8ÈÜ£à‘6Ù€Ï`L—ëŒo-ÅFïI Äô–RrÅÃ8 í9]qH΀s–!]ž‚‡\àò¤/Ój+=šWX±Pé„qV£ŠK† ŸŒIyß“#M™„'jØÉqz(Êß&RÄÛR’ V4¤ ÐŽ›âèD:F° ™a¥ à!§œ<Í'Ëׇ:f +fª<,v¡†:ÖFO¦$’2:¾ .Ç+²sø,z{ýÌD«$kPÉzÎZ³ê1Ì Ôƒ^‘RO}Eã¡;œ©F`4ù¿ûuYƒMǤ6_d&ìG6>9þ²øäô'ãÓÛ骗ŽMm•CbÓó^dã’YºÉ3™Â':U@ò48n!-K­ íx’"ÃÉm3JL†HɪXDÊ•àf fOµïì­Å¼)HdŸÒ†•N ™BÐN2?h‘¬td¦t*Gi{ òmpAl§¶/ÆõÃ@ +n“Â[ôŽ'h'D¢ããiLn3DØIxL¢•E~Z|ÈfµƒÕ þGAB;FÝcMÄM F•aˆIxL’Té­.Ä8w°RIî<æ*A»Öú¤r=h žLH¥‘8\›£p@À#¡ó Çã¸ëyV3N"×Í‚Ä^2çE’²PÙIMè#4ž%$<®ÿÅw‘¸eLauJ½ãÌ2^qÚñ¾e¥ÒðLuÅjª‡v9’uÂ{BHG÷Õm¶u뚨3k¥k_ôªvм dà*©-…ïR< ”}KÆõLa®`8 ±ÃáZÃû–Ë ÷È"ˆ>¾Ä9~žŽˆÂeáØVEàq,Ý¡‹Ã´>ƒiN—uþÛGÖ1ÍůڅPAÏR†=4&àt\"×ÚÖíŒÏ0ö`ªƒgŽÇkÙ5ŒãVmÝ.KB#†Y`‡v"a^xRÞú Y$öG[UFÛ½5âxïyµ;?¤Bx¦Î§›ùh³˜¬67ËjNí’¦Bý³ËüÓ÷?í~Ý»O"ƒš¥à¡/íÍ'Ã6ó½cÓdû‘/X÷Æ"§ÁC;æ½O¶Å”FcDcçLê ” àQ¼¿¯÷LˆG–²4í²R0¦½Æè[,1é¯'¦/¡Þ˜á)@ÁC®%xRxÇaVÙ~$ žN–¤Àê¬ÿIÐ3´ )F(Àäšû¦òþ¯VÞSWF;†$¶W<úœs†ÞôâUt_¯뿈t‡‡ÕF:žŽV­éæÇ´ÊЙ,¬’òe±Š|^°ÙÌë<Õ8ˆt¤jÝ£;Aªû’…SÃ4#âItà¼IÖkø}mk´_X£q¬ôÔ&:Ë?Ú^¼ˆí‘ÍÄ‹h<*i’e£À—õÝ©NáäðÜGQºíΦ&rôƒÜ‹!Ñݲyâ½ô.¹ä[ÓM5Ñzû¶œœò¶œî¯4q¥ xþ*®?±æÍ°ýèåؾr¦²p‰s£]Ÿö¶ž>CÇg¸PœÇt«5»ä)¥ïN/fI¡¢FRˆ<ÌŠÆcû0«Þ ­ÒfqOåIñóu5ÕX„R`j‰n І ûoï÷ OÆc*‡–!à!§pœŒ<“ÞÃeð•‘Á–aX3…-ùÐΪ^É}yÈKé˦¡àñ¶óƒÇÃZW’DBæÐ@& –“í”1)ö¥r<`Í2l²Sðpê9å÷“Ûù &Çâídv7ß„œ˜Õòv>ý‘\?z;Ù–L>LVóÑt=žPI¡…’Œkî$–Êí˜ö}*q¥&49çü_“Û»9Hù ^dÏH…³™V2,§Ú9òJòéèNè•Ë`lRðx•J©t/B-…sN¢î{÷‰î¢nºcEs”„Ç÷±>ž曼šj$»m—;*Oгt¢TªKüµäÎÎ+¥[9‚ó$<*CÞÏË?*ióté ï8 ?ì ã‡Í«Éf>Ýov)ƒ"KÜ j¤w®8ÿ²¸âe®ìWú.“Ñ¥=)¾7]Ú ç`Œc/‹1Ž‹[€TŸÒ#Úq# ø3X×êòoËE¹žOkðþ·â—Umv¿,ð¯÷fôµþºƒ^ÌîËb»¶\__‹?»_l¶ëûéö~]ÎÆ¿ýãß´ÞÅ«ÿºúg9Ý^w“Õ?à9€é·Éâý9¯.ö¯õ©ž‹Ë¢~Ô‡>ÀܘÇÇÅ—_¿ç_~ÕáM»QØ¿©QäÝÞqWn'ð¸Iõž'¹˜,Ëm]Oþâ™è.sùãüÖסÌÒ‘h¾ß•Œüò«P{;+Ça“e²)ÿnSÞ^î& P#³ÑÍüíÍhò~2¿Ý½ãè‰@ƒ2âqÐö¶-–³r4+ß—·Ïžòñ«êZ’ª“?ÏïBí»ÕQ«Z¢|ÄÄÏÌ^ju©ÜWï®æDõ3h=_lú‚‡Ïwøn^Þ΂ÔþQ ë™d»‘¨¸¸®žûó‡Õ±ˆë—ýzÜþñ|Ä®/kíwò«åUuNdö·ãŽ=i¤u}¹…·sSNŸoO´ùøñ †cy7zë£(?áÇ/EXGsãù—íscÜ$;dÒœüM¿YÓá‘ 3çÙ@WZþ|ù±¼.×åbZÆ ã÷w—ÿ¾¸Ÿƒ†»p|¢Ä´¼Í®®ýH]]Ë‘¿žÎFöÊÌŒ·×B3yññ÷ª«Wåôä ]ƒ$`½Ø)ÈYÜM¶Ó›o–°äÒF³ê@=nÓ0FËÅí‡ýê6Ú-o›SÀ›ùQþk´eËìk`ÊÇÇ 3´Ò¦é)ûÙx²ÃMSx‘ÚŸj=¤î¦îÇÝ î°Búƒ^]<ó‡¥ó;ø¦Ó* Ëä3ŽœÖë†oÿÞ/vEMwçA‚¥ó׿Ÿûæïo~~óyñŸÅòvVÛ'Åe±¬þòå8düv½¼_ Ô ¬ž,¬mœûòãÇ^ýl¦í© OÝ=öhDŽ š7ÿ{?6–‹-ylvO?Ôuü©ÊÝXžÒUÝž\ÆÑcŸIäüêbŠèÇûÛòN]õªÿœÿ€bÅë×…(æ³z#û¶€?×à¦u{ÞÅë½Ûø7?¾ùúç7Aiì?ùå‡oŸ|ROŽ`Ÿu|ÿ^ÀáõOÞßtŽkCÝ_ö¯ÛL—••ñˆÚÿÑMÛ¼zÕ­×~ÕMO<^>ºðf²9¨Ã\ú~q½¬þ&ñ—ÅgŸÍ_­¹×…0ã7°:=ùoµ,¾þáûâ›j„‹÷¢}û0ÆÅd];{©¸úPloÊ¢rQöOg`óòwËõÕ|Ó¢òº÷²ü(Õº‹Ÿ?N‡d“õ¢ W÷[pÙØËß·7óMYÜA‹›Éû²¸ZnoŠ ä‘`ËþÓË.œè¶ê7¹#'‘#/ sAè`è?ùxò&ÔF‘L0$Äí”× êR¦Ž: e†-d?`Э>Þðèü‹×éd©eÄûn˜;f)ý2y8b—’Çà1Ÿ([úÙö¼G²EAí0&8G{å$½°]þä Bw¼âHCÁã’ÃŒª¡TƒPlW‡k¶Ü6\}帜ì”^åÈ\¢àñ, ;zÅô6™|A™F.EÃö£óòDé+W/ŠK’û¾õòâ*õ*‰öJ¶’J¡h¹ò-aTu¶Kµ*¹«VåÚŠUQP–…ƒñxTÏôÝCѱ‡¹¿´%x¨÷›Ñ;ËpÆ…Î /µ`h:h'¬é™m™²äÊyQðÆ0Ž”$’/çóR ƒ¥*„:ÔF&©êœÄ:#àö,ƒCÂãú®=G÷C*t€ÃM±B(Ìô­Ê:°£,äæõƒ?Y?Ô¾þ*k[@Â㵑¨Â®àæP%<ª÷òø@]³"qX)5гÚsË0Ø¡ci¼¥4‚ˆg íséÍúÎÜ¥×FŸ„Ç÷/Ñü Á:“¹\ÌVË*鸒žA¤êrp'± ÚY뇘ù]¸Úñ ö<Pi&|„ñ„ËJrϸ3A 휰ý+¾õ#*íðw9Ðð}õŽúò(Yñ”„-"áPdF4úŠÌÈDF@P[•E5ÚŒ%€t96(x8O7Í÷åv gª“éoÉ…1M¡–*Ù|OKÓø>–Á¡à!× ~pÑÖ÷qmæb±;É™óÌ`\v’yËÒ)€n\ŽG ¾J†| r­á–k<g nƵLŸ^߉XÊÎqŽB÷Ò ”Ô¡ÏKbBOŽrpã3!ÍþÞgÕæwR`8–èÞç«Éº €nÃìá÷€~зŠeöÃ{4—x&G…Ç%›a!Ý,7ÿOÞµ.·u$çWañÇ–âeî¦TG뽑7.ÉÞ?±‡"² À@É\•+/'KÏ$Á NOãô)•lªV ‰¯¿ù¦§çÖ½Zžäê»*äUc1dUœì°/ÔýõPnLü²ôÀX÷ûüjÔ~ï´¼9_Ž“뜢á ׆OÆö— qu õ—ˆÇ°©#%_›\A8ó”`Ë' çzK¢fø/LžO“ÅÕG j9ºl¯š§,;>1„þÓF)Ö ŠáËRDTlЏlãÄrzO(ölr°Bõ–CÐáµ`…«£ë 8*ÀCß¿ÙM1Ägw£n4¿º†%ÓìYÀpU”BW ª Aî¡b£¾0}èaô±lW)åÉSª#Ÿ:ºî‘ï¡pmÔØô'ቬڸIÙÌÛœüfò$„ÓBàÊð^ SÛFeÂí¡‹Rƒœ©£‹b<|û×v1ÿ0Iç¥ð+/D÷ZH\Öî%È}è- âê(µ)TRG1ÏqbtÕŒ.'³6³œÞ«ßsöpÉgM&]¡2qÒyi L謸ùH&ÜÐ÷×K©qNÈ*z)ÇcÙõr>?~T#T |ËÓiíB ^iØÄÑ…³‡Š-©³ßYŒGiÆyåÁE?õÎø>§K•yK—çfꉴ‡Šm©³‹QŒÇÈþO ]ñæ73çø–§sÞW`Aç]Èmup#ï¡–bÛêDåxÂÐjùØž_Îç{D9¾%êÒ»\J,ÝÀ{h¥Ô4¯ëh¥ÏžISšÑh~ó8•UÙ0Í”ãû¥^ˆ |ÁÒ´Â|­›뜮”ã1ÜÑéË)uüñâ¼CðÛm&ß?õé2~t/4W Z ¹‡>Šª¥ã!—ÜñÎi›îÇoœŽ‘á‰ï©zm¬ %Æ”îœ hD •š©êÄ.åxBÏ .{ó.ñmWo%VhG7úª)µÏÔÙ7)ÇcUƒoÁzèÿ’KwÞZ™Ãí¡‹Rƒ\¥ˆ¥O¯rÛ³ø&«BÅœ~¯âëÅøzô|©¡’G(Æcøc„o|_5Žº}Ô|ÁG1êýURnWˆµO ?Ò|©Î^´ã¯Ai‹Œˆ=ŠøôßC3¥æÉ:j9ž²,ºñmØ`D %à•ïUec/Ð=4Rj–®óz¡í›Ô|×SÐ|´HNÜ {—ÐàR Úî“òÜߥ<ÈCŸb®Î¥¤r<}óç-¬’ÌáQ#ŒZé¤.lÏÚ©@~|<ðë9x]#+|1ðçB/¸Ÿ"nR6ìâ™R~nˆIP7™Óœ÷Mv̨_ìPáÞ*O õÜ@çÉ>ÚhçRß.·SVp»>Ê%a×bðc<ɱy’³e;žsu)GwpÈ«€ÜNI_oÐ{>ØÖWèqÇtØB`3t³™2š í†^êîW.åUˆ÷Ö+êð7Ñix¬b-ü¸!ï´ƒLd¥íè5$wNnç´ãI…ȨZz/+H‚ÇU÷±›M•¶Þ‚ AíœÕµÀ‡Þ‹ Z à‘|Y©vºãóÄlw…:}¢!B1VG‡X¢µˆä õC;¸rð@u…À‚‡œƒò…+u&g«S…éŒÇ)<_¾º!]n‰”BG Å—Åjfe7³°¸òÞz$?øº°ý3mó©›‚ÜìS8ÜíhîÞФ¡°b[ ǘaït2عÖ÷ýó¢à5Ò…æ§zƒëvJ³åÑb,±¡‚ xœù¬±ŒêfÖ‚P#ô>¶/“Ú‘S¿s šv‹örŸ7>Étù$ _a¶¤à ªçîêÝ„²}A·½´@I²“¸ì‚u^èÌqß‹et¬;OÿÞ­ÿD_[B… SÉòRª5›,BñsM6ȃ«cøw›4<äw›»§®lïúAæÞðÉE+ÚÓšl\?Ú}Yú1äší*µÉ¹9n‹¨·ÍÍtu½JÁcÉ£ÿØüÒ›ÉE;ºMÛr²çÅ_eos|¥™½ufïO¡Ÿ¢`®†å….Joêu€`ìÔ(R–ÍÜQWéÔƒÒáñœEx€ìGBŸnc:ѱ8ÒX|w_„œr(5JÚ:rPÞ‹…F þ„Zöy ÒŒ³ûK1u\|´`¨+ÁcÙž»¿ÄøéÎnp¸@œÕºÄëz¿xß;§„JÍu•$¬2Ex߉Êÿ1,«“QÞê×P©¡G:–¾à9EUÔ¾ÄÞ:˼ÿgJðDÎûz8O-2áSK*#ŽÚÉ/ŒJQòÉB¦XUVE°8³Âቬw»ÊÜ{Ä…’.í‡üÅiŠyqsJ§ØT_G:.H/À£Uß'GÏ7zƒ*UªîPHá…@Ï£³²N'{¯lǽ;øh¨mo±7Óiî‰Ë!€»-ï×î 6§pJ- •„µ)N°ƒ HŸ2&€Ž¢ž0У'iæëüœ‹?Š]WÕh1,ŒýOÀC~€^Q= ¤ Ž‚_1jS)ÓE†Ç9"Ï#×$¢KWDÏ ]ðn©¢‡[# Oj\!9­e'›qÓE¦Ç4DÞ ~ Ú-êm%XFÜ®B@Â㿌­+»9ÖªD}¿h.àŸÇãfyy>oã—¹ÎܪnnMzЯr¦ºn×õgoçÑSïüÆ ¯ ËwëéñÞÅ'¦‡ £õÇŸ*Ux¾´bƒh»Ü–(jhƒ€GºÏì7t7·Và žÆ|2´R–ïü‚Mçüº†6(x$ŸßX]6³yöÁdbbJ åÐ Ú)>³Ó`´eë`®¸ ”R›ËÇZï¾|lN R÷0c)ƒÀ€vô6CëÞW\º>^´Ó¶Y¶ÿ²¼l”ugAY!•“çq.BªóFŸ}Ñšq*{;Í…mágQ¦ýOÿMk‚8Vø1¸ø®?ÍAgÍtÙþ¾‹¥DwÐS;±s¤[!~=X3p>Mœ´`àô`5q.©¢Hà}»úçƒÙ|Ó‰0þ~“ˆÝ‰,*  Ùö€É²Èc%ÀØ}@/àÿ±5€áï,_]LÚéøäÛäfÞL–«¯f“é×›¯þz?ýêúÊþ»üé§kg©ËSoj ¶™LS'õOõm$ÿâ×_‰ßFB@ 8‹¯²<ó~ï§ùª™ž¥Ñvtðz~u=ma`œ‰£ƒ·)fM¦ôlµ¸iA)iCãÜ÷Rú®Y^ž®o¾ýö½²3ñ :ú±îš«±3ðé›f¹úq1ÏÉþÏV“«öä€ð+ytÿÇŸ›ÙM³¸=:€ÄÖÿçþüÓkÀý üE@»ç—¦ðŸß¾9;¼\­®—g§§Íh–ž€¤.›Uzœp ]ݬšÓ·ß½ûæ8©ìLFèø½× ÁY;]žýç¯ËUš ³~ÏæL@ã;ÖS×þ¸îáµÎ’8Þ<ýôÓ»U{}v¸ù,ý¸Ã×üx2›ä?÷àJ^ý²aí—Ãìªáƒáiƒ/k¶z¾3 æ—ԛ߅נȷ“åß–¹v–™¥£ÍX?û?á÷žwé›ù4½ciˆü´hfË Nÿüã}Õ³×9>HŸåÎzÛ¾ϸ¸Í̲Iÿü¡¾žµßüø}ú_»j9¤Ÿ]5à>W×Óf”¿èa’_6Éñ-—ŸÍ?Îö3áÝ÷ïfÍõòr¾Êÿs:¿ƒ+X|Àb Æú'àÒ-ØfÚÓüïA+ï/W/Pñ—ù¸ýé&…f1» `¤Ÿnêçž7‹öª]­Aãtþž†ÙuŠöVÓÛy œ®]ßÎA[§ƒÂ§aýò4œbƒ«æ~’!hRÓd| ÿ=_ÜŒî&ÁWyxŒ×ÑÖ«ÿ7sÎÁ|Ы?Ü;ÓïÖÄdO&Ý ð6qôƒ›Êóyú’ͬý—ù‡öê¼]¨£t`ÀYé‡I={¾¯ïG¯óOÀÏiùüo}Þ¾xêÛŸÎ_;UmŒ7®`Ic¶V‡÷KšðëÁŸ`µ¼ì·¦I÷ìÒ õÿùïåËK/¯¥‰Æ¢‹¯÷¸,0è¹¾h% Öoí·¸\ìS©¤ÛÍ;åL ²áƒ6V8…­Ì f¼αa¬·Ú(º­q:ßãÒ¥]€'Þ½—"ªƒ–ÁÀëþn:Ž'ô=õJìðNÁKc‚÷tldázU7Þ 4R|Œf€T_”‚t}kï,ÚÖ›òû–.Þ· _K`ð=1US%x¨ç*šé‚pøìc{~9ŸÿmíoÖÛ4Þ;Ñå3ßëô¹€ǃYZôwnß{B 2xÕñ\·s5T’4i¬ÈOd>PY5ïñ*D1•]6bAI’:9±V,[O—ÊËrêMfpÈ;3ãØü28ë±È>µS̃‰EAå”ů„­°RJß½S±Oà{J5™],šåjq“wyv’Ùù:Á¥ý–MÈÜÛÙ® ÖÅ™srfËÑý®ã&UŠ—¤Ç°† Œ÷ÈøÉí¤å{ÄDt:Uf´8x=|Y°õ÷Ø`‘ëvÆ2ª6ï 櫓;²;‘ÚyiµÂh”I×qX½kh*ܵM! ßå Ä¥.àx‚bI´ƒ¼íU’ì®§h`Ü´¨Ïºd¼”×W¨Ú['UÀaëá+<­¿'MÄ<žo–ÚIßÉ¢ìfQiiòJ!¨¡áIÄ!Y@ã¬ÒÖâ¨öÃ÷}úå”+ÀCîûýYt‹©¡íû|ÛÀï«Ø«Ù£Tj7|^Ôõ÷Øô:®ãÛ¿Ù|¼Ûuªn !N².Ø€Åy©ùAÇ`r4)Úñ©]/}L@±9ñ_"ì¯ ]÷¯'3]¡+í…Cu©Sc¾R¿½t„±éòÚU¡‡á{‚ö2àq‘mhÏS~´ênÖÐe°F0%ù¦tU–ÃÓBUðÞ'º9¦Ll“6 €H.ÍW_Ús†¿ÁEÃc-[ÿÞ]æÚ"\‡”ú æK̶”y¯Wa‡åO°Á×Ð ù^zºG±½äd79*½„ É›ä×WÕ"[ÿ*â%Øæ]…ާàññ sá:H§Ò ^̶ ­¡&„'‹š&¨ OÁãy2²¢ë6§ºIÔZ)å5rZžÛIr’÷ϬVŠm®‚@Hx"Ëož=ÂsRDa1áë4%2%‡ï/|h_C<¡ÿ÷SN²¦7Z[‰ÞŠË팥ÞGúŒ2¼$nWGÞ >E$<æÉ{ÛåÆr#Mø·ïBQ?6ìݦ @â#¡ G¬·í™ñ»öŒÃmÏŸG E*q›5$UÝ厾¬÷Þ+{‘çPõÁ“HU;1¶«TùdâA"‘Hdªh Ç‹»H×Ô32zF‹Î$ªsr¡õ%.Õ'0=éH]‹‚ŒàÑ'º–û·Öc |ñwR$s˜wˆL9 pÅSþ©œ/—UFÕ'ˆ¸©uCN~LÜH‹[APkôi‡æ@m›ÎÇ*Î0*VqÞššê"Tœá‚@ýÁ (¿.„}¨ q¼,„ è&<ói^òðH·š‡y»¾¿<<ðæÀŸNñ œ‹,&4ZÜŽ®êö—ÜØ×<.T½;ªfR+ùóô°ÒroÓûq.H"'rT€ÉTÇÊ·&noMpÌšPû†ˆ‘ Eõé¶uï‚&Mp¶Fµ0Š<âGǃe/ ШJ™˜X€à!DÏ!Ò8§B ûÝ ZR^9l®ÁqK‚Ç›OKöÊ:œPÔÌ" Ôs[ëÀó4Ή«³5"e¾Þº/Àãu‘È‹HL ‹j„&ËΚ)zú(n¤VŒ·”±AV€'(qjÐc9ÿ-µ‘é¶Ï'Ÿ*þϞƼEƒ;ª}1͆ÑgËÏ*tV’àâìj¹í+ÿŸ-Û*œ½hƒ0YRk›pˆŠç;ÌÀãÊ•ùéúbI›õýŽR²öz>Ú½07†?TèMEÏj©ðO‘h]Qó'€xû¸¸ëk¹ªbZwª+—n’ï+ÒBøª¸ç´ùê¸7ÿaÛÏŽ.ǽ13Õ˜{¯¥kÆ=ƒ_÷à+äÞßï7]??¦û¬ÿzØ÷†|Íøçlþù˜4rðàWÇ¿~än¾ø¾Ÿ#`9˜-ëØÝRc“q*óuá¾.–H"}÷=È­³qôRϘà’74~¬ïÇù‚†KžZDB„ú£„x°Ø å+<<ßzób 2:E…”Ñ8¨^˜ž:‰Ä¬Ì>Tzb}ŒWŠ{•„o£ן†´!dðjª0Å./‹òU"ÚúáÁÙ=cÊ3ŠQ^Z&*Í‚EåÅOOkV€·þŽ/ụ́³5úq5*çè´Ö ì4œ) 4…£°ëϹOôEÒ^Mù›zÔŒMtŠãjg£*óN¬Wóa;h°Ç‹ð`±W 9°1ŒëÀP]oæàÞ3…®–‹PW‚Û7Øã%x‚™z§œÃסOؘzêý8ù ñVÀÂ)iþOó—¡À6<ÊÆË]ÒsùÝ’Œ ÝÈ;~kÏ¿Ó>”Q|4t±eLa>v{pŸŸÖ€{BZc¤MVÄ¥ÿâ,Iâl¹N0¥ Oéê–µÕ":¯Œ±, t£¯ÙiÕ7óÅõò¶›%Ò«„ȸ°|Î4ˆS‰ð´˜hwu– –Ó° ÍÂâã’L~Ë$vðuqÊ}aNí/ÄŒuåXå¿«Þ¥¯êWò’á‘ו-:‡—\Æzž[^;ã3äŠðE¸uLžÉüÊ”Û+Õ†_Ùxìå×Âdl(Æ,¯ýaÖkIšqÊ诋SÆ}QN}ÚÞ]wûËñ ¾ŒÅz[šfܲðuqK^‚¢èl,/oú_é§#òäŠ6Ú!Ö¨ûX\Gę̮\±}#veãù²ìº[ÿÐm>õg*§Ê‘+|?þmišq Í×Å-ô_”[·÷»ùíòÇ~64Ë­ ‚39RE÷E¸õ¶4S¹•+uPm<® †,<®Ä{¬!@Ç\€8H§¯ØÔ ‹„(Ÿ½á|]˜ÉA,¸Ax u©Nç€ÙtêàCäSWâI•”’ùG² ΰ %:(ìý‹ÀÇÐŽiy“¦}Ø”a\™Œ‰1î*ÏS;·R½ƒ§ÐS¸AU,qüøQk£Í¶0{½G“kb#‡•¼•lu†æƒ¦Ár—àw ÏPæz¶ÛÐÌ'OjÞë¹b Új˺tm<‘àqÅ݈ën¾Ú]/®»Å÷#*½²Ã+[PLBy?ΙøE=Š\Š‹„jé!ãm»ºÛ£QÇèM Ô•“ ŠƒŒ¸Ÿ/BÔ¡MxäÉÇUÚíæ+Øt7ÝÕrN?Úž/o?læÛtÆ[ìî7Ýùžbç?ÒÓ©åâ¸ré•iŒÀôÁìÇA0íJ‘Oá¼@&[¿ì¨ ‹%ë’O;öºöŒ®1Éfóšy§Ê½O¨´Â8Ó€8<±ž¥I ön5ßuyJŽãJ6ÉEô¡Ò8ë|É‚èõWƒ@6§XÓ{NÁcñÙÑRÁô̸µiæ,jÖÕ³:eF¾PÖÄäAã(2ðX7=®Æ^e=üð‡îòz½î“ÎÇÛŽÇákâ¿bd26¦T­ìú @ —3¾>Wú¨>2Í9÷¸¥u.>¬Ö?lÓ!âfž‘µÂlñ€:¨˜vK(êÑlþÜ[R dp-&_€ÇCCq¨K=®K›|nÀˆÜ™ŸbÌâN‡%‰+꜂%xÄ“^8Y­Gáb ˆ<ÚчzyuZ¢FþjÔÃÛóã¦ûO÷Öhy^¹&ŒËÆ£ÕÄâié©(ýHaš8~Ošp "¼ü Aå\‹­Fbó±ûþ}†à}ðÅB(œJ{U24pQ¶!rÐe…‰%Ô ÐV€wd™•›z,SB-GÀ¨‘šŽS•96Ú`t±0GÆ  »Q ïª/þ§=½O«ô |Z× ¹¦ßý8y¼«°j á)WVí•&_þ ×%sÑéQkê1Åí¦iœ õm”ÀùèuýÇÓBb ºÁ´Kð”èÈš­Hf?t ¢Õa¿ß­·‹ù 1‡=²ÖŸŒåÕUw;vl°OæãÑJ•«³·Wä“·çNŸð²˜8æ‚  rÆ¥ÿpRЙ Ë\Ê äCw-vQúÌxÊÝžBsø”1ÎA*8 8-¦q c±Ò…i,‘Â7à‚/]b/[«Ì ¢vÖ8ÃJô¡Œq(Àè|Ôõ‹Îñ]Ò.ää“qNKL«>QÔrNK‡nf²DŠêsB€Gžÿ,HùÚñ:¼‚Ï\Aľ–°äM㼆’& yè}S ÁL¥Èj¯¸Ñ¬1M¹xéDkÆ_ ãô´ ú$~Š€šê%ú…x¬.˜àÂéQ3zD…蓘3ŒS®Rø4‹™ V7˜p žP7ŽòL8®Gz¯’¦Kq¸u:NLg™HT îúY-2<ËÏ?§NèU‹w,|T椧.µè+À® xb…pê‹‹‰£jãj5T\¢FŒ4Î(O‡‰lÀ÷ÚÔg…O‰»TNÀè/±3±šÃ›Æi_~úK±˜£C^íiœò h¾ÇÆ0^¡a?° S˜©Odœl¥œö,~ò{c‰kÖ‰4NxµI«ÎsxÓ¸êò÷ßãg}DgëTèà4jÒûæd¸•Æ[Ò[,Å`~lpjáÁÒ™”lØtó«¡ 2n·¥”)Hç0SuÊrL¤pBF ØÀñ€j`è{,ðFŠÆidzE:F‘´»zwÓ8¶t‚å)ÜFÕ`æ%xäÛÁâ~³Ü}¦Hi÷#µGÝî6óåínû\ŸG Û ãa;Úâ)õÁNã,6²9¬M˜UVsŒÆÕ÷ߣé®Áñx´Reoz}yF_¨©è®æ™4Nx¢ë’”úx¯=½Á»˜á{¢cµïÇ…âÚûD¥Íå|1KÎÔŸ{24%ê2€)!ÐbÙð9%øëO°OtU£þ½ú˜ºGE5 —RCé>̧ðQØ50Þ"<±BƒÙÐzå0×|6ÿøqÓ}|Ì;GÆäçz\¬'+{¤;³¸¡AÜ_‚m•Ð^¯6&N(¢ =¤qÚë U J05_£X{ mV¯GÆÆDYüUe7Š{B/‡²˜#]îËM·OÞ#ZdbàÒA-¸È/hœQm~I¸}ß]„'Ö{5»œ“&“J{-22•ÔÓV³i(T B‹{¼ÎJP‡É<"<±ú¥. L œ*ñ)sZ©Ÿ õ^K ™,@í„m%xBƒæ^½G}gCu dzM‡qZW¿ÖÄc™4¶úí® 3õnw9µjF­¨•¡ŠLœ¨â©ø‹[€[PA€'V4/Ò¤ÇÕ¨!}£Ö`Øš<\Uïvw"ƒ%b[Ÿ H#ȯXèöD´cD” p¦>%x&7±¹ºm[oWÒ¡olà SºŠ3|ò¢AmÁk6xNãLƒË=ú\ÿ’aœSÕíÏ󫑘«‘t¸£’t†Cïû²–ïø&’ÖS±Ÿ,!Õ˜§cúŸÎ½:óeò€82‡ß?Ϲq4N\ϧ>Y©4€wÜ”öªdn¾"hz¤®_åv€S¸ébÃE¦6Æ0κâþ!î0xH*uh S®Õô¹ÞâÃsåɦ>ŒÉý@º-\& žØ`ÝPÜvH:‰ãÕà<íìé§Èx{4NDZW  ÿiß&£»:{ˆ&¿¾TxôVËÝâóbÕík!Ó̇ñ,Xès#z¦iß0V¢iôúÉ{2<¨ê“öáÑL/çtvN€žÆ9ë«Rv¶˜'Ðë=u™ùר½.0ìdoÑÔ§n¾¾óÓ€¸"<¡ q7ËÅàðÙqUÒÝõ^Yz7ºæJP÷n¾ø>ýÎ2{Û ÅÀkíêó@‚Ç”KëÙ/ù}G¼Ç{Ðíùb½éÖô?7¤MPŒ6Éü*`­XŸO‹MÌA6‹Ðë,‘áñUv²Äùý*ºÛϽݸÖ¾^gÂhœ*Ø—¨ u%èm¨?ÿ<J7+Ûƒö #Ze\[èŸíèà9)ú¦5 ‚˜ÅÄö”LŽÀ_ ËPÔ¯© Ä&^ ?d=ìÙ÷ÆõÃøÚ@MþÐpK'3 ^:)”Ä–áÑå Òò6^­îVó1]N—‘R* ²Ø£ S¯Ò§Ó7l -&>W¦P74N‡šÙÎè6x´{œjаU‚q$p~ÓmÓ±íÙ§s¦sÜñQsá?G¯ •Ò™NØlÐcAÀ‚“.ÁŠÝ˜¿Ôã‹¿^=jfõýМÏâAE'5h+Á0@„'”\öÛ^]ÌÝ£NfÉ1ɆÃ8å‹]†—$¨@«L¸O(v5.Ñ(³sR‰O¢á‚&})P,¹ä³(+g¸ô<c ëÕÍð¾xÄEbNé5åõ2—“Ã8e‹ÝŒ—¤ª@{Ê«Vä_µÊP´ˆzJðˆ£ž#Œ|˜á§¬×ýÆ}üMPÌjGJÇ´ž=0cÿ8¡Ø¤Àê GÓÀNIðˆ¯Œ²¢W_RçUÿ£zF£iËôÊink¥$¤‚!©:¼ÈrÀë|Ës,—…®ßFX†Çª¢ïñ“«‘ñI#˜ä*ÇÝ?RKì`KµÔRHõkËðWÜx}ܬïïF´m¢vÔ3½‘¦è¢t‹Ò:_ ^n³ÒŠlŒ-K‰i.¹ÛLiêaœrÅW×$.Èлê&W†Çëâ+kD‘È(éêI{8Z?v•"ɘٟ`¶ó›»á͆s±Êú5™dx|Áñ½3½×ÜQä¬×'Ó>ýƒ?Óòž¬é*nÀ×'‚•®û»õÕÕ2üïH{—÷W»]^ˆ÷ý³Ü³ÙÝÕe¯[f‘i41èÀ¸ý8tåHQŽÔùø#40<Ö”Z?Ä€öª<Œ9«Çµ—<>TðžAKža 3Y ËH³‚rLà3áÍúˆ#]»ØÑÛÉKbÉð¤Wn…*– c0œê4%bÚ“-@‹-X À}ñ;—*|ÿò3*ø²ëý1kÆU T;!*Åù?T“2ú?ëZ ÇHoõr‘àqSsÛ*íuÉ,7 ·ß>*n»KãFÓÅO¼)Jo(®-xÄ…÷M>ªÚQ« 㺵 ¼òš=)§q£Õ0²òß S\‚=˜ú¼áÁ2épR•2KÍ¢²ŠÒy9P¸¯Tkj dñ §%è{eIeà9µøÌËMªUf¡¹þi­fšf ã´+Ø‹´±ø]ƒ“‰÷%ʶæ*Ò+ÒxðŠ©­Fã4Wl÷(Lg‰-ª<â†ô[º#ÈŽþg‹ä·÷zd–U«}äŒm®H¡ÔR4 v +~f!²¯CGMúqMÐÐ1vúqÖ£Ø T£®·S \G ].Íi{¹]l–wãiÜr !¢µÁqNNB¬éˆé›uƒ+ cª8¯TÈd Õ½sӸц Â[¬2̀Р­A‚G|½yôÍôˆU=|B݃Ûmæ>,½r™Å…Tø \¥q:TqÄd@v ¶ o ½½–Åh˜‚\2jœEKãL,V¤£©ØžSÚ^Û"‚¾=—Ú6Œ ª¹X¯nÙ¸õ©¶€2ìEm¤ÎµPèqvY~' "[*o‡ vªnH¾väñxkx½ÖFUß3Ö('n?NëØÀ*Ȉ+ ~‚œ oá8ìKórV0Òù¸®õ˜Bu‰®AØA‚G|‡ýöÒãï}gûº9‡g1féQõÄ´E:îŠ-Rf(^˾,Õ²´H’à?¬+Qi¯ÿj&‰ÌCtÑžXhÑ+l2«ÙxÄo¥ú…Û]:Rß<ÝSOdÊÀQ•§ôJôé¸6~{Ú Áñ*%‘i‰Õžrq!Ý y>o©ÕbZÇô’—ü>z"{ÐÐ7˜užÅúl¾Ù-?ÌCí"¯Ç•¦û’[Šy¹9Œ3º] Ú Ô”ˆR¿L‚ OåCâÛJ}æ?GF¹hÓ‘™šRý8´ºØº’Y24°®Ö—óÕÓz2»Z¤^ çDTqrß¡’Üàö Ö¸O°•k5½Vg½? Ô€Ù*Ì}M?Έ›œV¡®3Ô ñÄÒËŸ O EF£hƒažôãLp• 4@â|øp`Ëò[Èë} y<ÞA>œk•Ž×Æ2(4ÂUé¥T‚t©c\Б•týò%ý÷€¢¶ <ü4ÿéýb}·ì®f‹í§ÞRòÆw£ôÍH©Î¨Ù9OÞX*»]ü÷÷—Ýl»¸î®î‡„$x¬*aú?íÒad¶¼ý°™÷Jãf˜Ú "t§È€ñêãÖå«@ ( À|±¦'Þ^ÇÌ%{ JÉÊ22ºô4%l ´€Ý È"Áct‘»­Û庸9 éèÞr'\wjÑÔæ<Èäx"<¡ÈÓŠiúerrÖ@Gn{K㜸„ætJKà…ÇJž"·ÛëOËÍî@aÜ|R‰PL'Úaœ¸Û—à§@h°HðX=5Î.OAÔÔ‘Ö#‹/a€©Ñôºø|8!— ö1s?2— MX¥“ʬÎÂ3©mø¦û¸Üî6ŸgûJ&i$™nÚbòhœU–s©,,ê"î¢Ìî Ž\‚”›Q yÉÄ·öi{wÝmº•qsJ)ÔNYîª8Cm§O%`‹»,:¢·8š{‚T†SYD?Ž“Æ© ÄbrO‘>£öÔÊð_b¹ÞÞïæ·ËŸTf¹YM‹ÐR”‰ƒˆB(ã9•}ù8mý×;Bÿk¯G×#% &‚2I‚4NGe‹å+ÐX"‰oÀ žà ÇéGë›Ù^Û³¬ënËÑMˆÎXŽÖ4N«bEk§ÓZ€Û¶ ƒsmÌÃ3"D;®P É‹r96SÍÍ"Wø5Ø,"4p%xpj_ôc&÷õÞk9. YPîÁs?NÞ¤¤ ™<ËòÛ}”?íYGÃü"q¬Zµ±;ûîûåÀÇ»}"õr}KJxu'2øßOýæ^ó”ütqvµÜÎ/WÝÕÙb~7¿\®–»e·½8û–~ÿÛÇ_ÿãðÛ“å«ÿøHˆ'”;‰¿Vö³;Fî +íÆ‡4)· I •JÑ#¶ I>5IÒ+ÛpÜÈG‹Pƒ/P¶¦Ư‹òFo%5³ ñýÑC5³~V wŒR1“;MÄiK²tŒ³_ÉÒo«bñ¤œ©2fG €-GµÑŠ¿‡Tk(TkÂAüºgmѧhÙØ#êwå85z¤~f¾ŠânM_m|˜\U²ÈBöå˜4š5yȤꢴ&~eäBœJ®ÞUÉS{`Dõ„ÐfàŽ'3è$¼…h’+žV® MòñÄBU/ÚG–-F{«||¯l)»ir¥LvÒäã ElË^»Wyêøè48íŒÊhô^xä‚”"V®Ü¶‘ óÁú,<“Žõb7–Eg#?Íš€¸U¢²^çØÆñÎÆ#n+¿Xz?:|ø9[–ì;üz2¢“UÑA–Èmî²ñˆoøå;ÁÈ<ðhk¢c¡ôûj[Y–¥ˆ”+¯nãåãñ•nÇÆ'€N[§¢Î‘\á˰\䥨“+¨mD AAž‚ÞψÂùˆt6b¯¦{=YHKQ#[°6ÞŽMDÅ<¡D ú¸¦5{ΆŠzBì9 b)2dKšÁizŒ•'š)y;£JæCʹ(‚S²s2Ñ¢€£sî›Vm(Ç•xZ6¦k>.œ ÖL‰ g‚,E ÓfêAö6„ÈÆjûó»åЦbF½Î»ÛÝrÑ?HyªÂm4õÍ–ÈÆZ¾…P’RÔ¢‚¾9‚»6¹6Ùx|³HËíúª{¶¾ù¯Ãˆ:#ùÃ[;Æ2‚¾…r…Å6Û•WÎåáqÓ=–Wêå£·Ùø¢?ÝWÁUhÚsÅ𮼱 ²ðøb^Ê+-óá×l˜:LöOFà•"UZgd¤xÓæN'Oœ¸}dŸù(ªw|hÀw‹À–"H®l¶áƒ1!#ûÅ;5esxªð>ªn>>š×›S6 1ÎR´È«MR”Ǩ O06L­ó!ÑlØ61ÜB$ ʢ˒®Íµ]6žh§÷Ì›ðŽõà½1| 5|mÌH° *ȶŸºâ”"Y®ôææã‰m^÷¾Ãær¾˜ÝmÖ?~îgÈ Úåˆa*¿ö‡_ŠD¹ÒB›Jδ BA\—ÿ¶Ûј»õj¹ø,›‰«îÃü~µ›]u·Ã4ðÁÜl9\®÷S)B–¸m¼¢l<ò^€Òix­}>¨¢Jk=~pµØ3»ir¥Ä6—Hݳð”é 4=ú¹‡ÁGw³%Œ¹{[kÉ QÞŒª E j³íeãÑ“ÛÑd–6;Š‚ÂŒl 4ƒÛ*¾&¨ÍY 8ÈÁâr#›îŠ.çæ«í¦ûûnûÖš}óV¨¼ó´ácÉÙBÚÜÀáŽ!àã—}#e¤Îd$Ò ks°ËÆãUsFÎÿ~¿éÆ'ŒccTès̾ôl,X5&æ*¦ÑiTÞë,<®9——7ãÓÅ‡Ì³Å‹í¬¢@¬Z,Œéì3£b#g-6ÍYx·þ¡Û|ÚŽOžÐ战ïåµ­³UÓÆ_Œ¢ÎÈ;ŠÍÙøqq7[,nhf€çgKb]3Ò½-A5n%O+K®MfJ>žÐœ[íêÆÌðáþ˜Ž1GÄ š‘N(Z56f«¦É¾ J)o3¢¸AØô){hÀ6:eìµA¾ˆ±ÝID(Z%6‚2*FXÕ€:‘å÷"pû^z¼AßB l³&rñˆÛ) ­E6Ýv}¿Y¤Iû°¼]öýÞ­W7Ûó¢œ?å|¹NT$w>¸ÌÙYcÓ±eí¿ì÷ËÝâóbÕíiú¯z²xnL>QýQcj.Œ6¾¨Va©/ñ0½7ëûgwìO=Oßš¼áæáECTæLä@;ªpÇ 4ÎŽ%du},ׄF;4x–'Áƒ¶L 6™ëGv4¢ç׎N[Ò€MX, E%c ž¨Šd =/¬w5ß^_®ç›}ÆùƒV7ÛÍbv¿í’N#WGÒƒqˆÀ¶¸LãFžAËC–£´½k‘G&ÁcôÄÒO7>}ƒÕ÷×ëmrw‡L‰ô‹³}6à HæRË÷¾Xd3Æ=sOòvë·FTΗaä8\ Ùx@i[ß<<Ó%Z còÀØTñ4µ›è0”$²x‹šÖ·"ÞæcGÛàä ƒUmÀÓîÚ+’9è¦M,€‘3^H ¸SûÄ—á­²÷ –¾O°—þCÐ5CÌM.RÿŠˆl-§|Õå/¤¬¸oðêM‚Gþ¼I¤Èwj·ÞÍW½.™,Þ˜¶£ˆÊs¹õ4ÌÄå_Ž»Ø-^¸Ið7ÑB}6ˆðê‘ÈÖ^(Ô0 ¥—cÉ'b`ŽÈåD.óÐõ›•õßCÔ3ðȳ'(ôn}Õë3Œë@{QG?ô7ñeÌCQFKð !Á ¥,0'¸ F¡}iöˆ£š^¹†&"—Ñü®S)Áã¡¡… _­çƒRq\©6"¼1Ž3»iœöXÆLeµ@€‘Ž£åX!ƒӫ,K9òó^¥Ì±¾ï‰ë¼ œLÓúâ†BDì|!´npÞà1_ÂZ¼<Ï1!AÆDr}yhœ8’Ù†çê§¹ñ´3O™þ½R™s¾CNÆq¡–4.ûŒ‡”çùò Š H"À#ïf Ë)yÐãìò󃧙Ƞ…!„Èm4Na3³!ã¸DÛ î-ÂÓÎp\Í»›õm¯Pæðï©õ†QÊqPƒÂºG““Hß¶°<•mDÒàÓ³f¢‚”UÎy.&›Æ¨fÆ!ŸÇZÜ“JðœðXr}KO‘ä ¥³Ýv¶è“ºùíý]¯[æøP¹˜hîΗÆAÝS‰ŒÔÜ ëó_‚þÉbÿé¢a¯Fi\‹9 (r:ÆÍÅzÓ­·³Ëõz·¼I½í2§Kzµ§½ÎÂRâº)Û.,‰,ØÀ›áÁb¯YÉœ']n6÷·‰ Ý Òíù^ÅÃ_ï7}C£—HÑq|Igƒ‘—)9½®H*hYºçÃê”g׸·aéX;bÄ"耘ª+ý8µT‹_†™B‹ø¥/ºú†˜L»Ìé=R›¼tæ HímGIš?mºÅ¦›ïº«³ ^׬ì‹u?öAKõ¨ÏhÔy2bNy5þxg‹’º´Â…ÒT/'ÅãŠÑ™h°êv"ÕzŨ¶¯}Çû7ô㨧v.‡r«·¼≓z© ·Vsz±],.æ·Ÿï—WýÜŽëMS}œiÜhIC¡ ¨ÁR(±z³O!q/ãª}¦ËÞ-ÏT¯fÔ‹ZyÇ·ç~Ñ^Ê”‚ŒÎÆ©•jA ¬h  j‚²ÎrûUçM9KP‹®q|ˆð`kp·^¯rÕkõ&¶º´£sÆÍ$ktEkJáÔJë4à Å­¥¼Ì‹nÛ?¾ÁqÝ8äµ1XÓ8mT‹P²q\l`x¼Ru,B®jQm:èšã©GÃ8 ¶¸5±Z€5bäãqª¶E`ÎØŒS"wªIã¬ü]ySÊJD©Þ_UˆÇU:-ìuy¼\!zËh5-]à¶·4ÄOCJ“9«­^×]ˆG^k(Kw7Ôòx68W³O¦ÏRW¢ïŒóÐiœU¦ŽE˜N[‰®k Áã‹»ëíåýru•kh£[ÔÁ„DZN–DkƒUL ´Î]¿O€V¶9‚{pQ%ЋӸ‘{§Ú†²,È‚®» Á#'¯[™/Æ„ì|òbú.6’ÆyÚ-vçƒÓ ² ª›Ò á4èòÎ:±?tÁŸÖ½ætšíïvó ]%=LøçÙ§n³ªo¿ªW»ÿ§÷ûÿ}÷ÍÙö®[œ-®ç·»í7û9øæl~{uv7ÿL‰ÛÇÐ{ðÆ Þ~@ßsµÛ.7 ýÚ~ÂoÁ–Û³Oúü¯wWó]÷Óo6iíºÅî~Ó]¼K ÿuø‹wöœ‚Çé'¿§¤Š‹wÿq?ÿœÖÝû]¯îÞ¯w³M·êæÛîl¯çÉ«¸ÀtÒH I_Æ…‡ô§Ë9\"|èì• jþÁuéߢvi3QéjÞYT—1?¯‚§ïú‡u2 æ«m÷ó1í–«âµã73uJýílÐÀý”tÒ%Wg»u²R› uýLøØíþûÙíz?‰gÔf€{ Yß.xdá0ßÓ¢_"˜–ìw èõf}»üûÁºMŸ³ýõ‡e·º:ÿ]Z+›?,·»_Þ.W¿Úøõ¡Ù§_’Q¾ëúÓ?v·Ý`9/ ©v¾\Ñ$ÿò¿îÙ·§@ÿ‰¿ú¥úq¡”{µ¸T¿ú¦7wÁ§ßû ¥^ÐjûæìÛõÍݪK ãB}sög²Î‹å*½Ømî»Ä2çip?Tú§ùöúâÝlqwÿ»ß}4îVý:MôsÞÍo®¼M?ýÃ|»ûÓfý1mÛ JL9ÿmBøKýÍYÿ—ÿg~{?ß|þæ,ý@üÿýË· ÷oÒ'&‰àÈùµ1ü¯þÃÅ»ëÝîn{ñþýp¦vfþ¶wégýdý¹û˜,ãæsÿƒÛÞŠÐÿ8ø’¿ùÓïéoÇÚ^пÝÌ“ùÜÝ­è%Õ7‡›üvN†oû.éåÝ®¸=M„ï~ÿÝíün{½Þõ}³IÌÓ¿<ö“™(þïW>^ïÞPÅ?¯¯º¿Ü“GÆ)æx¯ú×ïÒ¿¤¿Ñ/ç›î¦Û  yuþLËìnµ\,w«ÏOô`t:˜¾£B´épâùmñÍm˜|ƒ›.íýÉ7Nî@÷cR*mÆ×é×›Ïg‹‡Mð×ýò<»¼­_ÿ_³çœ­’ úõy4¦ÿ4(¦·I¤G'!}ÂÞ~2Sý~N_²ßµÿyý©»¹ì6ßœ™oÎÂ7g6+xÚÔ{Ë÷«GÇã™Åý(LNB²s _Ö—‹—¶ýåþuŒÕ!y`ÊònoPN½>ÒàßÎþ!¶·×ÓÎ4g¿ôÎCðÿßÿ»}ûìÀĘÖzø‘&ϧÇfŠÆÂDÓ ’/Áår=îÛîjÈ­ïv¹ºe(¨B¼1`¸ÈcçÁTÍd„$ˆ«Wø’á ò{þ“4ÈÜRÕsg!r7QTõ¼`à¶ ²„¶A‚LÍçgG~>{|œ“§¿8???{ÿþÌœ-ûýb¾:ë/g·òúç‡ÚLssð¼M=Au¿ø®[}øÃòöû7¾P>ć¿þþ·o}Öð~ ðÊÍlT8³¸˜ÏÐ¥w9f>@ÔÜ)ýÒS¤øó>nðÈÌÄ œsôSO™½ƒ+ýWr«Äþ–žLï£êi»¹¹;øX²Xt©ØGÜ϶ï†×3eÎT¸°úÌYúÌô‘ýË·ï~>A=9ß~ÂZÿÅo»§»‚—Ÿ{»\MøÄܤµú§n³\_}G¤«íʼnŸXЈüˉî>t›îvÑ%D?ýôÌÆVãY„ë=ÙݳaSx÷üš-ýœ¬ÙæÇôËíê¹5‹îrvuù!Îìå˜Å‹«Y¸ôW>†Æõ·FI²ŸO™µXÞÎWË¿§/½8uªˆÒÃÍäÕ?ôʼ8ë/Ÿýìw·»Íçöà_œò;{8›^¤w/îUN6äÃG—²ƒ-x7\ÅöAwäáC31üß±€3/Œ½HNÛ“±8é †Ù#'*¡þò¯zÊGý«&¼Ûÿé݇‹¤¼ù»‹dmÒŸéþzÿ÷ô·ùííz7< œ?ŒZÞ¦Ù¼êÎ7Ý¿'ÆŸãço¾ 9ÿY8HПÔ…öÿ8˜þ´ŸšËUwþy~³"Îýüó¿·"Ù){ù ³{Ò¦x®ñû››û¥\œ¸µÿvNG‚áÿ~z÷Zñgïæ÷»ëõfù÷æÿv{öX`æ7»Ýfyy¿K{.ýøìl~·|$ñ'=üŒ¿£=QŽí¦qóy@xŠp?¿ý ¨¢w^wÔ¦ÊmP%ª" äã¥þ~ÕC+ÑXâõiY‚ãÑzÝÀë¥ï É«ñxÄuDö æ‹]·¾µ“¤“Õû£ú<¼¡O^,£Ø¾ã† œGIí­¢®½àc1°ÐbÏ“àqåüm—~x5{,_M\s‚p}+™h8È4îÔ¶^Mˆ›¦s§Ž–$º\ ï¡tt—á„ö¡ÕÉ„ªtŠOç>u»(Ÿ€~•Qõ}M<9Õ€H-¦UÈÁc]kÀ"<è´Îá<(OÍ,Oé1^—´ø¡ á™T"SLÁ#u_ä6ºÍ¢çKýÍ© Ù(ß Fߣ.–¯r»¾Ý¬×»!l¤ãEÐŽšºrgL§&Íw~&Éë¥y¼þ¸³Rn²é{Ò©Éæà±ò‡{*gª‘9'Lð,*Î,¥qºBm0!GQ'‹„Aó`Á@ƒ9OߨM¯j/pW‚qé@ÂáÒ8ë'² TÛAƒ@ŽØ“;Ò4ýïˆ9-#5ÊS 84³AÕ^öÀ‚õZ[<Ø‘2Í)¾Çïbƒµ–}¯9;®¹¾V"g hœ²'øðMÈ*‘Â6˜_®ån¾F-s<Ž˜EÇx#ª®ÖòÏb°ið æ^€¡êÚ ‚™þ’¦NÀ£Hûq B¹†º¥X*“ÀUßúex¼/úFT TÍ(5mJÒæÏ A›WÝ¥ïX¤!Í<ÈÆÕï3|½þÌÁƒ¾øÒ¿Û,?-WÝÇîªWžWžðBŒtN-ÇúžˆÖæàA;5aÂ¥(A0ä|dÌ:þ‡©) S¡‘¹Æ©Øb–M:ºyã3ðˆ»Æ}š¯–ÃÛ˺ËëõúûgµU5Ù+r¯Ç¤Æ‡B,/k¯ ŒnQ€à5' U-/ßFh³PSS &cpW?}uøJÙÈÁS¡4È å…qå™dÈcL`,hÌZCE?¼Ž&C–håùNNíóŒ=žðÔ|Lçb< ÄC`ìßξû~9ònŸCzKJxmJ©¢ålñTÒòì—p°O^œ]-·}ñdzÅaeͳ•0 ””o3ŠÚÕ_`ô=^im3ð.˜þRÍ~ÉÅÃ˳4IWý½ÅÒ5ù(iÇÏc4½ñ'UàOfOpFåˆéCöDY¿¡Ç]°[9©êIóÇ{ÞPφ7é§Jé"æò¦,ð‰ŒÉÐ(mZ0&±7èà3ð˜"wtÇ–éì`X¯qÃS%@dO½„(71“OæH®dÞµáHÔ:æà “ÊÁŒ«º×1ë'M9Θ5ÚS6Æ©<È•E+ß„Ô ÏçàÑ…îùö=n†¢ï*¿×»å¹a †ü™Ü(‹{2_œæÍÆ^Є/É ø 2ðˆ³¾_¦Ëå*üi•:–-ƒÑ9èG_i½dK)ÔS¹’+iX ï1Á+GCIÛ¬MϳÄF¯spƒ*aS²ñNæG®\¶ŸjBÐÊäà‰ElIÎj <;ЂÉAÌ$"C;™Q#äH…m¸iÃÈÀåÐÏŽˆexz…";ýé+½ËÚd;£Y€¦Î0€R˜Ü4šaKÿÉÀªÐîpl=18òœð!˜Q²­ÕD˜Ì¢\Q½jÃ"D\žÉ{ˆ\÷†Ï¡¼ÉñÔm¥ŒSd•EÌ4bYã,fDˬ ÓÓÜFæà±õ×þð°™í¿×?fµ¶ð9Lnx¾þÉüɕڜr¬¨2"iVü"B¦ÿÅzvÓí6ËŶW8lµ!DÜë*„9 x2Cr m<§Mt1*+nºðf¹•q•ï6÷ë³y¯u>úš‹Þ©Ü[IÔS¹â’£®r¤ÓmâkŽRrð)WöeîÖ«åâsžæç«Õú‡Y7ôŽ$½óñXçPÙŒx¥ƒ\7¦,îÉ|ɕϵñVeá ­ø²š>3î|hÖ+*#xé‚­Mœq¦2([PŒM”'6gÐÝÝfý¡W?´õé?!#¸é5´âÏ[ð'³T09bš6û•·è²Ôº5{öi|ýða]LÔÐñrFEùó¶“”+¨oÄ *®ƒ'˜ú ºê>ÌïW»Yú‡Ï½Þù€pÐ>+®êcEÃ3‚{*_rå ªÍ~ h›ƒG‡buÇNŒ»:ÖúŒ¤Ê$µ0jSEŠÉ\r:fIkÛdA…`¸R*{ÜPä©Û‰3`ùð1•_ÈÒlö­d- &sÑçÄC£›Êþlž¨&¿–Ï›?ŠP£q4ÂýFþ\«Œ~C}isb¾¨C›9ªv’Ljs²Ÿr—7ÝÜwÛ©«oöqq7Û¬ûê²– £·à3¨h³C~-Ś̶\ñ]›] ƒõYx<|q¶-ç7iZú áCË19–9ɰã݈ëòìM¦2,j‡9Y·ñ 8ÿå—Þ¿ürã¿L¤;©ºM.rt•ëšp|é2d‚ïÿé?î×Igçýÿ¼|2¢¹ jë y¸œT¹ËÕ~‡еª…™à1å[S§ù§X/]1íë51Hާr, Þ4N‹+4¡«@Ó‚<âÇÇ5J½g’é>£-")‘YF@ œÓÄE@Ó8ëÊ— –ÓV€·EþS¶Á²g"64¹e}ÛW-¶ì§‘T:6Xé<^™b+ý §òaÞ©ãù³Žn¯tË,(‹Š@² @iœŠªˆÅðjÐ ¸ Áª ýi&ÔæèùŒ3lÀÇÑ“9_ÌTá®D–G] _Î.l»Åýf¹ûLŒÝ»ô?IÁóåín{þðO¯4ˬ2—¼Ô` ûl?ƒ ]Eä¬ÎÇ;’>Z < X&Èê¦Ó+—F‘ÆyŒÅ¬Bæ $ ®A€]‚G^~ú¸½ß~ìè6~1ôz~ÒgÒn·¹¯^üa–˜Ga;àzê'˜] ¯VªÁIA‚GCE“Ð+‰¡Jk°9¯&PZU9{PžµAËçÇ*Í>V™|•‘`e@›†œÆ!Ö\=/m;<Pò³Ø±›³wóÛÏ÷Ë!LÁ, í¼wL­Ç~œ–t0‰†˜Î4X½U¡ býBù2<ÁÔYµÌr° é€¯9W!S¶DsÚSé'Š æV‚'ÚRíhyï5£Gz¶¤Àq'ú4n´*Á”eËAŒ¦ÁTçã â|Ð1•]¯ìÅb(£ ³Bú¾ÂJwÆKãF𲞳Ó)Àmlƒ•-Á#…¼Yfꘟ˜¢§£ãz¤¦èÑDÎÁv´Û`Á•-¤i>Ì‘*«§[‚ë­nf•xº» à9?1³(¾Ò¨ÇJngbýé–à9aºßèÎ;n! :F—”I¢bäüÉ4n´0ÒÔ®‹ÁôÐbÊxNh>®¶º&š ÎÏþnÀ0K&€Fë{ØOã‚3%H—£¨;68^KðD]¨ut†Od5¢kYؘÎÚ¥÷òS(+À‹ 6užKžY6ˆ:}XÎD‘ -æ»Þô –¹ŠóÝ+Yܱï fØÐA¤À¾/Ô"|ºMʇM¿]Î%xN}qbJfÿ¥6°nf„q”×¼8h‘e"Âs¢O~³¾]öº. æê7¢ Þ˜È]ô§qÞAEÃex`±AÒ‰Otµ­ö袱çŠF®Ï\?N¹pšW^…¤2è±ún-£”¿ùÓg§œÀ(”‹—޾öªw¥À&edz/ÀcCáU¿/cúàæ2 GÓ©0rUÕ†q¶BH}2Q0DkbäÀ êϽ¦†òT}†ÇÅ­mÆ’}xM"£IÔÞkžµiœó±ð’²6ª¯ïÞÉðh¨»à™õbÀ£¼âÖKçÅÙÄUù)AÞb¥‹ðH}ú»¥Û]w÷ÛÍ}^‚ß|Õmv3=h32ÚD †=–£lÑÕ]í…ê¨ÿðP³ &>míÉ]g*xë¬ö^oÌ‚ªè‚ã¶" ªóÒ)®NP  œio=ºNITÁE›GÜõhíùCeV›OÚ¤j¿TÝx\‹¨0*§¹eN㊇Ù%\Mç#³Áëíá{0r¡·a\ð×8³J(ùÊÓEƒÓB:§IÏìÕX™Z«úå°„xBµ},ð5£I4¦ïŽÅ!Góÿ3wuËÝ8úUR{¯ÿ æv^`«öz.ÔÒI·¶eÉ‘íÎô>ýG²-·eâà©ÊT"ÃÖGð# `}j¸¾‹Ng:Xs «y0wΩ%ø#üdÌè¶žY.‘Ü|0=#½Ûˆ-ù\zJ·o Ä-úÓá~½ù¶úütØ^¡â¢IW™ÃŒ½µŠK|UÑ76ÃM.w˜pü0™ sH.¹ÖëœY- 7i \æÖ(±´XçrvfÍ´¶:Ë]EÁÓKùçREW*¢¿n ÕµçQàH =íè{"6x!¹ WU™Þƒ=l¾[ ²NŸž-õûjtÌ*¦â'Å[Ϫ“ª…Ö&Dz`m)`Ö¦w?ø=΃·‘Ç㜪É9NÇããåTÅ3 7ÓCh“ glPÎ9½šÊZ•ÀÏq†Ý »SjvG‚¢t[Ex²bžÁæ+.×ç·Š/ý=>øç·p7º™Þ§ÜÆMrFÕë‘­"ÌÐ!’áI팯« ¨0M4…ƒ‰rŠbª:O#ñ®C`+Áã»[bÎé¸ßgÝ2ÿ6š ç†@µ>òràõ`¦·‘"$xŠo´ö«éZ‘J©Ÿ¬“œ))õ^ûSù)®Çæs/Ã#ÏBAU­¿ ß1hÃ8în‡1Ô9 »¨ï¬½§ó§Wô¢Õ˯¯.¿¿zù£¢ £hÊ“£®4ÜÀèrÐ52Q #þÜ ”ƒ˜c¶ðx£hÞ6Äó©®6JË3”ÏÀD9y ‡_ÉYÁÀrê`$xÀ65o>¼?÷uM×O‚âXã­DŸØ‘+Eó¤@ÈôÉ0½É±!(Ę4OÀ#~7*P[®«Šâçr``¢Õ¬li ”y+„B‚G§*ÈÅÎÞ­¨Äíï‡ã½°ó˯Q–Q Д`À½#¶[øY fn_W†'izïJ{ÆÁöø¿ì8“éÇœ;Š )ì}‡%-Á#¾†¬e€Î8e©_4#FðÈÙ"7ð^õ¢œ¶Ó‘:°@€Ç¶]ðŒÓÐqèrHQ.‰Û;wæ«d,¡´gOL½mÁë©KñŒ^ÁC1Ær-ÊeïšÚ¢†¬íÀÕïºàÆ5Ž>8ܶ2›P*_²½íÀt® ÆaMˆðd…R¼ÏÉá—f"+V×£V£Udg¤p£çBld¦ñX€´tpýx攡hŽñ¤=µÙsÁhß/+TímÂTÁ(à*˜ž¤/IJ9~œ¥„\ñ©nÙ£\r¥é‚á¦]€4upš$xrn²`Fµ1¾&U{,Ô¶†™}¬ÞøNnßHXqõ ¶”²¶dzHð(¶;¿hl³ßmöÇõöáÓ峟s“eÌJ¦·»!;.¬G9Ÿm“õ<‰•Óa†þ¯OÃÅÌ8à-Ú’X¸MšRŒÅÍÝúT0ú[Íg^‚ÇYí5t»@ï_ö»Ã·ŠZ™;f ½>ç†î[ÊíÖ½WƒL‡«p q^峿tàÁ)9,¶Y$…:gdˆ\ºÊeïµ×¹)%ÃȬ¼OÖ^ëŒ.™;â‘ÞÀ3…ùG9鯾€©Óa•×<Áé-èêJHŸŒ÷&êž%ɹ=y[2O‚ÝWÊËjM± /Ú«øp|Üý¹ÛŒWùfF§¢…ÄäáŽrAv/ ©VûÌBG«Ìb¢ÎËÔ™—Û’HÎJù±”¼p±Ã…¤¸bBMY×Sšs—_À\M…#fÔšSiºÝ‚˜‚QôðÝ$xľ›žV«+*SÉ7ªŸÅô匸TŠƒE(mûª{«¸Q_‰Õu`껌r1Í<³é„/µ¯„"ÃcSƒùdWq0†úRñøŠ/ æS˜ó9Oqâš&»#=ˆÚ߇ý›Jp?Yl* ³ºß¯¯»Ý˜v¡®Më1p¶>sèQΈSƒt· Ô í'^‚²xIˆXÑ{©ù{X†õördÁÍ0¸œ²q…ƒIïåX[óS€>ç“.ÀRwìÏýñoêKs·~Sæ\F%¯:UÛXꊣ,!šÄ+..¸JYÈNÌÒþ.Uˆ4õ›à„›Tð6$`º>ròžXªìMlµ¸•îmÅUBdêÚóÞ¡-v™#!¥…ˆª.&¡ój ž–,)ÉsSˆößÇ\8/ŸÎåÅ7ú„ …ËU‚GÜBl±Ï’l]›”‹`кºÉ Ëw ! |Œí§X‚'»%«öõâ~¼Ø_}ß ŸÏM$]®y—8¢œ ¡·ëœôÐw(ü'Ã#nw-q^’«+.¢KLÝI8j¢\JaÉúOMÆ;œ‚Hðˆ¯§Þèì’°·úIwc¾ÎjØî.^Œç&\ÀØ=rǪT†Ð¥†®tRZ:\Hð뚻Ҿ®=äA°1[Ž—‰Òž­e%^ û‘“¸gËYsïο–xn¦)'‚ÞTq@)Û¤½Ç­ˆöª_ßÇU9íOU9ß:X†¢‡ßOx¢ÁMÀ#~zƒl—Rê—^ W÷‡¯õ™Wß`4ÓÆ0'Y™N„RIöLÕb¥©š ER‡#U 0Ë'ýº¹ÆMýF`K€Ìž7P¦¸ Dk’NÇ^I¹Rœ{q9ø7™þóUʇaTS²ž£/Ê…\–Óa)}xcüh³üïŸ>ïw›Q{Lv:ì shÁF?gñw ¬`¥¦ãIF¯UpM«“¤Fe[æØ­xg"ö’˜Ò6}#ë0ÝÓÑ:c:øƒ<â\™‰Ú{y“~u«é™œ£.E#—´‡r1xµîÂ(-\ A‚§AÐpËÛêéPõM-WYr”£|¨&VAÎkn×¾œ† ÷*öá¶Ï•½!E†ŽÃ‰ME=d˜EÑéS{7Q†ÇÛV‘ÂMMºº&-Õ|q)ü>ÊñÃvE’ pV\[½á):'o³¿ùéj½½ÛF£QªñäœñÜèáµk,Ìâ¬yL¸ À“BáêGUë몥̱râhM‰ytÎ ui-Añ±=9$x¨m 3[ÅÒû-S˜‡Ûg9 Ö †;xŠc]ŸÁ;º®õ\è€ni,±´¬¹´Ÿ{ žb”≗ó·›dbð@×î10O?ÏrÆèØ~U¶ ð‡ à™[ç†õ¡Jy‘³š™S¹è)æ.PF9¼RD±ˆØÄ®G!Á¼BÅ›JcbrJ¨‹1޽”˜ŠZtМ½Ó‡uÝoz&T¾dBÙ3¡d(\‡ ¯×!öóÓnî¹ùçîËÏ•/3³ª©v¢uÄ`F¹Õç¬"Èl¡ýDKð¸ hzn>× LXŸ $—cæh‰rQÜN¬-˜aŽ‹ 5#AQzðn:žd¢¶7üΙ,–¹È¸EC²L‡¶QÎe§h\f­D©6”áцÜÁÁ! ·—£œ+Io‰O¡§›–2~u“²0OÒˆn½IÇÄñ¨n8PZ YóòRÓ±Ó±wûIàIRw ƒ¬ÓúËð}8‘W{·ûòüêü%_xÿôewX]äVÁÕ‹$i´Þx«Œ™‹øý®îrF\ÛW¶2 ©ùµ OEOý†×“ÕCö9&%o”‹âzŸ}X:}éêÔoúÑ@¹ Äd(Bê@Apهȴ›:ã‰Kª_¼îýîÏaócƒ3{uÀüÁ®ÞÏS®@uŽ©L_r!df4ÔÓ»DÅxB¾ ¦Cu&v ‚Oʪäík%`ô.@L‰[H(çãÒÔºöÄ>šÐþ­¦–+™ÙþG<10ƒB9o¼j4:‹â¼íÞd2ÝTda ¦„è™ô¯Q‚Õ™øöl :˜ ž¬vHQÓ3F בaa¶dO§Œ¡8ËŒ€J1ÆV§Usø-Žžg{*ˆð5CÁ½–L„@•-M-›ú=%µS M Fb"ðd×Ã&lÖëýñË[­2ûpð6ƒƒÌÅHjÄš]XJgìlr{2HðXßÔ*¼¾’¯÷÷,cÂh24•…Œ=lÂ, FQ:›<ÖÈû(<ß÷œž¦‡qhi×ûáô¸¢_:;`Ì®K%!G—‡D¤±Æ¦fa:Ÿ% Û×`—áINÓ(ܶ©ŽQnh ‘œ¸©y7â ‘f$I%ÃgaÊP€ïÀDp9€1ežbÕ²0/,|øô<Çïz3‰º:d䜲 !iÚ¡YKHB‡`E‚'*=b^ÛÔ[”"Hè¾›ÈÝQñeLSªNÍŒJ˜É^, 5Ó“=uÐÃ(ç´ {-œr äÜaßáYt@<^}øñð8ÜÝÖ\`4)—ä€ãʙڋqHÿû4lNÃúqØþö¼@Þ·9ÛÝÑåiø²{x<ý@ˆÁ5ˆ9õ˜\žl4·BÜT> |Öß‡ÆÆsnÐ%Ÿs‰s0`NËaUŠJö8èái{~w«rUd.Ó 8k.qÑ%Ê9ñ9^?ú F‘;$«HðˆÏ÷ov|oH säU<•Åu‰óá‹7 —|âJàh?å<³’CjtTsVè1t*™ã$å!Šßiqr:ÆÔø‹ðH;zGÛáð¸[ïNÃ_OÃÛΛýñi»z•¹U±ò­‚Wëÿ{: £š­žš«•ÅÿûŸo»³ž» çß¶»‡õç=:•›õýúónÊððÇoÿ¢¿ÿ¯—?¿|ØùŸÅ.qªºút|¯Úƒq,·Àø˜?¨\­ZÜ”[ï³”YÓ ]˜5øúH}2vŸï6Çó|x=rUßï4%×Íñtã—7ÿ,~ùð«ù5þàq½ù6NIУXµkfSŠ}4¤n,ëqß%ÂÿVluü{8}?¯þ¨GµZ®DköÁ¸ºñ-»ßÄ͵çeœˆ¤G0øUûi ÝU|F…àlž‚'© ¾?=†ÓÔ#¶`jérÞ|2Þd­©ªö,—\Q>!fÏ7„›·ïâÉbéùÄL”{9hµÉM7P£éX/’p–“_ûuf®`(Íûrñ”°¨/ûù?n8Ï×-¡Ú AÐ&SýŒb”³PÄ…Z³X€¾ý!±OœY®½ªË×7)r3O­å’õ…ÃJ{4€mþBPˆgVªÞÇçì¿OQg©«Óyd``úŸžåœxî•y*Ácû©—àImöýÇa?\›JnÊ!8¡^Za”ó1˜…oD0V: ž¢Q9½¨õÓþqµÞlŽO ÄXVš!6ïé]½­§¶œå¢ø-G;& `§æïƒ¥xf>ö»Ìºøµ\â(©DDÅùª(W½)œ\$ §!‡|àI:Crƒ[탆èm§ø9Çê@ŵg>lÅjv×ÃBˆð¨[ˆëC–ÄÍ:%:XW¯6z–‹.ë$ 5à®`Åv˜ÿéx’¸ûÆÇó?C±®®XªMŽÜ@¢7Ñm£ !òt¤èÍw0"<¥MÔp¢ùß®6ëQÜTSµVÄÂíb(WyG/žêœ>ß¼F©h0á'¾áA¨64C)ø\|®¿T¾ÈYß&H@ˆÙpå\Ko¾%xä¹:g]mö»íñïÃþ¸Þ^*>ÿçj¿;|ÏY7ÑcŒÌÛ¯³\H*½Œ—ÓáFÓaY‹ðdÝÁÃq;<«-ÔÕ–½+Lâö!”±YWd£g±¥ýôJð8PXŇáñø™öÉQ[ܬ"²HY­:À‰ º‡{òM‡™z¬Yh©_k/ÖµÔ*8ÃÝ9 œõEaéŠ((@ç\‡¹¥ïÉ ò<^sm‡ûýñÇÝ0çì‚›eº12ÅpþÊ…Ò&—pr:Úè;\ÎHð×*Ò¾V “ÕRÐ à‡VaVÀÁt¸r—à± Wî?i®˜V“ì'Cµ¦‚g.G¹$NÁif¡Î¸!çz™š‹\n~7~¸Â<Ƽȉ;:± Îå¸Ç]ôŸïÊ@¤Äh“|‹„öˆCØCY~¯È_D±8Ï#O]x&;W ðx²¸Öíõ4ã´ êJµ#ß™8c”“×XkNfDåÀûlyô®}Žæø=ÞW/Ý|‘³nÞ-ýk±‹ëãéÌ**›hìE;w ,P/9Xö¾Ë ¢ç“b˜€⼸û¯§Ýæ‚DpÿµÞnW_‡õþñëæë°¹œU0kã‹aB ’³E\tJ#‚Iì¼’\ûSðó÷DÜ<>«U=|ï]LökI¢ÕU¬ç2r)F½w@8Üp`Fݱ^{`5wàˆ,µÞÂREã÷:#`ñÍïõÓ Ÿ‹=æs:ñ‘ÌÇkþ–Ç‹ÿ6¬ï^x¨vÌALT眶bÎ/ó´$µb—­ì•d8=|w °Ë·ù½®ÝïW›Ó#á&eŽez¢0‡ÐjС/xü^ŒÚYì¾B_Åù¦Cê`J™€§´5¯{å«6m]›ƒ/0ÎsÞ;3Z+é°u:Æ`R‡¿Ç›ÙPŒðÌy.÷³Î†=2]m‡ïÞ¬ç ÿy¸6»ïc ”¹9Æ}ʇ¸ƒrYÜŽ·=CÁtœÙõEr¶ƒ+G߀ˑ>ËùØi½¿6>¦šÑg½ºº^£w>Æ`¹3/”sI!Ô[Êb^oC{Hð8£`þƒ¡ÓiXÝïî‡ýîpvŽ27É€{“ ‰#+Ê™X:-ý9d>{uU3½Â°{®mn>¬0Œ>yo=DÖ’&ïÀh¬™9s.™;ÄJÓñx~ùBùŠŽÅjwø>c?FyFc€;r!Doj o4“à.(fR€§À’ç௦Uu}òŽ™Ëì=zfÙq‘9Ê¥X–/Q9Ûsè°‘á÷ c4ð$…€õéa<Û£íÿxxîóKu%êZº:¶ì)+ÊE›–¼ñ^À=Fè`y%xJѨíûNc̬bìœ]ô‘CˆrÕ‡SWë<îIPæ+V‚GcG}Ú)ö¥?ûêpöF˜ q÷/œuJ~ µ|ÅœŽ0š³+Àcõ:Ö Ú*=@À1ó\¨K^ñ!1¨Q.–¢w4‡“¬)t¸á)zëùîiÿ¸{>†_0ÂÕǤF &ÒÞpn ÊåàÔ;–t:j0¦Ã¤Kðd…²  Yp÷Éx‡NŠõسœñzk{?epCs7L†'*œ·®žUøÿÌ]Ýz9Ž}¡Uš$ÜÛ¹Ú›½Ù}Y®ÄšöF’ÓÝóôK–dG²­Èbq;þ¾î$f¬àA8¦i4ꌱ5ä‚óÆ4®¸^c6@]¾HB!jóôâRg¿}R `k  #ñ1cgæOç>–`$^ÞÈ%xZ$o½é+§½ûÇáprlXÐZr¢#!&}çu@›GÅÀ¡‡™ ðŸùôqZs‚©xŽ ¬æqηۓËY‚2Àòö-ÂÓÀÓþçÝËá°Ö;Zåë…ñ´2ê- zcÃ>í´$áL®!ÂÌWóx¨GÊÆv°p žvj¦u(XÒ‰? ñœq\EãÏv¬,ÀÙá¡z!ž?›OÍÀFu­2àq d#(m4ÁK sNA»ê#óY€{ºOñÅ/‹®”÷«À'ƒ`k.ƒQ"¤Ï% „³«Y2º°¼qKð€k0‹Où¦«Ãúi÷˜Áݨ4+(­Þ !ºqÜd–Ÿöm[5õ8­‹Œ[€§8[êëד+vÆD2Fi¥Á<ƒ„±«ÈXr¢‚G;#—à‡ãñ÷è¹ÜvT˜Æ6§‚ÄÂ\¨ª¸#Ä,,ÀKœê"D…wL9‹×µ¬ƒÑ”žz)‚¯qlð¼ô83µö”À˜*íé.ûîÎ9ÛYoI Ÿ|¿qW>¾f¬ISxQMæf ÀÿNÿäÇ1[ î`KŸC˜›€hðP‹LÑ·´Ž Ý^½ð,òQ‹˜Œ¶1ü\¤3è &ü­è@Ö4ÛQ“RÓ€ä$m†Ýþåûöq8|»ø»OíôblÇç´ME—€ßƒ8Žþ^Äv¹«ïêNÓs¸ÿ¨h4¦O|ž¨Ñö …ç>´Aƒ]3ZŒú½©f+“‚¼ Ì“ÍÑt¤ÐbC ­8dúP‚]:Êhð´é=7±_WlÏ—×7ÒY2U€g57ÚžAµ\±Â6Ù_g"¹¤  é”¶oøh@&‹sŽ5BLvM¸$ËàçF-_ŸÍ†Á‚‚qÃ̾I¥_”3ú£ºåXS vÐ.,í0Ï¡ˆZ¬Ø‡"Z<Ú¤l—jÛ1]SšBïA¤¿aBeÛ³s*Àü%=´cÎT¯Ô¯ö£dèA!‚>â@N…§a™Ð*õ“L¡˜6,“áÂë(ìR2Ì¡VÌh»P(ïŒ Oh×|sJýBt4çÝJTŠ6ëe™¢¡²ÕhAYfP*:@§×öY•"Öá™UÒET®ÌÀ@¤  ø×"œc~µØÇü9ä¦Â•¿Á‚ sÈ •}2z£ÂÃ}¶—1:ê[ ×¢c)ÀOî ö‘¯@דD+Óå$¤ÇS^DXŽÿó—f]+: q¶Ñnñ%¼†w¡á=FÔà).y®\/ ²ýÕhï_¢œC„5‚xèCƒ`­O\8$q»¾Zßß¿<ÿª-zþQ£¼Ì•À¬ ãBúbQæJ+mÀ>„¢|bÕà)%ÔÏõãö~}L÷Çp÷ðòòûé’íu¿þ”²ñA룶Q&“‹è¤%N3Ès¢•Š}‚ĤÂÃmڟΉX1þš{kjä‰Úøë²rÌ ’µ`c”Eµ¦Ïæ¥Ç3» Ïí´Ñ›àœs g±ÌeY6—5mK­÷Eš>38¼eO'ëQô‰¬¤µØc4xb³”§í.í é¹T^úýÓ·3¾ý™oçNéDç™sFD!ó Ÿ"(³ê 0b"o%x°Á+öýÃýÃ[U­Ñ3XýÜ®½ã<—Ü4ÚåšúZ¶¾¼ûÂì,À;l*xИö3}ó¸Þ>MéS˜I1776(:à¹ú(5(Z1¹h :ä1á KÍû˦×,øŸ‘œO¿$Ä Á…ös6_õø©ªýd® ‡#ÿx$ïPZ½DJ†ßþx{¦S—þë<þ¦!ŸE`‡Ä½ô9lŒ7A‡L‹GHêÌ}%Úq:Å«ÐEšóàèªj#§}'€6›NFΪIgÎ…ó{€öM›m‡^£nA¶8:$ Tý³ZˆõæɇÓÈ€ÐÇüì­üq»9™—¿¿Þ «Ã_IÙO¿M(XÞlÒ”÷xkÒ,‹qVA+HìQ§1Ž7ÎhðX3çñ“RG}¢lwôR#ôÓ8 šûC V½™µ¸=õ1sòHAƒ±M¦õ¯†Â_M¨ÜsîT‡ã¤qÙ[Žù¶Ä* œ—g] ¼ž*1h$cìAoÒ¡"hðDXÂ3ÕJ¼€Þ‰(Ó,lê|@Wkt=|Ç}ŒŽQ‡¸é6°ú±>>œçËFOÞ¨˜œÇÛd7ø]½Ñµð©Ë¦à­!½‡EÎÚ¢í­‹rŠä(Tlz ød5ÔR8߇˜»*ð,HÕ~XßJ&©L@Æ’öxE Ɇ 0ák¬õ„Ð Óã=xúç9 |PõÎâÜW›cJÅo_m±$Õ(êMíMÀjÓ;dg4ô¨î?‡w¬ÀS\ÝûÖ“Æ)‹‘@&?±Qàe?óé¥g5´‚€éòŽTx¬¹¼©ô†nAæ"£Q@…P¹ ¨!Ö›_+Ú>ægëœ Ol³*ö2 ¢Ó’d0o%P#­&C:~› %öqÓFï4ªõf¹µ`õr÷ýõpªÐº:ä˹ã¨n1|è}šø¤8ÖN6âª\dÔõ$ÑŠ}¢>sŠƒ­oÕz¯\ëAæ 3hNç^ýðõ”‰ÉÛÔHÇ}Ö4é$¬8ûMÎïeŸ?+}8nV»ß·«áù¸=>9wÔº„Ää‘i„@;ï 2 |5eÔÒ¹>iDšX´<ŸT(_Žd"qÒB -Î.³d¨'VHê³æ“»)ð°›äžZé÷¯IÍOÃjøó¸_o²þó¿.‡@ƒC¯H„HrbMø{6îj¢¨s}‚`!÷Uáá¦=ÁzÏ*·rŒ49dÆ*b9íŒéäõdÑŠúÜ¢„˜ö% žâ šÉ|Ýoå*òI95érû|<”NÖÃf3ª_«&å"(b?!ê«R-'E5‰´b’íãÛPúRäg$Üízg¼iúÍ8|{ûÝÇÚ·dåh,‘ÖH ¯Òy=Yˆ¢ÕˆB²p`EÆHÂÝ®þÔûü¼ xY9@ËÒñG5Âìz Æjh…`»`ÌnVÄØR{¼_ˆ“•#³Œ1D Rõ-]-ÂzãkEÀ>[縞"’ÃÁ·7þÔî¼5.e£ &(â Ì¡'J€WSE+Y§ä^5xlé÷ãü^ƒeT¥lHªÐ€>¯W‹¨Þ¸ZÈ}Rø}dI͈­®ëßçÒ§jÃdÅX)æ*J¨ÛìŽnf- ôrÄ.w/˜àâ!OÂZ4px×«»×çûS0ȲÌL§w ^çôlPã¬'‚V}ˆ@Á+ð 4%Âyóý à(!Zë5x‰šAÂYM›— Ü% •«¦8Œ <Åå‰oƉ?/µNŒQ¢…àƒ§µ³/ç'ñÕ^+€‹} Ñy£Àm*4MgÃ81؈Î@ «̬Lj ÕTpƒ‘‰è\Ï]B£Â<¿<à»zÓÛo7§ùådëGËÖ×û°Õn 8°Š·ÆÌ-©u£þÁMd˜ÌŒ dŽkëi5®È GÈ}2³Ø‘ââ!ØfmO„5S ý¡7Ì*Ð<¿¿‰l5%´Òø>9Úè!}’âôá­kêí/å÷wëÍ*ùTþ5ªe. iž:$©B_e= ´btZ<§Vœ<|m;¤/•;ü™ÑžkN õ!мn@ÏÐ`›¿®ÚäjôÂ=y×àASzõs¿Î+ëa8Väu89þ‡É‹ŠÃIò´¾~ÀõÔàèâô‚* `ñ¹–®¥GA}Lòž4x¸ôõÈÝëöñþ*¬?c€o€ÒÚ'4¡ÇX¬¨ž¾,\b²‹G“NŸ 0(ð´(¬wX?í‡Õz·{ÜžÒàG­‘ 56DÁ—!ã¸m¡ËY§GCíCõ+C<¯ þöÊP†bùîãexpöÓâÇ—×ûÕf?Üç¬æõãD£L&/š“s¨Z<Äên°U€on]ÿÈ?ïï?n¶`D]¢ÆÃ®U?ؽc;¢D;·lîn|Y¾/y6-ÛÁÞRýÍïŒV2{òSKRHSÚ |)fsÉEð*ic.%/Çhð·+Ÿê©Zl…S ²ÓÄ–Ý¢œà4B6hÛH˜ÙÌÒ í+ÎeÉû;ŸË@p¿ÒÇ Á™ó8Ó‡àéŒãTxJݯõn›Wšá*çjúf¾Y°hƒG験Æù©K*íá¬îhQ‚’:¸Ñ%xØ78ír%Äë¦,¨,¯Ù¼1ù ¶´­AÕ ×Áˆ%xJïgs±Ú¼>nέª.Ô$œt\®¢ï(J‡ý<Î6è8RÁ­ˆ1øÛ«»ù°ºÛ·²óS‹{ ŠÐaQHŸÃA¨+wÆÝ lW6äçƒFœNøHÏŒµ$Âdã ”_3h_‹lk–ࡺ%þJ?¹’[Œ@b{¦¶œ/ÍÖŽ]0sÈâf,›,¸¥='h/ÚÑz+¡­Ù´kHW€'t˜”ExгëžWºBÒÏ!rjò8§R§nNȦÌÒ£µÆøå-Y‚Ç6 xþÿ¨9ÁmõlÙ¥µJró3?_:'K9W€%b+êñ¤m²ÐŠß‡l?üX¯×,ᬈ`]îT"1+³ÅýÜš2«©³5n´“/¯O?Ⱥ @á:pªOy9ä›Ùß‚a¯w‡o§´¦¯Ø¥59Ÿö­ÐÇã4nj6|ɼÚÙP€©Çd ,u«·Oë×Ú²!y}XD“Ç9hÖÿ­×J‡šµ4kG êàU¤ÏaªDœq›â „íËXAåq}¸*šòšv…Õ&}+{süÄ ÒXðQ>”¤qH¥[Tù$У ¶ÃP‚ÇÙâÓß÷ëÃqÿºÉ«é•šâ´šrSe#r+*íÙŽ[0ýE/ýÂà5 C èÀ)ñ6Xo[¤j¨ø77÷«ûõááîe½¿ÏWmR•8ù7i£”¦å~»¥733¦–[> ¿ OñA2ý!·i½ÔygœÝRK(1?³¦"Ûq¦•@®X2P“*W‚;ÄšŠðÐÌpþh´ÛÙCQÚÃ9Ï61o%³S³àÆbQ> ðøë ,]ÿŸ‡cs¥!L“£jP¾]HãÜÌhþ\r•€¥1ý"RpÑ nå8±ôv§œh–Oú-ŠãþÉ\ÿ6Ç+ýÀ´~,€O‚+àIã 8 ²8× Ðûå#ðexŠ#ð·oP¾ÐæEÒsœÞÑšäâqˆBõ…Ó8ƒå(æd¬Z‚'–ÏÙ?ÿºÒŽŸÖN:Z¥_ dÂŽãLñ£ÀÅ8V‚šj¢îA®•¡X¾Lžh›ÝØ]çšÝ.> VðœrªWé@!ágãbyd·tfèÑØÖ-ÁS\Æhó0ܧ}ò*‘pZC9WË€ªã‚‹Íníšñ­Õ*BšU¤Eìp^ÈxB„à5xJÏ Ïë§á°[o¾êêt±XaO6Á[éL“Æ.}â\7ôˆ.›YëÚ¼@ðn’L0ü„Ëè®9åÞk§L⤄Oõ¦ÒDݯäT› ió?§á·ð;½¸…eü¡Çd(ÁCM&ÃI»«Ía»ºßoû|vA²"ÿ !Ðé^Õ—†¯Xmy­ŽûX>÷~Õà)OzµwØÿzGÝÓÁõ‡Õpw¸Ðý媉äd’'ð9Ë®.?·¸,ŸþCñ²LxÄ>K¨Óà).dWm”¿î‡UR÷ï7í"oÍŽ‰„î=§qKs¬Dœv4ÓÊ϶Íbr,Tx°+;o‡›vñ"Í 5bEêB3¥8ÍhÉ1A…ü`\šE*<Ô‰fã˜ãz“fÿöùþÊg¹¶Žì܃ƒÈáìÒûf…Pí(¢UhÁù>”ƒ@:<±'åÆ1?6»Õîþ¦u‚L9½Õ§nÕQ¨v”Sk!ô¡¡Ôlþ4{ùlÛ»§ÕÏÝfu÷ø²¹íçÌ7­daéý´T¢vdc‡V£ês@æˆ*<½Ö·§õóöq}Ó&,²Ì[ç(DŠna–©EiF/­ìÞô9èñ„ÿ/zbKQ¦XR‰bcoZ}¡´2»>«•&«Á¦—7ös»?ÞšáldV­WHä—^¬´’´#—Vt„>ä  wô»v/ ûŸ‡iO…娽§ Óvˆ|¯R©Ú‘޼F ÔÇÙ÷ Pƒ§ÿùòçÓëý°úyØ= û›'–¯|$ !÷Èf׌‚˜¶K£ÒFŸK¨ÜVxÐ@ žÍ³Û¿$Í?­î‡ã°y³|O€ÎXÒˆf¡ñt"µ£› Ábº‹QƒÇ·Ó>iæ2©YŽò£·F¥ uÍVHíí5zRå] ï“·€¹O– 6kŒþ–pVñeÍÛ¯¿1ê^޾crFY# Òìése¨æO`oH!cèsƒä‰Tx¨®…ÞÕû›[Ê]íOE|å€9F`¯AL±¬µ^5Òj.h%á>~p0†Xƒ'6è¬U<W¿šY"Ëqîà|ZMda‚ñõ¸Z QË µ¶Ïñ]‡ë^ýî^·›¿„DµßÖ/¬ÒWÃø–zÔ¶´éÀ©IÔ ê.í-QWsD+ôñ=Cî¨ÁãayŽœ4½:¾¬Ö»íêTe{Ô»‹N} 9Ð/ÇþjÞ•|2tÒáÁåys.Ò^ÑWûõó˜wG‘;ì¢ÑÈB´w”2T󇣷Š\šÀ}<Ý‘ uåÏG×àÂrD™Lš¡b"©„©eYã4Œ"Óç&Œ,[Mj"WBª0BRþýóè3D9œ–Rcé6“eçÒæKÄÕÜÐJ}v+òÀA…§ùnõé0ò•cåÀ-¡eÔˆàC+’”B¯f ¦ X#öÙ›(¤““Óà¡Nl¹åXF9–K‰5„¸0oBT3H+$õ9U©ñ7HªUþ„W0Ú@ާ¯´Œ*dЏ0‘ô²ÔòI++›>9×lQ•哾:ñéÂ]ƒÄ<¾ÎQÀwfaê| »š%j±úDŒÙ'Xƒ§<ìWLþJÙÉ‹Üí_~nÛ—çìU¾•è\­ŸïÓ ¿ÙÇÕ~Xço‘ú(‹9 EEF {_~§Ð5o´òa§Õ…О¸ o~¾<¾> ëãq½yxž?ë\ŽsZ"U2kË™ìÕ|QËÖÇ»‰ùþZ‘C óe¿½{=‡±rø'ÅËAcµ 1.D¥µÌ‰9Ù\!`4}v¨h™½µK2çð¼Þ^>ÎÕ`äXq„@¨ÈŠˆêXMkäÕ\ÑJÖ)f= ©ð`;®¤sÅößI¯×5â?i[Ç@Î)næCCÔÕÑJu;2Ò–#ä­Õà ®=GÎGŒ¯Öî\øSdH$£Éõ‰ê[§v˜«ù¡•‰»$òZc EDŒÐžšM>1òk ¤" Û(¥à+£ÎRÆhð¸†+ÊÓzû¼:!.µëejx£ÉðIR58)‹(«9àsö†BŠ>Y¼Ö F§Âƒ9péð½+eˆ¬ŒÔˆÐj&ät ú0TÌœ“û¥‚ßÖáwå™ióUdXÃЈ « "öY ÒÒ©©A“ð´^ ÞŽzÇK‹ÁSk]ðŠËÔ$Øÿ±w­KnÛJú·ß‚¥±}2£Áý¢-×®íœx×I\¾d«Ž“*s(އeÝ"Jã̺üXûûdÛ) gDEÑʱR®Š†Fh¨–XàÚ” µ+‚»±%MÄwT·e0Œg£éµqîz&B å&¤–\iÝûÁߘ7uë×MðšúxxCŸ—+(ìV``uÕt‚+‚¾–kŠ=E©…™>a²ànZ– Ši<·vÅËÖ^uó‰½)ÓéÉ4(iÕK¨eݹC›¨›r„`¥DZaÒ G'¼_ ò*%ÃpÏÂá8I1¾¹¾xöætõÍtbÅípc2Š”V ¹(Î̾÷T´•Ü£"²AûÕ&îýrn.Õãpöò„¿‡“kÿ|îõÂYòk<7Uë ‚,«½µN˜ÇxhëÓÿ¨Rhˆ³+Ü{xÒ ¤ÉdØäR_•´UÄÍÊ€á „ìB[ÎáôÂÉdºÈ® èmˆ®—L`œÆýy<ŠÃ4¾E¾³Ëkà¤ý« %h¿`ÕŸšð,YƒüžYïÒxt±úøô2ùpy^…É(Ö®ý³ƒ´£øt2Æ§Ãø*™þ¶‘ËÈfu‡À›´í"Ï ©²¶Á§ˆ¼ArÀð‰Ú²moÊF‰èš…`°yžWà‡$ üßY±o´Q3:½ ›ïh§Â‡Ya¿SÀ›m1Èôeé«é¹Ým:üG±bw’i] Pú“Ë8¦(IóåËZ EyoŠOsP¾ßïÃLJ"¬B/Û|YÝËúÛdW¿ûù^ÖýsqvÃÍ,³f3úÜy_ÄóxÅõóþãàso™àžÂ!#Q|~:<¿Ð§ìü‚žê‹hx*ÏÅPhyA8¢½/ï·Ð>‹£Ò»€JÀ(”«Ý-’‡‹èòÉÔ Z!têt[º›á¯"É<1¶¹§ŒÉb¸Hÿ1Ÿ.g9Ú²úÞN¦??yžÇ£zß½Ž_$“åŸîïL3oéÜ«¹ËUÖOoŒÓ¼S¯HÒ`¬+oPÿŒîõî2`­ày£qb'_ürýÞäšg»ÆyÛ”xöÇ2úÅ“…7æ<÷õ¬â5ôC¨¤µå<µöZ!ÛMó¤  ç•%ýj™õ¨wMYpïÞîL‚ÿ€ÍÁÙY@‚d˜]ù5 àÿs˜}7˯—õ˜\¡dµ[µø“WÏ¿yfºÒêÉÛ—Oí“fÂ,ˆÓv§´5;Rÿü¿_åŸFS;èÞbîßšu²{÷šU³ÑW>êÝÊM¾øÏ™9qÛŠYýRü{p“âÖ`¦·ðª<´V'ä *ùYIï{Ï'Ÿq†ÀTí~ši[®ß½ûvþ:²ÅÛIßû¼ °’Ó[àó|­í_ÛÍaçVmÓâëÖj{;׆µmÊ‘{ ðàÈ{ÕƒL±öWißNß}·öß¶â.Ò¦YO£Ëu²]Ÿr‹¡Y[–1÷·I9èw¿åˆ{å¸n[>uñ4“غ†ÕR*±Úc|S)Ý"²ÅÓоjf“m›F–N" ³c×ÔÑŒÿ'_J?à”2)„ó@"¤£Uá<ï ÝÝèœÑ—…*´rî²*ï® ÖA°<Ü÷¨×ÝeþËóØøåðbÓ |‚T¶„súQÙ;ß¹cã×SçAH‡‰ïºÂþ—E|à‹Npùàñ^‰Ü*Îul¾R=Í[é:vF Š8ÒÌÒ±ª+—KÉѹ}ð‹Îkùà‘ü+( ÇöFaöÚbì\Cf§<ÝaEèï9¡Ï<½ëÑÖÊy.UåÓîQuI~ D¹Ä›u\'újëDý•׉ŽKÿJKÖüªàò+M¦ÀÎæ™ŽUÇ‘_ó¿¿ôžÆ“ë^)ú=¬)TëE5…ÊŒrO}…hýòlÅýïʽ¹ûß•ss÷µ”½ÜÿެîíÞèþîg~­ºÿÝ¥íæþwÐÀÛýïÂÛ¬š¾jôQï–æ¼Ó_ȩ̈É¥W¦o·¥Õ¸XÒàÿOÊq’ œ(ç]&â_an똋ùàï"~µ&ÌmKœMÄê8»%¡‚Sæ:ÈiÒUÝ éçÛ—ǧ6… âÛ¶øŽÓ×ّ냂wp\À”£q+µ¸qk;rãE4Üÿ»"šqA7Ú@:ªD}ÕMÿò¨#Üë…G~=µå8l¨¦\Ræ:ÛéHÕ5vžËJ;Rº>hZ,S_;‘:ÚÉëàx#”ÃL(Z÷ •‘Œi`·Åá¸ÀȳünÁÅt:ú˜,ÌÈ⊰®©‰¤K§›tUWï[Ig5(gÂýÞ¤cœs7å(êŽ×mÒ Ý쮡ñº¯9è!YW+kÇyW­˜Ðšb×°kÒQ߬=b{ ”ML&ZG)ù èâ,(Gš3רíeê³HÉq]&“øî1´³h:§ééùtºÈÚÜ 4¤ò„-ï#ŠÍº›®g–ŽaOê}Õne1cA¸#}–nÿAÛm9 ñ™cGD–ñSa.A sÕ Ò!äkfí¿h8LÓ¥¾@…õüúº­tÙ®Üxƒ^#\8.,±éåÖ"Æ” ÓFê¬A¨ƒÞhÊaDÉ:xèžö'•Øe9ĨÀºdÄ/Õ¦«¼s‘ì—óØ.©ÆÃ`5‡ÙŒŽ°êgáÌ„S²—!€ÒHAXmíÇ»­ËψçkgeÒã¤ZzÄlº‘¹Æ “áý8`0Ó6Uûot<ŒïÕQ&M즹I[»\q6“í„5Ø…²õÑòýß+ê‰Gµ~Ëõ]žÝ}pš†V¬=Jšfµ‹Â&Ýž} (샞óý“·wÛÛºÚfTYÉ:º› +ˆÓN´AÊE3;± z{TCÉb¢ešìjàÑzG“°¶0­$Yµ$Å\3Œ]S]HÇäÞ§ñ»PÛ£&œâýsÂO£ÕÛ†º×ѵ˜ +-¹«k1s!Úцl—Êõ‘k‚: €ÝŽé'P^-Pè&f×pô*nB äFoÃã·cI¶ËâÚÀÐ[; ƒï[é7læãÌwF—ËOTËOPŒ¦ŽeI›Ñ=ôºƒÖ÷Àƒ‘úªS G¿ Æ®š(°o䮆B <öÀ«tLðÀ£[ò2Õ£t‰ÑPcí„ÍyÁ½]M­ø0´öëE87®ãUƒçê>È ”ù«³üÿ½“À¢Ksã{z’7ÄIN†Á,¼MÃá®èE!ÿ ½åjœ&s@¿Bk[}íõNÒà ÷íq¼øóã9ÈwÛ#rƒ€ÎO z¬OPÓçf¥iÐûc^›s,ÑÜtƳi4;ÍϹüGz.Špö>ב ðë<¤çŠ^Äl($¢ /x ï4æÀOÆ`YžkŽäP SÖÓy.ÂQÙU:oõ¾s„ò‰q`ž™ÄPÁ‘9J-çsI(ð!^ü[0™æè¥ìÎÈŠW“YZØ~¢ ß¾ —sÐŽÿSè¼OúÈ©ê?›Ï§óIºx0IFó¾3­o>ÍVè^Û§Ÿoâ$ (ˆ6 P5xð·œ}9lŽ ?#s6ŒÎÑëîRÀwo¦‹p40½í$x2ÏF1tŒ: ^Å@„(Ð9ÇL1;Ä ±m¿5•~ ÓËAï4š-Ÿ=û@ø=‚†¾Í»p< ž¾ÓÅËùÔ^.À'ýã?ÃÉ2œ_ŸðþÙ—oß<Ü!G@•‘óÐþöÕ‹Aïr±˜¥ƒ³³0Š ¦} Ôe¸èGÓñ4u¸Ï^ýøúñ©aÙkhøî PpÒÁ»ßóø†_luàÆp%uÓ´/³Îx00äxq÷éç׋x6èåÏÌëxÅü”E¿Y¿°Ùݨ’G¿åRû­—m¾ûZÊ´„ùÍ´æcá ù*I?¦¶}WD7&Ì­”Nò¾>øKè½Í&}1€ÓY3]äÍ<œ¤Éê´hÖQ̯ϟÂÑh=ÙDiU¬†\Ó è-ñŸ‹SŠD´†$(û¡ÉÜdeTCX2Qˆ‚'… ß"­_¯ydßhŸd#a.ÉÍï??›ØxâÅ7@çí_^îý}™Œ /{&ž¹Ú~>]ßvðÄ æ™m¬WÙ‘Õkû »¢Üüü)³!¿|nþú%_$qtâŸrƒÒžõ¹˜ÂÈTØ—Å—ö@.ÿ5™~š4«Âëç¯ó›:ìŸæH铵a[€‘½­`Ž…£«ÿ¸òárQ"ŠŸ§ÃøÍÒ˜e.Ál{÷«!o/o~ž‡óØœïÕçÓÍf£$J£ëz8d𩾠è·eð± Æ1ŒýæDõºÕ Æ—‰9qD«Að‘ížÁ0³¶}3cN0ô軵2ý1ŒÕP¥µ‘9ävôš²ã¹)$µ£~|ÏOrÈ“€²¢7ƒºÕ|׆Ç-W™•#ôÅ›y}ݶ¸«ÛïŽ_;³Z<ë)ú=ø!™$éånsšàªÏþ¿ÿݲËKRsÝ‚®½rŽ!õUkΟšðÜ2>xoíº“VöÏ ‡ãC*¤ÕÄY7…$o˃»»£Á¶Bì ñÁƒÑ.·¥¥×ð{@:ÅDs*B*¶‹·Ç/<²S[ÇìX1?#s}éŸ×v»måÎKa h¦$ªæ³M§¨¯føªÃž_ÝÔÞ—=ñ¨31¤CÖ !‰0£®ºI­mãG«½À}'LñÁÓp°ñr{õÞMaN=)©ðŽ{0_™æ>uÓ ncSx´ØÝKT¢ÏÆñbžD6PŠvŒ|\Q°5v±Ò1ìëµØ••ØXMë‡ó665ßYÕË|}×Z<çª0#²TØ5¿‚t•ó«ºÞªiêW¡¨àƒ³6¨P&À’g7€Y§C¦æd̰u0‡Î[YØ#=êÂ;ì}ðxÏÐêÊv17ªvhèq¾œ G±•¤Ã5()f`‚0—ÓÒQEÚ`EûÌö¨Ã|ÿlðÁCªˆÂâØ*µÃ:–J3ÄÜ ÒQÂöÔù›Ôyv½Å÷ê»vM™ˆÃ ¬(6[.NWHç¿#©O} é<3^xdkˆjêH+Q‡U­ŒL€ÕY…ˆwh‘ŽùZ¿*©ÈáÓÖÈáiVYÉ:<ÞÚÜ”t-©k¢µÂ­$j‘æµk@Í ÀýsÃkgE»¾4rØ×ÐȈ›í“.ô{Ö“û`v횘01ðÂÞÝ_T"Ò›y8'•~tivÁQ*‰#N‚MGne™»5û¡ßÔ /<Ñ]¹}¶ˆØ29’Ìá®¶éüãUîŠMaŽ1ÅN¹)˜wÑÝW;ö´R·¢ù›èM^] Ã@0nqÔÒNZéù_¹zçm%V£má&‘Rnœ¡]»^ …ïrÖÅhú).ãqX·4Ì0JF˜•3³sF“:ßðë_£÷F'F‹OUã#K.ÏÕÎN Ú® ÛÆÇÚæá-Õ£@¥&©pU”Õ2÷”¼•w·±[Û©ã(N®âa…“(iòaŽ‚O—É(>…ÉbUSÓm¨ŒTÖA¥j Â]£’Û¶Ø_¡Jƒ‹üÈòÎÅ©m÷ˆqBx§ËÑÂÁÔm…Ûy÷ûf©™ƒØ¹vçfép!¼À!Ä–ÊÑs©%r£''Ñ·[*¯µ¢Ž;uòtÛ¯áj=¶TV"…jU¯¶çéÊcKÕjaÂÌcZcZì7¦ø æ’ë¹þû©µd…VŸÂ¹1L,­%°z9ùh"ò6>ÌžA1öíàeˆj"ÓÄiϯüçå+½Ï˜›Kg—m÷ª8ÛÐÚ “Í1g9ƒ!p›x;u0î «xÒ¼P´§‰Ñ–ù#ÝÇ,™³]q!XÃàÁâz?ºÿöæËÛ‘~œótÖ}P£a:<º¿%Áëxqô qöèþÓ$…ÌŒ<€@ò2™WI¸ÑŠÓÕçÁl_þNWµHûÁKÛ› à1$+$=ÁZŠMe@Ñ.¡þý›Wš÷ޱ£=¯õz˜YEmË´ÂÎ¥<2m¶â*¸T¡ùžÆ6Ñðåt:*4ت~k{ãöEiAz9]‚˜L þÆ!ü^|ŠãI0N&Óu«¦ƒà˜9˜LL;å9åëõÁ Nƒpn†Â,—“`–µtÇÁûiÀŒ£ÙûlÒ±œ/.cã½Y@mSk‰™mž#àD΄ awÈBÜ€u7g%úȳŸƒ.º/5+ïã ²p;ÇØ¿>è)ùÖì_él_è܇ýëLï)8Ú¿Gû÷¯gÿ–Z«}ÚŸŠô5ãmè|6 e]b†W0JÚQiié–va…êB|˜5€¼|kÄ€DoûÏvŠ@m®át°Á°Ç=GÐï‚ô©€Œ™î3„…á™Î…³ÔT9™Dqp:ÖÊÿi²Zõ3‚¢æý+È6Ç,]f4ð€²¾à¸Œ'd€LðAо plGºJ‡°> Û`^QRå[§Câ² ¼¥#dG¶A^¢F„Â?72]¼ñhmƒC´ Ö¤æœVEr\§ÃrŸ¶A†¨ß¸TçË>gšR"*Út´p¥Ý}SÞ°ÛæÕzëaº¨¥8úˆ±è+Ž„Â)˜ò˜æ#ºñ?ÌãS›,XNÉèÎøO(¨ÂîŒ\ó=y$äÈ–>ÃÈ1©ÚÛ{SrX½zLº3g½Áñ×2g½+¸©0Z5g‰è Ü É)ú¶ìA[kø UC:Û¶oÚ… „†ÝÈn]úu´öàÚƒ©9×U{5×鍨£=Hê+-ʵ&1^Bå¼R/déˆÂeäèA¯íF_bÍþKë|Oél?ÝÙºÎÏJdRI†ÝÈXa]ç¨ó:ÿ@u~FVPùœR7©y‰Ú¢Î¨Èo/‘ŒJ¥*õB–Ž vP:ß=Ãê›Òùy­çÕŽèNçg%r.HµI”§ÃGÔù¯ó3² hMkZísM˜PÙÍ\®óóÆ\c«€æ{¥‰:(œà[ÐùY­‡IœtK§â·Öu~V"UF9²â±£Î?êüÃÔùY¥T7©¹Þ§Ïˆ0»MÊt>µ‹Ú U¥ù¢¤Vò€t¾zÓAä7¤ó}¥ƒQW{?W%R¢TUøÀ52~ôçuþë|_RS´O>¥0°pU®ós_.XyºR/äþ|ªJçû¡§ßÔ~ÿU­'R2·tØöm­ëü•KÓœ«L÷ôuþÁëüŒ¬©•›Ô\ѽÞáNú¸üwš_6OÔŸWÍ.›çø°t~†ž™Å@äD#÷eçgŽC‰8!Ø-Ždg:ßYᆮ£Î?êüÕù~¤.^…¸}Ü„b¢ð¸LçgÎ%©©Ò•WîÏ×ä t¾z*Ô7¥óý¤Sqé`ë:ß™<Ƶ9êüƒ×ù~¤ætŸqe@Ý÷9—ev>Ëýôcõÿì]msÛ6¶þœÁч:ÙVZ¼¿h&³“mÒÞÌM·™$í‡m;Z¢cîÊ’®^Üx2ùï÷ iÊ&±\µÖÎî¬C€ƒ‡¾XÍRQz@œ_¡°›ÒaôøA}¯Y¶ªb”„{‡PtOœ_Öh.¢¬²ú-GÎ?rþ!r~©¬Æ•JEX©ë&ÖÎp©fÆ·®%FL?/g:9ŒøA1¾CE…À¾(歹’?(Æï×;”±{c|W#ƒf3FVÿ\ôÈøGÆ?PÆwÊÊA¥5ï ÔjŸQ;Tƒ•OZ(ßܪ¨%–HøqZ9®[ƒù1i™Ó|Óš“¹,DI-¸W&tk´¸Ì^q™/¶k—€ÜqÁ8ùÊôß[xâRD¿µ?}ŸÍ ƒMý.Ígf–›«·Vv~|c“„¥Ðß$ï›t66¿+†¡¥|“ŽMr“rlX”ÐÜÍ Â…^Zú?éú|<N–Û/>>GOA‡vé%½˜ O_Á¬|½Z|XÁônž!ϳI1C0qóƒ˜éÓΤ­ÖZyc®<ƒ:Óƒ¡¶ÛóÓØ¯™àÆl”âÕͧ@~Ùr<(ž™Ÿ-ÿýàrIV?Øâ®-‚§¿½öë ÉM¯ÁƒýwT–ÖFê´*ô«Í›tÔ¬àŽ÷Ç óåöZå.f™4ïVé|ï..æ¯O¿§³Ù}œ&Étª25åšžÁ”É>nÆL)"ÑJ%™Œÿ­š9¦¨'ÑG†°œ"ôä3€ø6]º¬y¶®)RõøªÒ#ûtínRÙÛïz17Y¦õ_@Ûß¼ú4øç6Ÿ½„Ez¾^Ì2óçóê ˜oÝ=¯ðÌÖ›ì¬ì«+û`nyÅüùƒË]úìõKó¯2ê«ü,›\MfÙéÞ]™ß.R ÍÍr–NlEµ›¥RC…ëôËÿštÊqMxûòí<]®ÏûÏÙb;…lV‹Ù,[Õ`¸_€ ‰À°_Øü— +Î7 ]ñ¯Å4{·5é¢CÓöÛÏFy&4üËüy𮲋lã@‡»ó³™fËY>É7³«kõô©£¾Ïmk¬&˜S^cUû•šðß’t¹œ]G ª…4¡öÊ.c?ÖWÆ$ÝÀ7ÙÅr“ fxš3E;˜ÚDø´ÞM!wî¦(î"ùÏâ4AÉc›©ÙWÚŒžî XͳÙ)@èΙ lÈœa@ ]¸[] I4‚@!‰Ò,¢+h{AP¢…T¾û *9¬úƒ`æ›Pμw¹”r„D€àÿpL¹P,4{AŽˆˆé!: ‡DL+éwü[šcÜ„,@Ïx(“J’•!ÿÎÄÉÕò]wF¡Ê®ðP…†Ò‰PRù®‘©äPÄ€èð€è‘äH…9 €9)yuA¡`QL`@aäjù;»£(‹  &¤7ÍD%Ç#ô—ìMZQP4Bœb!½ÚYÈÅÌ\'`Ðb­Â0Hí››î0XP3 tÐLÁS,€ä¸ŒÑ $pWºÄ*@%Ú˜YR‘'m‡GŒc† ñ­99Jbô³¤O/ ]M°’,€äc(TX1°I(áPØ4‰$…î‚V)Æ#(€䨈¦OJ` b„Äý}a夊1:qx’C œæÖ¼ †€·"¬NBºôãÄ´ÓÊ©[‹Ð.(8‚ž@šð #\EôEew/ (_jPO+‡qÄ$!¼Kgj§±¦F®vsÝæw £‹³!SóÏé,w>›^äk³}½€îUR»™ví6ÎÃ¥ýexY¼ûËé63GJÓüìl }µÃÎ~kÝUÓÑOµ|êך$yôãé²Éfœ\¤Ë_œ+ñ·t~Õ¿œGƒt™{óÁ8)¼’ƒ´lîʹ#l{FÿUkãD»Äƒ'ßDÔôß|>Œ‹^/kjíâ¸:ŒÛŠKm=7:gÎç‹mÊzp«ëù|2ÛN³QáU#lÚ|~µ´—'mj½dœy3ñ¿šŸ^ 'Æ4¼°þ–iïÖÙì¬|yxž8Ù@­#¥q ;ˆóÅ4N³Ëlf‡o•òŠ™¬²´ôP®7éŲ&åÆy‡ä˜á1ÿ¶u_;X@:Ÿo{ŒŸXG’mÀwæ3Óÿ¿Øn¿5Fqê˜ ìÝhëw0Nµ]e?×%à·ÇþlìN_ZœÚ[U§ß×vCÌôÖÙØÜþô­Éˆl¼q·eŒ§¬è†z_VS jgÁ%(?àåCé¬Ú,»ý£–Úú®ûôëÿzÓì_JpÞ.Ò ›Ñ!sùÚ›ì,[eóIÖ½cÞÿwüi°Íð@ᔑIv:œžžé!;=£C}6™婘 -ÏGtðù}‹Ú†/³I〹³–‚v[zæ"ÝLÎ &õºMîzùóˆ¬rã$îÙ óô´up¶þ~µØ. ´MíÝ•?O×ßÕ_y•žf³nï½Í^åóíÇð{f˜[&÷Cíë›™‹I]*IÄZ×< ý z4¸©#Àó¨uâ ¾ô+õkSjQl…s×”xñÛÔ/›ozc.J7C¿^¦“ì-Ìÿ‰¥†ÑŽ*yaíµZ±·Í“€’Í |ÛÕ${³u3ê—X-xôèË5 þÚœüýï Ir{ò5Ig üÿ*ÏÖqå ÜŒ)ŵ®ñoß¼xöî…™J哟^?·Oâ:³Ö¦²µUÚ±î_þ×eùëÉÂ.º;šû·¸IöèQ\3£ÞŠzi°»4›|ÙÇ¥9æÜµbªEbÔHàÉ?’k‰%ÁœéÁOÐCkuºXˆ u¼¼œ[| ?ƒ­ÚɆ©­Ô¯“Á‰½sf«·›¾÷EîFÏø¢\kD÷oííeg§µëúÏwÖÚÝR#[«#×ö@ÒCGÞC_=vÄ:*-Ò‘¼~’|õU2(væ»VÜÙúƒÖáä¼j‘[ñ«ö4[ qcÙ¤¹¿Î›AÿòkxÐŒk×òéŠ'®—ÀÖ5êàï¥ûèî4>¶—vÙ≴¯âl²¶mdã&²¶;mûƒéÿÊçæ8å˜*EBN5N‹8ž!Õ‘„×é«à’H…Ü­ WÏØØß·÷Ï|nn0îäâžÂ_èêkm\OWŸ¯œ»uõùkêçê+z<®ª£ÇïóøaôgöøX%'–u ¼©mÞ»Æø¿œÅð_=›ôþ÷—ãÁól~5hD¿ïŸ{x‡¼>O×ö+óN9¡Òã9¡’ã9þ^îåÈ õè˽¿#'XÞ:rµ}™#' ½9!¼qÍŒz+ê¥ÁsÞ˜¯ å¸~4±ñîXZÆÒ’ˆwú¿Ò¼¥Òˆ ó-uh/r\Ј  Ž– #„Ã&£°…•£,&tDuA‘Ä Ãÿ(@E°è.(‚±þÀi'§¨î‚VáUž0f⦡ ùaX9ŒˆdÇÁ¨~(Í\É f²ˆóˆÎ :Cs°àþ/ùÅ!o”vp<ð‘@æ6ļY¹nÑÓ7{ƒuè ›> )% ÁPT^‡ŠÍüŸh­äèAå5è×;„©{ËkP “L1FVŽÊk`;è˜Øà˜Ø`¿‰ ŠüªX!¦ÃZÍØ>¯ b Œ<Œ›3Pab‡ÁÜ¿[9cL;„ÕŠ‘ÏÖ:€‚›¨ïˆ*ÂQßÂ,óB€ˆ «ß!ÓE‡o$ÃXKsã·…•S:Æè`4K0†‰T)@aä:™F7Qè.(bÆæA*€‚ ªq„ÑÌPøó6S TÇœNªÔ»'*7âʤ …3>ÎÆL£úQýÑ¢Is/ç—Á"[fÎ휿T$ÿ4ïy¾Y ŽŸ˜é0\ÌgWåê6,–·uðvý؉ k”hÑ”ÏwùYÂݟΆ֡Χ³‚nÇî'¢÷ƒ…ÞÖ‘¡»G V–Quà—<}šœ<ñêÅ»'É?’Ålêì“dœ¸óÔ'#Ó!#™™´öùt®%ô´s;{}¥à)õ·Ã¥Çn‡KŽ=ÜõsÃí`Q¾|Îô=ÜîP^Üáöõ79Z2»Ôï9îž‹-.JifÒ[:¤Oâô¤çéw~\«£ÞŠã‰èÏLðuI‡[08_ÎÏö3‰müuûφ5oö×JþÞq`òìõËä[;ÂI£“<¿ã$]eIa/%§WÉæ£Ãý`ú¿ò¹ñmýœ¡¼$VNâ(+=ÔªÝÑ£Íy8“ûsÓ;9J#<ÔLtðDê‘D#¤4 À&ƒgD\“ÁÎ`&O¢” áu¾99“¼‡©pî3Wjood‚*¤Ä Çd„+’‡ç@é0â˜(ÎCcrHFh'Î)PH¢ýYê 9qlÀ;v¸â©DÈŸý¬c$Fé£&íy ¶­ÄH*ïñ…“݈H–X:©iû‰£#l牒^ÒprTÅôFuÆ×ž…1Ëüœ Lš“S1<œ¶JW)*y`L˜KÊ…#Pt`Pnï#š{Ï œ̧ÍPÁ0,(#X¬ph¶Z9"b’y†ùLé0Þ\KEC(ÌáoÄ/JþdÞ!°V Dýù<ŽJ*ŠÃ+<f•À\êya89±¨ æq{¸Ì5ÓÄ?&NŽÅ  aý”Ƥà Öw@rHFè§`á\šLƒâQL5—‘ÃË ÇH0“;Õ¤m”þ»2œœÐÄ%DxHŒa ´(p@=­œP1C"ƒêÉÑ›3h,üןr1 «(é“yap³)¡ £:F1t—Î0æ¡  0q›BTÜfq›,áMqg.#*n³_ïÐZhð¾ã6Ë{n9¬2¡q›Ç¸ÍƒÛ,®“E ¼°V×ïRÞCÜ&t'-q›@sƘÄþí¾“£4Âh–a§Ç&a¶ Ä’^›ÙÉ‘˜ø<‰» €^  &þX0'åt¥ÉL=0˜‰»cf´×@srTEø$ î,¡t³§ƒê  0,&¿·¬|ív"‡+‡yŒÛl'ÇDŒf”6³ð¡œPÁ ¡9¥"œ»Øϩ∄a0ñ¹“ ×'1(T8ó;68’\úÍU+GX„£R†\Úüá¶–~óÅÈ!ói¢*ùSzf«16LEÿÂÊ©Oº ŽÇµ ’•,tMƒ“#4b;¥H؟…Ÿ †!uD€³¢ÁËe t…f$ð…¤“C:¦3XèT‘ñ cÄòmq 9¤£:#ät°¥+ÐOͼ#TrQúr:@éÔ”‹‰"Ä‹ÂÊùø“í È&Y~ fÚ$OLºüÃ6D¿Ÿç3Ø$¥¹ÝŠ˜ufÃÔ†J`¬|§`zÏ!Û¡{E¥Ú^P¼Õºõ²Ò¾Õ ÔÖ \@'¼ÉÖÛÙ&YœÙ¶(fýô—ßjuŸ)r̹_-—LË£DÏŒ©¨‚èUéSÕªw–¾%±’ãøž\eœiDIÇúx÷ÑãqØRYA«‘ïûëJ®–—âî=‰V¬Éãz#Í=z”3黎²”#µœP…ÉmÎß.a šf&KhŒm°8QÞSíJ®ö¡d­jw“¨a«â‘ýи ŽNL6 ~lb´žžütýæîͧ?ÎUœq’¸`´§'-o³ÍIR„Â==yž¯¡0ÓöËèƒer™§·FqQ¾ž,WÙ%ðçºlÅz”¼¶Ú _€XMô4k%3¢ÛB%£“Öáˆs_€U¥˜Ô’^ g”`F/(nÓ0¥~KŠ1«©òNmÇ)­µk¬…êP;çÊ p­–3Ü8$Ò{™¡åÅȵ©ù<³BÓ׋Ŭ6ËѯºþÂÝÅ<œØ[œ“õùb;›‚Ų©UbœÂߛ߳lž\äóE¥ðëqòã&#²°1º¶¤¤(i »àÝiå›défñ:Ë’÷‹IÆQr1Y¾·;޳íjsȦÙZ»¶S„|ÏLÀ¬›åL½ö9.… u NoS8kXkzrxòv\°ÎPß:» ,i"ªÝ8\N&ݰÎÍL™)´þ’ÕFI¥Hpª–å¦PÔo»“š=—ù"á²cú!ÍçßÀØNÒ-(…Õ¡ cB.˜6©™`w"['ç)ØÖ³t¹6MÎaßj͸êŠët“ì6ë1åÿnnÌzh®Ò$.»7ãßÕhâ@|¥«ùcŽÆÿÑø?Pãß)+ÿ"Ìu ×|qwÆ?Qfnz¿h#e ˜b¬ÐÛÆÿnJ$ õf‰¨ä0~pö´Ú oΗ‚Ü3ç+"1Sad >GÎÿ“p¾FTKÔA©5Ú#ç,FŒ‰vC\ưBa „êÛ¬©›v Q–xò˜Ž0”À_• óP OÞ}íÛ¶³<¦•¤CïÔBÂîey"Ô<ìz9rÜ’—§?ÇòDlíuØV&±}nI4iÕ²< Í4_ø€:¹ÆPÑ?óû¡ç’<(Îw­+ j ÷Ný+—}s¾«¬2Ø%†‘ÉZ(̑󜠜o•U!°áýœï”Zï5êžc>¢ºå Ú¦k–D+̼v¾“Ãç;TLk¤x=“+îȵZ - ÷×÷Çù®FI¡a8ŒLhräü#ç:ç;eUŒ+ÅÂJ­(Þ#ç )FœÊfÎ/>›eDø9ßÊqÁËÎï‡^â‡Ò³wÔýùvú!SŒ9ÿÈù‡Îùý”Zï•ó‰&#¦9_”܈IŸÏW\Gòç÷FÏÙâüªw@ÞçùºîÅû:n.kDSßgx²º×éÈùGÎ?Dί”Z+á»ê±’ãû¼ ‹1B¼…ó‰³ñ„ÂÈË NNÔ7e%*ʼnô…ÏW­¤âAq~¿ÞQ ßçgE4 #ÓG;ÿÈù‡ÏùN©1—\‡©T`´ÏoʘÔ#ÉI3ç[ç’–šæå''9(Îï‡^Rú 8ßµZfPzçÞ|;}‘)z<Ã=rþÁs¾SVÍ¥7f%·WÎ'T0ÍœoK&´CPï†Ä9«´RÅù=æšJD/‘ÔŠóûõNýø}s¾«Ì|ä÷Érôhç9ÿà9ß)+å‚PVjºW> Q¡›8_Zç’¶ÙŽ°Çâ’… ±CŠÛé‹ã‡Äù}{§žƒt¿œß™>rþ‘óœóû*5£ûŒÏg˜ŽˆÄÍœïœK\•'|@ãú 8¿zNç»V “C‡{GÔRÙî›ó]ŠiâKÐQÊIt<Ã=rþÁs~¡Ô‚ᆕZ1¾OÎGtÄZÌ|ë[RD(êËÂUÈAÿу¢ü^è%UøAQ~¿Þaßå÷D¦Žfþ‘òžòû)5—j¯™ˆFRÓ‹&Î/bJ%ñ†óÉ2¦”–™ß ½@ôaq~q³cÞ°J®vñÞ¾9¿¸LÁ*ŒÂÈÈ1ô‘óŸó‹«Ú€¹}ùÍ+¹†»4óùH‹×§Ð£Œú¾.äh=Wð ^Ä\ræ›ÖÊÊ8‘ZÑ`eŒòV_ƒì·äõ*»ÌÛµý± ƒqò•éÀ·ðÄ]¸ðÖ>þô}67ntTõ»4ÿö®î¹m[Ù?ç¿àè¡IÚHÅ÷‡f2gr·'sÓÖ“¤}8MfBK”Í[YÒÕ‡O&ÿû]¤LÙ@–«Ö:}8ޏ\ü°X,‹%vl¦9´<Û ò,y9d})ô³äÝt™ŽÁ¤`½Vb{PÐ7(Ù 7—;÷-œäˈ Å(Ôô?éâ¬ßéf«££SÂ'è9(Ѧ}Iχ‚Á¯¯aZϧ§s˜ßõSäe6(¦&n‚˜GÐ [+vˆôÚ\yM¤cÚnÎOã;7Á ßèÄëë¿‚ñËfýNñ›ylíßÏîòàõËîÊ#xþ¾ÚûN’©Á»4–VFÚ´ôÞŒæusT¯ßÎî÷ÿîËÍ!µÊ]L:3gÞÍÓÉ"ß\\Ì__>¥ãq}&Ép¨25䚎`IÉ>/û̸áˆh%Œ’ úO¾]ÏÃêéô™!,‡=ý ~Hgîrê<[TiýóåZì3íæ ñ7ßÿr41×¥ «O@·¿yù¥óïU>6z ‹ôd1gæÏ—Ùl<½<Wë{ý´ùÍÖ›ìVöù¥ýabÍŠùógwYõ‹ãWæ_åýÙ¯óQ6¸Œ³ŸÓ ¼;7ÏÎS°šËÙ8؆®Ö—Ej,á¢rùŸÉôÓ$® o_½¤³ÅÙtiÿ9ž®†Ðƒå|:gó ÷¬t ì-»ÿ tåôlY#Š_¦ÃìÝÊP fÛ³ßòvÞÂø—ùó$gçÙÒ‹ó«™f³q>È—ãË+õÈÔ™¾¯·]b™Úš¶£ ÿ¤³Ùø²Ÿ8ƒ€Ãqº^G ®¶]u³êÂhî§.¯ªF[à ãð„'¸Ž(‚U–ŠÛV„¼-" o—…â$ Œ‚ÓÑ…Œ(ëHåÀsF1ኣF‹(dH•Ñ æÊÜdÏ‚’`гˆ¬,PúÛ1çZ Jˆ àšÓˆZŠpÇ0º82-ÿžŽs÷ç‹áy¾0Øã)lt/“ï+û`·qîÎì“îEñì/‡«Ì) óѨ }³šÀÎ~eÃUÃÞo•|i×›$yôëÉÿfƒe?9Og¸Pâ‡trٞϣN:Ë‹½y§ŸQÉNZvwî¶?½?ÕÂÑ.pç鳈–þÌ'ÃN¿zÙÒVǵaÂÀ.µí\N'L¦KÛ•Eç†è:ùd0^ ³^ì­GØôùìrf+!Ž–)™`ÀLü¯æ'çÝ uÏm¼eØšÃ"Ê—»gùéY·([`)íÙí8ëN¦Ã¬;Ì.²±9¾Áå+°̳´ŒP.–éù¬BåÆwy‡dŸá>ÿµm_X€:Ÿ,{‚ŸÚ@’íÀy6ùÿaÅ~cŒâÔ1éŒ,ßw0N•]c¿W)à7Ç~Ôw‡/µ¦'‹l~‘ ªv왑֨oÊýýp– þ4Ѹ›4&RVˆ¡*ï‹õX÷³°%(ßëõàå}Ve–Ý|èŸe½m²k>ýÚ¿^7Ûs NÛ,ݰ2•Tßd£lžMYsÁ|ü³ÿ¥³ÊpGᔑAvÒžŒt—ŒhWî<C¡åˆpD;_?nQ;ÐðY6¨°‘;k)ÌîÉœ§ËÁ™ `ƒBÁ¤^l£»Zþ<$ó܉[Ja’žƒ¶vF‹ŸæÓÕ¬@[×ßMú³tñcõ•×éI6nöÞÛìu>Y}¿g†yËä¾v¨Ý-qÊI]*IÄZW? í=ê\×€µE€ß£ÖaXˆƒúÒŽëw†kÁvsÓ•8ú¿Uê—M–­1ÜÍÐ/fé { ó`F©f´£8O­¿Va{Ó=‰ÑàlJ®®æƒìÍÊͨ?bµàÑ£Ûkü´9ùþû„$¹=ù¤ãþžg‹8~7c ƒâzWŽøoŽ^¼;2S©üå·ã—ö—8aVÄi»ÖÚZ;íùWò_ ¦vÑÝÐÜoã&Ù£GqÝŒz+ê¥ÎærPïòeŸgæ˜sÓ‹Y/½Zžü+¹¢ØXÌ™<ê€Z¯ÓåBÕ´ñ±ójbñý¶j;0LÛ¸~—tÛ²ÇcÛ¼Ýô},Úp%œ+à ¾Ö‰nßÛ›ËÎFoÕÇwÖÛM®‘½Õ‘+ i¡#AVOœaí•iÏ^MñÄI |]£~)ÕøGw§ñ±RÚPd‹'Ò¿Šóɶm#k7‘•ÝqhëØLûW¾Ö¿ ˜V\ ª ¦$‰í­$˜/¶G1åæ6F€tLâ[ÄöþOLÉúF!¾îIA|ËPßÖε õùøÜm¨ÏßR»P_!ñ¸¦¿¿,â‡Ñß9âwbý“‚X60ð¦²y«3à¿8!¿z1h7ü/ú—Ùä²S‹~Ñ!¿]lò2*b.Ѷãy'œ÷ø@Nˆs| Ç/åVœ«G·ôöœ ¿; ä„[»] ' ­9!¼qÝŒz+ê¥Î†å¼6_8ÊqréÔYãͱ´7JK"ÞiÿJý–ŠaF¤à‚ö2@‡EDú‘M²%8¦˜˜õ  CLF PMP˜/À(%PŒIÞ&º ‰ RRŠ,áywt^µ-FA³=.aÂ{tI‡IÄ—òé-w [h…©/ï® #”G¨'% „{LPÆ™Ö~†ŽàFI²×…AC`OzcM8Gþ1±tŠÆ¨k ÒÓÏ9À0tˆGH#”aÜi ,RÌÂÒ ƒB-wJ´ÐBø.£(騌HC¤¡ôTËIKíû⣤cDGÈB50”(å+UÒž£º 1¦QH/L©ŒHOe(ô¥ƒc/ŒE MKGDÄ0È«o‹‚ÒˆIÂÖöÓcÆ™•µÉá¦^Ž.&£Ñ°f0³b ŒH`®Z:N#¾b¥ù$¼‡M.°†… ÃÒÑÊ·K ÍÝœëm}÷4]fŸÒËt–wóawåºEüØß»æñã Ÿ;‹7héÖ©¢ Ú¸Eàx°þŠu3¸ví(ªü~÷Ÿîå¬ÏÔ!ÁóàLš{ eYn™97cÿQIüe¨»’'90Ó¡;Œ/ËÕ­[,o‹:àÛõc#M –b‹¦|½Ë Õ;Ô7X‡šêÃŒn¦q&¡DÎ&LoêH‹,®'æÒ4XYzëØoòüyòøåÑë£wG“%ÓñÐù'I?q¡õ§=#žMÒI¶äxøt®. ©M?›'¬ú¹Þöœ£÷¨sŽFœ£Î9ȹé9GVn?gZs4ãwÎqõ‹›u1í†í{N>`Ŧç%Á039¤Oãô¤ÍAH#øq½Žz+ÎNDg´š<¼Ò®Àá|5MífÛT¼íÕ¼–ãYáü“³É‹ãWÉv„“ò¸6yy5ÆI:Ï’Â_JN.“åY–Ø-Jg[fçÓùI>„iÑ<¯¯¶—Ùg0ª®‹Ó´kç™s[W'ÐÅl™Yïu2ÝÚË˳|‘–É9ô19K/²äd ÿN€%ørF†›ô2F'âVý{K ‚iÿÊ×Ú(ÆSoI³‚ØFœ'±2B­|&àöÞ>SÒá˜x( "‘®r¢ˆÒ s 1Ù§ª‚kô!_Îçš®ý?÷bµ®«e>wo -îéâ²ESÿ—«02,ðí®!¶:ÜC|¸‡x—÷—ÚJ1XtÖj¢ä.Ë JÝCªîbh_˜`Æ™ Z:*c®€“áPÀÂÆ(S”à s¼."ŽÍ™ ]}VÂ`JhÍ‚0c7‚°õ¹¤GÒÜ{+Õ~§DÚ›ŒDÌx(|&aÄ×Võ Cs{Çácóv(bîÄãMò:,{!)Ö* CÄ$Tðò\’ðí0”ñoͱ¤ðLº†w]SP^LROîqV…Pìw ÉʉÆ:¯Cl…1ôRb‰™òÎK„1óDç p'‚ç]± "LFNаî $Í8÷§T8:Ì"î—â*˜…Ü567=sÿ4qtÇ ÐMFD CÆ1„Bh†"’Di?™gH˜g n•ÞYâèŠHì8¼«ÃØ3Ø5RâK‡DÄ¢&HÐŽw†‰Ýó[.G'e ÖOÛxpuŒÇãEaéXÔ°ÐUšÀÞօЊ!æ7\ŽE.΋â+•’à{.G'c²\„‰u,A=5Ñ@‡yÌȰzŠÇD3%µ  :pØ#P”æ“yahÂ6÷Ò`º†X‡…!íu¶ åW K'crieØÿî ƒnšj. s7AÁ1–TÆ(€N¨˜;™KëI·Ã0U)1å°Eò§ç9ºj}¾æ0hÐɰµ1¹\ù.GÇ0Û«a;ôü[J§Zè}ÇB×¢ÔLbF&*†à <÷5@XÏæ ÖŒ°V«ÝÖ §¬Ç®mJD é߯8:Øüµwä:æ°ÝO$fãJ`¸áèqJJŸy{Ș½C!åGaéHL± )BW—ì™2_Ö„a0C}£–(b> *tñ;°7å0gLp¿odépše8è@ÌÆ•ÂD+@aôSFœ$«Òi–™ûÃaû ¹±´tLED`Tøk<Â{`56âö¢°tÍ g\GAÂñ|ËÞÒ‰TA!ƒjËwa v̯†v¸¡ŧ Dö´2·à┎®§,…ÉMç}5|ÃÌ܉Õd­Ù Kk"E–`7ëÝëXEJ5€ZYofqpiÊ›€ÓW_EUäâŠzŸ¸øÉ&zƒÇY$§&&°?192Ïÿvõæf²_ç<Çôqâ’ž?ÞBð6[>NŠT¤ç_æ `f¤b3¿A³ä"Oo¸ Óòõd6Ï.ÀK_”½Xô’cë2AÃç@V!=ÉFÓyf:îô é=Þª{’!âßËt•ã59b?$E¯×›‰²@¤sýnÝzå¢ÿµZd§ý¼ÉL¦åE½¦¥§i>y’¤+Ù¹‘Ϝ3 ~– º!¶HÎRØÓÙÂ(n>dÖ_WqK—ɵ®€/ÿ»µJRÿ×¾]e¼K--ëôUÔóef‰†ÇÓ鏢席wÍç®4`w`‹ &‹³éj<„½á„XbÂßËOY6IÎóÉt­µ‹~òërîä\pJ N3hØ% (Ï’™ÓäE–%§ƒ¶¡Éù`öÑÞ9ZÍ—g€l˜-¡· O0i§c“´ç4ÝÁ‘ß®ç Œ8i2Jß´±¬Æ©oid“'°­é!-·8ô¶qc8Îâþ¥Q‡fi NáèˆxXQÓk0t&Ÿ#(cŠî/Šc[ÏTª²Ê÷ï‡(Î!г·Q«­°ß¢þ4¯‚í²Ü<—ª' óýæ@ Ú?£ß½~€F¿±t*¹÷cô)2Õ³ “‡ÜÞƒÑÿ»}ª(Ó4¬Õ”ï2·SÞcœzŒ¾DH†}e¡+Y ëØJÝv$*¸’<1Dv~PµØ¿õ©)z‰ø\ŸKß÷¦DS¡T4@v8Z>¬O—õ©±VFv¸>Ž{ZŠ-ë“MgÇŒ2ê7 .í]ÇœÏ„ó »‘ÕÌŸ´ìèHªWáé1…¦Úó BI‡6>mÞ´ „ÓÉ/Óe>rµíÌãÄ=î'¡«rœ‘&åM;Îìv~4òîÃÒygªfß>­dš^ƒ/ù°70ñÖOÛeFÊ’gåG&7[t9ÀŠ‚ö _‹ŽNîÙZ^ ×Ð]F¯øÃÚ¿¹^k»w–Ž&÷·s‡~ˆíß¿ÈÔa-?¬å{¿–;¥6%=wç_Ñá]¦cQE{´~%·ÀŒÃÔã^˜ŽŽ±ýŠØè•Öþõª ÃâAYü–Ò©ÔØµÅw-B¯Wad’±ï-¾SV‰™fºRk´ÓÝX|Z÷¡Å.d¨Ìe¯ž´˜’NRµGÉXkôš æ¹Ðöª—éÌÖÒÑø¾¼ü¢ELåPÔYEë6ÿ`ó÷Òæ—J Þ»æ$¨Ôæ³ßžÌ›r ˜ž×Ù|¬{àFq,•ß.X:®‰'äe® Ióå¶;CÖ L‰/ŽYÒ1¶õË/!Ù‡äxž]äÓÕÂ1è'ß¾…_Ü&oíÏ_~Ê&Æ„Uý1ÍÇfšCËÓ¹ Ï’—ÓIÖ—B?KÞM—éL Ök%†±‡×Þd dƒÜÜß7f´p’/s .£PÓÿ¤‹³~§;˜­ŽŽN Ÿ ç D›ö%= ¿¾†iy<ŸžÎa~×O‘—Ù ˜"˜¸ B`¢P˜!æZaÊâ]›,/  Àô`lÛÍ j„cç'¸ñï¥x}ýW°~Ù¬ß)~3­üÙ]ɾ~`Ù]¹ÏßR{ßIr#5øa÷bƒÆÒÊ(B›V…ޛѼnêÜþþß¹9¤V¹‹Yg&Í»y:Y䛫‹ùë˧t<î£ÏÂ$U¦†\Ó¬)ÙçeŸ¿­„Q’AÿÉ·ë™cX=ýöž¶¹méÏþ}h’6RðB3ž›4qûä.i3yé‡k:cZ¢b^ôöˆ’7ãÿ~ €¤)K$HØÉ¹ÓÎØ&—‹Åb±»X,÷ñ'މcüàˆx/]Éÿ4É*‚T>>/åȾÖnß=±ûý磹©9®¾q®ÿòüsïÇM:5r Vzž-¦‰ùõi²œ.Îgàk=±› 晬WÉ{0í«sû`nõŠùõ…Ûzxüò™ù«¸•ày:IFç£iò"žÃ·+ónƒÚ\/§ñÈ6ti`²Ø¨Â¬|ù×|ñqÖ…×Ï^ÏãevºXÛ?§‹Íz°^-¦ÓdU!ý­] {Íî?YyºÞÊ_ãäÍÆÔ$ó1¦îÝoFx{¯á üe~=‰WÉ,Y;¢ýì¼0Ól9MGézz~)ž:Õwq]+ê º¦â/—Óó!r Çñ¾4¤ˆ¯mÍnRµŒæ†Žâ²\CžI÷àØïoDUòZו+.„ª¹j¸;,àzÝâ:(%3޽dHŒIÜ/l@@î¤Ñf*®Ý ®W©^¯Ýagàî§"Èk'ÒsݲAÏ4•¸yãà$aFá37Rª@Â$öPp\\úL"¿`p˜ƒ¦6ºÜCÀE!÷p݆ mt3cÍN†èpÚB} £ $ãD6ë- 'xÀRŠÿ$J¹‚ˆôPp°°ìï ´ /`ÇfØCÀ ú¡¬ œr SøFàªW.¶§‚{.«ÏÑ+.Ì­¨^2ÀžEz‹Š6ÌT XbEÊCÀñJjЦ#·ÜÑùÌ´ü[˜-˜]³=’³¡Êï³r ”ýÌuIN? àãÛ¬Ê,Û}Ù<Ëu¼k?ýº¾o vÇ↻(ݰú²ó*™$«d>JÚ3æøÃðso“Á½ˆÄœŽ’“þød¢ûüdÂúz2÷Õ‰K­&T`Ö»8®;ðe2Ú;`·Áž«ÝÎÌâõèÔìZ‚@Á¤Îêà.Í_È*5;ƒ¹0g ­½Iöój±YæÔîëï6üiœýTýäy|’LÛ}÷:yžÎ7Ÿüß™a®™ÜWbGý"à]LêBHlÝþíŽè wUJσì0b¯¼tÃúƒÁš£-éÜv%Žþ“‚ø%óugšsìfè³eÆçñ2íVã~åºVü¸©w]âÇ<7?ö¶t©¢Þ6®8•¥ ¶ƒkW¶¢Š¢ ‡p¯àCÝ%xÞ%xÇÆ=“æ«„²½(kfÎîvìß*©sœv uWò$Gf:ôóéyaÝú¹yËö^/[i{!j$åâ&3ToD»iœÈŸÈéGº+#²¸î›â`Yeì¢{Ož½9º‡þÓ±óOйÐúƒaÈÀ&é š&™ÛŸ…Ô¾Ÿ]V›°^Ÿ£öÀ}Ž˜÷9¼|n¿ÏáGupý9ÓqŸ£ ¾°}ŽË'nrìi·j¿açcZl1+Ɖ)oé(}&'Ý6BZÖë ¯ÂôDpF«ÉÃ+ÔáÎgóÉÂþb&±MÅ«m´æ•Ï æŸD_>COì£b»=½c¯”ûKèÄÝEd—(½ºÌΟ«“t Ó¢}^ßÞ^&Ÿ@©º.Þ‹7ЮgÎmÝœ@“ub½×ù¢¶—ÇëÓ4C%šAÑi|– “,ü{”àË·éeˆL„Yý¯˜(è!¦û'{?P”1¡ïq*ਠ(CÇ‹uÔ艔Â"¢‘/Þ)©Cª}qÙ"Ií®f7DØÖâÊÏ S?„ÙÂÊCÀÂŒÈ_ûÌ¢ç„à/œÐ€5/cÔ Ü`N× GÍQG¢Â_C°Ãˆ3»Ùè¡à¨˜&Â_CÁaç³æB‰9µ< †‚h±Ç—“aJ`?œ„0£ˆQÓú:…ÌÖ11Wu7íwpJlk‰"HÍêw”X~_8è®æéjሎnU@Gӌ짾zß·P#Ðõšc¢[q§rgü—®èZ}¤óSÆ%»«xW#ðÖ×tÒ*™©Oå—j‰¿ä5Eœ«d@&|4F‰Ç[8R½Y”‰õUؘ€]ì`3¿ ×$ éGøkõvbJ8kF¬D@N…há6›Œ"q”'yˆ£yÓ°X4O9O±‘ WÍŒË%žíz›¥½ž×¼k#¼ud1“À(ýd1¼[|Sï!+ß·¢6V=f×!Žs©ÈO pI\I@Þ¾e û½JL ðl?ñû8?D'É(Þ€Q›-V ZƒþGtÆõ¼kSzó Æ`¤§ñ23]Nç£Äšƒ²²|¼FÛ]¡Ñª×õô©ðäÙ8æW|¬Ê¸»røÆ‰ÈÙÝ20Ñy„™] ô¾ 4Þ{{ùåvùþ_aV)Xò{ÈETïÕ¼NÖ÷PÏ9¼÷4Í™á¨Ý>ƒ1^¢³4ÞÑ­‹âs´\%gàÖdE/²zim 4<° èI21£`ŒQ:ß@#ƒ{µsKS*ÏÏGe‰$—WcÞWqµŠ³•µ­Û»:ý­ãˆÞ.?ßR%±””¶àúÆü|ÇHÆýÜ‘‚=?ß¶a…•òS¦ÄŸççÿü|'ÕZSÕÂ"Fû’µÀe:ªqómûš2µ°8ë/b¹‹[*&ûibÆ/‹iÅH6­TÉ3wÑPd¯(BÙéb3ƒâYƒÉ-hÃïëI2G³t¾(¹— ѯs0Ò+ç!å˜PŽi »©qNÊC´tÖ=Kt¼!Ðqh6ZÛ‚$“Íj} ”“5ô6³ÆÊìgNÍn³þŽÇzÛ¯%­F‚ó]¿šï‘˜ŽŽµ‘ +ºFiiK£à·ÐCКaÖ¿ÑJ~{‚ÆL á÷Ÿ4¦äëz z° ¼eßywÂ_ÃCЄ©ÈsÃÁÑ/zy­Tæ˜C­Ò§˜‰9öM?Gv Õû SPhݧ&ÂÆ¥žÝ©œÜ6ûTQ†9“-¨×ßÜ Öp'bZF~îP!¿¦}‚™"´Ÿ²jôëÎ>ÝÙ§[lŸ@Z9ÑÜë-©Öô Ú'*Ø»ZûÄ·‹Sßô8X“l‹´8j ÌNµò„ÎápÀylYdñ𨉠A™IÂ> p‚\N#I‹<3mc´¹’¥Ù¹±pQÈM=’ú³‰´-%A ó$ÝY8r,\2ï†ÇÖeQ0?VŽ©€J…’û/÷âÔœ}ç&±qH ¹’EúOêvaJŒPÙ|ŸU’€(¥Hè@šsá¦ÒCÀE*äXåOÎÌ gs“næ……#!'õe±ŸËɰ—äDQsÙ‚®ÕEgW‡D·a†Ùb×Zæ¡ÂØC ?•? °k`5»'=T)û¢ˆŸ n¾+åÉÅtp"¤l*´'k Ã, xeD5ªO§t@= żY€+pZÃCˆ§Öw#ª2 R7‘#®Y¤›SB\RŠGê³>†³ –ÊK…„†Ìé¿WË‘a·ã„— IBj8(¥(‡!h *(’È+!× ‘Lª¢ÖžŸ…‘ÈYV~ÿ“† %˜gšX8®˜úSÕÏVaoÚGÍù¤Ži|«"0Ž* Ä3ê§^PüME`:r'úz9®E…ï§LVŠßE`î"0·5ã¤Ö(´93&—~õ%s8&Q€d@ÍU´ëF?ÑÁ©jÜz±réj,ªT0zb7à_ÄKtIÙö6Yåà‘9RÙ_Ä£þrµøt¾S®„µD÷¹é˜2%/’u<4*»ø£#š>:8øW:gìA-tCðÃUe‡ºb2¤<~ù¬˜a¤l!8#]QÔëô|âJ1”Cqùg÷1Ýw¬Þ•œéŽê—¢ÊAYyöò_Ï+¼¬;xL'ÏÓù‡= vN#oŸ=݇Ë!”11‰EŸkõy4Šû‘€ wk« Ód"Bd¨±Ñ^§ÊKɬ"äœRBf#ÖÑ»<:½Û}Ðí'W«dUÐ?[ɲJÕ,ä.I`N@ùöÍ“ÞE{Ú´0מ&—&ÿ*^pD®ñçÌÕ—`‰ã׉IOˆoP‰üº]¬iˆ>ÞÒ±Nkl—ƒ2z9£ÐÛNn‡çF+Ý\>°Ó­]E§¢hP÷n€ŸOÓ?¡ÑaèP‘~Q­ÕfÊX€$m=;š¯Wç6ø 䛜œ•íR}­¥‡!¨]^ꂞó¨Ã]Ó";$NMäµRz—ŒïF ü_QA üT~*Ë‚o×@õ±…¤ŽMQ1W´ÍV‰¸šp¶È—ƒº¬¶fÊo„qñð"„7äLüÕdá!¥4ÝÐLÒi28gS#sÇ_KÈBlùµd¬Æ³Ùl³6I`Ã@Óþ46K÷ïso—ñ®Ïb•þéÄüÝ¡¢ÊÔãõz•žlÖ`sÍc„.‹ZšU†{f€{5ô<´ýquî( éÜÅÞ¸I1Àrw/ùh{© pXoïÕÙ¥6…¥vïíü4žMNþÑjÊŠÃ^b~G«<ä/—Ós›ÚžÛ½2„ùîŠzgÒÆE‹ "˜èCäKÓO³2«¾‡¦‹÷ Ý/‰r4Õô^PMus¦G²aùï!tÀ” ·ðæªޱ€:ÚõW¯éF†ŒŠèDÅîœh¢"RRE„*PAC†„ûkènމS*eóÑE Vô>ò'7v`†”š5_ âàتŒüÉ BB,Ø9k¦ÂÂE¤~?ŠoQ‘Œ’ô 4Æ(†eÊeé{pøÑÇSP¶ècœÚÐÑ&¢mª„nA¹)ª$‘‘ôS¥IÝ\†±,¨ÊÐ$¸^»9Z74Brs<ÛL×Féš¾ Ïï yÿ¸6‚ÖmñjŠÒ¿ÞÌf1Ø^3Yó’Õ6ªhÄZ5ªë%¬ÜXrƬb–J«Tœ6+Ž˜u±Vèþ÷®ûãµÝ–1õ Ek‡oV›Ä®ÛÌ2çÛ²Ÿìù¸‡Èý4lp; oÈmU'ù‡y§ÆÓªí}æ…kÏ…†-NX;›íŒäÓˆÇSs´-ù4J{’Í!Úù è¶ýzͺ|xþ~À’ÛœY7û:ë  ¾oNÇ•L"S¸È^ÇÓ!"ÝwfÆ¥ö€ù}^¦ü@™ñ´~YŒ4ZL£«d¹X­-K{›yòi™ŒÌ>0¬µÇiöÁîÖ%è²j¹«tBç 0‰¹Ò®ÐÇ"¢Šà‘Œð ¹`2!æ"‡‘J°N@,oæk4K3[ ×á3Zã]ïÑY¼z4MOìîÂ4Y?ÊC0ÿzßõzþ¨Óº ϨÑ3spLÔ ±Ô¤ÖA𠿣ÓÕbžþ™ ÔlcYȵݕ"_È£C,Rc±lÊ®tppEÜ¢­þ‚zFAD>ê¡—ì[ÚêïÊZonx«¿¤ÌcÝ‚2IömõG¯œAV—ڗΆÚ@îÀŠýó4[ÛÍïàð;3úæSgì>ü\‰ë3§ÅÍ ßÿ>—¾\,Æ÷ñ§ƘXcüà!zº˜'C%£‡èÕŸÆT•ß :As¾2±êQjj“ ¦I1•s8ßLÎEéÿâìtØë–›££÷TÌñ! ô¶Üų±äðô9èÉ—«…½¤²­Nò=óÆó ¬]"·^þo_¾}óäÙÀ^Ã0¿™ì‡Ý-|3´/ÝçvÑÇó«O?¿^'Ëa/f^Û‰îH{ù¢»T%‡ïr®ÁâÚ*xxðåÙÅ•Q„6­À¼3£y5cá¿ì]ß7’þW¿$…,Ôû°—ÜCpl°ÙÃŒ[YjÛ:k$AÒØ,ö¿bwK–=R³ÔÝjÈ;4‚x<¢º?‹UõÉâgE/ƒ‰RJ?Ôsýþ›°{χ´Ü+Rϱ8Eþ¶™,·ó/÷Ÿ”ëZŸ&‹Å=ÏäxßÛlŠ0³dÞòláxëC§€‚‹J2eCp؈M*íg<ÿãý ?íï™Û#E:üúé GågqKÃK>Ï¿ÿÏÿ,…Ùñ'÷åRÊ™o>ýóÕ<ÎQ/ãö‚íjQÄ.Ö‹ÕÓÛè*DŒ¿+ë¯Õ/Oå/êË„øÇ:žüó¯¿Äíãìÿž¿-¦OÓEQç±_•Y°ùÜ­“iù¢£‹Z'Ñðm_±\þk¹ú´l×…ß~ùm9Yo߯vå?ãåä܃ÝfµX›#Õ'lâ"Þdѱû¿°®¼{¿;!Šãþí1²ø”`Î}óÔ£W¿UרÆßL6E\êXÈÄù¯8ÍÖ‹ùt¾[<}V„L+Ów6@@ÔÞë´fF™wÜåw7¾ã®ÔV¯¬UÁâ3­þû—ZÍíÌÑ!”O“MœÝ¥^{VëÇå‡h½ªM£ò*q™›ŠŠg1ÏÉ3ïgÞæ|0‰YÛ¡¿jM€±ŽEõ‰“ö5"Åü@%‘ïÔU /…ŽÊ…~UÎÈѳrFeiÄò‚F ~l±Ÿ÷ÖÄ‚‹«FŸæ‹E5¹+›±ì拯J(soÂï‘C¸Zùž«pj£çÅÑÜ㿽 /ñ_ ã *`ü— Ó.3þÌø3ãÏŒ?3þÌø3ãï‘ñËÝ0)«ðeÆÿ0~¹V‡kÞdžÆ!¸¸©嘮v¿Á¥àôy=u¿±Dÿü اbY”móâ©Ü’0/>Æ­ åv(¶xÛÓw|y¿Á‰‚I¿×Ô­;Ï>îð¸s ^µ"ÂØ“>­' ê˺XMH«vÚª›âè—¡g_G¯zmÈ:ÒÒ1f8Ž^½ŒSdG÷öeŽž9zæè™£gŽž9zæè9zå^­‰7ô¦Ý0‘„ÌÑ3G¿UŽ^i«Ó&Lkµ=*ð{…:8ÖÕ9îU^Úé™%ZlZµSžnŠ{]†>œ(7ôï̽Ê^ެwié£Á¸W, … @æ)s¯Ì½2÷ÊÜ+s¯Ì½2÷ê‹{Un˜Ý­R:醃ÆÌ½2÷º}îUkupÞ›´Vƒ½&÷ÒcMÞ™“û‘˨ÌI´ƒÆˆª3·uµBE6zÉ4zÒþEQ¯²×ñ^GC””N<”2õªÞh «M#;.ÇŸ©W¦^™zeê•©W¦^™zu¤^•{E `ÒÁ¡Õ™zeêuóÔ«ÒVëÄaZ«¾æÑDfLÁžZö2‡B@¾ñ‚CS2tKÜkÞ¸xõb =·3/iÙkßk4„@ié µåpÿFk¸÷ @æ0s¯Ì½2÷ÊÜ+s¯Ì½2÷ê‡{íÝ«cMoÚ5th™{eîuëÜk¯­Þ )𠯺ìeŒ÷pjÝËìwò2Gl:xcG‚ÂMq¯ý~åæ«Ç?÷R½(îu™tŽo£½6÷ªwä‚õ%Èò–Ã̽2÷ÊÜ+s¯Ì½2÷ê{UîÕ+`_œvÃÎækÏ3÷º}îUku¼GɧµÚ›«÷Ò ž>½îU¨‰÷·ùƵ‘ªX¼)îU㲞?I£?Qpôßš{Õ‡¥L ´tp0îU÷2V5OýA4ÈÜ+s¯Ì½2÷ÊÜ+s¯Ì½úâ^ûc\¼Jºá nŽÌÜ+s¯[å^•¶-{ƒQpÕu/¬þäºW}ŽË*KÓ¯jgoªÌáUà¡jº ÷ÐNÑ‹â^õ9.šÊî¥CAƽês\¨¬ƒ$²¸©4s¯Ì½2÷ÊÜ+s¯Ì½2÷ê‹{í]£o>²Qµƒ£›§3÷ÊÜëV¹W¥­üHƒ:­ÕxÕ2‡d혞Z÷Âê—Ó¬>Mg‚öí q¯=*¤š(î¾ûÑĽö½FEд¥à Eê¼×þüXt! !—˜ÏÜ+s¯Ì½2÷ÊÜ+s¯ž¸×Á “i¼øýÐÎù̽2÷ºqîµ×VÏ!˜GîŠÜËysgNs¯ý9.tãô«/Ȳ×þÞ‰èÒèÑ«Žö÷ðÿBZ:Öǽê äÞª42§læ^™{eWæ^™{eîÕ÷ÚŸŒñ í†}.1Ÿ¹×7À½*m h™¤µ:˜k^ÁmI ÚÓÜ«ÜôÈÜřиîUµó7Uc¾BÅŠ@M{íÔËâ^U¯µµ<Àié裱½6÷2ûú”>˜42À̽2÷ÊÜ+s¯Ì½2÷ÊÜ«7îeêrÃÊ7»Þ·ÃÏ{eîuûÜ«ÒVÇOu:­Õι«î9ı6göb£¶ñæ.Õ¸rT·ƒg7d Ϲ×ãú݆ch$ÏÁ"²ÞQÖñÝ»G¯Žð"k¨UZ"6ËYé‰Go9ì`ÔßïžÖÅŸ¾ûŸÏßüÒŠý…qnæl3¾c:1ÙrHõÝ™¿»ïØ•ñØŸ¾ûy¾å‡Ey°ûá–ïçëÑÇùäÙ(®ö_­7ÅGvÛ}/¶ãѯ¥6ó‹¸ÙQÓ7ÅÛÕ¦ˆaK÷È/wnxòQõr¡œ¹×·Á½€ß¦]€´Vk®{+ éprm¤, lC Ó¯ngÌMq¯ Ciôäô‹â^e¯2üLJ‡Û ·9²z£Ö­ ;º5s¯Ì½2÷ÊÜ+s¯Ì½2÷êȽ*÷ 19Ki7¬óºWæ^ß÷ª´Õp™ïFokÔç^f=š¦½˜ûvìϾÌãëѯ›âã|űvü°¶÷£Wç¸:ÇÕ9®ÎquŽ«_n\½÷¡NùÆÄÛÁ'ÓY_K`_&ëõâé~T„Gïud8è.Ýoqìc!‡}M}žõÞIxÖÝ„½›l?üï»Ídý>¢Còv”`Gú~ô×ÇeÃÑÿ­ÞŒÔèûOóÝ{þ;~ålðccXÓT þÐÌå  )AaÐþì½Ñy ~Þ¬Öë£<ä¦X¯6»*\L6‹9CŒÃC¦Åjµ>‡‡ º¦ˆ‡vúlD¨ê×a”oŒõ¦¬È«‡Xáá‘ ý¬ˆáb©G¯æëÉl§j±óÏã:¦ÿq÷!ð¯Vñ…1£ÕÛQŒÁÎÉÒ: éuêè¸óª¾P½ЄZ9äñt6Âøp^‚ÀqîoæÕ®7ÅtST“ŽePKç°HðjÉ6ìnW±Ñ÷Vûƒ¤îG³²ZG/Ócs=úlöÎvƒ 4]{qhQLgFLSg5^¨˜ËÉC±]³>E«Žd{·—xäV’êh½âé-@|þÐ#>V„®H;¨ƒ%ÒM‰šC»£‹>¯©Î82‚¹îÎïê»ØN±P¹ÁÃdÉáöfõvÎî{|ô»ñaL*“…DIqÞ€Ö‚n8%Ô‘«Àï 8^ik@ÐCÆŽxƒx¼¦þg/nžžÅìkA[’Òï3F€ZlKú@ÛA-‚RA£ C©‡Â–^;ô¨¥|ÏŠY'•"xãÀE‡Œ‡k• ¥(<|H%86’ˆƒt¸T%JÒú0YKýø»Müpv7”ò†¤n×hÁÙ Õþ@wQ" ABÃÄ!^”ŽCØ ^‡l‹Íǹ<ÔûñLügU’8yÅüR…p cƒ¥x‚ö=°º½4?‡·“ÇÝûR¶iJx@k;P:ÄÖç¸8Ä0Ðð C$Ýi#ćÇ7ÅÝö‰…ýðcƒ€ÓT(øÈ†lXÓ®‡‹q¶V üGÒ‘aŒ}ìŒM碹²]ÒzÏ„ZÊ3É´¢©gž(€‡ºM:/«í0ë8Á4p‡A¸”Ök¤ñh­úÙøTï áßžšP‹b³{¨F‰'É“Öñ¬ :€¾Û¶§¶À[« 0¡Ðn­½DUÒùgnW‰ J±&I“6l!ÓÛÚ%õ|…®õ o­M»mÜ Ù]íX á©6„½º»w“x]y)ÖtgÙ TÓìÅœF×zЙÀè¬sÃø~ÏÖÒIðÐu8À‘t)9öN½àvÆôÊN‚l­>^k.˜÷Þ ”„>ÐUànSLf¥}òÔÅ»_½$& F_ANcm­q¡Ÿbpa…ˆv8 ž‹m³ å ǧ\¬OÓc l¼ ¢%±=h °õГçQ„´ä†ØF J,0žz:Ñ$àd&”q^§£þvד2œm”³à%äгR%ˆS@t´{‘ž‘m27 òXÌ}uK+ †Øzøu<H‚>8=Èð˜ÞjÉí”ëÇ$DœÌ¸XY@€Øv´b¤­•È™tanÙ¥¤BP\2ºš-¸[½yû¸­ŠÉÝmãâÜ®w2}LŸ!]°‹ÛÙþÍCuk%11ÿaÝ ÃX d[@<¨Âµ-Æ9©'3Œ€ÖÁ2 ^͈¤Á·V F  ý Ĭ6N 7õÂ3íì„ЋÝônýa~k"îEÜNSJ=™„dÓø¿t'¬q8H'ð­UÆzËîHÐ;7LXꔤ8ã!Ó'?i!üd&kŽ ‡òLæõúÐZœ·±ŒHØ÷’–๘×|‘Fn²ô›Çe¬{Wü±ÛL¦Qþñ;¥À“)Pð¨ b1¡Mú»3îÖŠO# 2!àí I0JÁ’øàzÈŠ^$÷(òäq9îñ$X)áà:äH» o­,±¥“tm lI /¨)ÂíZ”ˆ˜>næ»§XœÅÈmY–óån{édÝN§¥ø“iU X_DÒÐE"®×‹ÖJDN οr;;Lî•GB¹ý®e´¶ãýO_—¢óɃw v¦#£t÷ZŠí·U¦Ú.ˆº6H#~O<² <ŸÝ?ôõ0?^éóxFÇÓûé˜ÅhEç¦0¶VmÑ{tÙ­aâ÷‚øÄhý+ÀaAܧØ@O”vÖÄ<§-ÂÖƒI 2'†ÙµaŒÖ’%äûü&ï<)%žLÊr€w¤;`ÄUw{ÞZUPyJÐ3$yo £8.¼øØÓ×øp$ºe2Ùj0ã¸0tÒÕ{)¢Öƒk³* d$ƒÏj*Á!TcAõœtVüϧO¹ñdfG ë|OéõF˜­ÕÀ);%¥Ar±ŒSâ4wùÑÆS¥‰Òý\†øîÍãrV%ƒt2óiœ;½ 6'œ­Á+ÑîIÓMìU ±÷—Ÿèjpí|¿p2›i˜ÄXÁÎ7ô¢)œ­!žÌìD3a˜ÃΦ´¡<½mßynj!™£ä ˆ¬ïº8߈¯íÀ£Ò¼ÀP“K³îuy'æáVÒ!nubìÆ£¸a*¾3 AEEú)öм“’‰ÒX,k¿ØØèU€b<€ý_16]LæM ‰iyÆcç$À/ÞÃÕqà[›ÐñÜüp­‡Ük¶¼?1äž=¸€@£¿ø¤þl-ë¶ØµØãÉ\0zH°½“š€ž·V Qà•ƒV—×Búšn.…† –€ÀaB•œ’xýà/ UÞÄ›ï¿XáI¦›(X')³FÎBÿÙšî™âr>¥g{¼Qøò‹Õm­bجb,Ö+aà¶VñC‚CÔîÌê—U5W³»é†Ç—mÈdÑpVð‰ÑŒÀ1¦Àc; Ö¾µ|ÖþŸ÷Óáqg;æãùQAdž8ÕÌïa)Aà!ÕוoÈÝ&EÛx­o¿6¾ë}o­pÿ?{WÛ䶤?û_¨ô!ëlF3xÑUêÎë8ûR¾»”íäÃ%©2‡âÌ𬷈šq&.ÿ÷k”LÍPBƒ¢`ïÕîn­m Ÿn^Šc¹Ä™æÞãž®•Q¸Ä•4\"ðÛãÕoÁ­P¥5«:¶öRJɼñ÷N(K{¸®'aŽe–àLÄp-{ªhçKmü{~Èûrµ+¦Q(d[)¬T1¶ z'D²Ì¡Év¸| Ì’h. ™ü£¥f‡®2juòƒ§hx—Y¡ÞdÈ>L<fbFsc˜¦<¼‡ CËnëx§ŽÌE‹„p’ð“­ƒP½Q­…(y ð0“re™ët9ZNö¶ŽòSN뽌ºT‚ˆA¹0¡ú£œKI†Ñ‚ q('%×(<*åòËÙèn™Ž.§‹t¿Ÿ£ý|ÓŠŒdø -"IÔÙÐ*ˆã¹IÃW<ÁäumšY2ϧÉÞ61^–‚ûrNt{b–¡Eé^hÙc\°áðP†È€ãp6zUkKÖO+ærÊ D¡"6­ŠÐ8µ#3cqèÄÝ}h<&–7v—¯Öûz¸!~V C,F"qê!+Iä’.Ë1FôH¶ ‹G²ˆ~×rñ>[݇=ã_»W†J”tèDÂ1¥êtX5è8óKe,5<†DŸ_ÞÍÞ'«ltW,o²ÕÞ…'ãßÐ.PÊ"„´<Ú<3X¸Þ(ˆÕ†Žñ ߡʟ‚uó,W Ðül4ÉÖYºiÿ>æJ(ÄNµf$ñp"õG7´âìvk!&z@çB«£š!;.K—$’  =@ºî^kE9Ç•‘ Ø Â£z»^{P«¸™å´ý‡R÷þÕw­a Š‘EÓö±2tæVFg6§ä˜h ÷R+rçÔÆ>åŽVUšiü æÆåBÄ Æ/-‰´+Ð’DŠ„Áâ1„'Op}ºOÿ:·qÏQ†qwO_BtfZÈ8ÓwÃݧxXÇ“½ËÅ4Oï=jÉtºx?‚ÿeåyéRÛþEkã.oÆ çXÊô‰º3GÐR©8‡'øFÀÚ®4=Z/FÉ2UyŽK½û×¢s¢F’Ó±¿3oÐòÅ™³ ñ(~zÞÔ‰œâk‹>Z%ó2:ZZÿгË).1²hq:î eèÌ´Œ‘øc©Æ’—?]ƒF#øW”…ÿ"BcŒeQˆ„¦+£,QJ£„ãí`ñXÁÛåOæ¥Ï`ý‹Á–i¡¥'ôtZwæZ¢8s)4žþ=áG“‘6ÇÒúnݽ·˜(Ë{spB¡wf‹Ð‚¡D‹dI¤ ƒGðHlÙçXZÿZ®5DcâM¬ä'æ BˆÎ B ÉÞ`ñ'[íªü^AÙÞõ`NÜ¥(™Toù€{<ÇoT‡ïwB³wG<#:öJ¼¬6Êa Àc”EáÑ_§ÇÞ’ ÔlÙß•Œ7‡bB B´x¬²á½ªþà̳ÞGl…ݹ»aÅŠ’K¾Ã7(<¼ûfÖŽ²aR³\-îò¨î&9›¬°£d>‡#]eëÑ*KÜžÍÆ‘Õ~ºª%FÎ÷¸zÄß™7ÂJ‚’ÏÆáTÚ ´Ã-NÛ»Åôv–%ëu’ÞÌÀýx¤sãçŒrM2HÙ/g°wæ V6ÉÎh*5 Ï©ù²Ê/o×YQ&«¤x„‡j§AÐëÐ' 3sŒÆÄƒ€‘,ŠZÃOÉœbž,‹›Åþªˆwë‚»9FËOAò®\ÁKeJÏ)%Æ"ðÐàpŒ>ÿôº{-Á#mS?O5mRÚO‚QwæZªHK!1xØ 8RO1Úl·"ÌÏîn\C`ç¬?† 1wæ‡Ð\¢dŠÄ)$Áà'àfw‡½DQTkŒ²GS ¾3cÐÂEbŒ²…GõȘY’ÏGÕ¢©]ᧆ•˜dbœjr<5¼(;s-E”-Î88Í Çòž9Ðtø¶ öï`3Â{¢hW&à1_¥=3ac‡·ÊUý±€ÚžXpäÉ%ûuÝ36S½uSÁþÅS&™¶À\÷ÄÐÎLÀ "X& ñ„5šdËéâÞ-{DGÿJ)ÓÒ 2ª¼Ø³f§Áß™7Xù”ŒÃ,MºY_Žâ½À C3a–âDI“ñ€ãääÌJŠHÌæp÷vOÍÞ^õé•òrVÿJ&'‚H z‹µ}¢îʬT<ÎÉvW‰Ãš^ù.™æ“d Ï’É,/œ3þx;|£ðê—ÑæÅ¼T·wSswK£?bBÃ|x ÿ:ƒYAžVÐí¯ƒ—ÌÁO[ÄÏ6ˆ¨_ &·Ù`½^\]áC_ÝηK“óÿø&Í`ðä¿/ÝÅJãÁ,Yþ õÂ_“ù}x=O†É2ÿ)[9цãAUÕÓá¶VÙuKyÎß™â<_\ÜÑá×g¾ô.ŸO†ãZë›/íUq·o@ßL º¤üÎå “ù|±®âĆT7ÌçÐé'Ùù*›fI‘íD½\Ü܃(ÿÕд_0‡_u©[ªõ=³Á5Ùôjóòè&¿¾%wI>­lÌ}xuPvšæ‹I6šdwÙÔ·Gµ|„j6÷ ¼Ég˜•Ù²Qªj:"ì ÑcAÇDýOùí²7U]v g­ÄSêž×|ŸgÓ‰ÓÿÏ¥ÚµQ7:†We½o /Vû©Yþñ¸í¯Æ•ÕlýiqYF¢NþÚìA1§­«ñ¾þü&Ké×-e>~ܪ¡©ï½vqTƒðçççðò—¢¬F/{üãá^v¾Owøîþz[ ¯ÅÛ WY5›ãÐ{àΫì*[eó4Ã+æí»ñ‡ám€‡†&‚¥ÙåhryeGâòŠìU:éK5QV_¹$ÇÃo÷оÌÒÖ»!`ªÍîÍÌ’uzó|á-˜B¯‹}å> Ьrç(jÁ]{j¸*þºZÜ.k´mòî–¿IŠï›¯¼L.³)î½×ÙË|~û»ÿ=×Ì{:÷Æ‘¼«úé§¹cÝ©7$é0Öµ7hxEO†°µð¼Ó8 ±—/aµ~ãj­«ÝâÜu%^üv›ý`VŒ¹®}{³Êkèÿe~¡–ÖîTó¢ô×Õ>vOºpjބ𾺭zÔÏ]YðäÉñL‚ÿ›6È'U\þt®`*Ô­¾aÕcjƒRI·iñç¯^<{óÂu¥Í“ø®|ÒM™ uº=øÚ–Exýßlê/ÒE9èî0÷ÏÝ:Ù“'ÝÄìôV§—†»ÃA»Ë—ý¾tÇ v½˜í qÞjÀÿ>øTbgHp“\øi<,½N¨Lò‹–o¼þ}^âs3S˜ªýiÍ´¯ÖoÃ?•³ØiùùrÒ÷¶þxÉÅøºÞÒ‰—öñ°³#mÑü¹7iwkí(mWŽ|òy ºzZÖóGz^.¾|õÕv1o׋»*®]³ŽÒ›­DÕˆ¿•§ÝcèÖ–mÌýeÞúç_jÄÃv\»žO7-¯ëèpXK-þQŒïª¥"—x:úWÝ|²}ÓÈÖIdcvì›:†ƒ åcû ’X+„òÆÿC¹Cç4öõp )¸îr…­ÚœèôÀ01¢$% &‹dž®ñ>Üs}w{™¹S*køa Ó z‚Ô¶ž>zgÊ[Ä¥/ŠH ¢¥òÖw—£†4„ßJæ?>î䌱/‚Ç~>øÂ‰¤&„[ÿù©¬Ÿ=â71bHðß¶“³ñ8µú⋤UîÔšw/ʉ.fâËCGaùŒìðÅ)@Ë4÷ÊŠ%?;úƒ’cIžàÄ:Ÿçl.¢‹ÕЬ¯@©k_$’‡†JoÜ/”¡~ð?‘lQbšðö…ñÈzum ešùeÜò(BŒEü(wÁàѽ…¬´˜÷½ZÕ…KíúŽYÁ ®µñ_åú,FÆö'›Š‘_¾c 'ø&?äÄ¡eÌW¾H8e´Öˆ˜1eXÃ6†9ý¥æøEÇŽyÚ'\hÌÓzzŽy:ø¥°˜§ZãÝ>õ¯Ð§ÏúDÉ?sèÓ¿¢yþ?E󔯻حª© øU['ž@žgiXó¿½¿Ëæ÷ÃVô§“9hCÂdUTŸPmXýD´xj?"¢ÅSó-µÑr¸ª'Ç7z‡ˆ_}ýF´x¿vdDËa„G´xðv³Ó[^îXÎý3Åa›5ÞmËÒâvbI‡wÂ_Ù³·¬¬‘\Jÿ”Ð&N³wÓi †‡-¢,—Yãn¤Äà ¾ˆ7l5»M›¾%ˤTÄÞ2!U_ b'[ÁÁK#)CDÐQ•G8‘„/ê-—m¶N'û#H´'€D ðÂÎÊq)NºIÔ¡[ 4ÂÉâ<Áù^ƒ—6÷­h–šõtqeÝ‘MB}Yw”å–öu$™@+ÓÁ.Œ] Aa"Œœ!xlhrÐ|Œÿ,Kf F^ÔWf¯‹é»|íÆßÅAZpAön‡éòøî©wŽé:!’¨í‚ÇÈx{žÎ¬­vWycà œT¡#S´Å£T 'or,Æä„ ¶à;š3"% v…6 Á›¼›%éM>Ïêlá¼&‹U¶(F—‹Åºjs7Œ0Ï.ž”q¦¼é¡‘&Þ”÷›’ÞPÊ£OÙ<1¬¦bŠêã0”㇮m÷€NÏáø¶ƒ/ÄÈÆ0±C–É xJ¬7“ Ô}~áá@úCyžà@~$OD°µJK­½iL œ:Ô»8HòÃ*+·2³É`3§xœõhÓ¹’¥K“X‚>ƒ2Æí=AxBÃØ· åm9‚Ú´'úÁòœ¸»á¤¦‡Ñ–åLptrD²IbO‚†‡’ÞÒ\õA¥”G×ÖÝ„È,÷Éf™9t‡i+kúçxZ©"0#긕¤éâ¶=­àC^<|0*’R­ü°Z©à0›¡ÄzÄ€rJ„’à3ù8ŠsFÎ)<ù»ÛλMîÝy½tå:æÅ"]Žêó|ÿQÜ$Lª±a’Pð›.mª8üí2á—†_eb¢4á)I®d¿Y*)<€¿’$ƒYâ¥Kc3ÑÊ}ëût§ñU2-²û´£¸&ž«9+-î_˜–„Ô¯³{êt’€Swd:½]­ÜD@ëlýoƒù¢nDè<¿Ý:ÅîC¦™ÇÑL5XWÑ¢ìúëkz³ZÌó?ê)¾-Žž¿k¹z™ë§ó|úu]àÛ¯\ë»W«E±×åÓŸRŽ9¨¶Ê-=~úçš}5Ê¿~J~OA«”&–¯ÏJ‹7ÖÊœ Þ,ÖÉtìzÛÙàùb¶œfÐ1Æälð*"¤ù€ŽÝy]`Š3‚P¸l¿-•þ–7ãá(]Þ¾xqÍäœ| ½Ë»d6Qž¾LŠõ«ÅµKS9^ç³ìü;@ø”ž Êü#™ß&«û³< ÿ•?þøæ9à~5 ø«Šœ_Ã|õr<¼Y¯—Åøâ"ISô(u“¬ÏÓÅìš:Y'¯þöúÙȱlL¡=8¼÷(8Ï¦Åøç_ë“FŽKqràÆd£u×´?T-\ñ`ìÈñòáÓ¯×Ùr<¬Ÿ¹Ÿ³ |æ?«ÄµÛÊê>™’o©µö˰ O„§W|,i´"|³$Ì/®5Ÿ —ÀÈWyñ®(ÛwCtçX¬J-Õ}}üOa÷7éËE œ®ú˜ë"oVɼÈ7§â«Žâþöá}2Ž¡'3¡Ùdb23¿ì zKöûz,ŒaŠ0k”#I † |ÑUîªr&@ª'ÐÿÝ1ßçË“DÚ>¾ßò¨ü Tû¼ kM>~ÿËyy/Só óþ7ï? ÿòä]ënÇ’þí·èGr¼Ç¤û~,Ç9ÇØs b'?ö @(rdqM‘:$åD{àÇÚØ'ÛêáE¤È™êšéiX#@$±9óUõ×ÕÕÝUÕ÷ÓYàåE(a®„¿Ûß÷ªr¸ÃߪÎúq“šÿPýa^Y‘ðã_7Þã·?¼ ¿ý};þez]ŽƳò¯Û‹*Ìçún«¹ðëG2 †ouzùùâ·y;Þ¾y»½ñ°ú5¤Î¿Ú;¹06Ÿ€Ui®£YGñßWÞ߬Ϩâo‹Iùî>¸c˜bê>û9÷b{³Vøñj´,CƒYœ:?…av7›Ž§ëÙÃ#=nL_­ƒàªD|¶†›†ƒop[ÂÜ*G,`è€RÃd|3 yiÅx7 ~S Ïb²ñ¶¾ù3ç3°Aß|µ7¦Þ(¦² ÒÞI€'lýèG3UÍçá%ÛYûo°œ»½*—/ ñ¢°/ ÆJ>Nê•å{¾w<Ž,^ã£Àqá`ç$?}Öçí‹§¶ýéüUËjÏ´gµ;¨>¹_Ò¸_Šïa‰ººé¶¦e·“CÅÄÿþOÍɪñ–yÊ£þ¹ÞtØ\ïa¿€€Üg8V‰ÇLt}œ´a õÍ µa·_q4Ú Û2Y*á®®ì?S—ˆ'IzÊ9žùÛv›³Ò)²›i›2ܪŠÉcJª>ŽÚºr˜ @ÿ5ãixÈi’gIQ{t]E-¯Fãüá÷‡*T5kÓ)îŒèæ ´³ä`ëL”&Èà˜ïŸ<¢e¥ßñ¨Ø†ªT„̤Π&•³(¤ÄStsR’л ›ï<^tß|'Ū$|É+-¹¶;0öJ6ÔÅnöˆì¤@ÒâT)xŒJ i+"“¤æi.<æ©@;'8ziJŸ#ÀŽ‚ÇSíùd¿uÔ^ŸÉ&fÈðÖ-ÍÇW¡\%‹fOFgšýßô@ÃãdFÄkÓ1†hÓ íBÕ ½ÊRásð9¿2(x|»m. ©Õ› 5p„¶~#¥yÑ— áaµ»ø´s‘vn†\Áã ¸å h×zN 6}ÿY…4 n(\Ï¿­Æ7åíèüxY6:kÜ„±Cx¸uü•FŒU;#Ó³NnH’4”OÕû4<ä@kÙB«Ѫ"¹iL /˜¥üTŽÇȵÈÐó<.í¸¯ôåšõÅ=·†)†á ,Ò§íÓÔø<ï¿?Ix|b;~äÚøfÅë`M  "”£Lä ¥79)JV§ë~ Þ9P™Þ+…áFãø¤¢«oÖ/¾þï‘ܼÇ9£Yc“ çEÐÚÉÄ&›/ÖØR/ljCM3<>ܶŌÆ:nZ$h¨ˆ ;” æw+´C`@;E>ÝHÚ³¤&ƒ³JÁcyc¢ÒšF´æµÞ!W”UíLÊè˜ôpsxÈÉPÉpY™£7 xÈÐg〃¢Ž’6©p‘ã[ˆçQ²‘Ã7Jë2x"<äÒ5óAƒgí…Ëô˜ؼíUq'³0¡•:Cÿð(êVqPÎQá„ü–°ÒZ‹‘-Ô’÷‰VþíÉF@ÛÎ’®+)xíl ÜuC¶M´‡73e±YÚ)C%#³-L†Ûèhx$ï~€dºË !¼7ŠÏ„Å}±U«Ñº*Ý"“«÷á„¡ÇÐNjדQhÅéxàŠç °dRŒ—å3­t¾® ]5©Î ™ÞB—5û¯U;Gîó¬ü%ÉâYïµ×hx8uóáÑt.ïg¤B°»ßá‹£ëÅV“˜Ñ – ž^¯8)Ÿã¡Jé2p€€Gé^³TžžÛ8Þ¬Êp9©Yµ³Œºé™“¾Aî·Žß¹°Û Y¿sACáXÿÌ$áqw.ÎUØ)¯§óéQÅÔñj:YNÃ}«Ã]_÷½?œ. cGv†B¬VŽc~/´Óä‹7žxˆæ`ß8>ÁÃGQžî•E®ØÜ´ë?È“†‡äYO½mŒL=ãÂlÝØZ Í øRph§9?ǘ¡H [ÄüH¶#$kb¤—ØD¬}›zŒ}2 ¸;¨â= %ßêO$RŸ³²ÿMÂãÓe.î'ƒñºw¾žŽfGBûÐ[nP£\IÕd~Ô/ÅÛÓúº ¼,&ÓÕèjVNŠñènt5_ËÕeñ*|ÿÕþë]¥ÉrFFÁCOû­·‘{-­–å?ïËÕz5¬ÔÿøÁÓÁÎu2žS"yÒ—Ù8$u «¶¿(Ì7™5£´„éI`ÐN4íFç)V׿îBgÃÄÒgn§ïwcTiä €‚Øe8 àñ"µgµÕàFðÃj¸ÿqøÁ­6œU ™b·V[®$&ƒ·¦©Ê4 @HøpUaãɰYž£¿ x„h}8©mÕu8—)l™Y%gu;m8šT©ý½4¼$Èàt†m #“¯¢yY£HŒ àÔkÎ6Û@;É;œùv&-§Í0Ø)x\:ã>™ƒ?2^,'п-Ö.Ÿ”@\o§d¨‚Ú+h'yúu_{ÎR€» »w<ž¥Xq5úJGãJa$/Ò1í±Õ¢;^Cw%ARÇ‹ày†c@ r|j½J·z/æk0¸³¦)%Õ–‡àÑqlÚ)ònnÏŒ&`×ý2¡áQIîs‹ÓåË}Z˜j¾.Ís!ª©áTµãäXÅŒ® I®¥4"ØÞYAÂ#赜ì^Dm„½Ifóð \ƒ‡œm®Þïª^p8›b¥:;›N¥èÌ%i½‰‘Vò<\Ra‘ƒÇ¦XÝUðG`n·Ä£(Á¿àIuÓûfŒÉÐñ<6]ä{¤^ Ë Ìí M+ŽUµF¥X"µ"xAzUGîÇÃÍP†G'ª òTo/Ï*’7+R„"0cd†vŒœañ¹ÈLÊg°<œœpÔvîø2X?–‡T Y‘ÀjDKn9æ‰PeE¤IµIÃàxàJæèl —¨ZNàûa­+M"[æž))”uXD´\¥H:ìHe `“!ü›‚ÇR×7G³éÇüýhr;]…˜˜»Ål:~ˆ®¼­ËßF£»é`¼œ öO¨´ŒÑ¡ëT‰+9¡ Èsd Qð´ÊJ³ã¹W/rfäC=rÿaâx™j/>Ý Ré Å(xŒHeTÚ«·‰4œ Y¸AÛs׸Âß´3"ÕMä=¢8¶ïÒ+T<¶^O’ùFï&{Í®ëEÅŽj%ØXužÝ›6û¯] ·[¼ÒIJ½_DÅ“…!§åo•¶9N>³Œ@Î}Ÿ9ƒ¸?b¥ê=|ŠÇçàÇÕh5îWÛA‘Ž%ŠõÉ’ZÜY¸¢¾0®(—ƒ+»™þÑ„Ëttѽ•&èYcÔ—ÅcR×”^!|ñÒjð?&€—æ Åû}9/—Óñ¼ÿ¥øénãv?ï=ðownôüe ¼˜Ü—Åzl¹¾¾„u?_­—÷ãõý²œ :øå_4éŠâÙ߯þ«¯/‹ÛÑÝ?à9€é—ÑüþœgûçMVÏÅe±yÔ.ö2Àؘ‹/¿üÈ/ž¿hñ¦@ƒ‹Ëm/ìÞT«òvï¸-×#xܨzÏå\ŒæóÅzSOþâDuû±üpßú2”Y:PÍ›mÉÈç/BíQ 줆C–Ѫ<þÞªœ]nGs0#“ÁÍôýÍ`ôq4mßqðD Añ8h;+óŤLÊå,ðïä)Ÿ^T×’TB¾›Þ†Ú·w­6å&Þ1{©Õ¥rÿY½»Õ× õt¾6ê<ü}+À÷Ór6 ZûG¥¬Ͷ#Qqq]=÷ÝÃÝ¡Š7/ûù°ürÚc×—ëwö£ÅU•'2ùÓ¡`Ošm]_®áí¯nÊ1ðóý™6Ÿ>íÕp¨ïÚÕú` À‡Cøò—¢¬ƒ±qúaóØÖé4g¿ÓmÔ´xdÍÈ9éèêQ‹ß€/?–×岜Ëxeüúáò_÷S°pŽ”—WƒÉÕµ¨«k9ð×ãÉÀ^™‰ñöZh&/>ýZC5`õ]9>ÛI×  ˜/¶²F·£õøæÕ¦\ZoVlúíb†Ã`1Ÿ=ìf·Ávz[^Ïò÷»`-F_ S> FX¥UÝSv£ñ¬ÀuCx·#µËj݇n‡î§ín1ïBúƒž]<íóÇ©ó{ø¤Õ, Óä GÎÛõ£îÛ½÷Û¢¦Û|àé|óMñõw¯ÿòúÝ믋/³ÉÆ?).‹EõÃóaPÈðýrqW\€Rƒ±z2±6qîù§O䬧í© OÝ>ö Gš×ÿ¼ŸËùšÜ7Û§ïë:¾­b7çlU»'o:ãà±§NR+=?»Øeýx?+éÔæQϺø+^¾,D1l²gü Ë´vÏ»ØsýQº]¿úñõ·ï^£±ûËO?|÷ä/›Áü³–ïß)8¼þÉûëò¸Vô×ýq÷ºÕxQyGÔþ·vÖæÙ³vR·úV;;q<}láÍhµ7‡aséÍüzQýñó⫯ŠúƒÕÜÙBñ+˜žü§ ,¾ýáMñªêáâÇÝ…hß=öq1Z–ÅÖ_*®ŠõMYTK”Ý“a1°:‚üýby5À°¨V í¥,£ºñë】a6YÎËpuOðµRþº¾™®ŠðÈâd,nFËâj±¾).G‚/tøkŒ”m8ÑnÖ¯[Žœ]Œ¬²°% ý+ŸÎ~‡³)§™@¶X ¦ß—aˆ €‘¾ÿM7 Õç¦Û&½á(ÿÅkT—V…‚{ØÛdetÄÜzc–"—ÍÑh„BB Dˆ„¿{w(¼íŸì©L†X›„R1½M§_{Õw¿r´žž‚¬^*ÆqY0y¸‹Gv®—W±¨3PåhþJž’JD!œ®øjUz[­ÊÕ«"¢°öËâ ëZ­s_tìqìßÚV¨÷«Á’e8cÈD'C 9S;n—á.HÕ1Ú2;d£2X (0R“H¼œ¬êî:‹ygÐNy™¤ªs[÷^Ï›ˆ‡œîwbÚî‡Tx;¥$“80Ç\ת¬=“1Õù“ùÃì꯲¦ „Ãg!’Ó`Ibð(Õu9N¨«7$Î".RÖxšc[ÐŽ9—fµ”Æó àQ-‹íÜ.æÓj¼6*³éÃj‘0[å“Î1‰â¹º«g‘ŽËØ6C<$3IMÂ9í!ûOŒ)÷¼ùú³ªTÕ.Q< ¢x–aqAÁÃeªÅÅY£ŠDLk/¤QÒ¢H½Ú$ùm(K@ë3,'x$ëz9Ì"¨¯©·C|qn)Õ¶çeBÎMµ€lCK R›!ÏŠ‚ÇñÄëÆ³ DN-ç0çÁ(À{ÎŒëØÕ ˜—ó œ‚G$:@‹÷‹â)[ð8…žEÐÛPEÃ'öñÛ°•Øóþ»Ÿ€‡3—Þµ?çm‰°‡Â4†i‰íj†‚:‚%êùt¼% ·9h@ÀãtVàœ_Ø©Rí¶/áÂí><½[ßš»àœËþ{Ÿ‚GÈî%š5¸‰d.ç“»Et\i9/v¡b—›±\¸­Ô÷1òÛp5´f¶t(xD¢ãç q–=P¦°mÏe÷Šo݈ h…cÿGÞ×-7–i¾Š¢/&ì’ ÿ@j£#ÖÛãY{·=ãè¶}³öÅ)òT‰Û”(“Tw—;üXûûd›)‰úáI$O”6vÂc«$Hü2ñ!‘H$2#ùæ2Kem8xœm³×$+¾¦a"µ@‡”ÀQÞ ŽóF(!F€Å Ô©Á€'(A^ñ¦Ž|¿hvлÖª´ù0ÄUw§ƒjãŒa8K"sº¬OiˆtJÅôW›<<ìËí‡GE¯e6íÀå¹#&Le]¤ ⛂'XTõ’ëL>ämÔSp· <ü¸(Ò+2ç*çR´kµ;¡5™ kªÐÈÍ5)S»ožYª™œw³²™6„ÒÀÆ„çgŠ8.x˜h©ŸDÈzàqò»ÏIV¡/R'±•ýÜ£;ú“¢Ó0¬Sª»qJ¦õ.sÐú–€ƒ‡_hëxϧYƒ›‹NŸ·ï$h"XŸ ™îû>»¡s'†IB}Ÿ?të>ZæÕ£+ú€<üÂh\'‹…Gn…=(éjµÙn.Ê—ö¾ ¹•cc}±Ã±PðaúÒ†ås,º^¤ïUð$9>\ÏúÑ:mî>lfëÅm)ÑðL×´¹­En”O âÓÙQ-“VoŠÆ1väâk‹ktgž+ØËQb(Ó³’0ðÀÂÛâ“ãÁÇÅúú'TÔfvÕ_wϵäÈ0<ª#C-ÖŒ˜¼Á¿3üQ-_uëyÖr^zÏTåèÕh:TmÀ…Þ’ Ð?»_u³Õõ-™n^8lIŽ0~Ç`BžžV½-/Âò›7T){ÓosÉ“çª1vXEÙAnÀ cß7LåÆ]®fÞ—â7‹g.œ~ï̓mA„•pðÂù6¼ˆ t¬Áb¼¸E°ëÕ‹|_Š¿òŠwo•¦©Q‹<ŒßRxˆG°£V¦ØhG¯m‘£ënvµ¸éó5Ëûû{õ=&ùìÔ_”nhšÔŠªÃÒÐGð¥V8hç@ùTƒGž/VçOz„ZE‡TšâýoÓ!OçS´P!Auå.iä#Ø„Ù|›8‡WÖ[U'š©ÙòSÿájµúá‰Êéhµ §"Ë0ðÓ¹R/Z›ø¨×Þë <^È•n6[Ý=-eU·L‹Êéx©7…¨@[W¤€àJ­h¦wêó# *<âÞ飖séøóõ‡nvŽNÀÏŸ‹²éøi5t+vŠ©…<‚Þ›T#”ksGïƒ×ºW2¯õÕýôÓ9±<阪^S!Ì`gðCÆL(ÄU‹ÙæÜ㓱OÔ#+¸œ¬wM‡]«¥HŠAiô#Xêäkdyªñ$AÖЦC°Ág*’¬|uSv¸§ó¢V  ÛdÖãÕŽÐ,d ùñJ NNé“\oÄÌ×JÐ(b\„àjð8y_„Ð7W AUƒÞ9ç£õ–ÔÊåÛœhêñ8Þ='©¼†dt¬"ØM|F‚Á™jñÚx¨ÕxØÑµ× 3ÔM‡a£ŠŽ¨ˆºRê²qèÓ9R/V›l=[ÔüØSÑÐ œÚBCþuJÌå¯+@Ç“úÀ}ÉóD<ô‰ÿS‘|M›cOt.é*<0òØSBX5•ÃÁtøùôNYÐ<ŒÝ:aê×s ðÖ6ØŽ8xœ•~Џ/ÙpL—Än”ën;o uq†ãÒI5Ò&â/vjðêƒ|;3@ÜÜxç•°Ô‘Ñ;ÎI›qÌ­Çî•jžÊÂMHŸÖl9ôçˆÕãºñä&ŠãÒI%Q'b+vlxÊÁ#U •¡Mâ>.8­(E%´à8•´Dâ“ùÊ€:ðº[nâ9xlmü¸WÞûe«(€¶Ñi²pŽ3ÉÊ”Bdm=zÛ¢ÆkoˆÖ:†àÈb*8Ž_hì-¡¶ÁUÓbU©Ž)ôÈ÷³fqyšr· ¢’ภ¼Hg=9Çïì<ÁÏdh’0¶ rÏyC:å¢ðr¥)§ t½$Á4Èùãà±^¬ŠÕ)š%.ÐÁ¡œ R'_ç¬@¥m9v3ûƒYÕs‘“ûˆæP@“ƒ¢Å­mþœ„ÇUÇIÒ\Ü ìÒëûáyQŠÄ%-·Uò&‘ÊË'5%VGkŠeÄĵëñX%XêûÍ&v˜\ÚYYÜž†%Éã4h-Xg“Oh؃‹äz›¤÷6i šEšÜ±fâ‰#£«÷Êù…ÓöÚ%ÓN‹ÑÎ(ujbëÑÛ¿ïwb´,úMññºkUM/p­£"ú'í 7.­ˆùTvT‹dmvÔã‘+û~TÛÇ~PtïäèR…6 “óÇë·ÅvFÚM¿ÍcJmŽÏUªŸ÷»»åö|Þß|.êör”©.*! {r–8Á32÷§5C¸FÕ0"š]-¯³óXóhà?ö¿ôíâc?û<[ö(Åž×ѣř>k‹‡¸ï—P·PYŸJn¯Öªö Ɉ/0©Zù75©üÖ¡ƒíLPÙOBªb»µÊ¥¤ºö‰ü©%éP+”iäŸÙ”Œ©Á%^„¼PrÅê‚ó5mód™äô× ã¹WÞRÅöx¼Øs÷×4þþè4š ÁØ*¼ýâýTì’ªÚP(ê5x‚•»Q#õŽÇêõbVB}ÃU 8t}°BDsz…±àIT-ïôn&ž$˜¯÷DçyDQx¢Ùbœ·ŽFk´ñ¨¥$-j3m¶'cZí<^4·«Î¼MçR•>mõ¼èTÜ’Ô©µ‘g“«|_ƒ'Œ}rô2(pT°ÑÖ€òæÔÇF“F(êñÇ6“-~Vž ¤£ƒO–Úaˆí¼[.Ë hšÕð“Tt[’8)'‚WHÛ\ZÕã “•¾úé¼ÿ´î7›óíê±èùü¦¸—›L"2ß÷2ûIXÄ‘ARVjÄž¾V-oJ©îvqþØd²âVÔj B«l>@’LÕ2§6dÊqÚ:›%öÐXŒãü-$“,r9.£Z#A§yl<`Ú#=‘ꆈ¸:õÐÇñMþtœeÀ޶Á)’ƒÇ‰.ýýºRf¤86¡Cª(»…ã¢3íB ^wjæÈÁöm„<‘û˜ÐS¥ó…qœ3îm%eJ üöÞj;ç»XÍüý§u÷¿<Ÿw›««n=]×E·D:(’Š:þäqZt?0„ð±A.~(ä-_Ám'ØU^ä<Ó'±Ð ×HŠ< ƒÓËHMBìjYð·Ä8x,|a£Aä—på)J0Ö… ŒÆXž×ãwº#ÂÁÃn°r\ŸO]½!}®5›³öœ³@< ²»Mó…톓ÅUUÌ|Öȹ}¾©µÇóMñÏ£‹ç¸ŸŸÝ~™x¼_^‡Ý ’DcÕôóÍ“ä.ïÍα†/ÃMì…14‘[Æ¥(·îexʾ Xx@¢(ÏÓŒýè}9¯ŒI@í5¸#‘ëP6ž¤ ÜÓò™xû:£7ÛŬ¼à8ªÌH(ÐP(K•#²ÓÆ%HÊÀ7}¡\&‘J[w¸ZÎó¯.WÝü¹öü°ö¬ó&Ÿ‚ÇiA?N†š ð&4Xç<ÑK7!,®Ë ¹ET£„›dAû¤¨wPeœcoïòä­Gë]l0ÿ <>ŒÌ×yTÕC½Ço=–)é1ç;ò,þQ–[Q-±…æ>™ÞQ¡Ë2Î%Ý•P‚Ê Ršž< çܧDÝâ…P#Xå,Ïyó8ÃOèhÌi†(©Á™ŸƒœDÆ×”ù`•çE‡Ä–ë‘ÄOÈ”+‹ãœ‘óƲ˜;¹é§žƒ¬¤1ج–GMêp'DÖƒ£Êµ•qŽýÊx*ÂÖcö64˜y7Õ¢/Š#öÏ€ë7æÌLhÈ}·Ar¥¡(uƒ&c<rÚ`©ó ŽK* 6¦?•» ¼Ðà€Ø…޼¨·¤‰ØTó“͘¼z ”2- XÝ€,5pYx„*Ç»q!u;¸ » å¬Féa×·ŒSv«!Âq–$øG¦f °õ˜/>õ›íõ·ëõ¦ß5ÈžPi¾òR‘x“PÆ9'T[f2¢3d™ÞÙ`â–ÎÆ±›šd‡5œ½FDÔ¬ŒK‚ Ü’„gˆªI8xØ¥… •RÎ\ T™û›‡@„Ìó¸¡ Ÿ)\Ž“ù]/ø O0ã›91l1±Uçò(ÏtÔÖˆã[Aœb)FÒšÝN¾æáÑVÖBÕa$t¥´ŸÖf$8øÓ‚–·9îÚ fŸÇ{‰÷_ºÜöË>—kÿ¼8SHl°Ö™ˆ[ñ¦Œ ‚ÉÉ£èÊÁœ,yÙ%¿í>ÑNV"ô ˆË(«(ü`|T»Æ’¶ph±ñsð°;)½Þ±ŒaB©Ìsœ ðÉj³vh¢ O°MØPÆ6O;Ú=b,¶¨Ý©=ÔNíhÁÃgÜ›šËÈ~³ûc·\Ì»\Õÿ§þÃÕjõÃÎÞ­w½BÔø`D7/m;aÜsf±Æ3 Öp¹½p^ ÂGƒ$O‹ò8vDêKS9ßE$-[² Ž ùsðŒnbž¨EžsË虸_€üDÒ&Ò)Î͉v:ÛHµjpâäàa§>íÆ:¤ÀWv¾: E½J¤ÁÅq‘´ô%© F©\㎖ ¦/â÷u:!Zþ¶}C&cðj!ä<»hƒaSXÆ9b>ÍÄ/€úhàAOžL³ûtF•ªÀã¤1«£jÎjõ„ZóºóÎyJŒÁ¥/`O-WÒ>‘<ÏãUºäKjT ~›3±‡Å€M:·8»qáÄ6'R@&ÐÌ}Òñ}Êv9ï;Tà±jÂ~‘[øh¨÷i®ÐK3ŠžÕ—†÷¥!ŽW†Øýu§eŠé¯YwŸ¢Ñ5x<·‹Âý¼]Ý}8 $=9~ŽW1ØP'È¡¥ÎH¨|R!ZRàÃPkÁê²ûò-׳‘§ï)ÎÃE^ôï•u8¡xÀÖšu’ž"d~$À>mOOH|P &ƒGÛ±.ö+úJ”¾J.X²$>4@~¬g=-¾ƒºAÇ7>ýlãKû†6>ŠÔ„UùØf”«À2OT—«»ùƒAÙÏãã©”Í#N¿ò –%} //O²õ(­n…aáaíÖýñ¢[nÖýßïúÍöåœ?ŽyÍBìÇ:ÈiÖV×}H‚˳ùbÓ}X¢32ën»‹åb»è7—gßä?ùÍÃ_-©{câ?=šÜ—Â{ëÕÝ6§ïõ|´•SÒÄÖîœ<êh‰ã"ˆ™0z ЩEP—ƒ'©ææ„Ðøù§Ùm).ªÄ´ƒ5”dM _ºQfˆ£Ý„{øÝà,Ôà oŽ{ÝO›2;šâ^½”ƒAõ¶Ü{)ÝHî1´ß÷ø/}¦ŸÜ­û2?FŽ}ƒÍ_³ïùšñ/è·Å¿ßÿÊÈm7û¡Ì‘•ã`z;ð˜ŒÍx˜Þ˜‚*Ïà xˆüœå ’׆h4ÀÉ=g”>¶0¤€±#¹—Ñ÷ú»ýaQ£T",èO1)GÑÇÅ(Pci X“ipÏÁcÝÈ’/÷"_ß‘óxˆq>*„îs™¹ÆÂ„e¡ êñXåÅîé_Ðáþ•ç«¡Z"¶œv*% ˆŒã »ÐÚDf൦Áyœ…'ȼ]«Q#Gy»W¨ËKK)±,Yê2dH-¨ÀÀ#òœ}Hy†ˆî•Ǩ(;ÎÎ}q¼PiÞ2`7h-Áã؃’š|…蟺­]ˆŽrbqœ²N {o_`§/oÂÃ2Ej&=QIp ϤÑYO%æá¸Ä.‰;1aØáà}a}Ô$sP¸4=9x¼\£òœàÝÝ.ò”ç#þ1§^S|v/ ™™Ä&+’+%³êq'Ó ’ƒÇ†±IRŒ”ŸäxG>=ËãøGÏFÀr «óQ˜9qƒ Áàœµ@uÂ,㌓«q*µ~ð­ |£ìõ½QVƒZï\¤ÞÕ—qÖHôÆ»îfW‹›þu˜sÏ©Ût¾9 ,<íï‘´»¿UÍÜ—Ò°3ínL†%u/Â’8¾)N9vaá™Øß•æ|\ŠU'Ì… ™œù2¬zE–±¼ª—95áU5o¿(¯ï?sýf1n Å^'äÖ1yšñ+…·Å/öû!ÙùØe·ŘåÕ—aÖKIZqÊ«·Å)¯õåÔ›Û«~¿ƒ$šWñÆ ©Ì—áÕëÒŒæVµÔmü¬\ÅÙÖà±_–[‹×åWÊtM.¯ªk0½p:rg4»|JubCv…`L v¡Ùé¸]ýÔ¯,g*OŸ¿}tƦ ©‚û"äz]šÑÜJbÔQ·áVUxâåÖÍݶ»Yü\fCÓÜ‚«¤Jñ‹pëuiFs«Vj0M¸TŠU+‚Ä{Îg!Àá&éñB¹ 4Ž~»žÇùÁš[Õ!Ê'oÀ_âñäÇÉ}jžä„šÊT#óV ¢¢ [—q>Š4æR²`ЩÁ 3ð‘nv {ÿ,ð±k̳¸ÆißmÊvX™º”!1DYõ2Î&ê6;†ž À.ÄégŸƒ'žPâj§µÁöÌù¼4¬3иÅ$"糌 üZS3” Á„×ã,†}êr_o×yæÑ“êŠþܰþŒ³&—˜§Ì“É¥V¿ª•A2½å昅GdއjŒV:èâm‰ Ô@D‡ ˆ²™eœÒVÞ°ÌÀ[°ƒƒ'Š%/핸¹Øñ<ëÁ ·oÖ´¶ÔÁºŒÓÎHÌ{KV3¤3ªcÀÁsjÙöZ]?>Zò–Øn-¸ЪE 6¸`XfÏxr×㎪›ÀÁc´¸‘è·ª êÌ-T]À½€ž[¨ú$ÒªA‚¹ ØVëéÀÂóeˆoÜ[b/vଵ*QÇg†žhFÑš½…Á¤-ÄUß-·W³«~öÀJã°J=~½BwˆÁçÃoú¢E=ÅBùãÏcäxÂÁcT»î ƒQb·ö€ûžAëHÉ6B”6#2ܯ!yÛ€&< gNúm·´ëþºŸ/ºü­ÍÅâæãºÛào¶½[÷{Š]üœßËÝ+7 +78´¡¼'ç£kW¤} ç2 ĉåÃÁc½dÅöqg¢kb¯àtPš(|XÆ)~߈֋¡^­\xpðh?¥Á{»ì¶}’‰{ØëãD§)o=æuQ²Äýô«#[‹;öÉÑRãz&\‚ˆÖ4Y©…‘ßá5Õ‘]õBÓÀÏaáñq5ò*ëþ›?õ®V«’tˆKÌÜWåÁ„ 8λ UkúÀ+˜É<–©ÿ¸\ý´ÁCÄuW‘µBlñ)·\sQQW ùY§èŽ: ©2Øçf§ŠC]¢}óÜc”ÀùüÃÝI$‰ËLß”ƒ‡}¨OVgKÆX-°;ä~A´Ö†Zrxsÿöüø£iŠ-W'­ª‡?Ì5*ªuOŠG~ô¼S¤ÂÎ94`Cƒ­†cë±Ûé[s3ñ$± ¥Ò¢ÊA¤ Uêo'7|=]Æ%“FÖÐM[Þé_0ñD™z5j´„ÁX£½vltž¢ sˆ0¶º™þ…7“/þÇ=½¤U‚ÚœDí)ô:W„ò2µôøË=}6(Ü‚X-½š|þ¢KGè2'ÈgEQØq—cS`z3Ð'×€ <ìÖl|m¢ç½¸)áÀó«îf¾k[dÁ‰ÜnÔû!Ž3ÅJí‰ð™½A÷-&îYôÑt®ïgëU]zB—ysKÊSlÆqF°Ö®4›RÄ.Oò#ÏŠ»ü™AKÀ:k¢²†‚kÄ5¸{îŠpB°ÂÒýCy(B˜ž€<ÑŒ$`®Öwžó»åáuÃK¯o¸,ÂÉ“ež‚ Ú?ò°*°nêáåÌ:V"fgWœª1 «1÷Ôuý%¶+1ß‘³.GVìØ"RÁÁ“”L¤‚¡M ´‰KZEPÔ Ç9eD–¾w°}ƒs*O#M@í /ëÐçrÞÚuê˸“LˆBޏ ôI78Ÿ²ð€X¤â˜B|?k6*B³  `)'Æç2öcÛ¿Hq¹³¶ ìs§'•?©9üBiÄNò ƒ|$@â8c’X0bÊr$I Î< š8E³šÐ,˜`A•ÈʸœÌpr¦ø8"3@ÆØ`úx’Èxr÷Cl¦Ñ¹è]*”†ã‚“‹DLÀW†$Q78pð˜QÁSê@}÷9­Å­ŠÄ-`ÌŒ×ÔY6‚NìÖ“P jÇ*RÁƒ­$'œ'*©ÿPcÔf™œöà‚¡ïÞž<6($6=p*F –bd§¼h$ïFrжˆÝàç$e’×xBœ"€÷š‰«7@³D6(ÐÙ/ée}Iˆ¡gà J¢5gµ"÷C¸Pùa|°qØ>•q`Џ›¯,Ð.MžZÊïþ+}!O¨1WušÈ‚Ùc‡ë'%-¸s æŸÇK¯þgáá J Lc#áp–qÖ%I—á*¶³zú)gáIEèŸÅ6Š¡ÇLÁà ÿ¤ŒsV ¯y.UXSl0ç <àe|»Ê+ jÇÌ$ÌO@ÇqÑø‰¢ó'Еƒ;’äö™Af ùãF ÖF2QsX€ÿv_b=¿ G%¼xvßͺ»Û®6³n§1{d­~D‹ù¼¿ {úÎ<èI*tP%bí)'5CðT<ÇñÛ&´`0®¬|Ë­IühØòç„hµ©ÀsjµÈãIj—÷lX÷Ý|WÐ"nw®¼ž_éR—8Ϋ0IYŽ‘†| °!iZ‚8}ÛŒò9 ͽ34ž¤Z…¢H"¾€V3'†RÀÁžÐÅq î2Ç1"0왟ݭÛÏ9RÚÿœÛ£n¶ënq³Ý<Õç‘Â6iÈÃ6*gýÒ)b߸…¶$k÷ÀÁÄèhà¦^þås¬2:*OⱊßÍw8ÂZô}ZÔB¤ðå§QOMÒzìÁèsŸƒj2©ïÜ\•ÖºÙ9:S?. ´Ã ̉~­µÊs^eвáÿrf|O9ŸK &¸$P†=Çk3iÔ¿¨/ê÷rzÁ$pÒ}˜OácæbT‰œïìrøóŸL çǹ0Aƒó]ë•Ã\óóîÓ§uÿé!ï<¹aæŽGRrçÇúÎOzPÃYÜÉ«n2ñˆ<“8¢¶D¨-WàÊýr)˜ }š¢JSëe“Gu™x¬m³ú‹ Øâ'y¸ß~œr“„ñj(‹qo~¹7y^çþs|ŒFWàq¾Å _Ñ"ZmÀûáôäÝ8=¦‘¥8Iëq›É:˜xBšî9Ôù‡.kUZ´H8ÈÎyô–¬¡N@hÆ@ª‚ÙxÎÖ£öˆlú¹çà9)™‡wF@« d”`!t[)ipœŽÓ½–b297JEW‹œçLN”Gñ*¦ töC²{7QÙ™ê>úÚkü~©ºÞ%õ¼ŠóògýÛj=ë/?vËMÿÏ£ÚAJû íž{¥þv¶ÓÀYþnÖI.϶«³ÙÝzÛ#>õÛÿ|v³ÚO".¤¿ßeÅC–bÀ ,©ߪТ¬•„k÷{zµ^Ý,þq°€ñïl¾þ¸è—ó‹ß¢—ºþv±Ùþêf±üõ~À×ÿ’g?ÿêîmõ÷廿ü·þ&OyžM‹ªíË<É¿úO{öí)Pþ⯥~ž)¥´î@©_¿+–ð2†ôîìO+tv/ój{wöÍêúvÙã¸Tïξ둳E~Òt¹]ßõÈ”ü —ù{ ÒïºÍÕåWç³Û»ßþö“ñ7êkœè§¼ë®çÁáw¿í6Û?®W¥oÅåvqÝ_ü+"ü•~wVþñß»›»nýùÝ~Cü§üðÏúqÿÿ""@‚·#ç[cøŸ¿ûöò««íövsùþ}7›¡¤H©«n{1[]¿Ç©î¶Ýûï~÷ýoÎ3Ë.qGVï¤àM¿Ü\þÏ¿m¶kÔy!À?‹8 äÆü^ëyjÿ¸›á.39¾}þÝ_¾ßö·—_í¿—ÜÏñcþ€xŠ2ö?(îÑ”|ý×½ÖþúÕÙ"k ¿1½ÚðúƒYÄÏ,„ùkžÍß  o‘‘ß-6?lÊüÞ=èÖEKïökýòÿ »÷rJ¿]á&¸_cy‰üiÝÝlÊ[Ã?!ów %õËOÝry‰+= 3Ÿ§>Í=ظZúŸ·—èøš  ¤I2CCP~1ÿñü§² pJÇ9®ÿ"ˆoºÛî®îí¢ßéáÛŸxT~†ªýf·î5ùò÷ùíM÷aÙÏ‚t>þ›Ÿùê¿Þ-–™—_}³ºÙ 3–¿üׇVíß”ëºü½2YßõŸÐ2®?—oìºßä/ÿ°s-~óÇßçýÇ~üvñ±Ÿ}ž-û?ì{ù¾Ë‡~4ŸÛÛe7+ô¸Éoºlø6_¡^þÇÍê§›ÓDøþ÷ßßt·›«Õ¶üs¹º›óàìÀØý­Šˆöt¤ø¿G®|ºÚ¾¢Š_Íû?Ýe׌ṞŸý%“÷«ïñ'ø¯üåcx¤JÿÌËìv¹˜-¶ËÏô tº3}Gˆ—½ ÃaŽÏã6œ}ƒë÷~t’ÑèF¥æÍø ÿwµþ|6»ß¿.Ëól¾ó¶¾þÿfÏ9[¢ úú_ŒéïvŠ)–Ezpð/ìýèG3Uöóü!û]ûßW?ö×úõ»3óî,¾;sh¬ìã¦^,߯'oðO¡ã¢ÑÎYýòo}Ù¹xnÛŸï_ÇXT Æ’ÕA<Ã8Ò¤¿ýÛâf±¹w¦ÉíÎõhûþ÷æÈÙ tnàçH ¹$Ë„}²¹A„|’#n4lãÄbòçD›ÀVàñܶ#ó‡ „Ò"î@b~QÔ§Ô]~G.;û#ƒº·>Ù è-R4êñheãÄiwdp‹HŠàòS²D·p¿Ät®‡mb N žZÙ ÆÙV‰ô7@/Ù°Ô3º:As/æš!Éa\þxLõ¬¦ßÀÔGë_2Q˜†Œƒ‡ßíh…­¼î7»¨Ãþlr½øTH¹¹¸§ç‹:[š¸ÍçÀ»HEðó8vŠa›ÕÆÀ7pX8x‚?ÆPÚô¤6“ó!¥ ôCIî°Ö¬$êá²³L 6áC-ž¤F;°dÙÞÑ®œué’Â\œð¶ævÜ«ðòò­”Õ)¿g§pƒI‚5"§ÙÁêeîÖû(aï£À Â@á¼Våàñf/úH†É©KŸ’`4‘À]ÆÙ&*eÈ^T,Üî ]·¾Z|ÜÑ'"…<ªËÃãÆ6ßl†qGÌ`®â©¨4¤2N³®'^4`ó“eð4tkSƒigàñ~rûóôj‹Õ!*íL &׈܋aìSË‘¤e`MÚN?óî0xpù¶+&Mø\®á¥¦[2§L6zjàA3ð𨞠ʷÝ%Àp%7}á]Š!A¤¼=쀷ýû…ýüì>šüòRáÁ7ZÞ§iïûÐ䙑˜y3ê×0¬g§nN¬ézèYr¤åà± Œý}Á®U¦/‚‹A[ÔyÇù¡î¤”=Ÿuzõ©P—˜ÿܘ ÔÔ£`ççß ¨[¯ïzàÞé鉛?'å‡ÒxØ¡ÊÓ¹^Ìvû‹.¹˜³üè8Æuª“ îm7û¿Èg™½íµbàip<áàq‚ýawK~ßüáts1[­ûUþŸë¬MKøþ1Ñ.P/Ž‹É51Õ,®‡žZ[8xØ­aªT‰üèî–xèo> 1¿„Çjéu%Ÿ+AËõ„•¡.}“c+;ƒ‹ì°»í$´J¸¶ O„(F ¶5`’›Åõˆ£O'dr@Í:EjpŸ–ñxë‡;OìñÀØ ôçà^^? O! ¤½'—޳£ÛŽ4ëBƒc?ORB}‡)êAãm.ðZåN‰ƒ ó¸ì»ÓFÆ’#®²É+ñð$¹óÅ ‚^.o—Ý. ¡KgrFm)ìÎ8c„:¦1tŒ »s'Et¿Øáa[ýã÷MÏõøì߇×OšZRxÐÈ· %Aª*1à Cázìqú;›ó¦»î7xz²·‡U­ÖŸv6ûPDÿŒ2.D¹W%‚ìeH­›žòdâBðX¯¹O5õuÞ´Ÿ&4§5Z°×“ä!¯sYõQ¦&¥bÀ1øöÊ앟4XPSQ¢‹T­ŒsÑhnj|àKàÝ€&ÁãõWØ÷ÝöñaB›iZ›B¶Ñê]Æ¥©¢,’T‰7•?ÿø÷P e6âÐõ;©L™aà±zqè§Í餹‹Æ(Ö 1Ú¸’º˜ËY>úèÝ6H𠺘PdžVdí—áB ðXkͽK*- ðc°Äê}“3OR‰G?ÅuNª<ëÔ$ÁiíÕ¨8fOm\e\ŒNÝÌ -ø€ÆŠ2<^xØÞÜlö»Ç‡ª½¯7ß×^0÷ó«ÌÃÅÃÍצ[b§Màz›)75Õ ¶¬ šCaÚ¢‚<Á«_G¼Uáç·?,j1Cs¹€û¦r ¬iqÔÍoAê"Œ&5_–âÕ  ‰ó}'4ºªÜÕ¡>¢ý¶©d9þU¯Ôþ›Ádzÿ-ã<€úå†&Ý¢€Éý)"Áã®LtX®VÛÇ×Åç.ªvÒJW†¹¾wÌŽôë8k4êÂv¢»@Žþ-}ex®jëz¥J›.‰ÁÖSDùƒ:Π½²Kã(~Kd‰®Ä%xRThõ|Iy‹Öê¸À^*† ®v°¸&XGŬ[b¨™™-«Íkö8àBH‚'YäY©J'·f_sù²+™6sm\Pè÷¬Jgþ‰J§Z”âÉÕ9¹Š „"ËNËΊp´I|}=ˆË|rÿzt2<âü¹ËÛ°T«~Z«\­éIJaÁ¢¥1•ÍîŒ7ýé “zzÇgšŒ„&цØçÚ8@«æOj™/E€æA‚'D±yXíøqŸªÆÅªç›‰-ו­+å컌K)ôt¤< ÏýëÔÊðÈ›:°Lë;&B…h=$ ´qµYÔô#-·áHð€^Šëþñë~µÛQ`kœ- 0ró%€ ðÚ–aÿ|X{nBP•Hm¾¹¶‘H)Rq“:ÎÆŽwÎJÀƹ 0ðˆ_ØÊ7±W‹!8ÊÐÖ›Kqr|'’ 0Èx“á·h¾Y?ÜnÝ­¤…Dè3© £äÀ©§¸p%=ù, XÁ<ÁÏ·éÜÃ×ËêIÔ6éÁ—í/ÊÄ—qV|VÈVåPן<)©×˜—é6º­MÜ’5HÉBÄθ»BšóEð¢²}'z4ÞÒx¢Å®–ãímï+µ{/€ÃÂrò \Æ%Ô¯E¯Êr,yÄæ"Á#îK QQ¯~Ú×€Œ5$Doý•‰-!B1«|ž`l×cÝÇJ}åH®s*ym%O9.uœ :«_DäòᜓKŽ˜ÓO¯|kUOãAk•Wþ£IÜ…$,¶'Pm¸ãLìêë+mŠ>:¤…IÁàA½ØIŽ<ãÕqÙ©Åþfé˜r±3$h×|„LeœwÚÆ@Æí!X¤úë¸þuþß ÉPpç•‚Cùí¨,âJ%·” HÔŽ•k—̤ÁëËU,î·¥êQÇÙaýNô8x@\+m·½¯­[®¯'D9Ø ÑzG"˸Agé³ù+‡~€ Á#~]pIYÛííQYÄ-H­ã˜¢s”ou%Åú‘?Úz F¹ÖCÐùLHPÆy°:«\HÙòéÊç- 1ø«¼|'Zg¼gàÉYg•Ÿ¶ÐÅ‹” ¥Õ†OÁ;j×NÑ6õ^çW1S A°Ö%xbšó²kùýûn]vÁõ¢ÚÇÓÛÈDy½>ز˕3 ¯ŒKNi%‹i)9€Kð¤Ô«ÐäK¿¡ÅŸù¨G$ôX«Å*F w­¿6«ìø•d¬=¦!§Lã‹8ÀfW<¾ltHã™\ÒÅûs³þgÓáÉ7–ý"Qž#´¶“½ŠBÊy(Á¹$xÐv.ïô^ÉêDëRL™òoË8‹ŠËšMS>>vÀt ðˆ³ä^µ þ¸ÈÅ÷Ûí×åíËM‡„;ÀËÈhP ꊨTh’S4Ô—†Ùc¢á»³ÒBüfòþÔL>_î%j?c›Ê¬R뼎çõšx¬ÙÂh€Æ<àméñ;åT Ç«/"pÔ(áMF(1ïå•qfêðÊî ÿçã×õb¿ú±¾yÇÀƒÒ‡ª§g5¿À-zÄà(£“jï™%ï¯d]Aˆ–1‘õË3]¾“‹‡ <ÙD¥âso×,åf€”¼%ýÁÜ » çözÊ•ÏÕu ††•sî?¡÷9XÏdÓYnŽï£vV"Áf"›ëîB¤Vm.»¯C¥Zq×q°>Ò@Ÿ‰Ô[; Q¿SfÛzž 0'ê›$Kê.¸9º Fã5æL¬¡æì2°B2Ï!×wz <â¡Ý-N÷+M‡ŽÔa å hÌòy1—í¼¤¬ãÒ˜õ}Ùw †Žóþ®¬rò¤&1ºŒEÈgÿ ä` 9+îdßÄàwò8xB°ÅOmŠuœQ¨j¡°×Z®e½;CC.GÕþ«[Éd-ˆK´ú@wˎɧ:rMËÄ"@°)…-ßB!u·\ýØÜ¯_ fÊŒJ â€CŽORíÞóZy N²Ÿ\]±¶*‡€å¤1)D'¯³ý±Y$ôo/Ãc­BíÌwŒyG‘©°Q$´[ëøÄäHiÐT[û(_ DA Ôˆl_ÐeS±¾lm\0%l­ë‰N¡FfW> ¤þ©Z2<1©•í¿Ú07'BÇX gGô4kãä"êô€Mf!$x¢†mhÿu(‡œÅæþÛnÙ”F¬¢Ú_¨>†ËȲáѫÍß•µ™bèO ž¤ó$cž~3¡_,\@”iãRÊVábóAæþ%”exÄ/k?¾b¼ßœ©‹X3\®Ác ^—¬UIÝîÊQ‰TÙêvûx³x¸]Ê»3Í+¤f˜×l{*Ü2Ì•`Íf¤4æþÓ-À“ ¨U«|£Ç×ÿ·éÑz,û­3 7qÀ_ÙWÓ”Ô:0ã<Þª]ª]¸…úÎ{e¿º'¤N®õÙ;&ª4ÝvšÊª?§‰ \’ð¥ËaIx°'Iš²7¼™¼]»šoPŽ¥„7ÖüN”(x‚Fb>¹?_q¬HÖfÅëqÇqíÊqF“ÌáïE2ïÕŽ‘œ©9æÁNУp©6P¨Ñ„ ¿™U  úÊä,ÇzBýAS“É̯̗*îÑ´Iî÷¢Mг+Cª,ä¨Ç¤ÌµNÝEM.´CÈe½ÁsðÀ\r5W…§öD2ˆ‹Û²Ýp¼J4á‹—~+šXk”ªY ´ŸõØ2Ù®þ-[Ô`&ócHR@ž¨b[NÚ½á©iÖpñû<ËÆ\‹[‹6\1á7£ \G›cªys69¯õ)¦8ãä ógf@U"_²1‡vïl‚LãqÑjnD’%ê af‹1™‹ÄÞ®…¯D"¶´h~/!t~Ã?1 ND~ªôð¥ó¸üÁ$*‡ÆßŠDÞ:­ï GÚbÏi^AmÎÄÌù™ÆI["-ª±0æôÅÆãm_ªñæôæ±ä‚h+™h!7Àb±ñä9Ä»±t,š<\Å ˆµ¨’SDŽ€iÏÄÆ»ß–}žœ :ü &”£C–œº]ŽqeP¢[dsÈçãIÝ·´‰y ÑPÓ‘‚ÀddtÞVÆ@‹HlyÇdÏ,3nǦ'€N³%ð^ù2Œ‹\‹:Á£á:&;ÑCÄèXxPÏû™P8‘†©òâ'É`¾×ÃBªE ®`ƒîÜùx’B ú²¦-{ÆÎ2 Ÿ{fAT"_¢1‘ž`1DÆ…D˜WPeRÉtH™r²0éÅì&:- w X ¢s¯H¦tMÇ…Ù`½›f‚M?&$B† <`zû_–›cûEíh¾¾?lVíAÊK]gé¨/_¢ÜË·J¢E-®àaÌÑ8¤xx†EZî·7ëWë›ñ²¥ˆ¡wŒe½…ÐE–°ƒÜW>ž<ßcy§^:zËÆ—Óõ¾Ê.¥i2’#Ž™v>ž¨æ¥¼Ó2~.xÏ€'[Êñü“ xZ$àJcÇdôòñ„™Ûû„HGQ#Øœ#´ƒ+w‹+Àj„-Û˜³ Ÿuœ}©Ð<©n:>Ë.`9xÁ]³YˆqjÑ‚-Ö ZpñÆ&L­Ó!ј­!;,ÔqÑÌØLÄpµH–nÌm Îo:ãëÙ zçèjÄ” G¬„²í§¯8Z$ãJŸÇd¥ðñ¤1¯¿›ï°ûº\-vÛýj³@d“…l9b`êûÚ{¾‰¸Ò&3æ,ÄÇ#uƒïׇ:æa{»Yý’ÍÄÍúÛòñö°¸Yß§æ&o‹cÈòk‘tÁ¯E#¶¸cÞ°ñ8èM£÷Ú§ƒº rJøz±g ¶iØRŽq¥ÙxÄ}E?î›2?úµ‡AGwS9-XF6` Ü3ÙhÉ´¨ÇVĘ{)6ž8»¥ ³´ÙE¤¹Vtâ ͲƒÛ_T|/Ps”0SÝ–OxÄnôn}S/ç–·ûÝú×ûÖìó˜ú8ñÎÓŽŽ%³…ÌÜ“Û_ AÀçý‡”‘\åà˜ƒ]v&ež<œ‘Ëÿ{Ü­§'Œcg-'£0³õÁ‚õb"[1.Žab°9xÄ/ªæOØæëÝôtÑ!s¶xà‡ñP V7²Õ2æœ#ÚÄÁƳðaûÏõîç~zÊèð<[Äh‡1Q(Z76æà89ÆyÐ />ÎÆï«‡ÅjuWgÆÓá|4ÖzŽ$yœùûX‚^Üâk`Œ¥cãA7œ[OÍ«¦Ì§Ãýh³KŒ UdW$.Z76²U3&H‚ŒçàW3›?eO˜&§Œ¾6`‹ÈN¨.Z76ÆP<<†jÀ^Ñ‹ zØéVƒÖD¶9pðqÉíÖc·Þow«2iß6÷›Ö/áß¶·wûOODùôL”O›m¡Bbœ‹™]§>Ö‹¬ýÏÓßþ}óm½úµº]Ÿhúßö¢x˜edò7¬ã5?ÕSLåÂ“å ÆòJ’¡øéøÓô~ßm_ݱ¿t/ý|nòŽ7oZ›Rg¢bq]Kž‰°ø‰-X»´¤GÄx’›ùüRߤ÷Ýô`ò"-}25‡Åø0}iÐÆ…,mØŸÌ"bÿ‡Y2< “<ôº¾ÞÍrÿãëv¹;%ž?iu·ß­ûuÑ)N—“,Øjá9ÄI¼s&ÎìóªÈeìþ‡No”Š ´¦µi[ ¨d<ÞÖJ—AºKŒ!²@o ?#$xä?¹N_éÒºDçb1¡$vtvê¾\Ò,V‘Í|ôì&HðÌmçþrÜz®~þ±Ý—£ð1‹ªüáâ”)|TdœV¤«¶*› àîšà),ÁÞ?CB†'û®æàeE5E:B‘èm9åDÇbVíLA•½àýÏ’"W) é­×qq1ÊwjoÊdxä=dæhõH 4ÄQ<‡ ºˆ$-Ð$ :ñDmf Da,Dx²Ž±˜8œ½Q&œKà’Í&RÑ2nòâLßL0 ]€å7Š´Ùð#êwBŒäÖQÇAh!¶7MŸÄ=µ›{“-…¿†ÒŽÐc3¼·qx®ê…ù2‰ÓÚ…Ñ» P<®âæäšš&›sí7T¦hünDì¡~'C„ÀÀ#ÎW™£Ïúßn—G¥§øŒk= jÃ+ã¡ËêÿOÉyRoí€ì•òâ?r«ã¼RÊqx{£P"Ä×Úl¤”ÿÏÜÕ5·•ãØ¿’êw¹Iü@WÍÓÖ¼oÕÖ¾Í<(²:QE–Ü’Õ=Ù_¿ %;¶]º$ÓµU;»=>pÊè´‘Zk„ˆn©Txè'øŠ÷¹œä9ïÄoI®éS_a ay½Þ ø˜hð¨gQþ Ùµº‰zR?y ˜J=S\h[z^g†ãPq\'vÏFtx|ßläE‹_ŸÃ·é)' ‹Ÿ×Ymv:ˆá ú·ŠÑá a˜Óøö(¨(ÕO+Õ¢#ŒR¿›².ž§·ÐZŸúÏ9Ñáq}ï>Xƒßn–§‡ß0²Þ:”u`˜{Pr™…H/XYqÈëLpxˆ†9ˆûåúa¿+ Ó uh}ˆ6DA^‡Ô×9¨Ø¬Àí_G®øEò E úÓ‘ÿN06 ­/¸›Tk^#äjXï‹ûýÓæ7úXÞÿ%aSÉÆÀ¡•?wËÀa.JaMù ·§dd’Áþ;¢±xPß=e¿Ë¦¢WhÎ!‹U©Z/w§Ç¢Û8­[@ƒÞ\m~ÒM¶…§jÊm||6T;¬h.+¦)dòc#™¯õÅhFÔË‚Æô·6žØì}xŽ –¬ËÃá´cB®Ï*=Þ]T|þO‡2"ìý“ìäÚ"æ Ð`¥/æ…÷µÀ2ý÷a½:¬—OëûÏr|ß²´á~™pÆ­ät1ßAŒR@Îë\Ô~5~–Úëe°ý©­Á£žå7­ãsr Ó®pðäó˜hFòÅ>—¿û>¤ŽÍ ¢õjðØ0g"Îùœù·—=ýí¸Zý¶Ü}=mòqAœ_dØe '¥äbSgЃ¨ iÂ(ðèûŸ]Õn6ŸíúI¥Ú +…E/†ê¼bº½ÃÄ\B+p∔BƒÇAGG 8ÐPR1K"NÎ%±]TÐ…ª QÒ€Sq jçÞè²$ •ꎒ"²Ú< äÔò:õC±†ŒÖàÄéŽ oî òýôrµZKѼOÇüâŸý–tÂëL‚fÞ eëÅá  ‚Oèã÷ûm­zÝ´zZÊG¤R°Ëë€\s b¶k¢7" <¤~ ¢Õ^'Ê… Æ2¼Îöñ -i«'ø@¨ðÄ>^¡Vµ0­ZÊuÉÉ$éŒ×EõS±Æ¬Ö`M<‚™.á¡4õ;GZ‹?])N”H.xqRMYçCèãZq·^”`ü>(ðXÓÇ-\ty½Wi “¥Jt—Ž&ë„>Yçus:T6dµtÿŠ;}‘[•h%R0 SJhòDÔÇ54àoµa@£ %jíöǧÍö¾ÖãúiÝZ´Áç›A›ËBÌ(ß`›žpËí¡ÂÓß7d :AƒÖ£îJË:£ßöÁ®—ÅöoK©Ã£.߯Э." ÓÊ-ùq.„ÉEwØß?TP[ƒ8  ƒù!Î æ:ò<'YBœßŸ¤öΡ-ë… 0 ŽÌ‡¼w©¶›ÌÀ¸wëûs Øú©V·qZ·Ài0û0¡½KY|â bàx¢éxàU'DáyèIÖJÄ…\ñÚ®\¡q²ø4Pà ¦g1ô•Ÿ/^ªGsñƤn±lH²ð:ŒO*(­€êÝ€ÄRƒG=i^©:!èFbH†3\ jn\ֵĹc²@@ÔÞKݯ·û¯ëÝÓÌ’¦µêÑ&gJi/k}ý€o5RèOž¤¯Î®RÝå5ýå>n¿þõ¿ÞÁþ×;Üy¾pù— ŠÞþ‡͑£ÌO‡åýúþÞŠOŸÖüç_@1]‘>?!Š˜ÄÝá¤*^ÊÚw/eíå¥,](Kw¼ñ.¾AEÊ-¡»ÆZ~B6 ¸»UáI½¬¢$òΧ`¤Ä׈ ^^‹-·K6—g³9®sVíеCKvÀVkð¤Û: \¦òþè;µ|ÜüzUŸ¯3UÂyåîéÆEÉíð:‹ØÛà©Xg®hðtx¶[óï/O²ó [#ä'DÀ!€R>§!ÃÍFÆW! ¸ŒUàA™V¨sêÐÌAôütOòó:TO ê@e%äЛJ<1ŒñVP!9òÞ[” “K`Í“nHZüîõ9J<4뤭RqZyB´3}IXÖ90Æèm;ÈÉ÷ßr j°²ÛïûýÓùPÊNÚÌ ÈpÌjD~Ú<†dÖ~7â§/ °ož4§Äæ¥Ku¥Ó´]~œÈéŠôarù°ý3.G5`»_)ñxìmà (‚ãszxyYç)Ì0ðÖ­†ÍÖö\ƒ‡Ú [¸!:¢i­r¢1IR@n^½ÍÚõ¾ë<Áö2û¢94—K|F)áuüUo33¡Y륰쿺pj¢)k½FQH‘™9¶‚ˆ.õ2ÿ+Æ0Àö5x"vµ}/hŽ€EORTOùÃÒz ¼‹ö^G=îyºþL¡T!Iö˜SÒ0Ýô¾¬sÐáOA` Ò¦¯ÁÛö›?7Ûõ§|yÌÊŽ¿äN2T7ø?ê ]öÑ©Á£Þå?—ÛÍýò‰ö×úãçýþË›F/š,мè‘Õøüéý ¤´õ. TýxcFñçGðç²§ZL;ÆÿÔã £Ùs)ã+ ëæ‘Æ¾F7ÌÿüX€Ù ʇk…1'½9þªÃ3à v¿þ}yÚ>-ø¾½ËÂÞûXSmè1ö#ÎîÙ|©•Ïò8Áš<¾Yß±Ï]A>:öÈÔHP—„w‘b6—bH¾JÚ1w>AL5x¢kòÔíÆ@ùøØ“‰®F’T›¥÷’`6‡ª%sºã)Ôà¡ù¯¥¦ëæ¯!Ì£3­AuõsÃ*úk%fLÌ,ªÂ£¾þV»|XÿqZçZßâÓêqqØ—*©,Ê'ÃÁa5âÙê‚Ë‘bÍf[­øÎŒaXÂ*<ðÓÙ¶Y>ð¶” ‘–¤k«Q2F Ù  ¢R/¼*¨ù——_^xøUbÌÙwˆˆTƒ'@×.çJðË?ýqÚ³Îîʼ2b…C¨˜ç:'/zŽ˜§µùÞïPc3°Þ¸HÕàqí{Žðþç³Þ|Åté×$$þ‘\ô Y:z™Ö¾ÙÇ–t­— š@ƒÇ¶sy² »î»ó¥äwJÌ(ažÁãHªbMy6“knójÚ*ðZ7 Òà;Àì…›DŽòëb©Ø/å†ÁÔnrë<’*@§ ”…f–þª¦òyßùçÇ»åééóþ°ù¿﵂Aq`ÎÞш‡H¼. à¨Þ4ÀëkðPèê ëÏ GmDHIüJñºÛnîÃÝzYâˆÞc<€ÍüÂq½:6O_sãú?Oü¬àåf÷t¼{þ§ï4;ieöÎdk7V8¦,ë|èT°Z…7˜î×Z›ÁGXtŽ¥4xÔwjå ¦Rz–E ’çÉ=Ço•Õœšõ€‡BjðÌ;JyÖÒçÓÇçž‚/Ý®cñ#“Á»$…¹˜ »[tlvâ®§ÝkðXRçb×nl>ü²Ü}=mÎÇ‚YDæ; ýÚÊ:£{Õ†õ0'Š{î¬O‹ëšÛK©{B£/M9iLꬪ£ñý÷VƒÇÚ>V+˜C*×6€1¿‹ Ò§›éW4¸{«À¾Õ8Z9zIÂ*!˜üÖZŠ™NfLí â€o  Íøóža¯Vç†1Vº¥Ü”6•`N6ô×ΜmÀÈzÜÎ8õÔàÑOþQ›©kZ|Ó`*¦É+^wg˜¦à¥ ©yK -[CSÌþ×Y*.ë‚>¼îÇJîØ?[Öá¹µYƒÆC¾jèºdª&‹ÎKØ™ª)ô³p+ÂäØÈ8a‚ë_¸Yþ;túsœñ¤ÔØÂò5Ñâü\îœd2ÄNÔE@ /Ýð~§7Eë±[7ÂÜxÔG¡×ÆñVÄD8­F‡“(Ëë8 ilé7PV7Ú&¯Â˜¼`6À. ù‚€—åSþûݯ§þû­ÀãLÏý.ÊŒˆó/þŠG ,Ù¤¾²î瓲9¡·Öˆ°?`Ïó…Pà¢OpsÛܨJ2ËŠ) ’;\.‡h¦b'ÑÁDŸ¤f:çuo‹Éö»MÑ•Ò(´*‘ãÊÜ9A‚ÎëÜ ±Z[Çà $(×àéqvþß¾Érâ´B}îžo¥× e] ©·Õûf` ˆÍUx¨±Õ_Ú˜>‡¹‚áxÂÿ·•rŸÞöGêó‰Z/À¼L…'6¬C“5™¦5Øeñ^Ê0xo~º®c­jœið¤Ø×à{)CÂj¾‹² ‹ÑæóS< øÆkðöÒô‘¡¬Ÿ>¯OÇé®Ào¹]žyõY›4­Íˆœ| RXÏë’s}­=¶ƒõ<Éö±ö¢7Á`"E=o·ó:ôF[wÚŸ  ôý;%êðߪgþke¾î6ÏÚÌÝ~# wÑ £ö %ĉ£À>f^ÃUNg|Ñ5x\ìhã‚•¤2“Ê'Ùä´6Þ• Ôq@ЮÂ{Øöµƒn¢sO&—¼å a©£}S3œ0"MÓàiš¢ïÖçÒþ'þI©è¶ ˜ ‘wÐJ‘&:uûþôT §Û®ÁÓÅÐO»ÇåêËâãiw „hòæî »&θ…ÍeÝäXvµ‰ë©ªÛ¿Q‡O‚Þvnå‘BÁ’ ©‹ßÀÎzÌñÕ©Q}«"ÿܪèz§" oÐNƒ§]¯Õüì¸ú¼¾ç$ëp¼{öÔßw£¬87;þ” ?WêYèírl;°qÀÞ«ð´LvûÝa¿ºœª€`¸–\Ê•(jsƒðv­Ô›Q´þëó«z¿.~‡¦üŽºÜÓࡆu«Ïl®Ïo_æ{\ù9ï/MßèÂC“•­ˆ×a¤–.HeE ˜bhð ös> ¨r)‰§œbÃRƒö?|pÖ€ XîäØô’CAj FpðYÇÓÒ¼ˆBøìóÍ›w$Å*¼ÔW?•³ Áú—¨éð$ßÕ9¼ùáã~¿Ö´tÀèIŒ×0 ½ƒŽé˜Éö'„ Oìç „à:p(ë)‚äÇxW·Iü¹¼­—,¸g‡<`Zt¹øÙ‡åŽ•xÿënÏQØùåWQ pðŸÖD$å9¼Îö3üØ æDÿ³vû¬Áƒ¾¡á×B„;–J^ R2“×é‡÷ô`¤0ÈTx¨a‰ò §,ÒEsBç‰ÿÇ ²ð:TO´hJ[ÒCžuxt5x!hN„àrwE )¡S—ºŒæk½,Ã%²€ÿN$+ºý¼Îâh_ðíÔ…„ÓvÊëLÙÌ뢺›rS6+&?àkPº çb êäÊ<Bc¢>1]%¤œº“òH®ÖËvxšŒ;y.¿ Yˆº.Z¹&X7KøïiiúziÚ0S„ Aí,7y´€MÙdÐ$<.E$â)!žx¾; вE&—õÒP÷h_¼4R¤VÉL@ŸÃV$౜¥V ·ÅÍ “¨@Ž„]ˆ\ÒžafŽ Ñ—"ELcÜ&ôEˆ âϬ7™Bܘò ;¦‚v,azŽ”LoÞÎ3œåRðˆtºãºœÝŒ¯Á@  ×áR℞>•(r:BÚ(@ÌïŠpõ̺Ãã}ñÚr÷Åò nc ž$¡Që[xt3ÚËzt'ÄÑ á©jdÎy/•w@¬ÐŽ¥³6RrœÐ“Á-AÁcÓmF–«Å•ã E°µ§ñúHå,«?í}AÉCpR/$ëÝä ááæÙÔG¤ÎÌ8³ ë…Z¨dj£’:bTzðXªÍñTnò:†‡ñóu¬Ù`”Ç B;C.®žÉ„^XÍû' kçÐÜ…¬v°Û¢d‘©Å½tšYi±ž„­7UG$¦rs¨Ž› $ àis³vˮܚú2y:d Ç-ȽÀØNqÛÎG™‡•„žh‘Á€÷+— ð8ù¬ó™D’1Í•±i…÷Êkúf“LZ Cµ“ðPUû¼¸)×Ëâ¡«ÿ 꺾$ž±lv³›2ÜôàÏ:ÃeªžHÓIO!’Ô[o#Ul2Á*-œCn¦Äv†|VÝ™¼°åR;U;ÁŠïñ¡Žq<äx³:a©©/Ч/B¢{§øÛ)r¶ÌlÄ ûÚq)ñ^h›aA§à!G$¤“*2£B˜œ° î!4‘^&(ƒ (]†yOÁãS®ðÃ-Œñp]n6ð'Õ–ÇÖ‹N‡+4£)´Ò=Ûä×ézÑ8OKïÞý¥º‡‚‹ò2¨¼´¹Ï[µ£äŧt–ñlŒG³ÆŸÅÎï-ŽÏé>Æ3!>›g~6ÇcÈ9M¦‹p!jV~*g2Á=ÒØ!)Ìp9+æ÷«] »Ò‘f,Fã æÔ11[ÊE™¾¼4‡š¡Ê6 `í]öÃp_j’ü­‡«²˜ì\È›˜:¤Ò@`B;F¾xß;? è¹Î ½)x Õ·öa¶øêÒÜ2ÂTi¤¡åA¥j®="8§½„AÃ\Ðî° ËøÓ" ŒÃh¿ !½åd?à;Nv™&ŒpõÑ]Ê—»ÿŸÖËr<_ó«r}¾ýù ˜OËâ6Üð=Þpc”6(zÃ\wè#WËõtèïÐÆqÞ/<Óõà¿øy9)6å—oWãëé¦o¶«rt ÿ^ýÍèL]vÁáÉ7Å|öÛ¶¸½˜.^ŽC~ÞÍËÅx 3qVëò_Ö×,ì#'4ãÂðK?6~º,䥓J51`ÅYñA—ð™çšÃø‘¥rìÒkf'Ö„wýe±—£Ål]þqJ:ÂIn=.aN.9š±_•áiI œ 6‹Áx»Z[(pUnþy0_ì¦ÍoÛ ØSÈBÜ4ºS…vŽ¢#-âÌp0SßÐëÐþÓ¾gýêôœM.¾½¸z3]o^̧³ov ^ý)Œ~øÓ*OÉ»øôË¿•ó²ÒH# ¢-¦³0È/þiǾâ7~ó‚ý>fŒq^xƾ9ênd;¼_lŠÙ(̶óÁëEˆIƒ‰1b烷!ëx: £Íj[SÂÉ24Žã·§Ò÷Åúzt6/·ß}w%ôœ½‚~È»âfb<}S¬7?­W«r½m¦7åÅŸá ~>ˆ¿üµ˜o‹Õíù°ƒâ‡?¿ ¸¿…o@ð·9¿6†ÿüöÍèìz³Y®G/_ã1ôô(u]l.Æ‹›—0ÔŦxùöûwßËFÆCÂß½ ÎËÙzôŸ¿†Ëàó«H€?bw¦ÀÉÔÃÐþTpŃQ Ç›ÇO¿¼Û”ËÑÙîYø¸œÀk~ýû/ßÍ‹ËY99üè|ú/o¿œýëv: ¼<{½ó_ÃÞûC^WñŽð,ÖÛò 4ãê6>˜G-~ü±Ê õíO?„ßþc·¾™~(Ç·ãYùcÌ6» ŸÔâ ¿Þ/ò»`Ü3˿ϟçíºðî‡wób¹¾^l⯳ÅvòzŸÔòFõ hè"èÓŽÝÿ¸ru½9"Š¿-&åûm0Ä0Áœúìï¼gïª<ÜáÇËbU‚©VÆÅùG˜fËÙt<ÝÌnïéÈ´R}' í-s_†õáùÐý2lƒ›Ö~0‰Á(¡†ÅøzºŽã»EðUœžƒIem½ú³æ f ƒ^ýi¯L¿¯5tio$À7ììè{5×óð’ݪý·Å§òæ²\ÄùÀž(+y¿¨GÍ÷ÍÞðx ñj¿ zNò§ßõ¼cñX·?^¿N²ÚJé8¾¥1‡·Úö[÷ëà/Óùt}ÝmO3x᜺€Ååþ{}bïå¹—Æ)h¸ÖÜ!Œ­«g ­XP8L/3uPð(—¡òà`ÙPÛ‡ä,R¼$¶ä;•)wþ¨wL ¨ê?×|fÚó&xŒMâ2©9þ1HpwHOí˜0X0µ sצ˜»$6GçEޱ%à‘:Á” B’È:/¼‚aêÚ1Î abP.ÇÈQðÐï) é>p6Ö?MËÏÕ%‘@jï¹ ¡ë» 0®Ë@v¨™°ÖáUŽx€GÃtc8MO3x(³Ý͉á#ÙÅÀéa9™îTZ}õjò<;&¯ß«Äv–‹$ëjëå!¢ð"Dùáh=ëýjXõ£,2«v*ûé£áˆ4„ÊÀR…¡‡É#M— žŠ¬À.ÇððxªmUIîÉÉíËc†K}}j’SKe‘Ph'xöSHŒ§ôNö?ì<Þöx iù¬>g°Üå¡lš¨+xR~6*Ë0“›éâ˜æˆ×Ö„£0ã‘›v±R>ó11à =Èá¬"àÑäŠ(§×æ‚EVèûÂÆvÅ!˜‡œ§ 6 K™akAÁ£]_[‹£’D\ÀÖ '™C]®ÐÎXÌÂìÅÍ;bsì6)xŒë¸FÜ»ÿšc Y_cns0 ±±SÜ;Ý×6£ }›#‡=}†ÃH #{Ü]ÔZ_•h²ƒnxë¶¾…È1c;. ] Ü+ç9h@À£D¢ýÄÞð>*AdEõ0µ½È5ʪ9Qu^âRºb2¬<¶ëñõj±Ý”M•+r`Rh PøX<•y"Ñž¡‹›#–>CHâñÑÁ…zï‘íº…}^¸¬ŒM/hg˜éÉ4m5½ÀU†í'6ÉôvÅ^#sÜ©˜·Ïa°¡]mÜW#¯Cïl”R8ë8Þ¡2˜®ƒ»+à1†3†ã© n£+ˆãúµ6ÌÍ]°`!píë׺؎Y–Ì푌µ™°Ì# K«vº÷‹ßñ=R) ŽG2™CŒ‹M1[\=”ªC¤ v€ ñ—X/¼ð ïkµ¦1¬·Ry$ÿvÕ®ÿ‹ñ=@6k4ŽGp™F w[êë­» ®…]b ÆvÚ™š  o‹Õ°ð^¸þs Æ÷x&£j€§E¥Ê»Ã¡Õ¶ùž ôk1+W›aø£ÊÔòˆPCòÿp¿ë„çÎ$Ú;te3rÿ·¸Ix<9$êc6·õízSÞ—œª—œë©1ÓüЮMíÊ<Œ%tBHÙ *ðxL Ãû'!¼Gš±²ôtìÛŽ…ë‹»1~ÿ‹2œ^(°© f¥†ôúº“5Òyöx¡=³RáHµÉ wà=Fk"¡°½ú+Ž%ôª/ ï.À¦¤iøÐŽ‹tÑšÝ €¤¶Z1¸<ˆ³j®€äN U§$L§¼Âèí¤×½né[ søJeØÛQðh‘x3õS*º ‡m>C»ºÂþ´*Ç0T¡ñÝyZK>Öâ„ݼPܧƒ¨}ÿcJÁc’.Š0ÖQ€wò;©o$fð„ ‚`ià½ûRËÄ+d#fæ0È›ãŒì§Ûë:ÒêNEFi"N/Ê c0Eí˜`)×Å”¤¥ô¢ÿ 4<–êµ™7åzYŒìzžªOÆ™Å¼Ë 5,Bñ7réžþyK@oT†'à!ß!Ù9½jeyàe4âÚ2J2#±ÜÒ¡]¨@ùDì¤`ìÿŽ y|ÇûBÉëUùÛ¶\o–2]l'Ãû6ÇV>ð°øÇvUF1ótböuÞ$õëàÝÇi%ç,Ý &Óu¬¶<–²<*=ݽÛökb—dìÙÙuÕŒb"·B!âçâÖÓÎdbtÚ}]Ìü¹™5½¼/ªñéÈ%Ÿ\Gû“_Ò|]üRì¹ù?ØãqHJ1{uÔGºVzÞ+ÅNu©+ËwÝÈ,,kŽÇ}Zl¸\|»½šý:Õ{^mv¢_ÙøæÌ×Å7ÿÜöX“Œ`¼6–¨O‚=êH.Fqþu1Š‹tl¢€—«í¼\5u¶)†¸^Œgža°Ã_hçxËøžd®Vc!£àñ½8‹7å¬ ¼=ô¶!þU« ×`GcV´cÒ'ó÷Á^JW|Ç!g¼KÉ÷Ý/G È» ÅéoS .± 8ªäÙ^|Æ­èK€Ã¼ àñ-ÅÅx¼ØÎ7ä‹wq̺à(YÀì.$šd]ª¾'à+¬ËpDLÁã»ÖXx(·—MĉÌ#.}ä\‰íèu¢ûæmsì’epùRðp•Z¸äŒÀ+å4WhH<´³Út¼#šž´ø.ÇÀSððù—Á¢*¶³Ípdža™rd2ùpÛ9TŒÇú®ËÔŠ€D^R•a@ÁCüjhA­ÂøÃÖ¼ˆ¬=Åñ Yc,’8¶«Í Ü8sQrº’úAÐð¤ºÔCW¬õ5ÍüE(v$˜géE(—èU?›• €yÿ) ix”¥ë‚ «ñl:Y|žÏÅd—ûïî×ál:ÿ(F"rSBÂæ¥+õöå0ê®Í{!™È0ú< À‚Ŧ•½ðÌc&äž)iÜ'¯9 OH —`úÏËÍâ20#JKÕKK(ò x… ƒv²m}­,ô¤tÄev oSX€dú`ÑT™5Â+0YR0$¶³V%˜Ý4–6Gç$Ë0¸<ÔÁ”ËÙâö¦lãÑõr”P[űÅÚi'SØrHI€õ>ì<†§õëÍ“òNlÈlQ*Ô(݃À r™LëÉé ¦Î`©QðÞËvíPzÈTŽAêøÆvÜS7ì=ª n¡t†Q'àÑ<õç*g¬ó~}rmÔ˜zij%ŒUŒI=´Ó©2-´ç,­ô9d}í͈ w(ê&„ÀŒJh'…O}½¹3Y½ÒŠIcpô5=?¼Ç om<ÚµsÑßß|=ôf[TPN†¤i80Ç[–¹k Ì8mšÓyFÐy+n€Ç©v{ëß¶ÓñG àΊÉdx]³Íõøºïü®^fF ÇA÷ÿÐÎ8××› d€­Vý2QÝïÜ=’\O™.ÆÃQX‡)hg…h·©îLOÆþƒfhx¼J–ë©m»sIÝç•ÔV¹K,dŒ¯¯UBz‹;ÿ¡° ¢(Ó‘—€\ò §i½³à§²¸¹_ø”@¤÷ ÷ØPv/”Ý•xg…DÀërÌeŸ`¡.¹*‡Ë鲜Mç•uc뎔»``IÆMyȪt%™±Ö Dì®oW+íu²ß¯†÷Òäˆ4ƒ½R aèCÕ“`º·¢+dï'+D$1Cåù©ÐºŽ>.¯ýåÌá[ÓSG¦ÃNÁ$Æz†æK×Ñjþxvã(ÈÐÌÛ‘ÓcKâ(bñ«I~Ah¾º  ®-‚Àãs³ “@®Íª=zZŽœÙôfe³ ðÌ9hfó‘B?¶éwñBét¯IöÑIö÷èØ3žxSÀ{ÄÇYþqì|oíßš[ÊЛãá5­GÏ ûTsô6~±}[´6ÙÙšìzƒÇ©?úòM6O€Ï›(˜迦ߙ¥ƒÞùÁtäÄLoÝ‚–81$=YM×£lúã_ãkùDŽ0øBO…W>½Pô2`3!"ÿ’ð™ÆÐ>‰à%òCAs$gR˜º¾[Ì™\úó8ø£Ô: ògVo^~ªGhë‘Í»Æ&ಠD‰§›(2c(p$ÿ.'ëD/ÊÜZ2è5T#fÈŠÊÕÒÂÆ éêÙvrVC/Ã`>[Ú¿ ãäå2œ¿Ê œþÉô¾ùjìSøå߃¥érÓ›4õ⦓_þ9c_Fû‹¯^¢ß¦ApÁ/Ñ«c›N¤PÇÞë?M¨:ö^¯L6o\'xNHrV0:æÆ/O L1â ÛþÛRé{?¾ž¦ëÍ·ß^¾D§ÐÑ÷yç/f‚Á»oÁO¾‹VWgâI.‚ñ@øܲýÇøËë‡7Pá?ûáǯ÷7ð‹€þ>%ç¡1üãû·“£ë$YÇ““:…–ŽR×~2ž®'ÐÕ~⟼ÿþ웑aÙk„(|ï5Pp@ýù<¶áÝàÛœ¸1Ë­nºö]ÚÃY\4äxûðÝ/gI°žeï™ÁÍ{?kŒìûsw®äô—Ìj¿¥ÞèßlP™_èE¨Óæӛ߀ ×ÀÈ÷‰cÛ¿9Ñm2a­tœõÉ?…ß{Ü¥oWSàt:ÆÌùùËØÎ}æ§żúòÙŸÏ'0’ “d6SšA¹„ÑùÖ„)E"Z C’)8ûEóãæ§Œ `ËB¯þ¯ýµ£; ƒ¸@¤íÛ·[ÙÏÀ´Y@Ï,ùøû_¾µ‰Â¬ø йü›·_ŽþmÎ /^g§~ÃË7Û‡¶iŠhÞ³õ>?]μ±´^ļÌòÉoÞý`þ•çÙoÃË`z;?ÚÉüÈ|²àS¬çþÔVtäÓiŒøìòŸËÕçåÓšpöÃÙÒ_Ç׫Äþs¾ÚÌ^§Óxó *ÀH?¯Mºgó®\]';LarÜ3Qg˜²ÏþjÈ{tŸÀ¿ÌË ?2÷ § ëÍù‡ffj(Læ·wô¨±iêúJI¨àª> ‹âê·»0lrƒE±ß —¿QM0¾†¿«èrô,žÚá IªÍ¶NŸMÌñæàƒNÿ´u¦ß§†±žš´Mà²<úÎMÙxn*É¢ö_V7Áâ"ˆŽ=rìÉc³¢wAÝz¾WÛÄãžÇ«ü)H\0ø9ŠÿÖ×틇¾ýaü*eµbˆ°’F& ·’F{ßeó?ûhï%´f AäüLQLI©YÅ”IZNi¥I{µC¯‘.\Rôÿ_{åÖ!DÒªÅwVDi¯ÖÈv;}§½œörÚËi/§½œörÚ«½öÚ†a­˜Võa˜På´—Ó^®½r¶R)dÕ%-ÛrVw¯½ð˜q¥”\ìÒ^Ĉ?ŸŠª£ òrR–öJQi-ˆ$õè•ÄÏJ{µ³Ž¦b0íek} i‚¬x®®Ó^N{9íå´—Ó^N{9íµŸöJð¹ð]òÚ0ŒQñªc§½œö:P핱ZhBD=«q!íí^{QèÝϽhúl9E:(íEíó,,¸¼½Y".ž•öJ[M¹ÀÕ[§âÔεWZ#”°ýÆŠw;íå´—Ó^N{9íå´—Ó^ûi¯4¼J³9 ׇaIÜs/§½_{¥lÕ+Ú ¹Ô…Ãʺ×^d̃|çs/³èQ ®A€UŠÄ´+œ˜tÚ+E%„9̸=—äYi¯Ì: ©ê­#8L{¥5*PU7@V\ é´—Ó^N{9íå´—Ó^N{í§½Ò𪧌ևaU¦jç~/™î㔨ÊÓaórD±Ò^9*Æ™®Z»¶-Gé3Ò^y«¹9†Ö[‡#:öÚÖ¨ˆÂ¼²ÝnN{9íå´—Ó^N{9íå´W{핇W‰8©šœÍË &örÚëÀµ×–Õf ªgµ”}î÷âH@%t·öJwÔ ­U@Ór|Çi_S{eû¸0è*T^*þ¬´W¶[ ø&õÖQ¦½òäRÒ¬ÓîœC§½œörÚËi/§½œöêN{¥a˜2Ì(­ Ø"ä´—Ó^¯½R¶2I‘¬O.1ã¬Gí¥„^òÜËî㊩Ê+êóræß‡¤½Ú¡—R?+í•¶ZsŒ«·Žbb0íÕ™pkörÚËi/§½œörÚ«3íeëD2#Y†µrû½œö:|핱Zs©U-«eñí´—bc^öÜ+=ä).Y¥z±å°>°ç^Ù­cS‰jÑJгÒ^Ù­fœh^ß·„14˜öÊo‚²Ȥ;cÞi/§½œörÚËi/§½:Ó^Ù½]ŒC$®Ã;íå´×ák¯ìÖ:Ày׳Z²>Ï9„FŒ±Ð%ÚK!ÿEy×dÁ¦ÕøÑâH…ÉcíµYÛKžíÒ 0î K<^³©wÀÚ^ ž]ïSÅ…¬Ç»S[YýÖ€ôòr«ÁÍnŒöêñãüjvo±Š/aþðþmïÚÝ1Wlš.§õžŸ$ÁbmþzšÂ Áøï½Û'ïÈ[èwÓ4Óó·l0·¡Ïlä]¦WпLn×Áé‹wß¼B~‚~ˆBpØ/²{ÜO_”8 ’l2|úâMËB쇒×áÚ» ýGCh•Ý[GÁ Dè8oE<öÞYW/ X¡èEpizÁøœp¹1Ö¿([„0Œu½qa+ó;bÏ·w«çZ7OïS—XZ;ã¬f^"GÙK/æYH¡ûÞd7À¿[­æ…«»;r°Í|Ò>óçgð:ùKo.WÛ^'ÞOKè°(-Ù/yÙ/­¡bVQ°½ŒþØ[§=÷·ÕÔ» o1]ÿ̓~ö.7Qr Èf$…æ›Ë4r;àDÆ„¡œQC zBˆÇ>–í-¬÷’ Ѧ2@í”Ô{\?uX“p¤7pY=¯Åçi«…¹Ë¾+.€HkTærɬ“È-€p“pnÎM¹I87 ç&ẛ„³áUs…±ªÃS7 ç&áΰÕwC)©e5Gº×I8.ÇBVi/!A¾HZ  ôx¥†Þ%Ÿ4ÝeÎæ• B¥Zt•ÈӉРX5˜VRÐç'Í*sTó(&-GѰ2QhÁYƒø$sk5œLt2ÑÉD'Lt2±[™é'– h -Ýe'ÿId¢D’ VÿxN"Fú¼“C%Bí’‰*=  ¡U4+gVÊöj‰^rúœÑå­É©ªÑm­£ô@Ú«52íÎæuÚËi/§½œörÚËi¯Ž´W^%½êjƒm9w>”Ó^¯½r¶* 9mÀjM{Ý£,ÆT“ÝÚËP¥´ÄšÓJ i9ÁÉAi¯•bZ Q^ö¬´WÚjM°¨Ú(¿-‡†Ó^é™»ˆb¦pdÅÛ2örÚËi/§½œörÚËi¯ý´W†1ÅœÔ&àW Ô8íå´×¡j¯”­P¿b¢žÕ„öy'%×rLß­½ìUZkJ«š–SD”öj‡^#ù¬´—i5¸V$¤ ¬S¸5¦oíÕ™–N{9íå´—Ó^N{9íå´WWÚ+ Ã@qÌH]†pÍÜÖ4§½_{µc5}ÞIÉ83wk/ž=Ïñ* i9ÎùAi¯vèEÏJ{ñì™&T#ê­u¦½Ò¤êø´»¸;)örÚËi/§½œörÚ«3í•…a-‰ÆõaX’?§½œö:T핲èJ8­gµfºOí…ô˜ëé¥Ç”kÆã• ÒrD—.Kõsîý—&Þeº¬2®f¨¾2*K’{ï¢à&\Aªm>ÌœÁÄsiµK«]ZíÒj—V»´úY§Õ­b-G¬,ÖjÂÏ=½žßN¼Ô!xx\mªG!ç¶á7(ÆAs|~(<Ù OØÌ£zÑyV®pØRâÇŸþç*ò××ÓÒ ÖÃïýfiúÐûuuá!ïåç0¹†¿æ+eÉO …“ºƒÀ\B0Á«÷\eåÊ;J2è¨7Ñj½.LCFÁz%élràGó šî2)Ó|µZïË2<1|¾5†­Ñt>"¯æhó 8úY`ÒEË£#“\›až:áxœÏ~·£‚=¿@{«KÏäa¥öTæY_6¹—w*ºß©¤a§j°Ôõ ”–Ct*ƒ¬–²L×¢m§.ýE¯Á¹ÝS$Ö'òn„žÃ´¦ã¤)¢«Ã(9/\%ù£} iûV7}ŒÅÁј\4##•ìj‚êrKRgŸÂ”^ë(˜FAêÕÁ™¢Ü>„:Š³àæ½ènN¼™½Rãi1ð²X¸7~®-ð”î3:2½>ÃÑ, ª0kT%îŽ~ªj;þ‰{ïy-ªç%ÃmƒÝ6sÍƒÈ Nî[’ÛÑ]™­:ñ?Ç£à".ؾ袸$‘D Ñ$ý·¥ŽO Äî˽/ùaL‘¡ö;¨ý˜ûSi¿t¥Ö}s¬Ms§™B‡åÇfƒÒì2œ¥ýÂ:£äüƒÐ¬as†§%‡E3ª¢™-“øSýárv/g¹ß;¼;²qÜ3ٞШá)ÇåaQNÐ!)gË\M×£õ¬´wDw”“bʵkÔð”m…ÿ3NÍñ7ß5žFdfœZQVF¢‰h܆‹ÑÍz:º˜¯¦åɢ쮻Yß¹oÛ >bµ80²É¡TÖÂ_†s¿´OTw,;´ñ¬QϬolÚáé®õõ…èìÍ2HÌ·Ë#¥¢M˜+b¾Ö8Mg:uwMÁbèñð° CÈAÉQÀ3˜½ £¤Ì5)Ô«hß¹EÓ– O.&‹\˜À®WŸƒè&®NùîŽh‚ ĶmÕð¤“ø°HWþø¢·ÙŽ›Åg? F7ñú:ˆJ§AéŽ{Ÿb~…Æ OAM‹‚Z OÁ¬{ÖÑ ,¿Í³z5ë›îÒOŒÔpÄkÖ¤Áé†ñaÑ ·^n–­Yñ޲å Ö¢¬;’Þ$5@z^K!pùMš_§'iÛ™ú’ƒpøÓéj³Lv­TÉL|7ÀÊ>°¶çÝ‘€5õýµ¡wþðó¼õŒqjÈh5.ÂåÌ|vTfÜ‘)e-,ºc‰l<'¶'Ò.¹ðnGKªàBÙsÍ ¢>4S‡ÌÙ]Á.ôòižg¯á:ZINí¬Œª¼Ð¸1X·s=}4â©|kÜH2ŒïiާõúètÞt½š‡ÓÛšE–'þ|¾ú<‚ÿ‚t犱¶êŽ2´iÊÒ%êÞ9Âðaq„±þ9’Zz”¬Fþ:™žå•º;¶pÚ[àï7\oéŸ7árkøÌ£"sZ’=$uljûãNÃ6ôΟ–7Ç£åÏÃÔ Ð ¸;"5~œ<@czg”>0FéÆŸ-mΠIg´¡HöG›ˆûæÅä ¸A±èšÄÈ®ÄRÓîHBXW$i ½w¶}Xli=ç÷T“—%–šý{×ÖÜÊm¤ŸÏ¿`ñÁIÖK ÷‹^¶¼Ž“JUj×e;yXÇU‡"G÷H¤BR'vRþïÛ^4‡ÓèGÙÊy±%af>4>4Fw:Þ(‘™7ÈΠBÇìÑx4+Ä « ŽH&7‘âû’Oæmí¶¤…øÔ0L:ê8ž™:­°³³Ä½- XzÖÿ¬áDØ`E>­W!‰K°*ëìÓ§Ét9 ?[WÛɺš†Û>¿¾Oç,V¬Ç DBü¹y£Š$G¡àqyxóqõðüXM·Ûéì>d?:“y:o±Š\MŽ=;_ÄÛÚM)a2óe½¸}ÞV›ÙÃÌׂOç4VRe"Md²3G¾­•R2's6û\m¯$nX:_±Ò.e"gçŠy[§—Ê$Ô2°¯XüäºXÞ­§›íú¹N#y&ít®`eõpžQgçˆco‹#N¥çÈ~‹Ñ¦» KçõU^¤cH$æìüðok߬OϘEÞ°tž_ÍY:¢PÁçfŒæok­y½Ðãt±œì¶Mé¦sîj‘`׃¢ÌÎù¶ö7ZêÄh|G§sÍj¥Ñš êÓíWÚ"Hµî½_¹ôiË$»SÙî…÷Z3….²ð£&_[‡žo«pú¾/˜–b]ƒ¦mæO>¸0û½Æ¼#–l£¬Añƒ FMÁø¶ð{Yâ®ù®P y"nÐ9J;í±Yí¤sŸ€éð+Y ʾ´¦òx”íÁ‡ãUáab5˜X¥i? Ú éy5ñöº¡T vÄã!ûΊÕbbÕ ˜h7T¢C7êjb‹Ù® þ‡]€öÏG+싃öõÎ »Ž°ÂFóçj´]­{wwúìyy<ì_ý©ñÃ?h½Þý÷íÿV³íÍèqúôý® åÓåÏô÷¼OŸû²nã›Ñ¾ åøht®w•ìêþ\Á(„ú‹ùø7ÿÞãK€㛽Ô_º(â~ßïàuÓú;¯„3ž.—«í.×òøLtãÅ62óêj_SòÄ.½¾ÿXYÿÔR¨ 0G݆Üb»$<;†ÏÉoØTw‡‡'÷‹ï'ÓÓÅþýuÐö¡š,Wój2¯>VaƽåxÍ¡S(i êåñ©Ñj76|ÂÄwÌÞ(~ÃÌÿÔß~©Í­Ë­Q¿æ¿©kÖøÝ¢z˜ù_‹ýlŒúÑq4¾«ßûŒSãÁÝÇþÜl?œýÝÍN{¶þiu[_~˜ÿ¾Ù±WÍ‚´în¶ðõ/ï«Ù‡PÈñ¼M(²¸CSÞ÷z“=(uu¿a5fÙù»gÙÕ%ÙÅO?úãmSþtž¿r7lCî|SÝUëj9«âóþÃÍ?ÆÏ òÇémõ÷Ü·ÕËçŸðçÂ0_˜Ü¯ê¢¿øÃ÷“ú@’k]û€Ò_ônüšG¿ïµÃBŒò…öÖÏÃ[÷¯=â<5%¾úëóèW-·dÌû· Ñ} ó¿N€×2Ú½Þ¼ªíµÆkÏÍ“>€7œlß<ïfÔ÷}YðîÝp&Á?`óèúz$F‹ù®¶ÅÃþ»^T›~ïïfÌ^¡ìzwñ/¿ùê‹ï¾ Séð›?}ýÛú7ý„ÙgøØ«¯Ù±¡¿ÿóÃû7³U½èž0÷ßúM²wïúu³×S½Ÿ.í&_õÓS¸xjʼn«V>úÑK‹“%!lváOcàamu›A%Õò÷ã?,k|ÁÛ[µ_a˜.½õóÑøWõnö¡þ|½é{¿ÿXÉ›ðû÷ÖF4½·çËÎIo7Í?'ëíé[{ö¶/G^ì#ïAV¿Þ)Ö«ƒEzUÞüfôÙgÇÊS+înócÖÉìþأ݊ìO»ÅÐo,Û˜û—e;èïÿ²GLø¦áðmÍNÿ×ÎË€œy}1£ ÿû7ãßVËŸÇ­èsœ(uêEʉR׋öç4¢¥½3Íáòö‡?È›þtJ™vøÓýªwýÇáö¾´‡?è×þtÓ€~øƒàí×Í^Oõzh|¢9_ÍטýS/¹ŒÛ´ñéXÖ·KzzÖéáë;r7Wiæ¼ñø¡´c¼¸u’½(Á©?©u‚ÝÑÕœkíºÐC;F½cœnM# l䙈6G\”9BQd9‚ïXÁCN½s¨+Ðäßãtv¿XVû¢„G«uµÚLnW«ínȃ…)°àm8÷wæhÃU#– üÃá7ËGSÒ()º9iÁöõWDVh÷I=TY)T„oÚ•H¬¾£ÁÞ³xK–þ(IÜ•Á®Þ°ca™G Yü¥nÞ@ß`FXTKC;[BKƒ–c€:7oŒGX;òè’ÀW‹;žœt>Õ*Ÿ„°uæñØ^ê"£gXu޳Njçñ¦U’Xg¼0Ö¢fž î­þ[¦<È•72¹¿͘’ñx¤Ê±ñÇŠ#;ËÀæÇïgC;Ñó7©ê²œ»ˆghÇŠŒ¿ÚªÍo¹´)‚ ÚØò»½Â­eŠêUo`1ðèq,´“*E$ħíC‘”0<†÷³fÓÑá&j-"LuzÆÂÍ‹C GWüdCU¡%jÖ‘ð$™û]%õqäúv: eFú¹>—Æn…y¡¤Qx<)´#çëËP°Ö¥Â½OÐΖpÛy¡9¬#xèå°/ŸŠEjÇZ¢È”Ñ‚³`y`ƒ¬Ã±„H¡ÚSRÐ |û(zø¹@~¦ðã´x´¾Í#yF±VÚ2%­Á©`™TéŽxS’9¾Šðûðø$5Kã¥é2¹Œaõâ˜á_‡™Êá†R2Àû¥(xÈ®€îò¯qòDÂj @—F(Ì{ í¤ *‡üT& ·Ö 7¼(-)r¾ è4zZíŒî¹ (­!·ø€8Ônƒ½˜ÚrêÔ“™‚ß0£IxìÐéÐ'F7Då Îp„½ý¡Åz&è×=à1%W­Õ!.DcÇëÐN1Ÿ ,ìeŠLŸuúî .-7!§-FÁã’€ÅÕ\I5Gvà!½bªÐ.MDýôy{¿KSµO×*±O Ä^ ò‹ö¡=‘Ys| "<Û|ÅÑ\·ÐvŠðŽž<$à,aÑQð-º£°Ž²jzÍ4G¶Åž{0ʥǖhÇÒ„ì l‘PG rNS955xXºån#:˰óhø‰ƒ:”pl¾€&à *…þ\©]wýñåK‹Ë[Yد˜€-…ç]×õí¸ ÞÆéÇG2®Ì1Wd<žwÅ^Py£k© "BãUˆšµä௷)|ñiß•9]™TÀv¦Ó–!ã!G×·þ!^øýËÄ’Ê"²u°w¼+^üÐNH“IIô#2 wÆu]9¶³¶+ ¨öZãxùð–v-Û Ñ-ÚàøQN8†tEpïKBŠlîKpvéü4¡àá"§òh~gT§e\kå<ßx«uÖËoƒYßÇ ØÚqñiø%X¹ ë´c^e5%é&€ï¸‡‘Ž<&]è0qßVKÙ×k =ÑÒbf0´ãÌ$‰†ÉAnB/t•AÁcäИzÅáãÊJa5RI54,¦È®hªp÷°úÛfv_=NÛçËfº“â„1Ü{m\Wäò¡ã.Ý ½v¢ôÄp'Qð8•Æè!UÄc,cœ;m°^ï5ëb*0jY`ä)x|Úy_Ë ñ¢XËœ0Rb+Ž ›S“vLã³²À MÁ£tb=~bÚ a'`—# êŽ íÈ¥Š©J/JLg Çh¤0ÈÃw­°¼ë.Ú¡±n¨m–ŸwEÆ3ei̲UÚÙÂ&5âÝpẘ± ;‘‚vô»8I¨ís]Cb×5ê×{Áµö õC;Ež IG–€T‹s‚‚Gºs¢–²u÷VxËÅb{ ëŠí‰ŽI¥ëÂu%•W GÀã™Jˆurƒ·SbüŠ ¦¹ð¢ÛE^·SÒ䘩8ßh(óo)ixOcˆvØq‘^PöÒY…¡ é„’¤“'³0¡Ì¿Ñ á!ßÂ9ÉÁŸ ø“â¦êvŒ\ 9Ùh¹ù‡’‚GÊ„ jC»YDdVyi;ëÓÚ9r¶l2ÛâÁxV`*Rðp=üÿDu5Ó !¼‚KXŵCpаÄ'\HãyF€(¥ïqaU!5óÈ(ŒÈO0øŽâÖpǺ”ÅwÕâ›ê•;d -7F;‡Mh§e‚ØôÞÓ€Óó£LÁãRˆJîä§F=dÊÊàŠ±NJ6´ÓŠ¥¬Ô‡œ´Í¬…ñ FïŒêR0¦Àf!„p¯˜ŠÀãX®Š3gvG,i9ìÐÀ£´=Jfœ2Ѱá4 ͤyša|™ÅЇ<™]¡f2WÙ:_)¸3ç%ã!WP w´ˆYÁC… °©­Áào]®éOâ­³UYÄÙ¶ëX ½_p“Ê{4>ÓÄo‹î·¾[!š¸«1[IsïÉQÈù‰Ko]~Pðx–¬2üE‰vÔztY\µeÌ ‹.fÚø|6A/NLJv²€r àQô{ ³uµÅT+]nëpÝ¢3áš³äó …ø)¡’ÏÂ_J_Jè r¾ê˵t±ÜvÇŸáÁÒE_c¼áÎh†u$Ôf “$)Ÿã¡j]ÀCM“·PåësÇ»EiÃÒŠ ³-,mäÚt%éKéH#ón¼çÂì=²Ës"\Ä.à ßQJ9ƒÇ ô\´%Ÿ=3þê-r÷HÂl’Öza†ÙDvfžMèÎ~¯’]˜éð!Šn!:Á´ã\b ¡]¨l>Ì{‘ˆ³Ìšð\PððDÅfcE‰(rB¾…s˜A…e$^£ÈÙxй+h‘ñÈ4—'Ênzî­gÖc€¡]§‹â±HDXt+ ¬ù$<.™·">õ|%"RËà×^cê Ú1!’Dߤ/°-°#!áñCí£b;ë)n½²8¨¨``t}Pƹh<–%‰v:#{ç11‘ÜÀ6§"´ [­tî¤t †ÐØEEx½[´x Ïo5Òðpž¬Üö~êÎ6‹ùz›þË5Œgȸ‚>s¡¾Ö°{Lš;–äÙ 1˜p¬ÇE= Q‚z<2a¥÷]ŒÌeÆ9&º‡“ !ÁÎfÚ1‘¬àwÂ9È”åV[¼ʰºÐɺÉ-—ZIƒ ÒrA”ÊÊ€xಡ5£'´`{ù‰Dâ“RäŸÐ$<>MPæêy>™­ax—ÛÅôáÄ"Pè*®úD íÌ>§~}ûa±ß”7£ùb3½}¨æ£Ùôiz»x¾V››Ñ—áù/î‘E¸Ç¥³+ŽRÚ¬«¿>W›íæªÿË^Ov®Óñ¤s_ÕäI®ãW=´Ú±l˜ïRkau…Ý “ h§ºLÒè{ŠuEÕCèlXXÃõ™ÇŇ9ªº“§Ðk—òQðÚ²ÚKp'@øŸÍÕñ¯>¸Í޳J!K¬°R*ϹÃúr±,€NÂ756ŽbS’{äÂö®àƾcBÖ”<Êö?Œn.j{q5×2ÕgT\IÁ¥rIm^·ãtoP^ú ò§Ë$âñÉwQ€¼º HŒ –9Æ•ÂVhg¹éæ;˜´–yX‘ôÕ»v%<6á;ÊÍ"ð$Ü6Ï—`ÌVë9ŒïNŠ·OJ ¦· \ÔZahÇÉ•¶rr–\ä?ó£á‘IRtÚJ'óJa$°œ1  ¦¸T˜W6 ’ò˜Ð…üÁë4<63w/ÇÙj¹…ûÐå™RÙmiî­³Æ`ÚM‡Cá$…Ó1š€Ýê¦ O’ë“q²¼>^ S £CÈ Ç¬D».륳“2:º Ì¡¬ àäB-gÞ‹(GØõ™‰VKÖ¤“l—ûøÔ3“©ƒ<3¤žš·Å!Í>‡&Ó¿?¯«z,Ê$Øpz$hrןβwY˜ÔÚ¡|Šî¯eEøG}2>ý8{ªGÁ¥cSgîâr—àÂV·ø$¡aŒèY:´S*]H|q2Súé Є„Çf½×G2bÞ†;AMÁ¾kÇY‚ëU¹O€k ĵPðX–æfÍk¹]· 9§ ×¥„èÙPȽÎmÖkuéÈLé”+àO¡àñÔ¸ ¶¶ãËp`ý’R)dGâ`î„·(FË9|;+ƒãkU`y àщTÁÎ"orXí–ZÃÜFàB;I¾W›ˆ›Œ¾Àþ‘€G1›ä]z« 1Î}(ýì,ÞGã­H¤†Ð˜×8v¥àñ|àû8=Úyp-Ãõ rÒt')¨Û –æv]aRÓú˜¿Š1 N9Rà/é`ÿ¹këmäÈÕÏó/?ärvä©ûÅÀâ H²‹{°Ang±@d©=Ö‰-y%y&ƒ`þû!»%[¶ÕMVwueX$±ËÝY_‘¬j’eº/ÕX2ãTPDzA=Ί8°,?Ï{Y€ xÂPKÑeaM·Þ¤Œà¨òN§}†{‹ósUb·6á½¢%pã·°oÞ.‚8lÆ%os§0×0bT–hµÜŒK&Àp•ÀÝ”"gƼ³ ˜”¢+êš§»`¢f ho•‘sX¤é=q×Z3NäËÿyXÖ×ô§K¨¨ˆ™uQaß)AÉãäPŸ3Äb&à´¢<ÎfûúÁóÚ¶[—ª^GÎR^RÕM9³eùdå­Â¯J–j»ÕŒëÓF].qS&Ia¯#qv£«îJ{¾Û×O Ö@ /Ìù|»œnW³»íõº^Sû¤)ì>9_™üîÇý_·Ê¤Á¬1dRªÀRÃ÷8ƒ`à±*_mÛvyP2'1Mw·^x. 0†d,ŒÓ>Û'¦<ÃE錑Æ.Ço^¿Þk¢`à‰aø^ï…"eíºu§ñ´_ƒµ"°j¼ì)ŸµÍMß1ÆO¾NÓž|}JyÇǬÚÇlêR‚””«·ýO‚?©™VRçÈ«n»ßåM‹ç=q‡"A;²€4Ex¥>1^i=܉ôŸ‡Go£ƒÌG«ÎžéÇ´* LV•8†NÁ“­¶‡y½ç£ž•T.‘T#ÊR„SÎZœJuN×î¶ézßÄAçcMçaü—m^„O+îQŒÞ™hÖªðu}Õˆ&9:ÿZÈ1 r †h!‹06¨Î{›é^=¶Ý¾©f§º©æ¸g­çÀ’´aË#¹ÛªqåÂ%¶¬J|Z\R&ï]mC¦Âå£”Ö ”]œ"ÌÒþÓb–”æ×| ­ÓféʳÞ7õ\Ôsà J¹`,ÈC}¨„q×%s?ï:žäc5²Äþ)ÊrÚóB{» ®R¼X±V 1ÙËþ• `¼ê2†!É}eÈ› ¿ ~OÊnð ÛüÐça£kMGæÆÛ¼³¤&aœ 9ЇRÙÛè‹¥Gë L=¼':9x’Ó¹ÞÍn– X«·³Åír‹91wë›åü#¹~úv¶«ÞÏ>Ìî–Óùf1}xB­e‚VF!…RT¢¢ÖÛ0¤WnB#r¼Î$Èa\ˆÞФš6xº.²gwó åõߌ¬ Nà•!”8.¤_ËõçÑ=A*_àÐ$O>£Ò_½iX #=™ãB™n"o  81†hq:.ÎǦë"‰Œº}¼(曽š 54»îÖ5;ê`”*£ðJ‘)j8Îõ:¹÷æáFˆþ%C¬‡°ð©¢²ŠÆ#‡TºóÕýnY½¯µ-I‚Hk½â ï*zNˆ‡ð£¶!š!• Eø¡°vÍÐx”ð%øq9Û.çÓûí>eP‘,Qâ,ËÀ¯ã˜,iÅ=„+Ê 8²9]†+Ñrk€ã‚.Á•ƒ§4ᚤ‹VNJ:‰ZŠ1éÒ}c´uš%ž)c]t7x\ŽO€©{ÊH|t¢=+È´XwdßV«j³œ7àã¿'?ß5a÷ä—‡ü«Cý}¿éO÷Õd·¶\]]À‹?»_mw›ûùî~S-Î>ú?Ò¤›L^ýóòÿªùîbr;»û<0ý{¶úþœWgû—¦ªçìbÒ<ꋳ`m,á‡ÇÍ—ß¼“g_¾îñ¦ß€gûY8¼©UåýÞq[ífð¸YýžgÊ9›­Vë]ÓOþì…êÎÖò‡'ù­o°ÍÒ‘j¾Û·Œüò5öÂ.ªsüÈ2ÛVOÿn[Ý\Mog+0#‹éõòíõtön¶¼Ù¿ãè‰@ƒŠñ8{SMWëE5]Tïªäß‹§||]_KR ùÓòhßÞj4*§Bý$ü…5&üoýîzMÔ£—«3_Hüù^€¿-«›jí_µ²^h¶‰&gWõsúpw¬âæe¿€ÿx9cWõ;ù«õe]'²øû±`φ¡¶®.vðö¯¯«9ðóí‰1?>¨áXß­»õé€???‡?þT”u´6^þ²{mœ·éŽX4'ÿfتéñÈ–•ób¢ëG­ß_~¨®ªMµšW|eüúÛÅg÷K°pgAÎŒšW—ÓÅåUœšË+=WóÅÔ_º…‹þ ¶Õúìã¯-TVßUó““tš±7-º¸íæ×_¯Áå¦Íf-@3ogs\ÓõêæÃÁ»M÷îm{ x;?ªßïÐZv¬¾¦||* ®°JÛ¶§VãIÛ–ðáDêPÕú ¸_º÷+¸‡:=…ézuö|Î]çßà7½¼$¸É9mןLßá½_웚îëA0Òùë_'Ÿóí?¾ýéÛÏ'ÿ=Yß,šødr1Y×ÿòå9*äüíf}79¥¢±zæX»8÷åǃäl§OÚSÿ‚OÝ?öhFŽšoÿs¿6V«]òÜìŸþÐ×ñÇ:wc}ÊVõ{r3G}$õÒó«³CÑ÷7Õ#ú<êÕð5ÿŠMÞ¼™¨ÉrÑ|Ⱦ™À?7°Më÷¼³®?Jw˜ñ¯øö«Ÿ¾E£qøÉÏßóì'ÍâÀø¬çû Æ×?{[×6ýu9¼n;_×QÆjÿW?kóêU?©{ýU?;ñÔ}$ØÂëÙöÁâáÒw««uý/¸ˆ¿œ|öÙ¤ý×h5¶Vü¼ÓÑ“ÿÞØÀÉWß7ùºžáɇ ѾyœãÉlSMöñÒäòÃdw]Mê-ÊáɰØ>ü·õær¹€eQïúKYýFµñó§ #ç˜l²YUxuÆ‚ëV)Ý]/·|äädœ\ÏÞU“ËõîzrF<b9Ôá¯)ûp¢Ÿ×oÛŽœÜŒí²¨-H:˜ô?ùxò¼ôÑÁÿ©SN§l†¾”¹Ïˆ¼ ÂAºá8] üßãµ÷Ÿ~=·òb\9z»'LaÆ4UÅ5¨"\âã Cûåñ: …J‹WеTJâ¨Ù¿[•Ùw« ]ͪRPhñIq0ê¡é»MÇ×þ ­`‡z¿þ°XF A8ºˆY Œd'ãÐvyÅ!«IÛIxBžÀ˜©I"_.zá­E!÷Â9‘¥«s–è,·/QHš„Çõ=G÷Cr‚¥” öc$0ÙÑ`”Û•u$`GYÑíþC>óöÐUt9°šJ à`½ Ï`ò´ ®Ý„îVj;' ѳ§“/›Ñ¦!£ˆ%á1¢gÀÛõjY¯×Nevý²Q­"Të…pJe)[—Ç`‡Fù¸Ì… ã”)Àˆ<:¯I8¥½Ø­= ¼ôÚø@ •x¹RÏ>ð…ø› Šÿü5 O¯ž'zÒ¨jBs.àš&úÛÖãìà ®á”å£u:˜çlM<~O 4W::¥¡yºw^ WàÂÕw«cà‰©»–‡¢¢S™Mû8œ;bâ¼4ÊxKÅT0N[•Š#,ªIÆï•ˆ'Œd®Þ4nàá×µ"‰°ßCø¥'×2ŽS©'UyX ¯Fµ‘Çù8®ÄÎß15Ž'9d9}OsyfÝÍd:›ÿ†fšˆí½W/C%Az¥Ei©÷"dp[à"OßÛ>O(òyD×ú›Z§DÐÀS -É“ç´ÌѶcáå'ApIƒô±@ŠxŒŠ‚'¨ŒËüÐ>a¯<¬q¨oL"´ç…7¹Q4…qÎäûœœ—¦ 2„ç)xbêyÄãmsÏËÐè —ˆˆ=†¹”€qÖ„| —cm®"¹ƒF©JwˆÇi"Ç+›ïÚ§YƒÛóF§Ï¯ï$"eP¥°àâ©í'Ž“©¶¿,‰=l¢Á‹YZ’ãn›üLH¸÷Ùtí;áñJZ£=†W™î}¾œm*ltƒ«G2îyøƒVl°2Ð2(Sà×» [í6x‚ȶ”t½Þî¶çõ¿êÃ-äšd£v¸dhÈÚ°‹i‡BíÏ)a‚(ÂKÝ(‘.nçÕ;°NÛûËí|³¼«[4<Ó5mn×*2Ì­éêðÏ¥Eâþì°0ïÞ0dŠª;¬–ãlú=1­ºÆækË[gž+Ø’”°AŠèpL Ìþ#®g›j—Þ3Ó1­Àb†ãõ~¸m`íÏ…€­>;ˆX„ Áz-ž8˜|Öã³Ãª›¯oï`Ë´z°’!Zk7ð¯Ï͹??¢²N0¼v”¦?¢“Ö1Ür4f~l«¶úk¥+’&öÚ1D°ì–+¹¡÷ç‹Æ”!†—Ñ¡ÈnEïc/(uÙùr¹ž>¹#T òÈSZe=Ç[²‘£ g&X cÛ%­)ÃӊLJŒ~åÑD?·Îä9§t¿,шÒü i68/£àÈbËDŸºGÍðk.šá¥LS¼ÿËZçä‘§uËð„^sÝHnäýÙâ½Ó†á"½+òí ÛºR·(5x¢›-ï«Ëëõú·'*'De0ØlŽ h3Yº÷çJðA †‡ÎáJ”Êi†±gµÅl>_ß?meÅ[¦µÊÉóRM0œ$¦r%ðþ\‰>͈Ib®(!1÷‘'úÜÑ飖±uüts9›O!øýC­lòü‹9aa¯›mÃ…Ü›Jg¤a勸x­°öñJ&ïv[꜎Õý´ÆiJ,OòLUI¼r aØÙ# ÑŸC`…õ 1}£`K©hׯ”¶;¸ôÖ»$]^ÝJçÂ8È“}Öh–‡vºJ—ùr£´ÃT““5„¢É#X¥cJ1`wµycÓ„ ·?/Œ‚MÃÍYÆšg„áà1ƒ®Û&4K²*+„ OnB¯Ë×ÙøúϼÕBh†ï·2™y댑 ÷m“¿×1Ü8¡oò\U9 v ôe‰Áu–@"û»B,qÀÇÁcr\˜ÝKíäÁ«òBZÆ‘‚rl²Œ¾?g<¸¦Èˆ5|™L峎kx+²\¼‘ nòV㳂òì|€Œ ûs$xë ÃW仯ŠÒ;Ïð†Ý÷T°šš·•‚´B³F2ra#ú^¡‘½:EÅ ã;­:Npâú„CËóÐ]装֎Q$£E™äL‘âl3a‡on{ê#,Nçp¼ž£ƒ‚pƒeTÄ ~Í8‘ÚtbÜê¹TÖ% Í¸ñ[d5ï1A‡ÈÀ£MîRÄ}ˆ6]ZB—õ商¸ó¥'‡¶9ÏÈߨ֠@WÐ øn5J¥¼QTáC=Îùìf`s°ûñSÛÓð¤—ǺÐóiÏ–ãxŽX=ÒEg} (Œƒˆ®Ü¢÷Ù`;) Ìx —ç€#A›¡[›Ø©K™HÏÔãzôÉÊרã×:&áñBd½øq¯¼7Ê$VXC­0eŸ—yZ!fd-½.û¥àÉÕ5A›±[›è ö‹("nÆiSœ 4zéCðŽF/Ço6X¿Gi£C ñ(%³u¥jShËÏQ³Ý7ÔYìߌ¼ Å '3ݬ—ÏÀ(×i8¼ŠLâí¡ö4­åð\ÀMÆÖ`WP„ ÃSùúÕAèIbý¢A÷äŒR4•ÜF¿½PÍÊnÍbm…7’À8ïÜðNÛùØ ˆ¢p’HHk$<²ü޹ñp¢Ù~ Ù 0*DC£ˆº@l‹ïÁöŠ–Ç©Œ4—+€]ßõýP^„·ŒwO!쬔wä1¦Å¬ÎÖGkŒe’D¬›£%q¡Ài6BW&AãñRÿ©±ŒêÖ,¬ðº 0µÀ`œL6P¹ æÈ(IÇi‘n“ào›dºl>Ýkc9(Ư¦¨ßcÀV“5âIîâýü¼êàP¶À/˜¶S¤¤igL°>Ò˜v=ËÒ±¶~ýû±yD«,žüþߌÿÛnóžhâà ™.!⪚^àx»†‘4t+Õ°´¢tÈ}Ùa V3DÒeØaC€W1ðx‘Íuµj»íµî I'aïÈÅ —VZ3‚ }ùã4ˆÉpЮDŒïqÊYËÀcS#àUµÃ1uoŽ,Õ/ª«ÙýÍnº¨Vju[š2Á{Éï¹áYa÷e‰Ç»<-Öñ=TüÈHvkм% ;›q]¹;If×7·!pbž¨²^ÂÊ~r $hw <·šÊDvWо3Ò¤ò–CÄXà³9ÄíB)áHã’?™¬¸x¡dʅ› æó0ê0¤¤Y¶éG¯ û Ëfü›zš÷D--Oò½áíUã§4þ¦uILã Ž–AJ5¸â½/öŒ’Æ:J€qÚ¡tA{ëò}Q#õ?…mõf9¯ú„'9¤”Öä‰/޾‡¡à3’HÙ(èC$WÆ ©`œæàñ*c¾ÞãˆZád‹–ÂI£ÕB ¨0â¢ÌH ¼?„%˜ÒEh¡Á=)ÍÀòævñÌ{$‰bê=ýrµÂ{½ùl1›¾kðæOØûøO·SëwåÓßÿ­]`—coZ ¶™Í±“?û§úv(ñóÏÔ¯…ÅlÏý{õùËâÏbH/O¾_nšù޶—'o——WóÆ™zyò®!L œX|8Û¬®[P º{h\úïVJ_5닳¯&W×_~ùÁø…zý©îšËipðé7ÍzóíjYââ³Íì²=ý3 üL¿<)ÿóïÍâºY}|y¨½ÿÊ/ÿþý[ÀýüE@·çsSøßß}söâb³¹ZŸ½~ÝL&`é)Hê¢ÙœN–—¯¡«›MóúÝWß}ñ Uv¦!|°ðïÞ‚í|}öŸ?®78uüQÌ™6¦7¬c×~»íá­ÎPßÜÿô÷ï6íÕÙ‹Ýgøëv _óWÀSÈØý¢ü¹;Wòæ‡k?¼8)Íáƒñiƒ/köz¾³æìÍ/€Â+P仄Œ¥o„Žg «ÂÒËÝX?û?á÷vé7Ë hz;Æpˆ|¿jëY}Êßüé÷_`Ap#Ù¸h¦ÓÔ¦©Ïö=Œ–ö×Í™KÉÀ”ÑŠdŽ üCüãø§Ðà±ëT©Ïÿo÷ {Bºýøã­ŽÊï€Ú·Û™pÇäÃÿû—‹²Ùÿ Èùð¿üøû‹½žÍQ—/Þ.k<‚ÿ|{@ð¶<~ÀÏJg½k?€g\},,ŠÁÿºÍLöÅ·_ãÿZöàï.pŸ›«y3)_t7ɯt|ëÀË,–¿,žfÂw_·h®ÖËMùßùòz lVK|¸±cûð `"øÓæ Zùp±y„Š¿-§í÷שQÄ^+âowW)ñÇ»D`½èü‡Ù–ŽÛÌ?ÞɃàtëúÆhMo™Ãt½¿mv7 clpÙÂÜ13„í¯@*NÆ3¼wúñdr3 ¾)Ãódº¶Þü¿™sNæàƒÞüéÖ™~µ%¦x0é6H€¿°‹£ïÜT™ÏñKv³öß`ý|yÞ®^ž˜—'ñå‰geï&õâù>¿ <>ñx  ~Îê‡ë¸}q߷ߟ¿ª¿ÏeZÕfïªêí’&ýxòXŒ¯/†­iN>³ÞŸŸÿç¿lÏ`=íè4hg½Kò$¶¿€øsJäë/lWã:+Ö±°>šLãñ:Ëî×\n}üö…á'¬N–«v‰©´. §Ä ÇáëMRÔF)´³ìÄUw8–Ä ×ŠXx‚h&¸GR¨8bl¥è“Ž©xœ®µªÌÝ$“Lˆ!SN $?—Ë$/"Wvc* }Á¡¿Zž·]4RˆÑ€l‰Ò,ØÎkk-#Aç A"OT¡Þ2Ò«n­M¢$¶óVY¹ñ?P³Ø>ßýxR”;ñ½q+‡*«ZâäÏ” Š:„v&p7r¤ÈÀgk ]v…¯Gùº†~…ÿß‚ÝgÏìE¢õä3´óVðà`UöÇT…H™ƒ‡)æñ–¤‹ÍlûDì ™ÄqwÆdúÎG |Æ:Jbˆ‘,­¯pAƒ…'¼ qGÕmr½»îr4–û¯¶º™ýVÄQ¨%FUŽ9$ÊwaVý˜å¼ŒŠûƒ×¶Â‰ƒÇ)1—°“Eyý[ÒÔÌÚƒŒv†PñTÁb"rÞ—v&ÆW7ÆT6ËkFŸ/xx¬“È{þ™·¾yZ8Œ‡QÇœ£ÕfhÇž&êšaAˆTÀÀåÎnnnÈÝè๛FY¨É7¼¥JîéÕâ%ÅËÀ¬M¿ë9xØË„¾âˆ1cY«ˆk¢ÊÖ¶“‹ëß· f´ùÐl3iº™tX)êD1‰íÌ€ _J¬€†W ,´Ó5;âqk4c­Ì{þ®3ãÈEmV£Ðch¹;ºíÜ„:`à‰rÙ$ÁìHíbCì•tçR‰§Ö¦YQ)ÉK» •@NÉ ôÑ¥ñõÀÁヌ_8tÈBrKŒ5`¡`î§lÁLÄrÏ»$µ•I™Ê‚µmWcÖð˜YÉkiA0ðásˆMó^%uj Â/¬UYwvÆpzT̲`?Rÿºœn÷¡†I؆ñ;žƒÇq/›#aw|í躽Â[èŠ]°jJÎJ—Ð.¹'¤ƒtÙrR¹B3ðÞ[â#ýƒÚn½±@@²Ô²ÃcÂíöáªäÀ«±·À“å1±ãKºq§© Úù,wkk˜ûc6Vèbç%‚³OÐ!e®›²€Yå2•Ȩ´K:ËÚ$¯F¸Í“‡_¦¢¢—×x4=›¬ —Ä6UY+5§@\‘C–ˆ½øjì 1«ñטx0RE,D£ÎÆ9o)hÐNå<ü:”¤Øu ž`êŽêHlFEÌÊë5é‘by¤,7ªûH”-WØ$eáÉOº÷¶?¹ybU™pß&„H–k¡î¦T×»6Aßµ,º<»eÿì†Ä³>'Ý©ð3^X*ÕeNìJ”×3éçp©3ІTA tª0m0ðå†pß'ðîd1kz,@¬2  bágͽvvDsìòy|Epð¥„á™Ø*ôx§Ø«LíÓb1ã(p-f€Òû#¦†"xìCËÝ[ë.©J,æƒQD†Ò.EÿŒ\F³+ë ›,;y‹òíí_l©vÏHCˆG®ÚC[’%­–×¼’µãù` ¤)ÿrÌ`i´3Yd·NÔý1àþ ™‹gð¦Ü#|%’¯ð¦Å¡;5OõKº¾z,©ô½%UÜ-©òÁ%Õ ìû ˆ¦Šª ;Œî…G$8Ýúø›©~×w{n%whîîJ€æS²Ý«ãm;å2VûG†e.ä¤ÀÁc3Ñìø»úiÖ‡Tb”cš,ç\ò”!E+´Jè™:Å J`àáŸ Ž Æ_}˜\•\®JŒõdLµø–oݠؗÂÕU´—­wÝO+nð„g§½æ—uéMj¯·•Þ>í=´n¨öú³Ÿ—öÂóó{Ío׫¶ô‘S_ŒÏG}ØWM•æÝÞxްwDõOi¹i&?•>²bÌëjðµt˜õ󚃳Ã/‘Þß÷Ý»[ç²ë֑ѳà(¬Ð.F¹¤²«†ÉWpDÝÁ:Ê|øî†ß$bÖX!âàIq`~ŒÄ=¸„ï2qdñpÇ9c¼u-÷üDT¯ ²ñãk€ƒÇÛ=]äb-_M%m36*«ÌÀ,‚Ū»!iCãõªÂ©%~s@`<ìÎ>ðö¦Ä¥+o’¢`;|C"‘æbˆF1G»É&P8ØNW˜äñ{B´!öÀã²Èõ„]þ(ÄÞ‰‹Ú‹Ú±ÂT-TH^B« Ø¡ÂÏÁ“Ø+>€•G7—Þ¨äB6”»‡v‘€~DérpרpààÉvè™rß;|å µö1‘¾ Ûñ AR°}€í­Ôú_ÌMä5&ŠàjèHkXêÔ`­q|®Ñ\ÍÐÙà‰ì¡å·ÄòËG­S†„µAìE€Œ+Œ:ùHàc»½Û®ý¯5ä ]êRdÀ£v’'`”‡'rÛ0R X>4 ßïåë=¬ºÖª“ÅèáÏ'M²½ëzx×ûZõe3¹˜-ÚWÀ!¾J ŽL9øR¿ÃÀãùu±o&ï±»;:3Ή1ì;?e·Å»-´ùͱØë*š ^¥œ{à æ¸šÚˆçIUõ¶©+ÁߘªzÄ–¡ºêmsŽÏJWAå£êjÿ˸ ¦­`õQ´uÈžZúÿ™Ñö{¢2Ñ«xüqõµ½Âd\$•ÕÛ¢”¢¬‡– ÕT4Ö’Á|aÆÖÑ– ìƒG»£jêçõÕE»›A­+p±Ùô°êH1ÖãÖ ÖV_«]®£­¤`ÕÝÏ‘ã¬Ùùeù'¥;2).°JÛìtq0g¨ºz›ë¨+™äB<‰_)F´;®–¿´«ŸËšÊÓëï„{h®‡UÖE\[3X[}­vu"ù.ÉûxŽi-®7Íböké Mk+{úX•ŽÇ?nÍPmem³îcu®³£•­N)Ðx2;ã£ïWîmzâ$“c6€Ð.%‰-ÊOÞp>LÌäm¦[•]$_Üa»TáÅ|¶G‹UíÈNç-1éĨ` æéÇ È"u'¹’ì°Êª‰ƒG‹”(Ü#ìõ½m9¦Ù%tûvR&îe'¼ÒM ä í¢IBUF‡È“xô ¼L<ÙñSžmYë,¶í-1×§r¼Û—°*²’¨(&©Pøá ;¿O2—¯6+ìyˆ¤šÂqÄhƒM)QátÆÊŽŸïLB  ŒAU8bá‰}Ü•s¶W«½êÍ–˜@3·ÓQ‹åŒÛ|QÞ°ÌÀ[㚃'Ë•¥Ý‘¸>ÝýpÿÖƒé,Û­5æ ‚$ª.í|4ý^OÕ,ëÂèEG¹x²HjþÀ½µÑd›Sw¢m»Ä¿Š7¢¸¸GÏúÈÄÃ~SCóØn:¨ ÝT–*‘6)O@×:g—DRíK(—{üi‚‡'«£·Ñ¸·Ž 9•l&^²lÛi'îÉ:±4 ]ž,{û=Æ%€Mã1üW•m3ß\L.ÚÉO”ÆnJ±¼¡óÙRj(å óQ#Šþ7X)Þ¦èi£¼« ƒ&Öí'‡zy·;wqˆÙÚ€{°†eSåXi7"£ýˆ5B¤ XT¡‚LÐmˆmzàñrY$a‚nævÕ^¶ÓYƒ­Og‹÷«f k¼ÉæzÕžî$vú+>º!7u“kQËJ/ J;+¦"¢y†MÞTð+,ø&ŒËÆÃnïõºþ0¥(L¨sR‡~‘”:+$Ö¬’ ¶ŽYd`· ÎxYx X …RiR%Aƒ”€CJ§Ø;LÚ"Iã5®Áé~UeöéÞ‰²T9j$I„2h5Æ2™®Xš£c º2ÖÐе ‹ ¯sðøP}ñ?ïééZ¥#ˆà%Fˆ“â/Ž4]œ²jø¯ÑZ-È[5qœiaö׌©g4ý8W®¬ÚM¾þ é’8èô1We¤¶VU·\3ÐûIMö­@¾6Ñó¿[¬R:pr;]ÍûÞ4ÊœñPY*hÆqÆ—ë–]„Ï ìMØ8xØÙ§gÓ¹½'œ­wuIV†XùÕMbÇMÐÖ÷Îes¾V4ÈNpðȱ±bfÐÄ’@ņ“à¥&n°¥qƒµ^Þ%AUó°wF²BÑýÿX(@Vߤxx”IÀX­oóûåñqÃ[¯o¸™£Ší•Q(7ÆI=¶ÞwuÀ[ÿ¦!EÌÎ!¯8T£V£Œû§ Ä•4ÎŒNQ”#+v7„‰'”ÉT0´m‚ˆUd‰ªPiœ€Pdéán>lY¿Û'»Ûçk5æFx0¬C… ÄÆ4NžÕå¡*q9è¨Ï(–©8¥ÐŸGÍ‚ 4›nvhg(IЋf®Åå|Ì®~ßM&žpþ¥ò5‡ß(ØIµ’Þ(£…ãÀ”+Ô]² I¼hàpð°ëeŽìÎѬ$4‹b(ôÈ©ˆÇ9oο)>ŽÈù Xœ~¨ä ¼8û!6SOÛ¤Ô”‹ã »‚ES¾r$ÑPŸ <ÆŒÙ^ºPGê{¼ÓšÜ*P„þâ…> f©XÇÙÑ©€"ÔeÓ`Âx¼,µókŒÚ,m|€g¥¡2ÖoåÅöô žt¿ì'G ’‰MÏÉ8wÎ1Ž|ÂzF&ï F2Ð:Ó`¾9xا²ò ï)ÑJtÆ+Am38Žßd©Ió;ÑÀIgá)ñî;[‘Ä~2x--P|…x'FÔÈÛÁWèÐ BgàñB˜ýL_ˆ8ú\Î ‚Ql ÆM‰‡ÛÅHËî[Ì?O…Wÿ«ôµaz´Óh ¬8Ny­_†« ØZ7Øç9x¬¨”¡•ÛHz$κ<a­%7*ïBQxÍs©š‹Ç©›< O¡ÃÙÌ# jÇ ¸óØž CBÜæe¥ìüteàöÒžq3Hn)1t5ˆc¨¹ƒ:.À¿9”XïÂQ ož€=64ŸÞï׻ٴט†ãYë/c1Ÿw«Ñ°[Gø=A—ÇšbÅ(Š|ÖãîòñÏø^Ò–8 .HL Žãâ8éʸ̥ÌzB ègà±¶ŒE¥¯Œ û"úR(©=~é0ûq¢\Áš¢4æIa«;Ñ<<®x¥Þl­B«ñúHï))"£CãP€Ñù¨• ¸ÀÀ#‹Ú…œûdÃN‹ŽÅD­Ž·r ô2Þò¥ æ•b2G /ës‚…Çɤ¿u¼Ž ôpm 7Y‡û'Ôᡤ (BÞ|ô²þõ&¨”YMŠƒaÅÅúª`€¸¸Ýw66’Ÿ±ö¨ö tàŽq¹N5\1ª¶x¬.xÁ…Ò£$ôR¦öènBËJéÓ,f2€zÛ`Âx‚­›Gy¡GÂcÖÒ‡Ô!O?Ž]¨²"Qµ ,UL»ç˜tü$€—'TÈ£QêT„:·!EíHqœºñ?‹¾ñŒŒ–4nW¿9Sÿ=øDAò~èò4xu0qR­„Ÿl”ðRiÒšá8ª<F²Ù(ü™‘ÁÑðƒoà D<Þ€ö$T™-àïQúÓ„þ@Xo¨ž‘iF´å§¿‹óÅhðúš‡Gª‚Na¦>=ádÇ{}¸1 ¼×ÇîËRƼ¾ÁæÀÁt• ”F ¡ÑÞkMæ+â8½ÅR fà¯_ˆš‡ÇšÒ7)¯Ù°í¦ó¾ …'Ünßòb Cí¬.RXV)Ë1’ ¬n°)pðÑÈ($Eùu‡oô½)àñ²kHWán>`)Mƒ™gàQü‚M³ûíbÿ3¥Ý/±=ên¿.VûÝK}ž(lã ;VÔ–:äLã´vì@k9À[œ°ð„²'I_Dê@Z[ÈSø –!eOt]’æc7¶AŽˆ…§x£ötQi{=MЙúå!)p =ÆÇNIíM8Î]6ýŸCN¾q>OpU³þI}DÝÇv:8ÊûÄqŠ·e?‡ù€­l1ß <ÊV(b0é[¯ß5ŸLon¶ÝÍÓ½sOøÃ±…¸Ó¨ (µgwUr8ËDp Æ+Tpµq$hÁ(ÆPÚ´Å´ ƒI™ãzG%Q¥˜ø6|Á{•G©±,Y:j4ÆÁ>#dz^¼¬§1ÿ¹æÖÔªLñö’·Êêc°ïÊ’kcSv϶Ö;X¾,Nã£Xº…$E‹°™ƒ‡Ý0"o_?‘9c8jµ—Ô%Œ©ƒµ•Êiœ±¨²qã8aΨXèD4CDä PPŸˆÞ?½ª^ë0©Õô€Ñ¥¥ ‚lÞõît×#i>j_¿¾#ûú/C‹øï¢ OJ“*å°*elQꀨ—žÆi¶KX–«¨G=¼òóð˜wH<Øx‡Û+”¤`€ˆ©”jKæœÉ·nê‡PL<®þº‰yÛþÒI®&`/•2.ö¢ ã83t‰O#ô?Údtó‹ÇlòÛC…'ßh¹øÜÍfËîP 9Î<1ó©Ð(Ù¿+Ž]í²¦³¡Çî#õIËÁ£U}Ò>>š Ãïåí¥VÖaµKá8eêRv2›"èõM¢.1ÿ:†Ù"×eÓ8`{¤UõÜ[_Ÿ¸<ìTåyŠÜ.f½Ãg†Ui”•RK­9'†Ò” îf:û ÿc™ƒíÕåÀCƒ]—…'”ëQÔ/ùCG¼§sÐÝål½íÖñ?wQ›šðýcÃp+…£X†[ÙÄd³8º“º8xB ƒ€ü˜Þ/1èVIƒvXƒV9«!QÔÅq«r}‰ÊP—^jUþYxBéfe‡dÐá‚Ä€V ×ÖÆ:ZÆX*ÛbcM2Uà °YœØ¥ òorxú‡Â¶`##Ðo=Ø÷ÎñÃðºXØ ‚£r8γKß6[:)¬­Ovi…ÓZ]¬ôr¹YN‡t©(]t©Ðš’Øƒ~äQúxúæƒÕ¾ÅÄçã1…%:”ÄvJ;­€ô£qœ²ål@¶2°[×€<ìêY«é]·Ã°íÅΧi"¦‹ýÐÁHÒuJýÙm¡h£ ›]¾“ÎÀÃΤ>k|­ÇW?>z”Äê¥wF8Iù,8βÈU -¯mppÆÁãlÉe¿Kê"öþ&~9†®á³ÃK”!4Hž³ð„bGã;gÀãÿ(£…ã¤%—|e9ðLƒÎÁc˽!\/ïú÷Å.¥PÒi I)äñ¹·(v2^’ª Žªæçýª•‡"@Fæã‘”cäã ?ßz=lܧßÁð›P;…ãÌy¢iD'‚ ÅBÑ«‹ƒ\ U=ûÄãEIV$õ¡:çé£:B£ *›ŸÆiiËq¡ ¯²ù©Ù–Ë ÚrñP@ Žrðø¢ïñ“+‡o!¹K—´TÆbH4ZÚ”$j©¥Æ‘À»úÄààa·’£5z³]ßo´ „6q£ÕBkŠÖ±>’tEé¥5C wÆ©Ž•›¥–ÒJZòUJ'¬-¾ºFr^Ö/웾G¡ÑõÇ”_YŠô„"ÁÄ~&PÀAWèÆÌ!‚ÙMï6ý›˜C*„Ò  Û`ºxt¹ØàÑ™>hî¤ Š-ÿõ‰lõAR­ŽÓ8à·hªHWp/d}"°ðpÓ›õ|¾ÀÀµw}?¿éöy)ÞO/îžM6óë¤[b‘iÐÆMËZCÁK3ÅHßÈn8 O(‘´~ÌTyœ²†pK ~è—T`‹ã$ûæwc&#F ÷Ñv£§X‰ø=‚ÈÁEê¸htýØÙ>¾ü¼ˆ,éAÔ+µÂL,b`-QŠ0óì» åÍ@kM0ð8YüLãµ ?½þ` ¾ì“?fÔ°j­ ‚Q”±ˆãD‘€uÍ‘Ãèúáà±cï¶1TštI,7 ÒI+$eäâý=vxÞ˜Þ Q åààñ²X·ï“ª´ÊzX·©«½T*¾šÓaäý·Âç`[Ÿ<¾Ðu8®J‰¥æœ^b¬F‰à¼÷¢XKð:ÔΗ%´8«bá‚]FߘÞŤߖŸ ÈYCœî@¬€‚Ñ5µâ8ã õ¡+Lo†Ö4 5Åv®V‰… ¤1––Ä@:ðŒ^¤¥ˆÍÀï[°‚‡.ô^…É\E·|,á U #ÿ±,¶{¦3G ßà0ƒƒ‡M‡]<#ÈÎþD5Nfè·'=ËÊÇÀ9`øLávÃõtò ¥–¢1x‹sbž j:}G#M'îAú âZ'ã8ðü>„Õ¨ËÀíuƒ.žrWHv÷×»Ùv±¾vA-§º)‡@ÆÆÊdàjºlúæ"}·¡ xTÇà oÀ¥ÐÊC|°>9_®”{ æ&P±ÌQ®WÿÝEü-¤rÄM²O0¥^þXÕã'Ô©Ü~;ýüy1KÊu„rã‹J奢„qAK]Å=`“™ÙÊ|ààBo¯y9?¬ÑxVöÁ8B™åRU*1š!Lƒ& <<ì‚–ç(w½¼;fPëM¢C (€¡À Æ»B³Ëò;_‚x÷¾>%x¬ªáA$­…a­©ø ŒJ›2˜VK\ŽõO¬xxØ'Vç(ôPšç¥R‰µ¤psŠºÔ™Æ©`j8 YܵÐy¥©ßà¤ÿçŒ x쨘a÷€¾»zšÿ«Sj|Û¼ÆZ1¬U­„ñJ­—Ó8 E8‡À !Œi`8x\‹pâM>j±Å^ëA¡U $ˆ™1?Æ$T%w¾Þµà»ªQ%ƒå}Y9¬X´z^„à¨#6lªÕ`œ#A “ÁÁcUi“±{ ÏžÊÏG%Rëˤs2ê¦¤Η­úR–Ϲ‚Ä|dƒ“ƒGùŠIyjXy6öÀqŠL–à8cdicp&u9 ÁÕŸq¯Z\X8Í&/CR0±¤,º½8 †2h8n°ûèÉ, çƒ(¼_ äV⣀pRzEÇö£Ì*DÎ{ghÀ-à Ò„×:…Ú¯>ê‘¶¯‡«boµL\ð¸ÑI‡k“*^å+r¶*ÉcÛ`é‚4´4V48æŽßßÒØ <ì‚÷OE±ž\ó¤"bY¥V»†ª|œÆ¹Pè.|§Ö¸à%-U Î.|jkeée…ãνŸ•´yîb-q (¤•†ÂŒã¼æîç‘5Äc=çÈõ¡UZ#ãy',ŽS~ü´æ\ϳ ÔJ ñIul L‰€Ž¯;3t,ÎL‚÷Úsp¢ YKDSx@ð«¶Ì»Írýp×xgfû}¬ÒèÁ Þ Ò8pj<ŠÓ™%‚¯oøyx”«j!^ô¾P«#ÔȬv‚#ÞÊá2£%ÃóåhCeŠW çéÖë6¶½õ7^B÷BWÕzŒ¡:C ]¿Ý'»0ìûK>÷êæÇbÄÒ“ ”‘ÊIJR—¯e_–ê YêfeáQBž—šUi/}µAI{¢1g^pi±~‘‰‡›-H w{ ©ï^¦îcOdëݪ0¾rRK¥h8β³Ê GÛ`Ê9xZ^d{Oµ„Tàœ j¥¥qÖr3‰#ÙË€|ƒYÏÇã„.¶Ð'Óí~ñy:ëk99¬´XðVH£ ljàÚ]A;ƒš Q¤qõ矃ÇÖ ßWê ÿ™ØE5Xå úqì–TÅÈÌYÿ=$se,À¡_ŸPwjXaFzšzn“Æ9ã«wã)ÊÆëú³ÏÂã‹eFé˜Úc ’Mš¢B§”.cxŒft b8…MÀ+멇Ugq·’68ÊKÅq¢`[ºª|åÈ vÏî¾]¯bK‡óKQ¬%´¥—?“Í ¨ºA4ÀÁcLC°ÇÏze™ae9 ^É\Çï;^–øFÖŸkßâ9ë{J%vR¸'ö¤`œ+´Öó ›dƒ ’…V÷z½ì•EœžB¼eŒ¦LOì8Ùä¹ú9tä¢þŒ³ð@íœ×¨Ý@9¨Ü޳ÒZÜÙ|e€k‘±çàB[÷Ýýn?¹™îoaq¼éµÐA+MÙŸ8®~æžËFzÓÀKçàagîØÚ|ï)?µ?zô™œ†”#(v×B”͇(uƒK<ʬòõô~Û—ùJ*#εî9FAE!näP{•ŸÃKŽÐ GÇÂƼ›ÞÜl;\Ý$ZÇÃC) ¶Â€~‘5ôÉ!Ž“J–YÇ\R2 †7% eÜNðäÙÅ<« —øá-ÈáKqœ0ª0È9¼ãàõS}GæáÑ×é—E÷sÒ—#ô…¢o­¦ð1¡?Í']>H[ÿ­µµÊC>÷ šüä{=†a=Jk•IbÓHãœåo.9øê÷ááaWw{Ñ!øýÊ7ËõõtùüAÒ!:Œ ‚ jš§qüF35¹™;¨sÏÀ£UåZMoÕ bXJÀÍRK~ÇŽ«+Q—ƒÙ4Ø»9xØU€Ié‰^£ÄÆ©âb‰]O( аã®$fÀ?ºˆßB^ZÈûÓäC¬Ò'c[SjˆÇdF•^J%hÀ@ÖwÌÃ#G¶ü4[oÝ|2Û}I~H%Ú4hé´%:Î¥qƒ· ³ÛÅÿtÝMv³Ûn~ß_Áø©DY¿•+Ñú¥ÎŽÓ9Ò«aåÅr{±*`c¹=7ÒK: „^šúÓËÁ£t¡’T¯—¬µkăSãŒtRƈݛǑ6´˜æ|<–ýt'êéIMÇ/í¥×Ãj²ø!íŠÅkJÚ*uûH•h0¡<²Ä[¬ú( IÝYlð4VË¿ÍóÛaU²É<çãzó<9œ®$*R‡ÁÏûqÒÖ›ïZ˜ë—¡çáѪ⼿)Á ´MÇUZÓÈÑe¬8ûU‘•þ?—ŠWq©>Ä¥: ¦”SèÛS{5޳л)ŽØ«° Å aàa_]~ç­È o𨢌ôfX…'Ì8 9Ž“C–²ÃÏ»éìv±ê®¢§ì býqžàжãx©¼†^SóøaP†J:ã8é Ô¬(AÕ|ÈJ7ðw9x »-Kšá»é&G‰äˆçÕ&=q¬û”»Ø‚*¶ˆ·¡h?Žrìæã2ðXvÉÌw't™”G1ùª´¶TÓå’›…þíxÍÊ5ØE8xدÞÞ«7úf¹½Y_CÉ%â<Ý£ÿ‰¨ la¬ïɾÜQšà °Î4p9xØ52ßU^úÓƒ‘Ébõy;MJ£fdê!E™0'œ-P·._󥑲ÁÁÁ£l±¦gÞ¤câ= ÷´ÖT4lj"6á Zs@ÚN%ÓEζV‹#uQsê¼ÁÉrÔy*ŽÓŠõÛ¨ËÓ|™Œjà°ð„"O+ÆéwèN€±Â§A¯vضõã4@‘S²lJóà™êe™x´,â |Yl÷G £æð‹Q´§§¥(òÄ¢.?òT/…ÆÄÃ.…6ö:Hÿ½.öš4>gFgÓëâ;êT“—ËræîdÊüðÛ½ƒÂ‹&¬Š|LžQYËmw³Øí·“C%Í 5m2Þõf¸S@?n0yÎpyvЙv‚ƒ§LØøe·¹í¶Ý‘ʨ9u!¥S¨½ Ç99*™x.é«×³eâñ%²B'ôİޔŒ]Çœ£pâ¸6uœÅÊæ¢Ñ ¦–ƒÇˆËuu¿Ÿ®¿<«ÌP³Š¶?Öd·D@ÿTÈñœË>ÎÐÀegàá—1}ý&=u› iб}š’(IbïF_æùÕxâ2@‡Vž'°o¤Ÿ®îùJ/ÿšô¨‡õ¯y4ÆqBKÊW 1G’ê'³L<ÖÇføÑúnrÐö$ë¸ÛPôpÁZ!Hï4¶x?÷̦ ­óqÛêç2L<ì~Dgêñ‚V¨“ÞƒAmwqܹª³™#DõL©~Zöip*èô³Ï¶sdq¶ÚáX® ¥è`²¦Ï5rÁsñ€®¾¥ ̈¶Þi™#È`/¨q[Y–¥ˆ´RYò¶9ÀpBHrð„J§cÃ@'§]zïŸ!Að…Ãr‘¢N® /ÊRG•‡Ç•ó~Ng¤B9ˆ¥ïõd!-ElÁÚlON arð(] }ZÓ’Î=Çb¥9W«–#rÏYK‘³œ[ÃN»6dÈÆ3ªWë ’é”r,øosP¶û>y;']) 8erî(£Ðm(Ç•xZ6¤k:/ì@j“ÖÉ g‚,EðÊeÜgtÐ&ÎÇcjûWÓÍ¢oS1‰½Î»Õ~1KRž«p+Ig}™%‘Wµ| ¦$¥¨0ͼÑv“‹'ˆV™–ÕzÞ½XßtŠ”‚œ[‹.;ª©¾…r…ÑæÀ ϹͽŽ}‚7ꥳ· …ͧÄù¾Ê®RÓž-F› æ Aë,G¬FM>]`óÈÔ:ïu›³Ï_ŠÀ-E’léÙŽ€ÆÃäà)Ð3oÄ;Ö£ôJÑ9TÜÀƒÊ‹¬Ô§ɼ:çN­ot.“§Ñëïä;l¯§³Éf»þå!Íõ¬É#û]ø¥H¤tðYÒ¶É¢äãáZªU·c6ëåböÀ›‰y÷yz¿ÜOæÝªŸ:™ëµ’Y,R¹iý:øKÑH{ 6C\Ýæ–o>S›FoµO'u½q¨Ž øFÕbÏìR¤±±ÜAŽ”l y¤1ez¿GýÒà³»Þ9ºº^ÒD®ÃÔZ²RÔËU„kd¯@9Ÿ…ÇŽmG“YÚì4Ò eÆû ЏýVÅ×òjd…|PYxøÝ)¶Ý<ÎM—»m÷o÷Ýî½5û4æ½Pyñ´¢sÉ>€Ò9B†ìð¿½pŸ¾ì#“‘Aa²”Ó†‘ùxBsFNÿý~Û OÆÆ§}Æ¥ý |36²«ÆÄ\ÅÈ6y¬ ¼’YxÚ3qq}7<]tʦäãÅt5¿ØL–ëéüz¥MF&”ò¨’À#úÔ‚¦Û-Ðv?¡Móÿ”³_ì.¾ÈË¿næÓ}÷ëï¶³Ûž›íï·ÝÕý¯ý¿¹ú`.cgeüä±`ýÕ‡»Ÿ>àRü4ÛÆmåÓím·ì¦»î¿ín§Êº+¯,šR'¯ÃÌiüÓõT_{ý¹3s¤Šž‰égÛáÏ‚´?À?Šig¼¸VÀ\ü®Z£M¸úŒ`÷÷“Ú™Q:Ç~¸f…øÛE¯‹øiÔ n¸ÓåÅ~j»gdH›nÿ_/VëÃ$^ÄM9*ö2m½¦— ";>J´H+Æã þÞn‘îÿ~´Œñ÷ì¾ù¼è–óËßo·Ñ¾íö_­˯¾ù‡8ûñŸö†è‡ôé¯ÿ£[Å)³©QµÓÅ2NòWÿåÀ¾Òoüú+ñË ·c)¯ígñõÇÔÅè œÿxñ—õ~º¼Š« ½€õÝfÙá¸/¾ï34¾«›«ýö¾C¦DKŽƒÓü=QéÓÝíÕ‡Élsÿûßß(»ßàD¿äÝônî ~úÝt·ÿóv}ƒ»Ãîj¿¸ë.ÿ~%?^¤¿üÏéê~º}øxˆ£ÿ¥þõ/ß"îßáoDHðï{rþgcø_¿ÿîêÃí~¿Ù]}ú4ÍPÒK¤Ôít9[ß}©žî§Ÿ¾ÿÿ›D–]É€VÿÝ·HÁU·Ü]ýï¿íöÑ&ü=‰³@n̵§öÏý ÷<¸Šäøîõ§¿þ°ï6WŸÅwsüš?!ž¤ŒÃÒ¯{6%ßüxÐÚ.R# ü ¾Úð˦G³ˆß™ócœÍß¡ 7ÈÈï¸Á¦ù}$z¼_ºMZúxXëWÿOؽ·SúÝz†œî×X\"ÙNW»ä4ý™ß/”ø§_ž.—W¸’•5ŸûÎÏmПqµt¿ì¯Œ÷Ê¡ê]$É Aú‡ñ—Ç_M€æB|ýwñí‘kuD¤§žx”~†ªý¶ß š|ûïýý*ymÇ?A:Ÿþ—¿~øï÷‹eäåŒ=vëeÿøO÷¾M·õâgi²¾ì§?X%+ÿø§¾OÜïþüÇø·SNbüÙÝÍç~³œÎÒ=oò»i4|»¨—ÿµZÿ¼:O„þøÃjºÙÝ®÷é¯ï†TÏ?yоFŠÿGäÊÍíþUüózÞýå>:h”bN{Öñ§?àOðoñ×Ómw×í{д:ÿ—Ùf¹˜-öˇgz:íMßIÁhmé.8¸]7Ñ~Þ†£op×áÞ®2ºÝ/¨Ô¸ßâ×Û‡‹Ùã&øMZžóÞÛúæÿ›=çb‰6è›x2¦è“,Šôä$ào8øÑÏf*íçñK»ö?¯¿tw×Ýöã…úx/ +ý¼©'Ë÷õ“ãñÂâ þ*t\$Ú9-ßþ®ßv.^Ûö×û×IV[#4}Ç ý6¤ñ»ø'Œ³w·ãbš‹¯Œõ—!ÈÿûN³øUC/²° ÄF¡êŒÖÛu€2`‡õq8x”+S¾¡Mâê®X—B3€Üvª-2Z´áàñ²È‹—EÍçÃ]¯§ÛÃkßG­nwÛÙä~סNUÃßÇ–¿Þ PC=OZ†ª”f oòü‚ƒG©‘eùžoÛ-VŸ·ÓO·ëÝ~r¸¥Žÿprx‰Õ+’¸PèÂê@¥·pœ×ì¶Û¨Ìœ·qðxUß<¼Ð%q¬‹Û©Œ—†¨´Y7TJ#Ëa(IdpÓà 5#-ÂO«EŽ ‰ëœÝ)£J)t®…©o X¼e`oQAƒ‡ÝhާËçÝ5)rðQ^ ¥³@¼ðHãl1”à- ²SÕËñðè±À/¼d¨Ñj‰ñ*…¸Ò8;ôŒ«ÀògR–<¨óŸÇ‰ºkÿÑÚÇS£¤K=¬K)½–F‡µi\¼¹2nù—ã.vý €<<`Gš€Ç[Fj „cùÆàiرDt]€Ï\vÓÀ °ðøªVƒêÉã ŸÝ£f“NͰN•>S÷XqººÚ´åhÌ ¬/Ô;‹Œ­Ra°ähG•ZA¨4ùÍÚSlVÄ}¹–á|63d€Ð€ <Þ5´«é]·ÛÄc¨U;¬U[¶ÞQäÆqr¨-'‘X˜Ü T‹‚ƒG«2öb {¥LI(0`‹wù(ð8Ž}ÚÐŽÕ )t J0ðÓÒVô´ÃM¨äeìqbGB§l¡ó†bœæ€ v- GÁÚ+…!»‰WÔº‰ãâ‹]ÓÒHäÒ™!4°íîÒ¥zo‰àßÇzvN*¶òDýü¥Î!u>üõƒyxø•˜xOÖóçgIdnlB MÙ¶xPèfÆ!ŸÇŒ©O~¡šõ*–à+4Æv»É,]J覫ûMÒ-þÇÎwÀ²€pºî1Ô ÜG7Wò«pxº\ E ˜ÉÁS$:ÅÍÙzÛ­w“ëõzŸ^|ïR1–ÁèR]â¢ÂeåìðIs'<û–Ë…Å’EÚêaPGkM<êñðk½¬Î]À)êr»½_Å÷”½Jw—÷½ïëM¼.ÞkÔë8:.‚ª–Æ9WÄÁ)K÷|ø`αaá`ÃÀŸ6bêRâ‡ÿç 8NvlùÛ0ƒ!“ª_‰‡‡ËnPÇ}ÆÓn ´ :VÓ’’´3émÒüyÛͶÝ4–Mz”àm¿€Ô(é©5B”Ã>£ºT À[E®-7Ømý RWP8C¨Ÿveâ)GçHƒe·g©Ö Bµ Ñ M­Ì8n(:Ãe(ш³ÏÁãÆ´*íO­®žæôj7›]MW÷‹yšÛa½i%¬wЏlÐã_mÊRŽ(¶`á Å À ]&·b-D—Ÿ<]óκEσ¥`)YÐ3Ö®‰mÐåƒiÀ~ ô| FÕ™aÕ! ' ÎU*Z×¼Ì8ž¸ù²H ²> Xx¸×æOuF¾¬tЪCOÍJ()bžPÏ䙵~þ‘‡ÇêºvÀ«N)%ƒ APã8Åu›q•#…Sõ €ß£ŒÑÆfà]Ó-H÷ÿ¶×ÓY,üËC:ç„2]°:C*“¸fSbáÛrPƒn0ï<¾ÊÂ?Tá8d^ܰþ´ ÎÇzyÞ8Nªš^À<å€7 bëZÆ»äKÝo—I­’Pk|tîl” Œ…*ËŸÇaÞÐÀö³ð´°DHmd`¦h‹ã@¹–‘‹¶1lkÀÁsîe”çkþ/U—ô¥})!Ð ½Uÿ _¬ð²±|¦üß.>üuu;]Íc“ûÔ¡ùCìý͇ÔÔöb{=Ýl–ï¢þñì_Ὰ84×|î þòÄ–Øóîf›úãTÜÜtøõO zL'¤G)…†š%ÿƒ¸«ÛnëÆÕ¯Òuî•ò`nçÎõ¹Pl5щ,¹’ÝNæé ¶Ù±6m’5kÚ‘aëø‘A°!þëáÌÛÇpÉLáòõ·pžëÆÀŠ¡¥Â›K­´T(ðâ€s–þ²Æ~lƒ•Î$çæŠêT?ý™^‡­wkš1/3ç´aÇš¾£Ú™bªí†ZƒGvŸÊJ¬ïîøÜñÑVµ~Üþ~Õž—GUç…# ·ÀIF‘Ü ‰­9[6ú¾• Oî=á… ?X"ÏH”MÎä|c%”!”%E¬ã6¢"ÖZèÏþZ÷…Îg¹°´¯ä æ,©t”Eq)ã*‚!ö^² Ñ€8öÜbÀ–OßÙE5íC«û }x¿z­&Àj§g|ã$!4j7ÝŽ®øyxÐ,Jî­´Ÿà4s‘DÂâ¥Ë’óÐ>åâ&ÆÖC~€ó§ÁÓ!góC áÎ* >ziÝD.Õ±è2½ Kx½p‡ªÂ³ ŠöZÚºÒŒ‚ÃŒÅñ”ú›¹`ݘÉnÛANfÀÈkð´?ÔïûãáðtˆX#“3'#º#$çã‚Ncš*`ÏžÚ¹ O£¾ 7xJ‚¯œùMfÎFÔ"epíßg(™«;"wJƒ]ïi?›xJy;ˆ6ÍÇ!HÎb6ú6µæ«N‹þíztx²kW¹Þ¢ÁMáéi‘ëqwª$±l†_ƒ'÷ZŠå¼åw4³£€”ÿŸú•Í¢j4ˆ¶ÿØ«ð`Ó”I…Q­`Ô”“sÒ»¡"-öšýUV …8`øxÐtúqÞr\²‰)o¤Ž_æ¦ù’툪Q¢cxKs%Ü‹™s6A†Šxcðþ€šaÈ(×âÉFëÞýµÞmï×OôÙß›/߇ïoÞ½Z²r²#™ñåQÒûwH.¾““lðNrUg„®K}l†4†K½O‡ÜøÇãö¯ínó•†ÈxiÞxžû.xʘ¹™ØW†y0e5º\ÜäÕ§<Ù)åÉ…ë9O:sé‘.üßoÿû}{&äã”Ãv##üºŽîÏ÷+¹§½u[z>ø‹Mòóo÷ÛÓú ç±Ý­×_¶»íÓvsâü3ú½½þÚU…J»MY!o,³<®í{oæW§äóô{+¤û–æ"]kÕðs>Á%OºÀÆžþåQ”x ]éx6ÕOË_/@ä•yC[ŒD*ŠæZÞ´¾˜1µ ¦0†1hQ ž&7uצéêB¬XÜÉTÉ´zÔ Ÿmóze‰Y„x1Gj5C?„#ÁÚ,ºþÏ¢;½ySË~R5Ö٤⫠ãRÔê̘Ý%8ŸcžÔæžï]°«Æ/v27jñÛZn´Å½˜/ÞC•~nÌÞR+ðÜt/|‹ÁÎÒ(³¥ýl©÷li…z1W [[£Ø1\A$µjð@˵E˜›rŒ!ð˺ܘ[¬)Õx—ò#šä|^9 áG´Qê1ávMÖ’šÙ";¢ Ù™ Ô6,ZCths£V+7fíˆtX Uxô9ÅoŽˆìÿg°åá§£žPmì,lµ3Zhñ×c<Ïz<­v‡kóIX€³Ì ojTI¸pÃX¬Âbq#ìUaŒ?sÌU¦WøúeUÖÛþu¡ört5.¸\¡H¾y[i£ÀRÕ*šÌ˜&Ùl}ž¸<Çmf ^›GN‡‡ãjúýb9Ìš:[£GõI¸þÅü©ÕsЉ8ùœ„×O“œéÊŸ»ÃêaótÜÞŠÁå`kŠÖšà³6o'ÌUÀ‹R­Ø˜û¿” TáÑ7Cú¨žË¼ÉŸŽÏçG)ëbu9úš8r]q=œªc&-Q/æJ­v€C¸\Þ¥jGƇÝöîGå×»ÝáïÕæëñÜw†/v%¶€É>Ô௎շŽ”/µú³û€Åèb Åú+lø7‹»š‡¶æšlîMœy3ȧŒ5Šº8†A!J%’'Ü~4ƒ‡?Šùå -ÄèLs5Úòç#ø‹ÙS«fszn6œjðÀhöLi|eä°.B¬Qd¶:\Sþ|¬ÀbeR•¢c¼ãj<êg†7 ÀýæõóîiE?øQì.„Ñ8¨¹Ñ‡êÀM[ÜKù‚–{¶Éúá x Ÿþ]žÔ¬îØqW/‡ŽÑ5ÚT»==µXÌ¥ZmݘH tܪ¸1GucÒßÝ8Ac49ÕhjOé½4XÌ¡Ä ñ5šæ1ªÅãâ§RóyóW‚•ª³Mš(·‡eô#r’ ÀŒsÌR;‰ úvúgîòqóçóæ´tö­¾Þ=®Ž‡ò£r” rdsŽ¡F½ê˜¡j-f[­úyÌ =[úožlì?ζíú†¥ ˆZÎ\̺"É#[÷ñìC…–2,'ë}Å­u¾È¢¬ù妗_QxøU"ŽYU3Xãªð¤†/]ΙàÓþ|>Í>•¼2b¥ TÂ’—_^p›óö¥„•O)ëÁf3‘Ràq¦÷;T!\øM¦·â{’³¡]oø†\Uhàü€)<Á¶kêz<ü›ÖíOçÉ_Œ(Ñ ÑŽKÿ‘v°ÀÜ6½'|C°0àô­ÁÓ¡À±/wøNy*Ï$Dú"»»Ö¡t÷ù]qh×¶uC5 G—5xb»ÞÍÙ”/ãNŸŸ>­ŸŸ¾ŽÛÿ|ØüÖJ„à«zÞÉ%]RÎ2ê ¬À;ÂáSáÉ!X›8@#J[Tâ;F×lèÂ]….h\iðØvëÂis÷|Ü>ýàÜÅÍ¿Ÿèdàõvÿtúôò£_,+±lr>‰Ç*’‹ŠŽêY­À;"¸¦ÁÓ¡ôØ¥ý<ÛÏ ¡w.³´£IÌNk×ͽsš€ §ÁãÛŽëý× ßÃß­Ï í«=ɺ›ã~½û¥ÔDö`“ïXΦ®KB¥x〓‚OŠ–áÆ…kå'Ä,AtsÜõŽBsâ*‰ÉjõJ?*É]™‰T"ï² $çœ0¤ñWà±¥ªð@ÇùSŒ'Ü>eÚüéL¤7’›Ýÿ«Ûò=¬é—žwëòP06×ÜÏYƒGŸÀ¹~Øœ×ï«¿1H#öY."äÙk:ÎÞ*ö)Àâ€g²<Ùôžº³zñ“qÖ{ZDæ÷é"ç¢6*¾œŠ*€3M+[®³ ›h¾Zí÷‹¿Ì×H‚)¹=D*Z9°Ýgµo6â€qWàI¡A‰¶ºP óf´Žcœ‹ À&¹Ø´ærº* §ú3@ƒ'-* zÕb X,erc !©¾A'¥F­‡íR0Ð <ÆMua§t6çrW#À&9\v³y+?5sî?Ð <ÙšÅMp+–D Á;ææx9æÖ Û`©ãèP}£È9Ì‹[ß.'©°ïßO‰gQ(åÅJßž¿¼Ô|­v ³¥œ“ YZŒø%jÀ'±%¤¬›Íï\ƒG}©6ž° g—¬6”fK6êE„ ÜŠžÎº^„iMÿt3}ºÙ›ßµ`8°ÑF²¯Lè?“¡Øh¬Ö*<¨Ž¯\»ýíÖûÏÛshBØà"¹ƒÛm ãBG{15@qÀ Zƒ'ÛV-je­`G:è¹ û‹$gªƒ(M8YѺ‹µw}¦±°¿%bg!D"ÉmÖ€v95¸ã€Ó”O²-Še]³â›‚S\z`ÞŽ|ÿâR(ÊrÆô™Õ¹D†Z'„†³úÛ`ßÝKCY'ì{àá ¬Ô-„û1Rƒœª4x0·h&=¿:^wî€Ož^ÊD+r¨ÎônHUÌ” ¹`¿.ì%{ÉÛ(÷9]Ë@‹îÑ혩Á>bÈ5x Ußè 7H¸5B°hד˜JrúÌ…†L­‡é݈ÑVàñ­·ð¾'^ýŸéîÀ {b."åPŠÞ’\жñ0÷Å ýÛ2èðø4`¼gWÇÄy3!‹í¶ÎrÆ5j¾t5*p’C #œåú?Î9O6t¯À“׸Qåcò—Ò~ãM’ÇØ9pK€ëÁcP¨\sV¢ÿ›Û3žì¥ |–‹7:á‡ý¶ØJ9)’`JÞÓŒTt§È9¬Ý^ÄB4ÁÉxƒ·†ž¾MÊŸåRϽºOXy[ `I.@ºÍ%ïÄSÎØÊEž²\ÿ8yùrÑh£ñÀMy)BdòÃOßq@0(X²†Ô¡£È¥€'~qÁf#–±(rˆ8`ô OÈÙŒ'ßОBi3½ØMØ£+MrpFnæ£~Ú‹˜ÔqÀÁ]ƒ'™ÓûZ$[Á’)£·ÒU‘úLñ*ž*p¦<`Äx uœßÂFÈ S ÐOœ$gB—ù}+35Èq€ï¦Â=æøóþq}÷}õåy?¹AÙ 6äÝÎ%!Eï,gRÇÙÛáôÜ5 ž¦ùnûÍ9mŒ~DŸ”÷Ö ›!Xt–Vg‰˜$gbê1ÅõÄÔ`η”U/ÕŠ®+R¡°v@0H…'7«¢Æ¯HNwß6÷tº:ž>½,Ò¿Ö¤“ˆÈEB#iÖ€P$T½ÚÜ4kjÁ’œ1Æ^ƒ'õ^r„½mÎÞ{+°H.¨“súUÿ"\¿úÄiõÉs«E4¢’<Ö7L8¸ûF¼{y§øÚåãÊç4¾Yº×E°Ïs’sŽ`2v_ˆl+°Ö¸4Pàiú‚g؇§)àã‚NÛü$¤õ’äô5O†²U¡É€Ç:<78¿ -ËÌ9v»ÍÙ¶BX0ƒ±1’ó&é»oË’Ìõ0íˆ,B  ýÖ€Ù¼-øD?ð.J­‹œ3iô aªN—Ð=¤©ÃÍèUà~Í¿\슂]ù¡)æœ%=R6êN ©¬€ yÀð+ð`j¿›…y›YN‰HAH˜:Ë=ýë)ªÑ#@ÿ±×à‰êƒ™jýuóàèL÷°¥óÔùp7™ïl½çó§kèdÕé×WÓï¯^ÿ@1t èäb£±’b) íׂ*R+0Æ OÂN Aœ·™sÎçDÛ€‘äôÐ’° ÅøRw2hðè© ýæÃÇÃa7oéùp~C+õ/r6™NKCl†Ñ…8€ <±å-ÉÛfz>Í›³èR0âvÆõâÐw]“V¡Ytö }Žê/L‹ìÃzOF¼ÿ}¸'ÿëü¬Ð òù1ÉË%9ï}ÃY¯d°æˆÃÏ õ!ªÍ&©mC³sÌÀ»g“r 騜MÿqVá†yÊ7ÄWæoœòl ©ßT‡f0g"o ) ÀãZÞüRlØ §èHtü>S@JrI]]s0Y5ºÀ€…@…'^~FZ²e‹tH-¥i%=RöM¯ôlV ±¨ð@×u@83'Ç}Gm–6­Ä{V½Ôsµ^î¿ÜŸ<”á}I ŸÚŒ¬D[« áµüÆš¼yI >Ûº®+@n†Ô';`üx vZJO¿ ‹v!¾¿”ò{k„uz»0U¡E ·¤H¦)I‰ëj–ýyà,÷`¥­Ÿäb &Lݰ+殳O2¾ë„Îè7ôöÒÒNrÑ·èãÈpi¶” ¶£ªÀ“¬m–£;Yìn·½?ü½ßÖ÷§OÓgïÓ“V‹>Ðþ&©fg¸˜'H+OæWk©YznKŽ*TÀ8àü£Â“[O{¢Ci ½;|Ým÷ßgÌ*\!eà g.Ká©*à‚©_E`Ìó^›&ç¿3Û쑹}¿qó.#ËÙ¬ï8ˆ¤5ø©÷è—ïázlÂëÞ3ž”ZÏ}Á–I°%Çe½5FÂÆøÐoÂûv0û{y*<4®Ê!çg,¯M{hœ_/ˆlóf²¥@ü"rh=Á—1Q=öOpÕá‰ÍùýáiûÇö®\õÏØ›B2(äi³´|X@ÓjXÉØ4`¨xÔ—X3fö9ܚ茴ϑœ¾Lê jt€{µšÖ³úÏçíÝw‚|š±( …€­5€ÛÍé(ÂÊ–öÄ(ÃÊÆhúŸ“Ix,ïQr¥ÕÞï?ËÐ٠쀞ÜnîQ(A&9T—SÂMèú“@ƒ'7Ÿí§õÃ#ýïukfÁšÜoÜ9¡z‘sÎ-ofÒ‚Âõ½Aúž°Ï ]#o2¡°câl"oD:Z¦æ«ÀRÎ*ÐÏ{mG žÐülþcý°“,:ß K7Ø„^è;p–3aÌ*"äoBÎra€ÓÇ߃ÀUàI¹EÞñ5#^ù|ºÿ-¶¶ØÈ9—Ù‰Ð$gcóCA B+4pý³Òux<6["¾mvwßÈÝbÐ'~D¸%£ò§¿ZÔ Ï`³äæD0Y]‹u4¿«u¡¿Ø¡À£®ºßÚ¶ÂæLDr9ø(yC,gÚ…2]£ApÊÔàvŒÇãáÿ7wOÃ:Á°\©A<)‘ªK⌦yÊÆa–…äì€ †¿'BÂ<ê‚)å›~ÐGŸ_mýùňŸ¯—­…mxû‹h¤8~NßlñhOõzEœI¦?=4xÔ¥ µÛ›*üãUùù©˜N¸†ƒ”#AÍRì‘äB° 2'û09å£OIÖ"ù7Mô=à0&”ñ€:°U…­™ž&Šqi.a‰Úõ¡-‘ߟ[p Bån‰ý Àx8¦2—¼ÞÇ<•æ)?ƒùÉ tHÌ·œ2'Js¥»ÏsÛN‹8 i@ƒ'ÅÛ‚‘SŠùG«XVØB³³Þˆa^’sÖê½E5a5€Fœ Tx´1…ýúasz\¿ Û]¤L;áª.—lؘ%‡#s;Ëx[Dqë5‰f€+§Âƒÿè ŸÝ"ó'þo–ºË¹Ùb¯ _D`¸þÍ\txÔOŸæŒu9¤i¾Ù};xÀGP‚·7”ßIÎzM\ÿ^êJ<Úiÿqoò[¬æ­jùŒx-,ßÜAÃ)¯b±e°¦ÿØ«ð@˹ÿLc¼:mžžèWÎÇLÆ` "MYÎh¯ ÆÑT¡Eÿìouo§vV¶T΋³)¤,hÁyq!´œüjóûñ` 8jèxþ¤d¨ÀîöðÍë%ìŠ_Rì¹Øiuܬï§# f69á"«È%õã)äÈ ÊZ àúñ÷šý¹¿qð¾‹}k¸b¯$ÙË’;e<ˆø¬õæ¶HÎB|Γ±_1™¹6¢ÃŠñ´NÝ[a/íåÑñ¨Ár :Œ§Œ/@ôøgðÞ¥ ã Ϊ+mü¾i·ùk³{SÉñÝFÇ¥V»õþ§¯[©¢°f{›Ñ’ã ±‘äfo¤8ì­ÅsáiâŽÌrýs62×äfI!Tàl±#¿9ÁdÁZ@þB@#Z‹†TàÙŸ’õ臜¿Tx´ç¯?v‡¿¹¡ÔÃúM)§sÍw’¼h2Ï!“YÃò°1e#ù_aö%Œf.kH©@çG„TTx Áf#Í÷„£/ã\0—Dg%Њ­®Ù”kìõñU¨µ~€—ØË &» <`›LÒ™3F†9ZÄ࢓v]’õIéVîi@ဠªÂ“–LП—ûåòõ×vó÷Ù/™o–9ßÕ'rÓ%g4–fìMæëÚÕ£õfÀΪÂG»ÎIûÄ%,™ä’\R?“lÄOúnc,È18,ñ÷Ð7¬À³lNOY}«w¶+‰=«ÍývòZæû¢¦(˜$y ‰'øpWZ¤h=z?•VáÁŽ®t†˜§äÞ·‘_0·ñTxÈD×àQ§œž-÷K ÿ÷Ž%^¸éš¸!1hMrœÞÏã)YÔÙ8`„xÔ%-ô®0Ìȉ!O°Š\P;j-ù¨ý€H—OŒË'òe÷Ší'\Þ"o/ˆâ©ùÅfêî‘{-¥@Œ-±Üűõz‰]û®Ä.È}ÀË_G›é .£Àn–+¯äjðø›ÇÔaâÝÅ5âÏbë«ïXVic„èevtY\IÀ-_h–Î^šâýÇ^…'õYoŸ¿ì¶wÅzÂeqN™>ÎFÊÍ!9kŒvS¦öœ‚ŒÝá—‘¾Ç{“,ÊxþËܵ4·•óØ¿Ò5{9|‚do¿Õl¦¦jf; E¾‰5‘­Gº3¿~È+É–c鸙,ºËvhë<A¬ž3ó¯oæ«t**jUiE]Ê {J„ã¸t›ÅO$ïEÉ G›ý¼Æ8}Ž÷fº’ÆiœSm–þëCÒ‹ËÉÉæhO©‹^¡³^”ÂŒYï@\ªù`Þü! D±ÞßSZ%•­Õ´²µQ6y$ƒá8ΰ;}6ä9·Õì•9š\w±¢·ÏQyá ÚEëÞ‰Ìtá|ódY&âg‡kÆWëiŽ…ªl>Y!K©¹èDìÁ,3p¶¿Áçâ‰21Æ÷ÉÄŸ®þt±¼\?ˆF!ùX4˜Ùs`w kDV:dob”'¯Æ+KÀcS«SÃUMšiMæ=Ò£UDÛÒ$ÊÊ„$eéËÀ»ƒ‡]Pû6f(qÇm^`6ïa¨ ’mv¢˜Ej*òòV¥ƒÈÁ áAbÒå:ªÖN«Öå(åQJÂZŠb¤g7Cg;p„ƒÇÕž#Þ.ÿ®ú`q·ÝøêÄyŒÏy\P¦áYB€Ï Q|‡Ýƒ…§öä°}>ìªÍuÓŠô:Eë=`~péœjï6+ùËÀ\‡@ÊSx;†_Õ â€û¼çGkqÄb¬‰Ò”ä¨f$8Ÿç>Åì—`båq1HT›ÃeÈÔ#‚ÈÂ#i®>‚tˆÃJ‡<ã¦_,•qyÃ2r÷Í™Ëë"FÏŠ§(}3Šƒb²t‰ 59xäjÆ>¬7Ǧ¹_Ö_.|ý=êäcö”±»—X*ÚxA4k91ÐzÓáæŠ…§…7²Xö‹ûa3|}5èˆOƒ6 +8y§@®ßkKé˜õÅ+(º½I'{ã¦ì …îÁ>ž(}þàW&\F&Œñ° ]§¢jàïÌX- ̺y®%ÈYœQWȱ!…Ò@V¡nz)KáœôévéxÛW-`âa— Bn ¦Ô8y¥¨KË”´Ÿ~Ç}Ùéª(ÉÃÖþn‹‡'å{íê¨8@tökÜt±€ã¸É sn®j¸È€b‡ùfà‰^ò. ý¥É®}Si÷ãKqr{ÉÕsA`]Ïå*hê°ÈxœÜ£¯¹7Q]vò•6…¦ì㜻I~Ò±§æõL˜xØ]%ó±j»ü:|¶åÌü¸þz~tþú†`søº~ZœÆ-N¯#‹F'›ðed¦táðÚb»S}tÐùÄe@MÍëbpñDQßíú½²Q–FœÞ`9ã8eØ D»•!ÁœdP§˜€¿`¡ÐÊwà!®MŒy=WoÖ_†ÕUžÙ‹@óºx@Ÿâ¾—š}ãtº2®ÔìQ/tÖª"ãÍã´kÏ£(¯*ÙÛ-¤|þ1HZè8.T¿·iOcº4± -xŒ•¡E½’‘S€3º´ÝtÊ•ÒÂÐèÄ:‡ëàí“ê˜xäÒó±WññÆ?c¶ó¹RmBÉÌ{RÓ… ªÃ‡ƒG[±xÆ”ž7ïN 98ø²õc"//¨•˜™¨e7vè°…pðĶFâíõüdï¿fì¡ç5fÙò8çå²1$ÙK—À÷8örðhÕì–ûåæùë{­"n{) YÚ5aR”²º­M “™Úwˆzrð@”´× ªAÔW.!CDÊŒãŒÕ=lÁ,æ2¤è±-pðD~g…óuÐö@?Áe »Ü Ûý¢üÒÑéBò÷,ÐI1ÇEë%ÍÁ,&sàÆààIBAä‰ËdïÀdg&¡KSàÄ.Û‰¶t!à¢Ö= ªñ¤L Ûá–ƒ‡ýRðvºÛ‰…»»óÿœò™ÎXZ>›<Ó`e¬På2b@6¶ƒ%âàqU 5c=Éc¿òëšsˆæ”¬ÃÎÕy\vŸÄ’2XJnÀÌ0:ædtŒ›²:©$⦨cL‹«Z,µ3Í@jB‡hOôMò× \y$´ž²&¼³‹Cäq“…Ül†ÿŸÛa•?y?Üÿq^,»Ÿ­ËåvøºÞí·?2D‡å)0 zÛcƃ£Ã+xB”ÜóìîîFžõwÓðØioÇÜ)£A)ï§¥ǹ›žÙù¼eÁ÷·+gJ‘‚‡ÇHo•£ÂQXˆAGˆ"(/¹Cв–!…¦DšÔ´ŸJMëL—Ç8$¦p§ðÞN¡& 7í'•ƒ×ÀŸëôd#GmD›¢‹^£èËÕ¿‘h:=ƒ’tŒQÙ3ÎÁÃ~‡¶Íñi¿^nvÛá¯Ã°{×Îyµy>Ü/ÞÆ\+Xù^Á‹åÿ¶Ã¨f-§f3eÜÿüñ_ßÖG=wçÏ?î×»åçMv%WË—åçõ&ï'ÃîÏ?þUþþ¿^ÿ|µØíF&ÿ«Ùõu¼YwÊ ÜJ*ÃM¡Ü/ãÖGaj™EÚ«.Ì¢ãq¿šYëÏ«çã|X9rMíamÉuUžnüjÿÜŠ‡‡ÝÒJ|>ÆØ/W߯)qr›*EÚ–b·DêÆ²ø›±,™ßÊ-^žÿ¶ß«ß‹Q-)ûk­Ù ¹ªùfuþ?EþЇoT}sÅ<ß°:…ðÀê胵Ú!`ó8~‘~aÎ2°ú¡žØ$t¼6CÑàK³‰\«Ø }ÐÆ;Š©¦÷³?é`ê°¸9xtª|2ú^oŸ(êDæÞmµµÉ#ð˳&öõP;Ê2`Õá&ƒ‡}|²šËÕêùð´g¿ ä:Æ•ƒ~PcpÕ¯E0˜?€ë@žšÝþõÞ5»SËÃf¿8ñcAЩF(QÞ'•Æ”˜ÇRÞ'ù™…MZÑ™Œ=SN‡<ÚI›†Ë›x@. }f­W1aé yœ'PÀ¤sé2Óán™ƒGª*߯j„P¸i”Ƥ€Ò`7JÛ‡9HC‡h#;A”è_mËüß/VËQÈÍ.”VwÆ¢×ÕPÊ5 ½æk@X†±CRO’óg(¡G0` 8Å^ó8ÃnÎЂÏÀÐ!U”ƒ'¾A(ºZmÖ÷Ï?mž—÷§º€ço›õÓ·1¶È%|ƺèæÆæqÆi1“߀±tA¬î$âà1"A¢Ÿtúný8ð –^ÒÊ!EÚÇqÆÏXðR|eà´ºCr OXçOÃþùsYX£¶ü‡-„¤ÅÐE;Ù4šnÆëHH‡[º ´Ÿ\ïeƒ~OÏ÷ÃYmÈ,'Cy¿‰E¥Ê8­Ö.‹ƒt=²ù9xØÙü÷ÃËæùÇã0'à$˜¤1Á$ E* ¦F6~Ç!#fè°³ð„&'óKíMN²½SŒË›àôÕÒ8NKõÂí‚Ö˜æ§pÛìøu©@˜V .Ý@Ö§k‘;ÝÍl wЩýÄsðñúÇB½cÛžòí‡çေ Œ}š‘½ã8ç ÕŒC[àöÅTxxæ†]&47uj Óš4ÆÆ¼;E yœxIƒZâô%²–pôT{äÏI%ÁQð¤™CÞ¹_†«ÑiÎgãÃÇqzfÍøÙÀ|Cæ»Ì 3*x xæ†Íþ:¬Wß2È îß–÷÷‹‡a¹Ù?¬†Õ)O±t¯À0–›h]Ÿ:%hmèÈcû^<<ìˆ×ÝŸG„Ö(Ð.$Ìèäqž]Mˆ²cƒtT:Ž‹Ð~âóçk|´8žÀ.À»”ØGß÷4ýo%í”ÎEÅÒ´z³ÍŠÞEíƒÅ¶±oÔ‰8Éç#’ÆÅ™èÑ"Ç–ò9ÙoGÙ[ÆE[¿)¬Ëk¬Íf±Úþx.¸‹Î‚BtVÚGúPŒ¥ÿ9ÉÉ›2Ž]&žÇ›ZŽ[´hü\kE2¾Çq&A­[×_‡Ë&9³ÍÊ_ ËÇ7£è ²À½)ý-µÁ¶y_½¥úm~Ž*ó.d ŽÑ…ë7ŽÏÌGÃ2eœRõ†{x*oÍ÷Ã÷aSÖÍ"ÿ·{Vûõ÷ñ44ÝAÙ–Ü¢oöì£ÛÇeøIÇ­í0÷ 6Õµ(€Gû¢´¼:³Û€$َ㬩}üXGCÒh;L/OÒboâ¦uˆ,‘LÆàCÒæ<\¬_ʳ(™?<&í´ÅAÆöW°ãç$[ÒQp.h>ÙEÂà ­}_nÖ÷Ë}þÙßÃç‡ççoÇK¶Ãvù!eã'­ÚÆc°Îëä)Ð-Õƒ\CªT®ÏÆã|ŒÚSð€Lãš(Æã¯’Š<žšmÒVŽ*QE…>'Öû4N×¾iº6z\^u–pâ"óhÛ8—5sE{n¯æd‡sf0’LGÑçœäU@ÁïQp3hý’÷„ü¥R^þúñîD€»ÊíÜ1ÈØIçÙß© Ü@B,ng’x“P“_ÏCÛþuDõóëÚ»ìqÑ`ÐÖgÈ C´á?ŒjÌU:vÛþ¦˜‰'ɯûÕf¹~œÒ§›Ö§6 8¤*å8ÎßjísØË@ Öµg¨2u,²X¾¼lÞÞB ˆÖJ£>‡5>ŽÓJ~ÍWó”ßÏyO^‡…ÛnÆT)6Ö{¤¼\Ö_Æ&*ÁBLÿ~_-Aû†¹ãçX¯"Róç„[ä)Ò…:!”vTtVU=;ºjö$“a·oÉÄã‚Égm^ô!:ìFÝZ¹7¶"’L‚Ø~úÛW àᱦ&ÿòÛáó°ØýÈÊ~ü4¡`'ǧç$[²q¶'‚ó}ˆÐÛÄžTs›øA©£>=>ïTx~VÎ kþ4Sqƒû½¦„Je¿u†¿¶ JºcYŽ£Æ'B,wDÔe[Ï>Ÿ*1&G‘,vò ’OH­Ñž&žÁ¨Vüôä”5$” $‚ŸÐÍžt—Ï–ß)ÝeÒéx¼è6°øºÜ?œÖRÄ'=ŸØ‘›Ê“4Vd7¸Žnþ¤Sá·ÜÇÃÃ~ÜGs°/´›äæžœ^V²=ü^ðª!Ûay?*9L `bŽ ˜pë|B¢'¶!ÈxBíÛÍ1±âÓµ-6LW8ˆ&Ÿ‘@Yá €ó§>™h(trüèx„Γ Æ#^iUSå‘€ˆs6¼JX5ßÓ¸>DðÚ'¤ðÆ «´g•ÞÐ-,%ªv3­âüé§Ê`:M¿ÕÑ“ð8;€¨zP[[g ÈHç“ÁA´$QB2Pñ8ÝÌ,ž?9쎗«‹]¹œÛêÆÃ‡B0€Þkq󀣞O²X}nÈxÀ´¶·´ŽG}È®/Eˆ`Zü|ÊD‡5ÃðD%rÎx­ýQéÃ~µxù¶^ Oûõ~3”,ÞQëxÒ§,Eˆ¤ªÎ UàçS†,]Ÿ; P> O”<ŸÌP>Émò0\P"g—*ft "¤îä¦ã© rOYúí!«ùqX ÿì·ËUÑùQáx¬ÆjàÇ‘+îÊâžO›”¡fûœ€èx¬hcp‚Þ‹Ê5#.R$p5935Èç“…*šïdUÈx<û=æê°]ï”ZòY%ù4ërý´ßqënµÕ‡U!ä“!À“Ÿb¶“b>‰¨b†N'P$4Ýýñ‚Ü“ðÅ#qÇ‹îjKì^+•JJ/]T¨£åõV˜û«w?úIà-`„úàÑ]\ã°‘îÝí§ç«Ù¸\ Âî•O…˜öÉ4•Zrsƒ7νA™Å8Ž(ÔOæ–?Ù©"TÎ÷ž€Ý«™ŠrG¼Xv¢.œû+‚oGHOŠàGvµeÿÐÔ÷¥b-ùà¤üÐÍùV|û¼oX?‹HŠcWa±5îNê4µgÃ÷b£ˆ*ÀÞ×ÃtUðîI?Á¡2P|ðgd쨬·/̱|2*ç—{Å)¡=Ò•$ûWýs`ÛÀ“Ì |?ùÞx:´°VCýƒ&d#å“Mª”Þ·ªV×¼!ë~’ð|ñhD:»üÄa3™Ç¸ƒ®ù€Æøà[N¼Á ¾½é‡öiÊ5÷ÂÓ-í³'åóótt 1Õ§[+^÷’Ÿ7ZÒ ékG¹¿øvƒöékF¹òÂ#:ôòÙ'ƒ¶\X#î¥>Í Õ™‡šñÜ|¸ý‡Ü=ï'Æ×iå…'8ÆOe-²åyĽþ§…f>9ÂÚûb½Žï¯Ú¬õzô¨úÀÒU,ì¨åB‘~ÔO¨M:_M¦ã­e}Ç!cÂÅ "Òܯ\JÕƒ_ Á£;( Y,L±S£ì§™X‘9–‘MGFue]A;EC+f†«™?];Âåm €±•¦€µ™ótE¹3OÅ¢í!F ÁÃÕÁ÷ÀÎWãÓQžMs:m¹SIæNÍ0ñ®ö¿v/À¾êµyÞëÍãvwL"„Ì¥&Ó}hà1ÛR®\‹›wu l€Ü¼ÆàbZ`ü„zì^¸ÖßþõqѼ‡kwBƒÅ-;¼¶IôߨQní‘RkŸÞÞÁ=°‡öâ`]R’bŸÞÊžl¨6óÁ\çªí*ÕàQ(kŽ•[:UÊÔ tî®›vÞÕQ{èÌ¡š…ÍhÌ£Ót"¦ðšˆÑÖð ωV>0ú¨ícÞ£sÖó°¸C™Xº˜K“m%Y¹¶(„ÈÀúœ….(1•=:`cá¼"b­øq³.¡{ºDÖU­ÛT)…À=½ ÁCøá§uÍ@> k´k?™š&”ÔShÚ¶O²S¿öÓøD¢‡åÃ< ….š"¹ÆLª‹²jbr.f5…`.þ í˜ ¸îô+&ïã@kžîND7IϱßÏ4/\»ÐÓPë€îÀêc'? Þì­FoK>ŽËMbåÜ'C;ÉÑs³S% @Û’ÙÝh†àÁ¼;ÆZ °úi%çpUb#Œ9s¹h'‰Ø/8 Ò»<}Ñ Âºã>ž[²q¬³—ÃÝ'…ÙsÓcÕ©V •^·Äܧ©Ï&Y ÙCz^žàR‹Í‡f×üju^ Êô‰û§e]öUHd®9vž>ƒv´íðÖNÍ þXXáXžP q‘°<»L—ÛVÔ± $”€w)gthGÅŒF7s+ ÃôÞél¼©êå P¡b­V÷®;­¾zUý<:IŠE6JFWéì2+N*)Ÿ$élœ,ÒÛé<7¡çð9nô¬v,~Þ*YVLr@¿Fk‡t“78)’<øi1±~þ*]M–ÙÈÈyx ÿZþÍðˆ `øä»ëô¾ûÇ*½…ùòj”›’¯æ£ÅižM³´Èþµ¸Jafaz#L>×#Aá·ó”ž+z‘±±™#”^ð ¾Ó˜›ð+J3¦Ð¹æHŽ¥0ïúÓ¦ïð"ÙoÒa0¶Òi){Èú”ḨF&tpš,ç`KòÜdRƒ \fËIfója†üceÛ„L®¥£lj3Óª… &å{z•ƒúgmfÂsг‹I6¾ÉóyþfR,g“é˪ÁÙÌè›?-Æ{ûéç?g³¬¼ÀyHA´édjùø•öU*`Ÿøò}!„0>çè創SC)ÔIòã|™N‡f¶$¯çf«&Æ$ï2P„Ñdj/ŸÏWhŠ1ºÐØŽßF•¾M‹«áÑéh±úæ›KÂgè z[ïÒë±`ðé›´X¾Íç—æ®¶¡I_| ñIbÿñïél•æ·' |€jÿÛ/úñ5àþ ž@Áß•ÊùÔ4ü§wo†GWË墾z•ŽFÐÓ¨ÔUºïtý †:]¦¯Þ}ûþ«S£eC¬¢ðw¯AgÙ´þü¡Xš%>«¿ÙîL@7Æk©›¡}[Žp©C£oîúùý2[ ªÏÌ×Ù^ó=à±Â¨¾°»3%g¿TRûå(™©Á/6xYZEx§U˜_Ìh~"\€F¾›@$dÇw­è¦jOn¥tRÍõáa÷é›ùtºœcfŠü˜§³ÂÆ7?‚æ—Åüöùc:a&&Éx¬25ƒt³%û´2eª­­„Q’û‡æáæQÆ0„å¡—¿ˆ×µ=‰š"m>¾Ýè‘ýDûºô„•$þýçofv»£þ ¨só_Þ~>ú7“–:z]Åßðë×›Ó.¯m³ƒõ.»˘ßÚfÖŠ˜_¿Oa¬gÙWo¿3ÿú¡òƒo&Ùèv4;¯.¶ï®S0ŸËÅ4ÙÝ9ù2Ç©8¹üÇlþq¶_Þ÷~–.Š«ùÒþÓnó€Nåóé4Ëk0îmØýuBßCQÜݱíLÓw5Ê{ô¾™_ÏÓ<»Î–%h·83ÓÌäM–ÓÛ;õpÈ´4}‚†æÌc1íêõîܰ‰ ®3ðýýB8}¡g|?çù-ðúÊ žÙ陌ËhëìÙøœd 6èìcúm)k  K› žPÅÑwfÊúsó’Êkÿe~“]ŸgùIBNy’00VôΩ[Ë÷rxlY¼ÖGAà‚ÁÎQüðY¿ïXÜ·í÷ýW£VK®ÜPL;ªRõ!ùPââê0N“×0ö¿ÿÓ°è`Î2€›‘®ÕysB£-ã9ð‚ßH~rR;®á¿hÃ|mBP°2›ƒðˆà„‘ÉÜ–È{²UoU€îà« ÌX³ƒíª¸%mm0.]+ÎЋеÁ=W!0Ñ>®8Á|ý©å[Òqähð1 âQ[Öæ¬nèïÝ)VL!ö° ÜÇ*„ P=,úáQ]Ä»WÏ9[ŽÆ§ã´¸:Ÿ§ùØäU¹r\´¤„*„\†LK¢DèFl¸öû£Ñ}ÜŒ„GͺÈ!`ÌW6ÞSk¹1@&G(ÇF"´#Z¾=×¹ž…A®!ð7Â}*" Åã_³ˆçÐ\*;h͉⚸†P*Sãš#X #zÌæ€ÉKêÆ4Oð¡·I¹|Q—EíòÁXÍR޼}ÛN2z`>Õ¡:V“ÇÌ <º›¬*§ Â(ÆŠ8j>Ùvœªày®€x?©* øgÙÒnœÕåƒÛåCaTäÒ1‚ukÔ’Tu¨Ž`–¬G„Gvv€c[Œö_5YšÖVš®B„&Œ+ÇuS¦˜èД×}42êÁ ‡àÑ¡æ&]tK:¤]:@ 5åûh(1ÎSôGÖµô\>þ؆àÝ%Eîfí ›vÍ´+äD í‚O¯„kdÚƒ‡ Á\|”þ×l´Ü’m—9* ¥«†mGë,Eò@c@A1ÆNŸÆ€ÖPng‚¥€GÒÇײ<Áeîl¡¨-é°vép³¦'¨vE•Ü}Tá£õhhZ¸zwc‚‡†°-FWÙüßÖi,Ê2÷¡r¢\6Š›ÎC½øc#â-;mlk‰,e“{’v6ʦI1¹œ¥ÓäãÕ‚‹éd¹FlºÔ„J åªfZ¢RÈî •IånT‚6ÉŠ3´FU$Õ6pãë4eŽìÊ VÓê% „wY±š."˜¾ ËÆŸ«„Ž»\Ôë2¥Zw5i˜ã 4ÎÆØRX“ ý!y¿º¾Nó2Ñ'6[¬hz©$TjéñÒæÒ˜Ð–n’NíÛSⵚ*æÔ·•Í:ÓæëJI€Ì’ã?–ÝØïmÎÆp³Ñßò„õÛÎ~ÌW™QÜ2Ñõí|>-L>]6.óêàË9˜Fk“Êü‰ä<¥«"V¯òµÕZ?¦Ê:*Ó•ì3‡ÉÏÕÅh>OáÛ$û4ʲ±ySù ¸m?ïl¾éøö~,ó[xã ©2Ñ’ã…i¼Ê01‰sÛ¤±šÌQûw&+deǘßgé ôÏ$ ­óÙ01)RÉ(™—–ÁŠôh5Ë>- °Ìg§ãIñwë7²ä&Nà±Fée:™è &Ÿ–øOÇ\‰Ñüú9ĪÆ„é‘ÌÎ@¬lfËäzR\§ËÑU¹m¬Æ/G¯nÒüÕtrn«=N³å«*ßáWÀûËÑÑËVWÄ–kúæKÝÉ Žéè§ÙUjJÉ›<|dì Db¤gô£Ø$1gÕÞØñÚÚ‘k¦ !A˜µâå0D¿Àñ^^fð® ‚ÀÃs4CAfíq“mÄìi(ÑSDTûU'U/kGŸÃ1€J:L‰ö#U»æc'(ßÈ8¦Rx «G€OéÒñ@<Äcñ@<ðe(Ý+gœ`åvìžÄcOô@¥ÕŠQ­ÝZÍk÷1v ˆ$Š’ë h €™s’QŽ[v»ÖíxýJõß{mÐÃÈaÄè%Ãâùp¯;éP“¾è–o.ÔÖ-÷Ú¼QHo´²zåóȽ"÷ŠÜ+r¯È½"÷ŠÜëîµq¯ö°q»aY/ȹWä^O‘{m´U)&skµ¢è¹`“èÃwr/bÈf¸™lZ¶£X=)îU¡ZÅ•ýsã^U¯µRH{H§9C¸sîU¾Q sÿ®ÙV¦hä^‘{Eî¹Wä^‘{Eîu÷*Ý«¡^œ¹Ý°à8r¯È½ž<÷*µb^J„[«å®í¤Î¸l ÞQÿ ÐrBP…Z£à²ßQ¼÷÷ä^aèy^Ü«ì5h’àÔC:ͧE;ç^2!ÛÎ.Ü!Ó1ç0r¯È½"÷ŠÜ+r¯È½:ã^¥{ULS*ÜnX!¹Wä^Ož{•ÚªÁrSå¡Õú1sab ,Üͽx¹Ÿ%„ЭӯlGjñùSà^aèëåYŸ÷*{Í€0kí!Õß¾W ²z%·È½"÷ŠÜ+r¯È½"÷ŠÜë0îUºWNÈí†ãy¯È½¾îUiµÂ·VsA3çP³cr÷ÂUÒ#A6­Ú V»‚ú÷ç^è%æúq¯tà?JÜÒ!\õĽÖo¤Té¶}¯» È½"÷ŠÜ+r¯È½"÷ŠÜ«îµqÃkÅÝn˜ ¹Wä^Oœ{­µ•1‘tk5Û‘ ×e­ D ãâz'÷RŤX1Ú Ô¶“üAC…ÉCîµZØ‚‹¶ž£ÆCa©Ú1´Ú« ²–ÈÇ—÷~^”I—·‹ììÅOw¹mÅ~œùlÆ‹ª¬çÙ‹†ï³å 0D6;{ñõ¤€‡y€û–W“Er3IŒâ|ýçÉ"ÏnÀIë^ƒä­Õfxñ54«5=Ï.æyf+}Nf+S¿ôEãðjN‘ærdäiQë0ôâaN½C97…Z«®Å!*êŽ6©(øÃ¦ŠéšÉ®ƒ÷ÒàúvMï’š7ݯzo'15—Á_d7»%dKÑž¬Kõ&×Fõ–0–÷ë±&W)pß)„Fà“Ù(³ž'I—Ëìza~&÷êÉ!WÿÕØ?IU[˜M;Ŭc¨ÚÌ_W:6…ksÝU…"ÈÕtlëŸgkŒcø}ù1ËfÉõd6ß̧b˜ü0ƒ¹ž—r®žT]8˜˜âÄ0 y¶)k{’,J#QdYò·ù(¹Ì–Éõhñ·²ìï*_^²1„xó—0ãÌÈS0'•)áÀÈ7š…8“H9GBAòÐC°®,ÐE$ÇDÐ&¢a 1 $!ÏkÎöS¥K”e;Äú[„³o$ ¢â¬^Š7.ÂÅE¸¸áâ"\\„‹‹p.ÂY÷JíÕ¶n7ܰáâ"ÜÓZ„+µZR0Ýn­¦ì1þr„Ò6öâ T<ÁUE5"ؽÏ{1Q„t4ï™{1&V>È"÷ŠÜ+r¯È½"÷ŠÜ+r¯Ž¹3¹n˜ñXð6r¯/…{q)´ÂZ­õc&@P1P’5s/Ž©¹îǹíèC’¨wmoíµÉœÓˆŒÁuPk7´?š€^ÑgG9fD¶–=ß´CýÒDx£dR3d8æÉGšib¤‰‘&Fšib§4Ü+¸0îã†e¬iâ—A¹9{̘pk5Ô{Q°P$h¢)lú9Ó­ ý¶ ?)î„^2ŒŸ÷*{Í)ÕB{HG‹Þ¸W…LÁ±G"r¯È½"÷ŠÜ+r¯È½"÷êŠ{•îU-)u»áúÕ‘{EîõT¹W©­’Ę[«%¢zF™F1ݹïU†PQ±ÖãK¶ œ?)îU¡g’ìDozù¬¸WÙk‚‘FÔC:\öƽ*d\Êö· Y¬¹Wä^‘{Eî¹Wä^Ýq/Z•i¤œ ·&µ´¡È½"÷zªÜ«ÔV†¸æÊ­ÕT>jzäÀܨŽõNîe‹3†("­QpÙŽbý¤¸W…žsÒ~øjÝËçu4­êµ"˜ûHGÞ¸WùF½Ê™ä‘{Eî¹Wä^‘{Eî¹WWÜ«t¯iˆÝn˜ÇœÃȽ¾îUiµ¸}C¦jÇð£ÞI©X’¬ož ¢•½TíUOé¼W z‰¹zFÜkÝkÊ‘Ú-Bûº“2‹û^‘{Eî¹Wä^‘{EîÕ÷Ú¸a¼ ¹Ý0±$cä^O{­µ•]i«÷½iGÐãދ´V»jm2éѼA‚·-Û©]¸üŽÜ«<…†¨TT¹ÑëÌñÿ3÷*¥ƒ1XW䔎@JöƽªÓƒX1âÖ:Q,ɹWä^‘{Eî¹Wä^q¯Ò½2Lˆ”n7L¹ŽÜ+r¯'ϽJmå” ìö²ZØÛ=÷’ ´hØ÷¢l £‚²¶ŠsU;JmŠÏ9øŸéd™\T¨›^Æ$ÄòÒù2FtcœìCò6Ïn&sˆµÍ—•5&1®ŽquŒ«c\ãêW?ç¸:Ì×R©š|­&üC’.ÓÛaR„"ËCM(ÝÖýfu?h®Ÿ]ßDKà ös'–ÞùC漡öÇÓ6võÈ2øëtá#ĆÏOˇXÙJ—lav ÍÄOAÏW‡ýÕÖgó‚y—:€Gt¡Û’Ûú×ÝxKçpkN„DÚ [)ɇ»wÕõî‹f}Ddx8{L³PÍ»ÓQzz¾šÿ¼«mnãFÒŸý/¦ø!‰wE ï/¼r]ù'ñ³ë²|¸ÝTeDŽ$ž)RGRv´.ÿ¬û÷Ë®13¤F9 Ì€°¶nk«"“àÌÓÝ  Ñ˜¥:M,uÚfqÔHÌhË­~L,°‚Ð9†¦á5fw˨â`7 °e„Cº„n´ €/R̰CðèCzõÒÅÒ“{ê´¨:5gTZ¾´ò@^ ¾¾€“Iìï‡óõÿ]=I·Z^Cç6Lkݾ P¶SÒÒt n|sp?ˆ'8@¬¦ÉËŬ8Î'î»Áê>ºmÕ;Úès´cUš`ʤЦµüݦ3‡òÝHì œ“$,À¾Ê3^kÌ—ÐùÚm/¡6çR3+ •3:<›->­ÆÅeÞì% §µa~5u®³XWyÕ¦9% ¥ B;"£ôç¾Èp’`.g}»®Wžhù2àŸe%uïþzPiœ®?žà¹ÙîNú€ìS™!œÓB!j«C³#ûèÉàÑ „Ô,…q DpÜø†g9zi®Ôn_m¥eG©¹bßô†ØP¤wB>ß$äïÏÇ¡“8}<&JÞûæPëª:Us'Ú¥1!LÀ´PEˈ²ô^Ä«/ø£Ô4…ð0Ń´Íh¦=x“6–âhŒ’ß‘þh w*¢v*¢Õ©€°)6ŠBð„¦ ¸•´]Ö»¬ÎþAW½Z. §žÀ.  áõjøÁ¸´x‚ëiáá–«ûºG‘U¤§ƒÚÑ.aZvØ(ŽÍE”6m’Ù2mðÅhGi÷uͧVW3°q®¹Uo”À܈'´“LGý»“1¸N°O‚Çawë€y‡ ©dF+F°}h§e3r½Éê³ÅE´yF£yòÉ‚Òñb9¾RiqïZ0Äôž‚פ˜ŸbnÏÅHÈ‹G[ÀÄŒ¥ÇÎY‚Ì(x Ì*îGG)¨ã§Ë“m‰*!i“œhZQ•‚«3,Úp•ÑÞ"pÂ$/á1ÑœD­Çñöކ6•"¬à„qÁ¬ÄV¬¡&F‘•èÄ“+3!x(‰v–ÚS¯>IgsU)iÑÔhסV_ö+.¥ÀCy×N$p(î=eI6<:x_ùÁ—×êɃÀ®Ô,Nŵ2—Dµ­1Ý]Ô;½õœ0ç÷’Tª$RÖ tMĵ3_CÃü×Ë¢´ƒF™¤•°è¢©k'Tj&픣/ŸÜÉ.Ê=ä52 Ÿ ³Äzà1T|5>¯J+”MF¹"QÒHšœM¥èË%c™VÒCZ“f|³îÆ…ã±4ÊJÊε$•D¸õK8æÑ¡ N>xDà‡Ù CKѸv2Aà ï1ŒãÕ*kZª,@ÉÈð%¤;R%9æpL»y”ûe:Q^rÊ…AK)¹v)Ö[„ÛtR M\rxd *´«#É>>£ ŸòA;AôA+‘ÅcnˆP*Á’lžàex§¶][“n¯êö>!ðBJÆ!hÚ´k=AéM„ž¼õ‡ËS쟆àá:Ωûz;Ù©HdÒÝíf‰4˜—‡v4øøY$n`d"AE¥ò\ü W•d®ö$ÃBh§…Ž“f‡ÐŽW@nR¤>:<\+ôˆk¼·ïèo¹½[}HbŠ2Ò*Kp¸F6/Æs7Y;µQ0÷»u¾t9‹×=}såù®S?õW'õGÙêªgã‹|~^¬Žjeù|’]U©ïA¯Üm šàèm#ùvƒ¾$k±š.o/h¯¬½Mÿ˜®²Š;Ï{jG‘ýI>’ßêûß3÷©ÓIβõ"_/—Îu΋õ¿dóEmÄÌ­/8ÅöFÖ̤,iQöýõ½X.æÓ4:-»=Y ÔE¾>/.OÀÔù:?yûÓ»çCDz…‘€Ãï^çÅÌÝ‚¾Z/AçÕ…ç¥8SàÆd£ugÚ7•…+Œ9^ßÿôó»uq5ÔŸ¹¯ w!ýÏ€§TFýEù¸[WòìïµÖþ>ȦNkðÁáÕ/ËV„w–„ù»³æsPá0òítõaUÚwCt¬/K-Õ}}ôOá÷šôõb œ®ú˜ë"ï—ù|UîȽæWÅýõùS>› '3¡Ùdb 3‘–ŸAo)þX„1Lfr$ƒ#(èîå\€ TOyú@¼h,x6ˆ´ýøfË£ò;Pí‹j$¬5ùð÷Ÿ_ÎËµÔæ7@çý¿¼ù<ø·ëéÌñrðb1_A¤àþü~^¾¨ö©á³ÒXo‹sðŒË›òƒz³ þü¹:~ûüÍ+÷¯¿ÖãàëéY1¾ÏŠŸó9üvé¾»ÌÁ}®¯fù¸|Qã°\îßjzùùâÓ¼›ï^½›çW«‹Åºüg¹†¼ÝImÀ¸·ºÜSüWÀ•ó‹õUüe1)Þ_»p S̾ï~u伫¢u÷çi¾,.‹uWç×Í®fÓñt=»¹¥¢ÓÊõí Êp|¦jç0ìbƒËÆ~Œ!(þ¥ºÁøbêf%7Ùx3>+»'ÌvÊhëÙÿ›1'›zöÍÖ™þT)¦ô Ò6H€'Ôqô­›*Çs÷’zÔþËâcqyZ,2v”é£L€³â·ƒzéùžn;¯õQ¸Pðsœ>|Ö×µÅ}ß~üÚËjNC+„;ö7î`ÙNiÌoÙÓùtuÑoN“}Ç­…—üïÿì9Ý¢ …~¥56õråPŒ‰RO:ù²B€Œ†'ع Á#hÿ{§=~{³¯ÀînҒüɠ'7  fMüõ€aŒ`h’€k§$ À{,WJ ¥¼g'¿.$áܸ]9ÊÑh×z5žïÓñ)k\g" ÝqíRÔ¡wï1TëGõ¾B¢÷™Bã2Z9Ѹú£ºo…ú(p-gÜzÀÕê PêSàžÎ]=‚£à) \¸÷( Ùˆ—½íÖÛúÒû“P™E|¹‘œY;QÜ¡AÓ³œ\ß)9·D¡‰®NÁÉÀJ`¶oÝâ–Q9¿`‰€(ƒRl”tíh¼yQÙ Ø„x¨çÚIÑÁ±MeŽVG‡yE3¥ Û’ Àšy|WõÉU§5PÂ'>^M‡«zùdx›€^ÎI|2ön_öÊÄ,E –í$á¸÷H©¨õÀ#x¼«éFÉ>Iþ\#ûÞÖí­ZŠ6†vÝBûXÞA2Ã)3 ÇiR\?`Ë#õò®]pÖþ4H¿ªõ`‡rWÚZ°[íÊvFÄ;ì—ºabèƒçç„á1Á¹à»”×ÌpáíoWðÀ_ åÏßäîX}tKxK{ã11¼î»ÍãdϨ»m0Üäí—3‡IE½ ’@š$¼bŒWLõ_)ìn‡Û•Þ~›o˜PÜ·‚Ua’°ŠÛÇŪàHu¨ÒÙ¥N*¥A,™¤#ÕeéÅ)oyqÊO×å̳‡=º^Âï«X£ýrÒ ÌŠ‘ƒU»¶Dä»#\´½xá-‘Nà _<†F=š²Wá.-é[Ä#‡±áäè¹C!L1±¬HÁ<±®QóöÛ³"8Ŭoî¶6èdZJ©Æå¹}O`VŽ^\¢Ú ›ju;’†KÞxDÜ+Ùú˜B¡”LV±  ÔÁÅéÃ,o‘¹LÂ,<¶³ª½ÏòÄ">S¹w£Þ²´EiPJl¡4¯IF„Œvj;Îr¥dŒ lë¼jwø[¹ª÷HiñÀÓu>ÕªÄFG\‰»0ĺM`«kÇ{] ˜†Ç”0BŒAû¥{‚M°bãðHÂ5Eñ°–¢k>ó |iì¶u©IdõœJ& ¸lìtíˆèŸâÓ‹ÅXE‚Gòë¿´·^:kO†ã¼T ÒmQ FйPæn€¤} ƦirÃLpCð“zú²ÕòV½È¦STs&‘Òe;nIŒ ‡}™¬¨ËL¬Ú±|PÔŒöÀcB—ì?æ³é$w‡òÉåtåÒâ®³éøÆã`óð<_Ÿò›üj:/'ÃíJ-#½ŽC8·Ø0[^ˆm#Ý~8–;qŒµ‚áâ˜Ã§d•ïq·÷pŽã O¾ãŽ”BÉÏ&Ž[Í®†ëEÉŽêøAUiµáH]‚ªê´„Úrçù'wq¥%Fzˆeu †ÀxúÍ;|µ}š¯¦ãáõªÎøc(KUBüš’%{q÷á S–KŠËÆ$OÂN¹’Ü)¸²éo]8GéÂÝÅ"ø¨Ã8?¨SiƒÞ‡1ÜHi|ÄÓ" c„TZyŒ>"¼tûŽ-±ÐY£EöòѺ À†sh×ZeÜ»ÒØ×À&XñÀ#È!QmŸßɯ°2š.…`=6v»aîì5BäJ‘ï‘J4òsx4ùJ»qæŽY{”_KkQß,Ý‚‰töõQIep'OxFíîsé½rº¬Ž¦_Ë|–ÃÊÑÙ…ÈÊÍãâ’è}ÔÖïØÓ œX!Ñ͹J °)ѹ¬0!UÔý¼ñúÈ›Ùâ- …¥)8€§o–Óöäâmß¿´Åú¢¸^ ?·ÇF AVö¤”‚)Á°ERh×:MÜ9Ð}µ•쩸NH‡à ¾€d¯–»«YYWnµŽùZåV¼yÏ‚8<÷‡ìR ψ<,RØì©Id©W•µ«D!—‚6®"=we/§ã µ­ë|:€¿néû|ÃÁ7}O:Ð7›\®\èdzv6‚s=_­—×eͯÉñ/|“.Ëžüõô¿Šñz”]æW«*oý–ÏoŸód°ëzWƒQVñleXVãšwÄž|¤ƒ§GÞôh0ÕVؼi¯Ê»½ÃU–ƒÇåå{î)gÏç‹uuŸõàêÛ¨ãæÎAÐw‹CC5›ºyOÜegìLŠãºØÝß­ŠÙÙ𲬂7^LÏ/†ùÇ|:«ßÑx¢+Ûèñ8h;+†óŤNŠÅÌñïÁS¾Àc6‡ä]!4ð —WV•Fé°÷D¤ óŸå»Ï·¥`¡õt¾Vâ;ú´¬ÐW ðƒ«-ë´ö·RY4ÛDÙ ¬Y»zsÕTqõ²_›-à-v6ªÜÞί§¥ƒ™üØì^3§­³ÑÞþâ¢pe¶q%k54õ½w¨Ö üññ1üø±(«Ñ7~ÙÞ7Ž÷éé4;Ó¯×txäžžóÀÐ壟€/o‹³bYÌÇ…¿2~ÿ0ú<¸ž‚‡š 6.N‡“Ó3;§g|hÏÆ“¡>Ueõƒà|ðå÷=TV_ãF:« ×Ör..óõøâņÜ0k–TvŒ]w.æ³›Íè6¬‡·Õ.àûùQüqå¼eKïÛÔ/w…q=¼ÒjßS6½q§Àûºð½"†·ëu×ýR÷àãÐn†?èÉà¾Ío‡NWƸÓ( ÃäŽìöëwÌ·yïwõ-juáé<{–}ûýË×/ß¿ü6û×l1›TñI6ÊåOBŽÏ—‹ë«lJuÎêÞÀÚÆ9Wo¸œûéöÔ?»§ÖmX¤мüïë)°±˜¯ƒmS?}{mÔ»òÃb—¯êöäÊÇ> ’:éùÉ`Snãíõ¬¸¥S—G=éßgà@±ìä$cÙtR¥yÏ2øïrZ¬º=o°åú­t‹¿xûòùû—Îil>ùåÍ÷÷>©:‡‹Ï:¾£`÷ú{ïßWðdþº?o^·/Ê(ãµÿÔÍÛæE^~ýz_“JªR•wÔöè®5K„à£IÅtä9ÖB‚ ·ä¯c¯-t8OJðĨ×;òõ•ßéËY§oïQS>'°ŽòFÊ8ƒÞqró% Öw …s ™‡ßÏf÷éëQ‘Dd>•ÏLŒHÙµ2.È­ÁÂ|–€¢< µÃå4¥KŠÅ/H3¸Œk”­ši®b0xpŽ|<Þ«™‚·.×ä7£N‰°>šZ9›ë¸V×Îk­Â<"K°‡°<$xÀhÔ_›zœ÷Û¿QÓ^³[0dꪬŒÃ¬nÖ¥._†Fù&ÅéàñÒª!Ïçªã/e£ås¡ûl †zKBHRÆ¥4*3^MdT´"<.¨ÖH»(ï÷†2©y’ <˜ î@Ñ—Á|IàEÚ-?ù1^.ŸÊ6nŸ$0Z‡÷²xØÏãðX¾¢*á]ŽèÍú8Ô:ûzæ¶Œ¾_O?0)CY2øáW ž`Õ\ï'%ÝN÷§/ãýÙMqÙÓläBnûÁÙP;ð!Ú_‹1èñáÇfø³X§ÓÃÍisÜý_š¾Ñu i1æÄ@žò|ZˆÏ`W& }Ø‘ƒC½ƒz­!³û±þ>¼U0”ˆ6åÄÛ|æÀ£æõ<à Òx÷¢Ê6«·k|ÛüUuÚÜ?Öoµi2¸20;;› \¬3¼u,ibFpñˆ³E§µ|»>n«–ëÒ{£bÚ§Åñ¶À­"×L:°€Îà[”>>e„r”eáÉz\(þÙãªÛ~ü,G¦»wÒ¬H]f@‡ù;†ò ~p…о?ØxÒ"ü8 ÷õ½ÿ[Ugšèbäo^fËÙAžÁ ÌÑrDBׇ¹ì,< Ê‡Z”u+?ìÞ¸pÞ’Éx †»0ƒ ÷z^pJÆtáE²Åådáñj¼øYÀîjè¯üÈÞ½7–¦†39q7;ò¨!C<ƒl™°;¼ žƒÇ©d–ýXonwwC½cy ?éì9Õÿ¬þQ鎦 D4–!BóEí+šhCŸÁ®p¡Ï.ÃÇê|¹9¬^µö†y¦ââ/€9Z8g0-IîÄälàà‰Nq_y6Ño­3çL)Cæ Nû é 6`ï8²ô‰k±ñ ›ÿ¾™iŠ/?9êœybY]À‘ sÃÚȯg _¶>^ºân2ð Yœ- 7·‡Ã¯TN‡DÙX»YÚÀgp…-tâ ºä9x®¬Å¸Þl¯³xËtT9/EÖspIÆ-à3¸ÂÍ÷ñN1øäXx¢¶wú¬åZwu¼YoVÅ ø¿¿GeÓñSoЃÚ)† y?ØBuÚwŠÿÇâ8's/ÕýúUÒŠXžtLct!p„á^Û/(Ä qÅŒ}nò1…”Xxæ–÷¼Zï–»b1É‘#EŠòh£ŸÁšlÀsäÃ>ÙA|§6žhUÞ~ÔM‡asµÒð‰†U=ƒ#l±úDÕ2¦‰ƒ'Ï­c>õdZŽÁq a¼¶Ó½þë6è|M•óôXåÛ}jiÜdY0º$·rñŒ½¸ç{Ƨ!QõHÄ/Œw)ÏêÇqΧ™%0?vZ<]Qˆ'÷›ýÔVcùÆØPöFv¡l:½2´ÞNÊÀ§¼<$xÄi‰ܘ 4‰„&k+4„íÇ~F éÁö= À”ÊÔ ´IÀ-`Êú2®¡M~ql=úJ¿ˆSóëÞâ£G2íÈP,‡≊å1wwö~?¼HÄä‰),û'€$زF¥BzŠKG€~ù¤UçyÚÌmmúBÜâm¾TçbÝÌkˆ+û¢fßöä‹í -Û#A±üàñßIewnÜv¦;ü¸qœ ÝÊ´}D½J»vÜ_„9™|åëä)ý¿Ï¿b¶,Ë×h‘áqVÇ qUM/ðäSòèÍ‚´Œð¿òÕì!$àˆ„}Ø1^ú1ð½“ÚžúbÔ} éˆ# 8Y Ü2\ÍŸÀš/èÄŸ²÷ež(uwî†û:æ}³çIÕo‡oë‡ýýj;Üý=ªhÊpá'î…³*ìkYRÎÂ`#C,ôrÏÈ™ÇS™#\#4HuüŒ3J¡ûÕyä$÷üçå‡þ}÷mØü½Ùÿ1–r>þ§(Õ±ÄI]Ö:2•Úî¢Æ§l‰ªOCïÕè2u5qÇ]I š“‚·ÜÞô™T6Ðl™S”ý*ðcèíÁ‡ÌA¸ç×"Ô¤äì9BAŸÓÆ„D­ŽGÜ™[ï”Lo¡XN™ƒ}•.E¦9ýÅŒY–0}oˆÙ!Zµg)iü÷Éiˆ4Ar9Zrd>ySÄ®H¡l|„Ì·…²É91ðdcõ®ÆHý¯Ê±ú¸ÛŒ¡>“HÕZÓ‘#Äœ—psÁk’ÈXòv"‘7€<â R«Fò+ף‘f‹Gp´ÞÎÈä¢Ô¤E™¨úr¬Ï $CmŠÇÀtë?óÌ;}Ý‘SñN9øÙutqkR‡+jô}¨“ÐòðàÜÔÀ÷AIP Y(~±® 6þå«…ÕÇãËi–ƒ'iG_-µ—!¶Õz¿gÀRt(ßyª ÍEL«”ÀÖ#Ž@ÒØ‡8΄ÌÁ#.`+›¢ôÃ_«áûq8V÷‡ç&>«íÝèþÑ—›Eôè²8·‹$2hRʇÄûP*X8xÄÝ-çMÇúçnõÜ †¾- ²IAB>M  I&€,™;‘)ÚÄšèA¦Ëqtu\û¶ÛŒÓhÅ“gÈ—äQ»&…R4ž%n¾âNÌ>6Iö˜-ö.C`ÀÀ t'v¹½.CkóA|¯8”E{Üm‹1nî·kô꘯÷ñÖ÷­Í|N_žÕù®±c¤]­×ÀÁ_ëÙa÷dˬ‡>wÈúà©—EZÙ+S ø¼j6'ÏPyhÈ .˜V qgB5^Kð#,Ï žìtíĹ!ôñœÙúJ«›Ãq8Ôþ?F7kÅ݉Ö")ã Õ³Aضp v $‰Ž—<É«5/¼F³Dˆ 'ç yÊ*ã¢ÍjMì>Y’ØÁ‚Hð$÷©{ €(‚¤Œ&Q°e@ÐÝ[´l¡@†è;¼ðáÉ‹ì/-mWòåT+)9*¼‹ëŸí§î,Ž”ÄÙœl %ñ6v¸«x°ØHãqÑ|êÎB„™"øTœkK¥Æ–qͧî,z’¤ÐÁvHð€U}°þÁ™/ž}òÑcö–ò•ê8ëÙ=fX: úØáFL„u÷ã°_ß ûöâw§="'5†\¢¬\ªuê£õkÌGÛ¡uº OK<ýH‰„†Æ%ë"™ß]Æ9uÍ€q%R„æ@„GÏü_3¤¥O×ÖgA_|C9³±ßñ‘â±w4Ëó@„'ýÇG Öš‡T“G d*㌞נÇmþ8"ÀzY³÷·ë»ÃÈœ¦2}[™Á¸ä\4”ß[ÆÅs$Ep‰LË”áÉF¯`íê|ûýýûqý­üqµ]Ÿnoëãöc]º%_@Lcù>z49ÕÛ¬:|›å©©"ÆDÝÏ”qìÌû™…YË%ØÜ<âêVKüH™Ovw;ê¸ì¬ÉiR¦|ÿ:N1 3›ÆÜ`—Ÿ{ž¤gîo‡»ûÝf\4“ʤH5 È`œøñÐBÄ`NBõ<Zü£âˆ4×Xr¶‰òúsaj0z+^…©|ðÞ÷Xö"óA/“w.o%°³_~Úx‚qšËÿtØOÑ@M~!«/dõêBVù6°Yxc‡Å.Â:)›|Ó‰¶­MW/{”©ª?›“æÒŸÃYêÒòà½×=Û»Ó´ ©‰M;Ïô2Î*åi*ÒV€:%#0u—BêÖµuëM9±äh¨mÌйsÈ̇ìâ¯ý„x+Ç| äãð}}?­Ijò ðèBûfrg›Õ¼%fa1"³eq&tð%xÀöô¦n`з5jœ²¥Qû^IÏ{Pà¹y\ü:QˆGñ:qŒÂ Þw÷D0&P\ëcNíz{çq.åžÅÕìæKä¿_âq8¿Ë“À‡¶&ÁT'2Ê=*ã¼Ó«N§Èn‰=BL"<¸ˆ½8û¤×©(N ¸Qv;ï‰ç­ã8›¯-1°«ÙÈëc·œà‰¹œ/ty?ì‡ZäýïKÚܨ@h+0Ö¤ƒžrˆb >øEÌ‚‰’4JEêQA‚'€®yØî¾§û»ãñp< ÷“—"x(çK¤v»2. JÖ÷\6óƒïp%ÁœJã6mzÊl&WÒ]Û®Ýnª  ù.lˆ!›è8xòÜ^l“Í(fcËùÚ–lW6Èá‹~©¹ŒVú’ëÏõ~·]׆ 7·‡Ãg øp<·y¡Æ'#zzgÛ‘¸¹OÆ&—|¦VGªïÛ“îί´?IDˆbŽ"è(¿uå(uœ‹ºËj&$Ð{\ëŠð$Ýå4©Cb­¹¥ÞzGÊ8ïAáÇø`ºž¤Ž‡ý¾úMçZΪ¥rvrù,¸Ü.{¢Ó¥êµú`ã–稲ɿ_§·PÂjfðÎÔ.>~ð¶u­ÁW*íD‚z̽¸DЇo÷&”÷ò„d›->‚ýR‹>™hÛoæÎãÐÏX~X<Î#Ãã£1dð9zýeŒZðB‡< !ž8?O‹òj~¯7Ù»ÍiÔ%áÚc-/‘ÚgѺ^ºÆg°SÙÚV\‚ÇEµ~¨/ì'uè ‚µÉ¢§(j|;? K•²|ìÖwpÕ$x‚ï»üáö‚ñhs1_ö2.y½cù,îJ0£[~þ%x²J9Í×…êªÊ¡²Z’,£ "ØbßåNR”½•å¨8Ý<òàËGyj¤6©ÏIm6Þsí©ì]HHW»Hû¬£“Y±ÇúáA½õ_UEÄ®#X4™èwwEË®Oçª@:è™áÉ¿œe ÜîTΧ倊”›Só’0êYÍЊ›¶<DxÒUDxy`"‚À¸3P;VürVÀ©Içz\ÎHðX¯P æôwùèÇ×'í}TâWÎõG"¼m4¾/r‘§Œ3Ù\E —pêþ±ølKðä¨Ù¾ëŸÃf2­ˆ@8‚š+Lq´Œ òš€ý9*'wˆ ð€Ï7„ÃóÁ"Éñ2ÎÓìò5‡ãÔB‡z¤à±afùð7õ¶ß·È"ÆžÁšTZ09¸Ï·AOìDàÉzÅ$gûe£®›Þ¸ûbÊ7Öyß>ãÐÀÌŠã3‰.C&ñè=ô>mn‡í¹ÏÑ„¹Í„ÁÖÔí(ÜõlÕŠJöa´@¶€8"Àá³±­ëòM‘Ì%[g5ÛÎå¿w;"ƒsê“¿Û¹_Õ¾±ÍOŸ‡õv<ìECè´¾ž*S¢d¨»¤ùÅlJÔ’­–íÀqZáDZuôœÚzvåDë€ËÕô;§Þùrë@ C6~y®ˆðˆ‰¶ÃˈP´„rÀ'ã²%ÁÔÇäÒ þLâòåêPóT†Gìt,©gâàËyÛù"!WçÄõ¥d–€‰°ü¤Kð¤ SÂ<ÒEG(±Þ¼—s<ÅT_Üc4¿E@5¹²ï° Hðˆ]½·Ç#PÞ¡':/?S|PçºÌ0.8ÃhHÅA5‰‰zÅëòOú⾈_8"<Ž˜®qFaCÎ `  ÏŠ¼†£•fžÊ‡<{œ·Û‡›—[±#¶âP·4 žÚŽã¬WªŒ<{ë€v=&V‚Ǻù¹±oøœg=¡9°É— …lÇ/¯¥ S W ¸ l (ø\o pM<_u%H¡CœV‚Güô碹K9£–?h¾E$Œ(Ï®Œ‹I£PÇåÍW†Î©aK¶Ã¹X‚ÇyôÏ‹²^N(Zâ £&©BÄL¡,ã̵à‹PQ¼C«kÍí˜TsU+ұ$T"·–X_›«ds^ÅZJì°Ö%x²¸áßqØÖŽ©ëýé8üïÃpzaØìÛÕó˜ç5õNÅ£f£šf›Í _—à\H‚¯¿mw§õ;l!›õÏõÍn¿»ß §¯¿ý£þÊ<ýÆÙ’ö8SDZxJžƒGï‚ë#÷õŽó¢çÉú×h‰èt2r²†JS(ã.›A8Ç Äßá¼$Áôò‚ÞnÓM•–%/[MQë8—K½jó[ tp;Dxô¬Æ£þ~þ±ã(•`c9ççÉP¾S—LP»Ï×d·D„å ÂÉðˆË–~|×!R'±Ì°ôbÈ­kÓ¬W-H™Ö|!Bèp/#ƒj›È[ÿ©ºþ‡ž?q’Ë7¡ü äš *wxš¼–ÀÏ(!À“{‘ó¾Ò—³P£I$¾òW;÷þfY|SïÛ¾‰ÄÆK$6·"±=œ’úïÄœmâàI*/ŸÇ3ä£A¹ÌãóMÜXÑ•²,àÔd 4x^Í ÑµŽ|‚ëA O-广¬¯™ó C 'œÇ•2´³Y+½|Zžh»G¯¾o~Žu_iª”µ˜#eÊÝ¢dréfDÐdZX> +Ã#~<µüì¬ÿ:³cÕ¸‡­¤¥ÎÜ{/]/î¡É}¸çk/N~=îýëá8ŒóãhöqåtÿOÞ·.7v#i¾Š¢~LØEn‰‹&±^OÏvïºg¶»ÿl;6N‘§$†)QMRe×8ú±ööÉ&q%Q"‘<ÊÛînWQôeâC"‘HdÂï‡}¯È7™Ú£-ÛðÏ#³ð¸ßÿ†‘»nþó0Gšæ sš¨§3ŽKfœ·åà1'ó0W¶‘ôÆCg¦'v½8>=f™`´ÚJGb Ö¸«št:ÉkE GŸƒ' 0¿˜ëWõH¸Oø;”v pÇq"LÏZ›BPVõ眃ÄÄR/÷"éÕ ôÊ8e‰W*Ã8 e‚Åe8šÛÓ`ÞxääÙ9Ã/´Ö:Òë‰ãN­Wt202ä;ØZò3tÎ?þtÜXL ׆G½ƒ,­¸r—ÕñYFw»ŒÆ9æ»Ï“–0Ç Ï=Èï@-§žXÀ¤€ùãàu |Ce æ¨ÑjD6„¡`ƒ‚‚wÑÅX›ß¤å›"¿7EƧl‘RІ‚¡cu~[æÙLÉçÀ¶ bœ+§>no¯úýâi^ái¨á0Žsî³ðêui&s+Wjm¸‚×9x‚ø¬ÜZ¾¿¾e˜Ž@’ËI!\†XN|žíðˆ8SÙåb>y–ØmvC§¢íÊÀ#?ïnx»þ¥ß|ÎT@Ÿ¿6^äH¥>Ïéðui&s+Wê™$ñ÷à‘!dáù¼»âÍÝ®»Yþ:̆¤¹•+ØÏ­ץ™Ì-'• R7ò¸œ `²ði‰ö,ĽxBŽÇiÅÊa ¯r°‡SÞ¢„û;G‘ ôZyCÔÆ)ëK¤Ñsç8   /Ñî@aïžEÆ.[Ëëî²w9"ž·-·K8"ñPžÓòúîe¡½˜/Ø5˜}W¨Kd¢Ú(hÂKƒXT݃£lx§¡D׸’te€o‘ÁÁãeùµ¿ží6q¡ŸÒ ú#.p¬±¼ 28pœ¥ÚÀNa+04Xî,<_ÑrÔÚ£Òö:{ÒJO¾²… Ñ#³ªpœ’¡ü g“’×·˜cþ;¤£ #{%nÏ÷xž)¢€¸GsB9 ŠÌ%ÇqÖ~©ËÜd`t-î½8xTžª&ž5ê ®&\e31¤2¤t`´÷ÅRŸ¦39·™<ü›Rý.¡JâÔãWœòtÇñË6}n‚s¤ká°ð„"u‘êúñ¡h­öqý;¡)~ã8tq;1‰ßùеi~eá)n*®únµ»š_õóŸ*%QAH#„•šÇi劆.AblÛàlÈÁãôgu-à  _=Ä®DÈ;wN”¶eè/‚mñ:‡ƒ‡Ý$!¡Ò~×­ô¦¿îË.~´=_Þ|Øt[<¼Íww›þ|ïÖŸÿ“­ï•›Ì^0ñ](r^Ë´0qœZ~Vw#—ô<¡lõëcö#Ý EZ“ t 2(eˆRCÃ8¯ º uVCg†ÇÛz&It»êv}ž’CZÉ26^#!”úò´+Uÿ`Ð%EØY΀¡ˆ³$¡¾Y8•ÈùRÑ îÍÁ#§–Zïœ֕¸wŠ)ÁÁMÁÅq`\±wsEÌÀn¥ª?õ<ì”âG[¹¹#­WuIñ´Õ@•ÔÆvúFÚ2ම½`áñEVýþ¿Hª‘¸òø …Ó‚ºtñBËõ*36;žÃý ± Iwpã¡h ãà±f"c¥ÓYŒ+.îV‡— /ý:KcRlô«ŒR¡ˆù)²~°}ƒðÏd3”{¾#.Ž‚NI©(Ì8ìÔ D9Æ2`[Õ 6ÁÁ£m™ØC›D 9€R`©*PÃ8)ýD”".shÁžpzrè“ò¼/”–\63X•vÚ¤Aã¤)w(ESzUÿ°ÉÄŠ…Ž)ôÈçQ³Nš52À: $ü»>=§sy ë×çáa?4ɵšO®sˆõƒ_q:¢÷Ð0<‹6Tà+C’·à<<¶\ìáÍJB³ ¬A$w ã@èJ®‹Ô ÀàPÇB)?àPcÔâÁÏTСŠíÈ]±°C²2$QFÔŸ{;eîŸ:Oê»O}*§ý!O/qŸ>Žcg=•á*aýgÆL<®ôêžõ¿îfO k$Þ|‡´$ã8ë&¹ve8ÉÀëd/ƒ‡ý' yåˆä4¡½Ø* ýMO¡üîPzEŸÄOb æ›ÇÉ<³I,3tÞ”@Fq(U4’wQ9hë7àåáqUx¯)ÑJŒ ×€çK 4N¸Òžbã*¸k°­³ðØÂ‹þY„Z9“^„#2 â8éÔˆáÀTè`Tý gáñ&<Ó9BÈS-Œ3lä©…<—§ ¬¶A´–ƒÇ™2>ó®ƒZ=Ï‹ÈXIù$8N_`Ú˰•[ê ààU)^ÿ,Þ1èÑz5¤§pÇÇܾŒÅ/ÅÜ|覅wÏÁÃ.}D•t¾µ\|‚c¥âJ8ÎkU)pq9¸O)­²ŽIB*QZŸ#¤zèªÃ&·ûªë±¸*áÅã¯ý³în·ÞλQcÚö™[DËÅ¢¿™ »…ùäà1å fíù¨ÇíùýŸñ'U®>oa&3¤P²Áí OñœÙZ%î}d¶ ‚Š÷â8ã}IûP„Ù 2ÞX}¢Ñ@ANàïqÈ ŸjESŹ´â†DÁ`u±ó ùÅû[‘—#E‹~-ÖÃ눗îÖáôkKÜ3V:­©uƒãbïŒ:‘Ô,žÆ…K¶½ œÂˆGïÇ)Q7ªòDÉÍÓž üLÇ¢OIÜÃ8“JØ¥/Ê&1“´þ\&žP0ëÒ£$ô øX …ýg€ºAQAiጱ4n´´ æñaˆ¤×qœªF}š<ªÖV«ŒE=±éª˜±¯@P«,–Ãôº<-&²š_ª>N”þ4¡¿èåk…7¾öƒ‚›C)úBÅèhÿkì¸ÝmºåÍnûTŸGªÚxbÿ4B ´V@M¾‰•Ú Jmž‰ô5ñíºz–”@%z–cþ¥¤´ŽÆ£„ndE¡HPàÑšQ®Ž3ž_¦©.A¹hÅ,ÝÕ/;þ.»(4Ù wH_ؼïæ3ô²~ý4(ØVc%Á€ú£ÇqÂ4²9¬E@JЂ.[ø‘5²¦ì=À /Kè b·Mú+±Ÿ¸U¥[7ŸÂLì§™‰ãšL0ÈXGJ[¡úÁlìïr˜ˆ>ë./7ýåCRº'öL+„ÆMJP§&§ØYVHŠ¿× àh|F4˜óø{Àâ*jÇÎâ…Põ9B}€¦Å๘‚S}…%(š/ƒ²x¤h³ì=û£ÒÅwœÔRw"(ýÏ¡.®S V>ÖUâyƒÚˆ«3“Š…% &ŽÓV¶YñYìÌÇmdƒh> OÅ×Q³÷]Ô$ªtÐ"±]z$'É>;Xˆ*1¼’r`º'užÐäBoÐ"qæ‡pÔ…Ó§ë=ŒbR4µ :´õ;{ :$öÆ€go©e 0ü Z\Õå𕃬zžRÏŸ_DôÓ÷_]x­=Pþ Ž3ì[ýZäá9µtÒãÛ­EeA)KÇ?^Òà‚—S[?V—jK\r&x¸Ç½n®únµ»š_õóŸ'4( „•ÄÏ<m(NªØ¥¦pð‰V‰ >)8x¬®|}OîžšP..s‡×”0€'7nØ -Ãó% *§È‚xVdAï‹,Èã5x(êw‰åárÅ "Kñk›î²ÿˆ^=~v½¼H¹=¿§ç‹WÒ¥gXãgýAGÈ‚ãt¨0uá1„1¶ÓÂÁãu±¢@œ}ÂÅcØÐy’ žf¡ØÓå:TÏ—E8ùÆÌÐm¥x(twŒ…ÇVáè‘3JpÄ„šx³jèCýMS¬tQÁåÅ‘À5  /u¡´ Å´éRþ¸9¬ÅT5Q–‰'’Møàqß7:›zê&ËZMƘôÆg½xÉ-N+~_skm¥ú/£hÒêƒ ¿@âÆqÂU±ú'oX èõ+Õ2ñøê{ÿÓ+‹àˆ+ ¼fO<ŸÆi•Êlœ@`n#O©\{?Ô¤üP ÝàPÍÂ3õY÷â&Ù«^n€Ö.X2„‰ã”ƒê&ˆ»nòÑkÓ`#âàS¬ÁeÐg¼ˆß;ê–¸6±ñ²\hâeâ0…™xÿ7‘Å ¬‰ÔËr<àà)Uý…R!qâ°`¤ ”Ѳ øbí®êP7[#Tƒ ƒGs“Eoú]s»^-矎©¶[­Ö¿ÌðÿgýÐÄt#˜{WÑÇ»§1µ&x"‹",Pu‡qž÷S–° ¨á „K~ÐÁÞ’Q‡X:V é¨ÑX WÙúë†3Ù èÞÔ_7<¡Áfyÿ2)¤ë øóÈDÜã‰]~—è7…ŸÎ¾ÛwÊèg÷÷È/c÷Ç¢ÕòC?ÿ4_õûrèÑ^ºt.¬ÆAÆ>ë\P^4 l¾žÀ]õí‡ÇÛ&„Ý,磣lÒª`ƒ#)‹•¦¯ ìlÞáü¯/ê*ö¬ït `ã:c·ó«¬ñ|èª~OöÍx–*ý‡în…»óiÐ ¤5¨„Ó6Xârg§RaÕœ½íæ?ãâ9fotu1ðZ5°]<ì`T¢«Ë¸Ö÷}ñRŸ¶çóõ¦_Ç]GmjAh3&Ïã2¡,¯Š¥4ªD'Øäe >è…žŸ«åè;2 ç°Ç»‰wd÷›û¼W‹é)ÔÂyéQ&pç„(ש̒á ]êYxBéiûÔž ­¾´Žg/ˆjÃ8˜\2`½h@žR}Ñ(JbÆž÷Ð8î„:9­xËÂèT`áñŬÁòA¯V·«.¥KâPb@h8xl±ºK ëzX[a(¹Ût>,çƒr‰mÖÇÚ?—$% (g Ðƒì²ÜΗÀ‹WŸ<j8 ƒÖˆ›À ´S ç”@‰ãø­sZ“˜#L‹À…b½º>d‡£và€N¬ÕÆS;^ˆÏN«Ä²8œR&ÒÍ N;Ÿ²ò·ŸðÏ×sqL/»S¤.÷@œãéÅ«Ø.%Å8α/¤ª“—)@õÛI&×Â_ØWÈyªTO(5ÖYR!]pbüS͸͢z O&ž ËVta9a ÓŠ2úŒ2’$¦P*ÝÀhœÂqŽ®á`á Ç‹*~É:"Cÿñ8xç­*[ê¥,¿‚Ý€<¾âýÄ <•V^ÌÞÃCDºnÕ8΃o`Ø<æHlýégà B–¶ Û‡ÃÚCω¨DjóÝÎ¥ñ(бȡ©x‘ÅYØêP\<¾ˆ§pÔ`Î)Ñ/–»A:­N-qÛw¸Qð5aw¯EXhc}žÐ"Gáðh6{ú»ªÆƒ0ç,u2Ò t¡^IEIÍ€ vv`úE,wHñx7†òg·w«Õl¼<<Ž™´¸CIMÇ oZä*L$4C ©X # D¤ \¤«kQû¯)•*PÈ‘óÞM½—(Áå|ÀR70<Æ -ìõH›Ø}‚ØK-CZ˱»‚(Ç7¦ó±ÛÇT%4¹uõùÁÁãtäÓºÿt–[ŒÚŠcžv¨P ôõIטŸ‡íùR™êÕ˜x”<ÍÍ å<ö°iÚ¡VhO`ÆqÖ…§Îª/×ïæ±ŠÕîÝz~‹{Úªï¶ýÝ^u ì…WŸZÊ÷anã£Ë÷~ïõ‡Þ,¬z.ºÐãׂ‰àE×#ßÞn^+þ®]oæýŇnµíÿqT;Á £2´s¼4ñÓÙ¨³øiÔI®Îvë³ùÝfiЏìwÿ|v³ÞO"®ª¿ßEÅCæ¬r^g ;ìÓ0ÐbX8òôj³¾YþÇÁjÆŸ³ýêò_-Îÿ€ÛÎæÛåv÷ÅÍrõå~ÀWÿg?~ëXí‡áÓßþ{§<ΦFÕvËUœä/þËž}{ ?ñË/įs“úÄ—oxá¬{öãz×­.âj{{öÍ:®F\âíÙ÷ñÁÇ|¹B »Í]L‰)€8x˜¿*ý±Û^]¼™Íoïþð‡K7â+œè§¼ë®Öà§ßvÛÝw›õÐ6ìb·¼îÏÿ~!ßž ùÝÍ]·ùôö ?ÿ¾ø—¿AÜ_ãODHðïGrþÞþ—�xsµÛÝn/Þ½ëæs”ô)uÕíbfä;œên×½ûþ?|=‹,»§¿ï¤àM¿Ú^ü¯Ÿ¶¸ÃÜ\øÇ ι±¸×zœÚïÆypÉñíóOûa×ß^¼Ù¿Ü/ð×üñ ÊØaøq¦ä«¿íµö·7gÛü ¾Úð—u³ˆ¿s Ìßâl~*¼EF~¿Üþ¼æ÷žèÑCÛ Zz»_ëÿOؽ—SúízŽœ×X\"?nº›ímû#2\(ñO¿ýÒ­V¸’•qj±ð½_@Ðpµô¿î.Œ÷ ÷Åàm$É Áðñ‡ÇM@|R¼âË ˆoºÛî=®îݲßéáãO<¾†ªýfÜ ÷š|ùý¿ýá¦{¿ê‡_A:ÿÎO¿½ùowËUäå›oÖ7[t¸ãÿåÁ•øf,^€Ÿ “õ}‰–qóiø`l>ÿøç±#Ù×ßý)þíß÷ûà·÷žøŸ÷Ž8~íºCó¹»]uóá½,Ö‡zùŸ7ë_nNá‡?ýpÓÝn¯Ö»á¯«õÝ⛇HÃŒñ+hPD´§Åÿråòj÷Š*þm½è¼‹~¥˜c_ûk$ï›ð+ø·øÇ÷ݦ[•f©óq™Ý®–óånõé‘„NGÓwÔA𸠠·awølãq޾Áu{?zÌèô¿¢Rãf|…ÿ^o>Íï7Á¯†å‰žíàm}õÿÍžs¶BôÕ?=Ó?ŽŠ,Šôà$àOØûÑfjØÏã/ÙïÚÿ¶þØ_¿ï7oÏÔÛ3÷öÌ ±Ò›ú`ù¾|p<žX¼äBÇE¢ÓòåÏú¼sñܶ?ß¿Ž³:«ÍjïÌË#ÿéì_—7ËíÕ´3ÍÙÚºsåÃÿý?Û×Ï^NHa½VÔ9Ü <7T{Zh R¼A­ÌæàaWI{%T‘{ñõ¹pTˆÒ”±/*EG'7ÚÒ0RÀCj߀ <Š÷÷âé–È«ôByáµ î¥=úœFLÈU 9C/è8x´«j9žgÚ>Q+÷ô ~®©—™ ¡x°²,Ï—%Q° Ex”9í‚oJ5óñW[4_&¢¶'Þæ6„èU“YÍÇýpâ8èß÷ÝõÓ”©eT•%Òå‚ZÄîÔ´«šªf~ŠMâˆá¡þŒG<¬xB™»:uz¶¯;{xuIxX¼ŒÅÕ(Ç Ë}"2‘¹ùÐd‹ë7_l‘Ïð„¾üÐÍÇB±6™ë&ÏòÍiÒw„ã8E<ÿ ¼äÉQßóçá1ÐîÕÏkªõ„jÁ» ]º À8β)RŒÇ F6˜Peÿ¾)ò˜•dUZa2–Ãq>Ýzz§¤k÷œç‚rD±¦þÜsð8YõX÷ºRŸ8RP.ã´÷@ A³€EˆœÐè+Ÿ…Ç^ùÏŒ¦N«.>t“FЍ8ÎI[ÕןNTŽ0 ëó€…'‹ýMÒqÚÅF¬-–ç)™¬’R6LnçCUF4 +cvøÙ¨,“V–ÆK ”ÍÂqŠýðÿ3q•#“k°/°ðpíÁ|³¾‰m3O¯åJ9Ø'ƒ¡àã¶ -ý|þ惃¶ŸƒÇÊB‹}½^Ê‚´²ŒÐøD4BŽȩODüúõ=xx¼hQè5¥N´ÛŽM÷UljP茟OØ|p²El‡ƒ‡]®ï¶»Ùe·»ÚŠlZe ЉNSÇi«ZTò9” !Œlà±ðØÚ1üçÕ(‡À;tš å€BŒH¹2KœËZÄàLx>' -ôuw·»‹-*#âÞVhÜUPœÄq^ÉÚñ{.'9èÔŸpž tíþZõ3ʶ±“·ðAP€3Z–Yå\Ê2 †3žB'óý:{ôˆw¬J,½÷à8i]íu~ 3(Ý`7gá±Sªjt——›wÁ~íã¾.£¼^³x.8v"F1Z2@Úk™…ÇÕ*òÿØëuö³õH¯=!ÑÕ òÁã8<Ôº)1N#c>>-ZDSYx|¹ÅûqÙÿ2è‹ðd=ÄÚñ´qÁqƸZùOàa>n qð¨} ^0æERa#›ÖnLø´ÞÅ&†qtѶªåˆÏ´ˆ}qðHS"²}D—ƒòˆõ{*8ãµÆå ô'¨Ëç|i}8 RçÔ {¯t˜:Ù0:&nm¼ÂÆÃ&2ÙøöV—ˆ‹O 7l‹€ »íÍ«Êþ´ÃCÎlyóaÓ J#V‘åœñ–ºšÃqÖ†b}Ñê²6_&§|"0ðh(’°=M¿Äýâ˜ü*5µéÅ4_áKX…ˆÍ ¬»câëWŒ7ËukÆáúAêXäb{xU$u».Gó屺 àà1“£¶ü”ü½ÁxMã jrD¾.¾ƒBù`z‰³©@\üéè;X“†&¬ Þù,mLuvrJáTÂeT&4X¡ < |Ò½â6;åµ †rOpœc?É+Á<@ß`Cçà ¶ÄRý¸½½ê7ýʈµýoãåsà8¡ExÏ©Ücàt \uvÚÌów1Ã#·Ù½æ^æÆûÌOб8§µ2Íé†qàE‰•Ë$" b¢Çc© fâ)²ÍÞÜíº›å¯*3‚P(e%HCAD®²‹]•ç`>Z%Lƒ fà‘¦Ì‹,Z‰*­Ä˜PÞHA€–·"ÁV6+ó!ÆFEõ癃‡}™ûªÊæ±KÚìvÕípÈõæˆ"!VÌ3D p'¬,ók:#óA'ú:œnökøãŸéñé_=ê´UŒ1Zo)šâ8QæyÁ©4e •²ÁŒsð(_ìRíÈ-Ô»Äí”!Ö’å@k/)I@%km3«WNçn>n§tFàï Rj•çÔvD\=>!B0i……øœS@sŸ Þ¸U t¾$Q–úÔàà™R,åa«ãGëëÙ^Û³¬[lC¬7 Ú H‹‡ãlÐmìßù88å­¶ÙGꑞGCõ<>á*óÓÙ?/GvÞîs$cçJT‹{qÛ?´×=xgß!Ýëíâl±ÜmgÏæ‡=}Ï^ïÎ{T>'½T´|¾~^%å\ñ—Ê~rOH\9°!e3iRn)’ˆ7A÷ZhC’\<ÞÔ$É lEr#hmÁÐhƒ¨Âg( Q"[¨ú½ÇßcÐYËÁ£B‰Ä|r!¾;:b¬H6Ì ½‡`,@†X 2¹ÓDœR$˕މ6$ËÆSî™35clÒ—ZzgIጹTk(TÂåë@Ú„3" ]|c‹¾29ȱN¨hNåâw&×|Å]Š6¹bú&N´‘JSýWö¸õäÊE²%™$µVD‡¡qœô™Lª.J!reK^? yü=àœÎÁ£'—\•<µ;šAñº6wö­ ÞR4qFk‘!žuMh²™‡Sx¬T Cûžd‹‚X¨’†¯’wûÏÙR v!Ò(!äH ² i¼ñƒÇŠ"¶e¯ÝEžúÍš+fdà÷z’9w!ÚÄ‹yÈ3è&´ÑˆFfàÁÉ9‰6cªùàlæh\ A2E똤Ÿ9™rð’) –"‡ÑVæH¦Û8ÃÚy/!1%7"ÎUBÒ„É#Y·%{C:~)yDŽ´®Ó‹;NPíCå7ü‰Y CÑÆÄ¦…b¤»:9—ƒ_ˆDøŸ%­6mHÀšŒc¹1¥\âGZ%4Í«\ÉÜTãTZ¢BT2ä( ´ÙôºÊ8ì‚°u©–7ttÿ,4ÔaWRIJ&GnÓ&"&5©`»±t,\|k‘Üžt¢Ÿ€¸U¼ÏŠ%^p¥ÊÐn,ã0AT¿-{—œ :üœ+‹•¦ÚåX® …è”-²2¿+:%^ÿÛÒóàÊqÉøj[Y–¥ˆä„Î 2&š±–%’óàsðX¨t;–ž:8í„ò"#ÞeO3ND^ˆ:Ù‚6r«],û˜Ûr¢ ÷“P8‘ÎF, x=YHKQ#W0~_Ô8ñZ4WÓ’Ž=;/q Ê€ zBì9 b)2äJdÛÁKtFäÉ97%o'©d:¤œ2èS²s2Ñ¢@®0^´¹“ò g7”%^‘¤tMÇ…}¼?s`U˜ÎYй2™6wÛý`Ÿ…'Ôö.ºÛåXÀ~;š÷7»å|xòXEWI:êëdr$²¶–oÁ”¤µ<‘íõ®Í‰Æ+m­"-7ëEÿd}Ó!Þ Œr¡Å díK}! e +ÛlWAǼõ „&zˆ¯2R5Aˆ©úì"EÍ­NÝ-N[† û(«Ù´lCü'#-ñ„)›Ãc…椺=Í <燼pÒ³6ÎR´ÈËÚ6´ðÞjÇ™›G¦ÖÉ(H*ã©hl*5a3aÃ-D’\é¤lâaäãQbzóÐ ïX^Ð+%Š‘(ý æ•í§®8­IÖ&+$ºÕñ_ÄmÚ¼þ|‡Íûn>»Ý¬ý4Ìô Ø 1œ¬ûÚ; ¿‰r¥Uª¬b¨ËÀ£¹Gà›~ÇÜ®WËù'ÞL,úÝÝj7[ô7ã4(šE^X•#GöõNü¥h”+®kD#<–xŸƒÇצÑKí“A]ÐJj“?@-ö¤`"M®”Z¶9aéØy;äà EzûLÏ£~êašWÖ›ŒBZ¨‰Ü+¦Ö’•¢^®"Ú¤CÅÒeÒçà±0µ¥Lfi³£H}ÐYšsÌ[ÊÏT|-Š .G ß$m!¨9q'Äͥ¦_Ä˹nµÝô¿ë·¯­Ù‡1¯õqÊ;O+2–ߌI!d¢“ðsµŽ àÃ/{Ëdd¶rTãdŒÍy†Ÿ=6gd÷w›>=atÛXisB«&UÏ·0Y‚Uc¢`scÛı@8—ñæ=&6gâòýuzºèy¶x¡UdˆU‹…Û¼f¨D›—„ .à÷Ÿ<]·ë_úÍÇmzÊèð¼øÕ³ßi5­³Uš<É«Á‡ŒžíwçËùíl>¿Ž3£ép¾‘û³ºù{]‚jÜo³4ЦXðxø™)“gæ¾yUÊh:Üo}pe£*š‘Ž)Z56†`r‚¨6´ñþœŠ³Eãq¢=ï;0%§Œ¾6pÚäÔDU˜fldŠV‹Î£2â4îÀ6æ÷"€}/™nE^jgsDÛdMx-•ÏÀãù»ÿÐcÓo×w›9NÚ‡åÍrè—ðf½ºÞžßåü(çË5RÁÑ$÷Ö‹œšØ·$“䓱eí¿ï¿÷Ûå‡~þi¾ê÷4ý«œ,ÞÁ¦™OT{OTM15ày(ãÔä]“äfBg%*yvõ¦ûé½Ü¬ïžÜ±?v/}whòÆ›‡g­M‰3‘‰}²¼6T¾ŽÂhÁZ£% GˆJ9xœøüXߤ—Ýô q‘f@9g5雨2šË…&dfÐâÖƒƒ'@‘ä¡§õõÝöêýºÛìÏﵺÙnæ³»m: T9ɘã,¥ _þ„ 'öy-Çå|ØJ˜. O(Sc„¡Mâ~4^{*¯µûÇ‹\a¸»D#"3dhñ’›ƒ‡Ís‚NŸè’ˆ/Ø*TtIçÁ”i[ŽÍ ôA4Èëbá Ý…ÇÛà¡ç껫õÂc~ãlŸ)<*’¸ð¶ ŒžlÕ„ã4»³{m çcorÍÁ£uUsð¸¢E_'D/eÇpœg· ­É^𠘞©§†Ÿo–9*$r ¨ =ÅIq\²ºCÀ$-¸mPšƒÇÕ]ý÷¾Õn½ëVƒ.‰Ó·ÒI<ÈR1gÙÇÆ:¼E(^A>ˆ¢µ¸ˆxQ;ŸÜùæþ&CDj‡…£g>>ÕU—?Ÿµ±`“–d ï8X«I:VñxSÕ =»9oï5;è”8t£Ùõ1ë”Òi'ÌDKPŽÆÈ•aë 4ìl‘ÂÑx´PS­Áý-X†‰èža¬rÆP9$8D]gàt3dhápð¸–Vá¡Kð ÕäÛœ g("mÛ†qZ‹‰v¡¡y°]õ·¨<<¾PbòÀ5”°ŽßQ¥ •‚ÖèzRÐxÌmh#x|fHQ¿Ì ki%FZ„tts¿òŸÔ][s¹±~ö¿`éἜPÆ­qQUï&µuN’­ìå%»UK‘#›g)’áÅÅåÿ~3”,Éâ4z€+ª­µD‚ï@£Ñm¬“”2®}bffsD(CÁÄ“éÀ¡gsöL™’Pf¬_攕øxÏØ×4©„fP¾Ê×-Äv³hõéúõ©ðDÀí¼ÌtàÍðåOžxxœËcˆÝÚ3…*B¡ pY¬µ¡%úBÙÍC*›ÓñK]a®`á ­C|øj3ë”êû•ªñ­©bm;/t‘•Õ”gÏT鋨¼=S¨& Ê{𖔃š«­Ó…ðªÂ^ƒƒGë/`+žïåˆM¾Áׂ·äÊÛ+ó˜¬,gåkQòð°#^Ècv..¤'ž ¡Rи:Ò‚â¸Tø†ƒËñdy´0ºEx@” }º×ãôúî~ù&‰­>©¼$ñd{Õa8C¥+Ì*,<õŒÆ§KA­R ·`L»¥¥$ŽCÛv¶/ÿGŽp¨!´fÀ·ÜÝ<ì(zfpÉfñédYÛ}+ðUã²n1œO©jæÇeŽPÁÅÁcM5±˜5·›u«PÂ#hcmTç%)@,d]Ö8ðØœŽûq^ˆô‹_޾¡ÈCQ¾äO–—s„œovÍf?½ÞlË[ìè}{ÿØT:¡‚:PÀX"ÑÉj&*}41ª‚yâàÑüüf›u*|…Æ=ä~:o㣚Ùú¸muKx']Lßí¬ó”, Ež3Ú¼ÜfÀ÷Cnªú“Ár¾Ïby!¡=Œ `Ä ¯¾3Ù—µX— YŒ©°2äàÈv?<® f¨ËÝî¸FB6J÷—'wwm‰°çW²=´E“§„ òƶíDèqVj”éÛ]3ß5³C³˜ÜËñy6Ê6 ÷C…3„()£„RÆ"©m' ûØå˨#S¨àtgàQBf¥v·9ài—p<0ª­ÿKI³ƒš2¤vÙ öœ}e$ñc*ât~櫇>½ÚÏçW³õÝqݾ×À¥ˆoiýñ%m»Þ;îŒAv¢²¤‘ª¸àáÑ&›ˆÃgÕXªµ‚PmÌ QN«mgøËÉŒ„fàô¶x‚*h\¿ÞðÀ{G$˜oÛ²‚TeˆåÏI˜xB6+ðD—í†!Q½’P/Hç„ ÄÙ{ã}´%pÙpºòEò˜xò/ âùôl>oömмï×]Ì^!qù*¬ØÎK‘Í”¢,GWaRààñPÆ"l7›Uªz¡^ÐÒÚà%N< ÊoxÌf`-g”…Ç‹ÒVX^k¡u 5¶ãŸŒV§-G[aÀÁãD«ªZM¨” VÊÀa»ÛÊ̬NÇ*u…M#1E,Âm›Ô¯[iMß«68©_‰F( F O€Žáb6”1 ™¸ËÅÖX7rð˜Bfá¤Ëó¹J½5„V!fGÒŽr…`»xlXÂ" `u:è ¹e˜x\-Ó@,¾A8p¥@c; ªŒiÏ_†JWX,pð•Û*lö×Çåj‘jqÐ-(%¥ö–’W Vײ 2hi*,8x ü²!jX[¡MÀÿ¨õ®ëâì†!/…²ʳƒ ¿yà­ÈŸ~›xP)¥)ap_Ü>¤P;±vŒC›þÐkaðxUÅ8«p'ДY')s†íœÔùCVs„ñ¶}ˆÉ<ûu‹;°ZS>4eWÐÛBéd¨ñÓhÀÀ£UYk@,ºƒP&&Ñ¥ ¶ÓR– qÏXŽ,¶Â¤ÀÂõ‹f»ÚÜÝ6ëÃÈ ’–ðíÇ"CÖ+rjÃvÞº²v²A ¢Â5ÔEìÀé6ýÉӻƶ—BŽÊ~ÛéÀ^V",G ƒ/Í)K. Ú˜àÝõls„ÿë® „2qɼ!®#·í´/ãWäQ8¯)ЇG» &Àõë/F˜+©µ'ðʘq÷“þâý¤¨ö?O~Ø.p9@ß´üû ’“ÍÍÉâØL›Ébyss…_ñ_ïååÃã>ð O&¯¾¿Û6i³« >çþæc¦“W¯þg¹^\¤¾¸øïÿýüqŸ¡üáÛo~lv{Ü6] ƒòäï%÷¨Õüüíúÿšùá¡+>ýÉïÓW———“ׯ'j²l ŽÎg«ÉͲY-öüGýõ>KÀ}ß<ú¡ ëÕ½ú®YÝüïrýë _ÈïÎȇ¾ùê¥gu´3=Ç5/LM~jü|6õ€îzÔÌÝè o`‡z¿tˆ?¹˜ùø§Q;fïS‡ôÞŸ›uÓù,>_ ûM¼ÏƒOû~‰<<Ìn·-¾ü!þïj²¿è.§¸vîÊÈ+ÜBâ3ñ‘?|ÿæâãõ¤|û€±þê«—~æ¹ëåjÄÿ¼Ã±úm³[nßEÒb5ð‰Èß~CRü½¹ivÍz+Ë~øðÄÆvV㉓ëu´»“nR¸xÓMç§àëÑ*EÛüðB;Ü.¼œ5o®§‹ë›05×7znæ‹©»¶ Ü¡±1JöqH¯ýi¹ž­–ÿÆ/½ÚU‘Ò]ÝÅŸZe^Mþñ32éÉk_¯»»sð«!Ÿ9ÁÙµ"]Ü×È8iv°!ïÝîlÁE»øi†=häŒÜ=¤3ÝϽ±ˆˆk7-¯@>2ƒ¾ ë½¸ˆB”Ý?Ê1úQF¼û_>\Ü\¡òfWhmð÷[\œþÆ¿fëõæÐ… Ä.ï[-ר›‹ær×ü‚Œããï¾ 3-&þÓ8hÄ•rÿ ÄßN]s³\5—w³ÛUäÜÇ¿Ô"Ù¹ü™Ù4)˜5¾¹½=buõ«SûW³¸%è~>\|®øÉÅìxx·Ù-ÿÝÑü§õäáöó‡ÝòúxÀ97¾<™Ì¶Ë¿—Ýk±ñEœùØþˆ3âî®C8D¸/¤ Ò{Ê5/¨àZ¡\éx©àZáàPÒµ•çû•§€1ÚP`±¢ç,œ€ ðPÞ¯ÎÃ#uÍcÖ}{ZuÜ­ZµJB­/±9â–Bl'ƒQ% @ `ËW”çááW”ç*8’ÐM¸±D½¢®€š§¬,ÂrÄ€ 3ÿ¶×óŒ*OU×êKQúŠqËþó\h_?ǧ¤xš1°¦üϸ˜_¿›­«f1ù ¹»˜à#~ɹ›ìN gÛíêîEÔ?=ƒýÓ3ÜW““×aò𑧘,÷“Eóv7[4‹ 슷¸ùý'P¦3Òk*žì­åÊ:Ÿ~L>K?NéÇÂùìcøtÿB)( ½¥,e'`« 0ž¡¬Gyë_\LÒ^Å,F‚X«aœ(›€ízså&'Z:åá™nW34÷ƒgßÄ€…xm$Ú xxŒ–\v6ŸÇxŽ—f+Ü·½>«ÏÇAJ¡Xˆ·½%-ôU,<ì17² «lkë ~b—üB%9b;\QûaÙ\«Ð7]%„(Ï)Fâ N"*âüdDÐ|½%¥sZ™ ²/Ÿ‰‡òÇ´®7ëÝfsè¶SAû ©®$ÁÁS ¥Ív·|¿\5oãÙ*¯wßë.ãÝéàƒì'j×Ng¬—‹¨-2#´µ†–@—ßÌuxÇh™õ *C©’Pj<ê÷%D¼‰žµÏäpÌU…ûE‚UR×`(Ü) €óÄ‚¡ûbž”ùyòݯˎ“ÛS0LÔ*áskºÚSlÒÞ£kKêêGSåÕd±ÜÇ ‹É|¶]/WËÃ2^’{?÷æác£’¶ü‹ßƒ#Lª<7øô¹š–&W÷—Ú°“ísœÍIºz‰ï$ˆÑ·°6yRþhö$‹é«°Çt ¯óU6Œªú¤ùó‰œÑÎ’¼I ¯¢ëSÞä>–1Fx)5- ²c$PÁ'<ã˜îÜ0>jÖj\ÑTQ*@ ò¾z'çLÌ(Ä£9’*™u8¢ãˆKÁ3ÊÓׯêVÇô:ÉÄ:§&koQdz ãhD‡O’,•f@&¤à1™NûN·In»»§¯Ï*¿Õ»¡¹aj ~ЉÜÈ‹{4_’å«Ä—6ae«F†v¦*üÓ(š-©è½`°%êÑ\ ʆ$éL®„`mŸÓ¶cÓ’,aÁ¥àîË~ŸnS’ñŽåGª\ T~€ÔI|í¹(̲%)£ÑÑìPÁ›€ZúQ6„‡v47R¥RulGŒÚÓIxøQ(O¶ˆ~Z…ÒÈXe> ˜öÉ‹Ñ$@£{8¸©4úñ‹\›iv87žhNXe­K¥¯ÔZÒ„1Z„Ñ,²Á¹Qm%9/D7váëþÁPkÚ» I”"ˆ C§•<ŒfP0:‰Aª0ÈÆ»j>OããÝzú ;^Z5»Óæa7=}¾Õ?ífµRª ‡©îù2øÇò']NW‡?JH“‚GÊ¢ü™o¦·M—~•vÇî†é¬Õ:í}µN‚’½¯QæJªtåK’Ÿðá’ðp×3§Ô"ÛÍj9¿KÓülµÚü6mÞîºÒÞZÓþXë!@B|†u©‡}yqæKª|¾Îú7&5RIxl-¾àS¢âŸwÚ5ëÈ$Å&;Ý 0–ANÆ 6´ NÔñ´8éKÂj3h»ÝmnZõÓN[§œt A?N‰Züy þhöh2IÌ:kg”KR;ûÊÀhõŸÂøÚ Ýº¤N¤w‘–•?/ 0šAÉ‚Ö9QtV:•ä@—gТ¹™W‡)¾q×êv;K¶ü¶àÄÕƒ{4_Rås¡_¤àñ&[Ò±~WM»Ž}¼M•‡ãð6áE¤Ë¥tiëpÉK)S¢²¼ÐY.¼ ìC»½ !%ÑËÔ8˜RŒæP²¤uV@^9•Âi¯äè SýqógÆÃ…$„?W-¢ße!! Êë:^o‚qIxاӟb—wÍ?Í~ì蛾o§»MûV»•5´g؃ÕIê6ÉÛ5ÅͶTñ¡ŽÇ[gDøâl[În±[Ú¡]ËÞ$˜…/ƳͰ`$D>ùGqˆé7¿äéæ¿ÒQÔÙ aDJ¼ *ãM—.üôÖ?ÔÙeûÏó+#’rBY-ËGI`µù‹´ðoS2ðš¾Hn£»7-´RVÊ—‚í„g§Ž¬ÁX†RU¸»ÄÂã³Ù€XÜ­÷å E£ ß¡h=åB¡“ÒUö!^¨Ñé<®è°×Q’Øl›öò””Ô!-¶ÓÚeöãHÊ]#'ãlÿ(¬ò¾ßc‘ÈË'õâ>Ó-1PÂÚ§nby![Ô¤8o,ŠU Xx|†t0iN ID{$}³ Û©¾€¬äÒ|·3üÐa»šµ÷Uœ‡|øB' ·›×³Ûf¿=OˆüDaŽR.s¥Ñ@é¥ÊPŸ!°}…˜žê g¢÷C,Vl=yÙÛàN‰9èÉv?<2×|ÐÚëG¿?åqŽ8T < êB,}aëtÈÛš …'‹EÿõxÝæ¶Žôhvû\ö$ùKïámÿ5ȶg—P-IWô x˜xFåä=«1Gh ŒÁ];ö9wvf2À‚®Ð½ àC‚ëx¹¦_m±@¢pÛοR*2 [¡gÓñÀ€ZgI*#z5Þy%5µÄvZ*F>Š|˜¾‚]fáÉáôÉ(…ö†Ðc¼—TÅ„¶¨‚£[æƒé+x»8x‚Î<ºo£»xÚ­tN‡Šèf+‚B+Êa;'ØA:åèÉÁ Ü`<#/Tçí7•R99¯ ]ƦC¹ðl\“‡ÌC}gÓñö,ˆ3ö=† cžèoS»â–Âxc;érT’ÎÇUöòBxxØÇÖçŠò&¬’ ¡Æv(K e°coÖJP6¯×5ºÇØ‚C¾UÑÙG1v• œCØÎj[°³sƒõzšƒ'ïièW^*xgçà?6Ç +3~©Ö±!à´ònlðrà´45z2˜=ív³^¶ºb "Ž#€¶:A9°h]ÚjC6°VÖÁø=AH#ð°ïÕSÊ;e3¼_Úôös¸ŒÕЏ Û)ä°Õxв ]ü¤„‰Çç÷¤¿øê“Ý# Ê‚ÖRDo–É<æ™´M‡jËG2ñ„²#žèe|´DÈTÛΓߵ>š¦‚*ß÷,<>c ­IOh2^BrT6à®e¼#¡–(åÛµÓ¡B§ã÷i$$àq¶Ì€oõFt±w@„7·íÄ Ñ^Œœ äRUèqÍÍ›³E(Íá]sÜïŽi1~³U³;LcëN›ÐfL{`•¢øŠí¸2C=‰¯ œ¡Â¢Ž…ÇçDÿê6Õ°ÒŽÀ‰í´áú^г“ƒ>¸ò½ÎÀcØ¡ŠgÓÍ?Væãó¨Í˜à×Ah1Î}F–m;mtÁ1òá ¾Bo§ã1"ç ÚºéΜñ-|¥ý”šèdw5 a;Ü<7ŨÉA]c€3ð¡K ðs^ M‚±ÆC™&CÜ`òAdMkŒªÑí 4ôxæsœßÄÝãºNŒ\Òz/gM9ãcóÁ,ß¼Zˆ'å7>Îܰ΢¢NÍFOIN«ŒÙÙIÉí‰)D~$U„‡š¼ŽÀ“²¦Ò,Ñf»Y­Ú^·ÌÝ^«×¬Œrʤü`“}…˜¥O€B«†u–¨úTОۢP.„ê«_ÆQÁX¢ª°þ%x´©½þ ýr§Wæ¾/±FEË%M¡Ü`¹Ikòa„ Çžœ;ÿívùa¹j¯úÒ¼Mâ-¢<7cœê0v/§ Ö^þ£(*=+9ýb<â.ðx˜Û6Wí<±á!îf‰¨þ4wP_¯½»þÓ#zÐêá×g‡ßŸ=üNщQ4òÚª-70äµÉyòZ³pÉ~1ž`Ê™ƒ0¬6­,›œf`jj}"N6{AÚ"à @ mÅ÷r>¹ò| <¬O0¦¨}xôáíf³Öô@$¨G :D‹Ž/72z¥Êˆ &zp!ÀcrÞz~UØ2[-UųÞG]j^jbQ‘™ºT>¯Ë dG樑hqNÐ÷€C r6å( r0µ7Í•¸¸Xoè‹õ¾:jF€>ìUy/gMÎ\e9‰»š$Ê[)è û}×Ö¸x]þÌNj)xf$)É阣ÈDRJ.c/Æã}Æå".Í=Fºk ‘]ð›E|Ê…ÔºTƒãñ ÛZhÁwMð³+R»Qº«åL%Ê™3k¹_c±¦‚×'Ác]m[ð9ü’,£W°xj„¡Šu÷ry ýÉÙ<©uª$xbQ;À죠Ð9ïå´SµíÀx® Æ<”g€O° óÞ熚‡ÌX]wZuŒV)Í!9Ç_QNÅTÔèlHµ©à HðX[Ätjc¶N¯îS9C…rò‡çÕh*ÅqÕ‡ñyJpÈS ðL¢RÿçÑ«sTg›ƒÞh0EV˨9jD$x4”[*Ì”uÞøÄ­èÀ¼Lݨ‘°âÒAlÞ‡|Ø ·ô’ãIÙ’r›¯–‹ÍÇõjÓ,vç‡Ï¾Ì}ôš±)tŠÎî(Gr:–[Ì6L_Áó•à Ò³åµ>ôÓÁé~iÇÌfTèŠQ‹_ɉ“êR2Wá2\„'ä^ã¸jºNÏ«ÍÕj¹~? V`Ô Œw‰Û}P΋™1°XPÁ¢‹ð¤|ëš™½¤ ^#gnþñÃëz$” #V¸¸àI*÷ÚftÉ$‰$*±?„†;Ý2t0 ´qÉr„F9¥Mn#‘ƒÐ’$_žø=†ª.&ÖÙMÄ®¹¹Åÿ~^›‰Ñ&X­©,‡¬R!G~SAn Æ¡3x¼°Ý`xb•ñ6˜± ŸÛnLe¹}¬`3$xRv›ñ©¹Yq5ŠÑ(WwL¢›²à^Øn¤lcIZW`‡Ï"±û„Ý\>èúò^‰—Ï(—´ CrÜÅh¸}‘º¸†ìÈŒb^Hð¤|¡‰ëvu3¿FŸ@ïèmã•JŸ~­QÍh4h•ç»Q«Wq±zÌ? g+„¬$xœ{1{ÁpÏÉÇŽÊ &Š íEFv FT…à…ÎwYq»Ýüo;ßKkÅÒïØ ‘"òN¿˜ÙÐùFSzð¤ëµ²Ù&øqf~xå< ÁåÀå‹|æ'ºd ©Â V€Ç+é3ͯõö¨&$ýxÖý|שÎ2ªC6‹.•žÀ»Ó✕Ø<~$VWØF$xŒ}Q;Áp$(’µ˜‘ œ'õ楳j°¶< $xÜ 1ð]×·åsÚ€7ŽQXãèÑ ¬¶ñE×»Í7’TÁ/à1*déà|ŠV~Pãà½ãFrNÜÃý ©p«%Â#}еnnÚÝmóøjð(oÛ£,ä–&q×ïTK\–¾½%88~ÚT0â<^le3Üèš {ã¹s õt÷J\ ¸T!6$À“ÄEù†”u<¥Þ0Ù?)P÷c£¹û”ÓɼØr‡l£0¶B¼X„'MíÚýXq¾øYë›WrîÔ7v•ðª3Ÿãñ@ù ¬¾<:Ú󸼿3#>«ªÌçx< .‡°ÜÐKªUû¡]=ª#õ…±¦z³ÛU³þl²»,-̶0}LåýðmÉéd]ÆmX°³ÈP–/„"ÁCíOsîÇw¸Pf»v¿Ç_éCQÐ5…cj´ur!‚¸BizŽGË¿ü”áéÄ¿[m>R_‹›æQ-‰¾-J5¹Õ†‡? ¨‘ P”óâÇy*€Z¡ª¡ ™PÛâ!ËaF­ÖTúo7Û¶Íâ‰`–†mµLU€NÎé*+@}¬0¿<âÛO+nà8âÕ°ö ÕtˆÎj-nH^ž²—˜¾Â-Á²,âGþ ³$º:ŽI©À¡âRù)8­/“á×)ŸìÒx=¬M«¬Vì;’3TA>–0s<º¡Ž–ùæZ‚ÇåXǤ$Ë,KÏQŠsÿ-½uuµ}j–€¨MŸRÒùäX][3ÒJTÆÁEÏ:îåB…[üž`¨«'(“ý ð”éÕÌ?ágNÍmÕÂRe¬Áx6KÐú …OPelÁC‹£ëPË$,%êÀÆrV弸ß`!¢÷»¶HNWXë‰*+`c„G|M?ÚízR“ƒ'lKY_5É$ urƒ»S–¼œ´2Üå­½ OÌã<í@Fo Ròy¡ßË™Tê€pS.W=S±—ó¶ÂŒãq_yÍ {<É< úP½jí°j);,¹È”íä|ŽhÑ©äà ®$xÀä >ÎY¼xòÓY³¸Y®;FF£TdÏ{ÑýåMÁAŠ6¦9xĹö_’c»¹Û·c ­V$~º^h pC·â&O°0/«%#(ߌ^„'(Í=JˆðÄL'Œ‡àü“ÔŒ©¥IŽEL7ó(Š“ÐO‡è#?,ôôNH† ‡d(ý|2Tÿ×CÄíj _ƒž ½Ž.*¼ZÛóeðÞÞ-W}û¾wË«/KÞ† 8=Ö'Þu<(—éx3iI åg]‚Çæh¨ý¤Ò˜p!õÁN6²–€.²UhœFÍјÑÞ›ŒL<7dd$(œª@7 Ƚ~åW&ÍD)ÑÒ%£¹v!¶OZÌÑÈû”å!+ø¾<ò6‡J{ò©cb]J eRphéÎ"æÞCN äx¼øA…Ù–à ybCjdâŠAigÐçHJ½xÅE£²“T€Öº QP P܆Ys·¿ž-ÚU{õ`&™¸aW™’·r˜™úu§Ä¹¦uýRO ”g,Ž&¤#;Jݬ@„.|¡µåñh•)¬0]ÉÌjÓÊ&H‘)ÓÉ ¾9Ÿ?äì–@Mø À“Nz‚óüéý鋞Èèú^T ‡”w*O´¡<ƒ*ŸŒ'ÂT¾¼í!=ã6||êKÌ2ÃÏÐÆ)&³´“S'Ý`f¦µ¯‹å Á¾PôùIE&F‘TÆ'Íí·†®Ûm¶PENê FªxÄy˜'itÞì›Õæê±V™fºW!FÇŒå ùBQêS-îkX pÙ,÷*~¸=°ëªÚ‰ó{©dT5ìÂI,? kkA€ÇyqéÖû;íÝøSZÛfÕn÷3ú¥ÞûbV%UâvÇÜ«wr>æ{ï7•ÑØÁU8IHð€)j>?nÿ먲¡Ò,çé:jÄ¥_+X0òȧW|⥠EH‡OÌ—xyààîü~Ž¿ê7™é‚§/Î÷JJÐEíÑø…$íÊ·eâ 9­ÑÓÆÜ0êƒÐåös)ÊYq‘¡¢d?z¹0Þöèƒí1nÈøP9FŒã6”ó:æ\3'M·nù2±2<1Sý>æÊpƒ^„èvÇ4Äíäc¾!ÿ¼mçÛ¶Ù·‹³ûUòuk³å ]MnÛ«ån¿ý„Nù &[a–ÇãIJåÜqìÎ;ÞëïY‹c9o'(¥q{n.mž¥>‘«tÿ“b´–‡ºì›ùûnJK±œõnÄІNFe)öܦ²,"üï+¤T…eÑjcGà‰FýcX±Ùíæc»ýЯ~Þ#à£á]F^Öš=3®É| Áš1ã÷uø–”6.ŒÀ“^šoÝDx–`Éø#ÞÁRiE öÅ@¦2*¹ ã˜ÛJŒ ýáx|¾Èp§àÛíݺݎµÑeÝ —’BŽèd¹}‘(z{zJôC?ê º1™CÅÂÛ‘Àã&)n)¡œWg¯L]ÁPb…@¢O‚IÍšû<á=ÞGaÝpK4 ªñÁ{Ë„ ;9Bæx1OTÀP¾Àˆ¸5è½éåÃ,f§ÍÀh í4s¥Br:E7©ysn‚A´Üë»NN—ßÑãq^1}z9“&> }¬·‹1êLÃêÔÔ¬ϾÜÜke’'Ë”f®}ùÜ!žtZ~ÿ .??”òÀ,$M äpÝsÄE9¦v_*@\БÂQ‘‡­­0õø=Ñ%Æíïåq¾ÊóFþ$|3Ú .š³EŽõÇÚF{Ñk¹rœ:z;ÿ°é´¦qnßì›-½ã»ŸÞƒÏuv()ú8¤wÇ?º8üï«×g»Ûv~6¿nÖWíîõAÿ¯Ïšõâì¶ùD¼Œþèhq¾cf»[ný=Ún²ž .wgôù¯·‹fßþñýv~½Ü·óýݶ½|… ÿ»ÿËWT•é\ã'¦‰¸|õû]óé|¹¹˜o©±ÓÅfNù«¶Ùµÿº»n øËh@iãõÛ4÷yÛØ·Ñ¾kÝ} ;WÍ;hñg©«ãÿUMë¢z›@…Eðô]ÿ¾ÙÎÛËwÍj×þý9íš=A’Ÿ/Ý JývÖkàŒ>%´8ÀÕÙ~s6¿Ûn[ô¼Wíþ_ÎÖ›Ã$žQ)v2²ãæk-ºåq¹¾A ×ÛÍzù·£5‹g÷Ý»e»Zœÿ¸Ýn¶?á’øf½\}{øîŸhöéWû×ÏoºOÿøvÝö…o/-ª¶Y®h’¿ùçûèþâ·ß¨ÿ›+œ¤Ô¦¤¾}Ý·K4¯Ïþ²Ù7«KZm¯Ï~ØÜÜ®Z\—êõÙ/-a¾DÛ{u‰»W‹L¡´îæïJjv×—¯fóÛ»¼2°VßáD?æ]s³ð?ý©ÙíÞn®¶ínw¹_Þ´çÿ†¿Ñ¯Ïºüg³¾k¶Ÿ^Ÿáêè?Ýýˈû{ü‹ˆ þKOÎ4†ÿúËO—¯®÷ûÛÝåÅ:ù8Òs¤Ôu³?Ÿon.pª›}sñËŸÞ|?#–]ê„Ó‚¿÷RpÝ®v—ÿóÚCÔyG€¿wÃY"7÷Z§©ý¹Ÿáž—DŽŸ¾üô7ûööòÕá3úq»À¯ù/ÄÓ)ãðƒîÏ}6%ßýõ µ¿¾:ë 4~P^møeÍÑ,âwþ?yW·ãVŽœ_Ř‹ Œ<üÿ`.6N‚ ²» v¹J.ÔÒ±[kµZ+©=öö±òy²°Î‘º¥îÖ©*’‡ë ¶í¦¥¯Š‹Åb±ª'ÌÂlþ6©p›ùÇÕþ㾟ßÑ!ú¾ëµôíq­ÿÂÒßÝ/§‡5KäçÝ|³ï3ˆ~NÌ üôë/óõúû´’•ñj¹ ]X¦}ä}Z-ÝçÃ÷&åà±’,’!èÿ!|8|˜(ª³â7M Þ¥#žéñ¯¿<ò¨ÿ]Rí»a'ø»S͜ߛ¦ßÝÍ“ù'¥Âf|»‚Âò_Òqä¸ þÐ/Ï7ËÁÛúáÿÍžóflÐ÷hLÿePLo ’HNBú„£ýd¦úý¾ä¸kÿáþSwwÓí¾}£¾}ã¿}c’±ÒO›zoù~óèx\X¼ÑJŽ‹LvNË—Ÿõ·‹ç¶ýùþu•ÕÆ$[.¬>{Æþx¤ ÿõæŸW›Õþ¶ìL“_Ê¿ZþÏï_?{y¡"Á–Ÿ‡oñ²ZN¥FYxB…ʦ;Üß@¬¹×ðVZã|ÄÎ6iÜH÷›¢ÛPÆùŸƒ¶EROSÝzž+É1iqB­ŒŒiœð®Â"f‘‘Nª—W<š›þ¶|töø÷ÀÈ2 @7¥ ÔKãtTSÝfrhIltƒÔw_žòúLsc÷¿Hòp´É‹±Î9Œ²ÿ6ÈuƒŒ£ª,þŒt€Ñ„R÷6yûZB§ëQ!úqÊq“&³`î ”Ñ8ní'7ý÷ Cq<#ÕKskÅÝÏúŽèðÇ58צ^D4îaœµ‹‡¶DoÍô\ààq™=xŸªJž§„xTQF‹”oìÇéÜÖÊS©µ[uÉx¤Ê;’ýùaµø¸‡ É›oæËåì¶›¯·‹Ûnq<Æ"–[Zå¤NöÃh•5u,wý턈+ƒ6Œ›¾‡nÿ=>í*Èó aœ¥K—[¶ÿÞ˜N®Hk»aœ¥+8ŸIÆiÕd>£óÚ*v¯‘ëÕù_szÓOÝünÿX”ߌ·#uÐÉ¥#¶6Ò8Í~ªVÉ10Bé‹É盃‡m¸¯Ï÷K—öhŸÊþÎÎÌŸ(ಜ”ÕR:/±}(ÒVk.Q‡¾V§ÝIZcW-¬NÑönó!CzõaSm%4fôØ…gŸ¼*T¶+`%ª L8Ûµ&èì» Íšño…0"yHõ$§cˆõ–u59p˜>“‡‡ÝÃôõÝ/ßÏN*<¤Ý±×6ÅVY)•1F«Œ¬ÔɽŒ“ ÀÓWâá ª° ͸æÂ¸æ¤ÐéÜä-FG§]ùjΡ£„ëOEÄ1BKãÉg¾'¦3º àa?¯±€çÞíÖðÊ´†Í¬4!h ¥•"«¡H=Ò‘6x_ÃÃÃv¾®¿S×a×aú;!¼'•Á©z»2“ ”qúó3vå+ùO7÷ûý¬›oý n û£J¯7lv­‚d+¥0œúžT{9UÆH:fi\ƒ¹f౪üñÔ…òžŠ­-Æõ]  IˆCpBRÖå+9—“œÞN?¿<¡‚c=tÍêÕ5À½ Øä&Ç:¿c[K'Œ/—MBÎÐbrxØa¯×ŸŒ?-Çõ—Î’Ñì™D?.˜ Ûq9 ƒœ~’Yx*ÜH³IÅaæÛm¯4lfÓöàÖ©7šáÁxSÆD:^)Yx\é[RzLÿeÚX,Ü:Œ“ºôË LNÞš>å­]O[>Ýš…oÂ픓å[B_¢wƒ÷jÕ+ Y¤Ö £(@+tNVçttd?‹CÓ­‹¡X Û ôÃÁãtÍõP”o»žoº»¡bÞ¾;ìß~ñ69š9¡BPA`G]'*xŸìUU!U4Nú±žP!ôç‡ùîãÃþòÀ4¢4ëbr*5¶ó¥qÁêšåªr“.E<{C·;öhw”3<j$Æ«Î[·oÛ ·¤„Ž_¿NCýË7oÏJÏ\íÊþT1³X c,$ë…T ·<€[ÔH=ÝŸéö¢@ éˆÝØÒ?'B)Ò|:Ð…™¾G+«¶&¥¦ÉCZtÛÝýûÕºÛ¿=û» Îc¬Ç‘±”‡ ŽL¿q¢ýºˆc=âœÔ–g·|®h+ΕFá¨AºržÑ–Ђ,PlC ­£§àQõÜó^¿WÕ,qR˜ä\jfíŠIAÅZB ¸‡§ˆcm16jKÂã«ÄƒFöñË®JV(œÀMoÉK=Ð%$¡Êåm'>í$<¡B¯ø1m_ñÿ¬Ð8YÔ’#1švtN– À—&B¿VŠ|® idßeŒ€‡}çø†}¯nƒr…Š] ª'RsEèbù6²ò‘€RúšNÁ3tù“N…¯MºÒ“ð„ªÛÀìÃüp{\KŸtcm¤ Ô¾Ênð:ºüI·*j |ÓhÒÐ’„'Nr8ÓnÄçÞ§#`GkVñO¯‚̧€G;4¥hä„ÄI o'¤Àl×Í—½’=V a1Òà)ƒl0áu¬Ù„  cD›ƒ!-}žÙçN|÷Úë±@iá(@¥Í}‹I˜?õT T›¸8‘„ÇTzv9¦`<ÿó¼Ú¾±¤áÌ'‚UÖ)‚ ¦‘ ãñ…6à¤Ò+ºÅcƒÆ[e(Pɯ%³!æO?U†9¤ð=AGÒô³¯Ê¯¬/DÅxtÐDhB@ ƒd¤ùd ‹âšÁ å OœÌÌîoÞ?ì‡:¬³=\ÎzuãáC+…ô–5ª…:›$t±ÚX +½7$<“[ŒkZÇ#ŒiÓ£)UÆ©Œ>Ÿ2:á A:Õæþ‘ŽÇW9g<w~©ôî°˜m?®fÝæ°:¬;HÔíµŽ!áŒ)(BhWt)ŸO«„¤H×È-µNñ¸šç“ åã‘Lë ÷Œ ËoF†|Q…lQã¾'ÁQ$<®$È=féwIÍwݬû|ØÍ ø7½Âñ¨ F8 ~ïrÂߟó‰B,´‰„ب!á±UÛô*—xŒÔ -E‚h b¤%ȳÉB͉6dqPT×Qðö“ËÅÃnuøåâ“! 5érµ9칋u¿XôêÇêNF¯)ÒHM~m9ù$ÒA+’˜mÒpÈxøïö¯'utröoO?=/rë%%K uqAÞ<äùd1>¹2ÑÚ\Ú‘ñ˜z•@×çyÀKâZgƒ ¬VþÀ0æÀ…H¢M¼…ŒÇ‰úx¼÷̺ɤ¡Þô¿Š0ò©"ø6Yt<¾þäíÎó^ãxPÚ¦XB‚™#ßßWžO²d± U ƒ%ëÓE®kú|~,¶Ò«¶&'ÎRòcá‚Æ*¢ìÉ¥Cnã2z% Ô•ƒî/Ê {‰ÇJ½ò‘’a蕪^…™O#%ÉÑæ‚ŽŒG‹µûÔà`vó°YÁ ‰G>½‰Š’tè(iÎ@ÆY@ª mœB8˜ U‰pÜ|Ÿ)fú´ jBê¡·¡ 0œùD  âÚ\­yï O¨ueÿÒÔ*5¬ ”Öxv K/€Ž°$RÿŠŽ¢ÍÆDÆc¹ÓÍÃj½¼ë#1†(ÒZQ½,Kã¬ðåeôÊJÂqÐÚ “ûhήŽé"-„³-ºÐ3ððæ^vS½Xλn ÙÍóõHgÌà >ÁF%À•Ìnÿšøêö>ïÝãÇ• Ú0„ŠGëZ `z·õˆbTi÷×,ÜÍøÒÂçàà±²bÿ×kª¿ú›~\=ö8]¡l©͸äU.y!œ à ºb#Xö, ¥È†…íQJAx’P1Tè[I˜Rfy‚$íÏŽîdï+yÇó™FÜ/omúJ ÕÆý"ãaW~oW`iº‹Ü«ñ‚ðV¢Há‘~œ7qªCý˜ÁB¦ƒÂÄÊjûùÝvÝÍæÛíz5¼Uîµæ­A¡)Ç¿çeÎsó舂´ æ‘Gq7¨a ærqlUu¦&3®&  ¤4•Æ9[¡A½(ýô¥´˜x|…E¹…’¥—¢2+m¢\tD+åZÞ¾Ña5(VÀÃÃ>gòB?v\?Jhi¼ŒÁ“Æ YaÃÌ`b t‡Ïœ/yêqÝ÷b¡H«pz"qðHQ^õ&òe Žçe¥¯²Cƒ™¶¡`óøÅâ;Oœ¾p*;c¹Ù_èqEµPZ¢U:úq’mÂëQŠS)?ýrðh[­˜È5í)D{VÙ •÷Z« Û㎎Ūgöæü¾KÀv݇ùáÒ`!®¨JÄ ÑóŒc÷vªÎ.Z禟Q_/Ýí¤Àãÿ÷šCΘ•˜c‰,÷º1—{tL#ÇŽŠ³ÉÀ#¹–vu7ÿp©Ä/µB'œÆ<°¾è!7! *·HÏctZáé%<ºëÅÁî}p½ÜÄ)8xûp³;$>¯3YYk¥sΫˆ!O‡H/˜Ìã¯:§ZÌ+–ì§Êïwóýa÷°ƒz¡¦8®&èF­ƒ”˜s‘ƶIc6i¬@7rŸc>4Å|pPÄ0=Íx4;÷d»[Ý÷eŽÖóýEe£‡´1ÌéW«Å| щØ~—fâqÕnæ^ÑæY¶nw8«Ò¹> wý8-5ŸLF2ÐØsÊÁÞÓÅm·Ll¿ÈÊÕv\C n´¤Gú$÷ã\0Õîè YÆ@}^~RvxT‡búf‚<<ì»üë(—¹§×‹Ãk‰øN*±>º -†?l.f­"§3žµxÏ(‚iÀ3È|‚&t<ìR(›ù]·ßίõå:3ñÔ´H¸`#†1-ÕʼL¬¶øƒpîTð"œF¨Ò,FŒ±«buù˜ zí ™êI /ê‹%åîæ qÃQÚý4 /ÆoÝôK‡ƒ‡}ññúÒ´;[ìW³ånõ©Û7l½Äùô Z04`îÌÇ´¶-E€G5Àc”@^Hq³û\=¾ÎÜïº??tûí‰èü—ý¬»ÙŸéþÜÆ¦ãJ²Lô>4“Ë‚ñé[“×A(+”¢ot‚y“¬0ûa~ö¤üåa×Í’º?^|#‡üTKË›©9Ƨ͢´‚" ç/yÞBB½}vŸ‹²yy¿ZwWçÅ`43ÉIÁÊÅ÷MhF§Í Ü*{Šüʵ¡YBã5nE³~Ìa¾H«µY^ø,—³cq²çºmÃ8r¹Ö†BÕ£U >4¡Üÿ¶wmMnÛXúÙû+XzÈ8«÷‹ªR[YǙڪٙTìäaT™-±Ý\ë6’ºmgÊÿ}HI-uS¢³[“‡ØAð;.‡RÆDô•îgµNUæýx9\NN¶ŽòR.T8Jx ÊᄊF9PU2@ ”¥¡§\†àa©|¶òj6¼[އWÓÅø´Ÿ£½|cDkª$ã¦g¾a%ŠF¶`¨4ö-Meßfù¼œæ'ÛÄøY3°‘íÛª‹^ÌJ";ãièå–­LNŸŠ^ulÉúi%­&!¢'RíM„xtRšɬH:Y¬m›Ê»+W›S=Ü/«8ûK$²}³*T’hä ›„\&$µ ý®åâc±º[·{*†ú‰& á!Ò šÀ÷ÂJtn PƒLcѸ!Vá1Éç—w³ùªÞ­—7ÅêdàÉx—„ *$<蔑lž‰.õ‘Æî¹ü¥*MâØ5ÏrµÍφ“bSŒwmÃýÄsñËÑDï\´HñèÆyPW$ âº: : tw»Kàp;4þ(0 œ½HÇÕëp )¶–aðhÖm3jC>ùÝÞ€­Šï6n~PéÞ}—D "‹%¸;¿{¡+Be”$M SrÆC¯’Øn)NtœRîpU_ #?`.…»‹21g¸TŠ‘væ‚Kï"‰H³‚ —*dçgRCwÀá}òRiüqni©V!Â(Ó=ãZ,!:3(TH“&Ô¨5: ô)-évÖ|¹˜–ãÏžj—ùtºø8„ÿ‹ê¥mÐZ¹«ÿÐ+ê„ÄDÝ•#ÁRÑ4SÅ•°ñLÅTÿ©5=Ü,†ù²Ö×§Wz÷Ç¢•¤T‡È!xl Àß™7@>"ŸLd[Œ³!xlÿ¼Ù^ ⿵èÃU>¯vG;ÛæãŽvwÅ…ÈbXÜ ”¡+‚eL³Ï&@JÊ€à«Fïó=O÷]ƒƒFðG”57,H(¦“)H˜ÎŒ2hq@ó4ëöZ «âÆZ$É@ù“yå3X0XÆhrEû£M#âÎÜ•H§‰°â® ÀcDln<šŒ49–Ö¸5|—L[–Î|²Z“Y-MÂ'K” âúzš®mpà.øƒÄ– +B«-׫E¢N#ì®, ‹¥‰åXð§XtŃ˜ü‘²Á‹\®wåº\ÌW¹»òq˜Ï'`áÇ«b3\¹;1±‹Ô[°Ø‚+¯â¨6|_a/ø;ó&T>•f.n»ì<Výðæn1½ùf“ofÅü±Î½ÑbI(ç!Û|­eq9ƒÀÞ‘/¡²IB’ðEºÓׯàA§ÏÀê|U^ÝnŠuuõ#Å[?iÜl)D`߸':3G*cC‰˜£¤R¸ì•9ëy¾\ß,öUEˆŸ2VÙ€¸·$º;€¼3WB%K³ ORSÒ<6â¨óŠòwÐëqN‚GÚö†‚Ýݨ$ :*iø‘›ˆ¨»r$X*fÒpDÎCðpŸ#Û)F“íV„ùb­á,{ /‰¹3?BeRIâ¾’Q%H«ãó#dWÄù•ŒiŽtÂÆ# |WƸÃp‚—f-™0þ«'+Ü3ËËù°žBjWø©aHÈþEɤ8Ÿ^”9*…¦i8`aÆ„ÇFæÀ¡Ã·W°74+9£6`Û(¦"ÑÀ´+Bái.œ3FCðà/ð(xg‡÷ÊU~(p‡TسYÈÎ B&b€¦š†àQ$2vS½Í¡‚½ÁS)ÀA'Á"®c±À´+‚±i˜ ˜»ÅÑG¬-˜Ëéâ³ ž{&B¤TpÊvJ‚¼¡óŽ~ðwæ ·$H>NÒðFè éfA|—ž¦5 ¸dÕðÀYŠžn© œfÝD w3W-¢% 9Ù«î_©R.ú#™ÂECÐØü1Qwåˆ$ÊqĦ‰PIª… ˆPIô>×»|ZNò ü–OfåÚ9ã—Ãw ¯Ÿ wï,敺}aL73SW¼ÈybÝaŽQsÕáöyrû<EǃÇFËr°Ý•qúJwãK$·à0_FßEþ¾€‰h9®AÛß²Ÿ—uÏÈ~Ùw’ïväǺ“\t’lr[d›˜¢ëë|è«Ûù>º>¹øùàÿÄI“eÏþ~å+²Y¾| õÂßòùg|=Ïù²ü¥X9Ñ£¬®êù`oVÅû~¬ä¹ø`ÖЗwtðõ‹_úPÎ'ƒÑVë»/Tq·oÀpCuyõÊäóùbS§ '«h Ú¯˜Yû«îz–úƒmÞRt ëbz½{yxS¾¿æwy9­‡µÏøê ì´Î“b8)ëojùÕìr¼)ÁÚnòÙò TÝ6tHØ¢G‚ŽˆúïêÛUoªG‰ØšÏ©û}+Àe18ý¿­Ôþ¨ºÑ1\Wõ¾v:x±þØ/‡%àÛþzTÛËÆG‹«j·éä/‡‚=(æ´u=ÚÀ×_Þc`úû†2_¾ìÕp¨ï“Cñp À_\\ÀËeô²ÇÛ{ÙÅ)Ý…w?üëM]_‹·>®²n6Ç¡ÀŸŠëbUÌÇE¸bÞ}ýsp[ࡹ`ãâj8¹º¶Cqu͇öz<ê+5QV_3IøàË»´†/‹qcƒ]ƒ0 mÍî ÍÌòÍøæå Z9têõ©r÷Ã_K‘Ué|s¤œËj¸^ÿeµ¸]nÑ6É{\þ&_ÿpøÊ_ó«böÞëâ¯åüö“ÿ=×Ì':÷nîrW÷Ó{çtÛ©w$é0Ö57(¾¢gƒ‡ Ø[ø½Ó8 ±—/¸Z¿qµn«Ýã…W€ÁcØ“N%<ýJ¦)÷,ª×åz¨cð*Ñ?0x4‰ä¨Q{Ô(¹$\h¡\ëUÑ & 2ž$2!xЫ|§/¯²†£cèÊ¡'&OMs)„䊛ÙöP‡_ #·ÛJlÛ®[-º—êAåø9­7Š`P Æo eÏßÉPJÅÒÕU>vÉ®>}®6gxÖé­ÔVJ}*”Óm3¾à$ÛéÀpì… ¡ñàÉ…Çöa`wšÝ+òÐ iÛI¥Õ¦5ð#Û“§W帥2ÆÞ¼xtš׽¯"ãð ÷m[¸sµ}IžºÍl\¹Ý>ðÜ.ÚÇPp6™Ã%‡'x±µì”âD»âà‰ry¿Œ(”“èC}ó^&0²ë‚Ç­Ú³„N©7ºåtŒíéÐÚö¿ªÔ<àwéAžµ:è;”[&}!ë6Äq[žXŠò0x ;×}é¸~+ä;òS—Sä\?&HÍ“´l8ì‹ëéâãz|SÌòæþ²ÎkµFñ™ÛæYm`hß V•Û§ŸcÿíŠÄcã¶k¥/ãÑX é] qå˜åœ”ëÕ(Sd{Êò÷y9_; s LáÒ‡V;†'Ò0MÉXr%Æwв ” ;Ö±p¡Ò LvfåºJ×XÛ`g5~\Þå«ËiyU7š›Ëº/þðþ:|ýÒê¾zLbsÒßT†±ÁÏó›Ü­L²W®¥®Á¾•8í9~¸—nV‹yù{Q§hȞ﬛ {!QÜE7Ö_0üÀ0ýþ}ßÚ#¨üÛÿƪ‹s¼V>././@LongLink0000644000000000000000000000022400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000755000175000017500000000000015117043044032774 5ustar zuulzuul././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000755000175000017500000000000015117043063032775 5ustar zuulzuul././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000644000175000017500000000202015117043044032770 0ustar zuulzuul2025-12-12T16:16:48.486559608+00:00 stderr F W1212 16:16:48.486376 1 deprecated.go:66] 2025-12-12T16:16:48.486559608+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:48.486559608+00:00 stderr F 2025-12-12T16:16:48.486559608+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:48.486559608+00:00 stderr F 2025-12-12T16:16:48.486559608+00:00 stderr F =============================================== 2025-12-12T16:16:48.486559608+00:00 stderr F 2025-12-12T16:16:48.487023019+00:00 stderr F I1212 16:16:48.486941 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:48.489013308+00:00 stderr F I1212 16:16:48.488981 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:48.489570742+00:00 stderr F I1212 16:16:48.489526 1 kube-rbac-proxy.go:397] Starting TCP socket on :9154 2025-12-12T16:16:48.490007522+00:00 stderr F I1212 16:16:48.489977 1 kube-rbac-proxy.go:404] Listening securely on :9154 ././@LongLink0000644000000000000000000000023000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000755000175000017500000000000015117043063032775 5ustar zuulzuul././@LongLink0000644000000000000000000000023500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000644000175000017500000007727115117043044033014 0ustar zuulzuul2025-12-12T16:16:48.303645962+00:00 stdout F .:5353 2025-12-12T16:16:48.303645962+00:00 stdout F hostname.bind.:5353 2025-12-12T16:16:48.303872848+00:00 stdout F [INFO] plugin/reload: Running configuration SHA512 = c40f1fac74a6633c6b1943fe251ad80adf3d5bd9b35c9e7d9b72bc260c5e2455f03e403e3b79d32f0936ff27e81ff6d07c68a95724b1c2c23510644372976718 2025-12-12T16:16:48.303872848+00:00 stdout F CoreDNS-1.11.3 2025-12-12T16:16:48.303872848+00:00 stdout F linux/amd64, go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime, 2025-12-12T16:17:30.495439182+00:00 stdout F [INFO] 10.217.0.39:53639 - 34080 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001276747s 2025-12-12T16:17:30.495439182+00:00 stdout F [INFO] 10.217.0.39:48486 - 32142 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001373179s 2025-12-12T16:17:48.771060488+00:00 stdout F [INFO] 10.217.0.39:33841 - 63912 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002199854s 2025-12-12T16:17:48.771060488+00:00 stdout F [INFO] 10.217.0.39:54872 - 33684 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002256946s 2025-12-12T16:18:11.465992128+00:00 stdout F [INFO] 10.217.0.39:47030 - 16231 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000705637s 2025-12-12T16:18:11.465992128+00:00 stdout F [INFO] 10.217.0.39:54451 - 57884 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000896782s 2025-12-12T16:18:48.765433321+00:00 stdout F [INFO] 10.217.0.39:55286 - 35768 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002159934s 2025-12-12T16:18:48.765433321+00:00 stdout F [INFO] 10.217.0.39:34817 - 58824 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002134663s 2025-12-12T16:19:48.765904733+00:00 stdout F [INFO] 10.217.0.39:51494 - 51787 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000969304s 2025-12-12T16:19:48.765904733+00:00 stdout F [INFO] 10.217.0.39:37228 - 4819 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000858542s 2025-12-12T16:20:48.767128046+00:00 stdout F [INFO] 10.217.0.39:33809 - 26045 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000987096s 2025-12-12T16:20:48.767128046+00:00 stdout F [INFO] 10.217.0.39:36195 - 39998 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001082888s 2025-12-12T16:20:55.312212581+00:00 stdout F [INFO] 10.217.0.39:43069 - 56031 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001174831s 2025-12-12T16:20:55.312212581+00:00 stdout F [INFO] 10.217.0.39:48586 - 65404 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000321838s 2025-12-12T16:21:48.765550004+00:00 stdout F [INFO] 10.217.0.39:37997 - 22756 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001446318s 2025-12-12T16:21:48.765550004+00:00 stdout F [INFO] 10.217.0.39:46310 - 20188 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001549291s 2025-12-12T16:23:48.771147337+00:00 stdout F [INFO] 10.217.0.39:53369 - 19774 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002645546s 2025-12-12T16:23:48.771147337+00:00 stdout F [INFO] 10.217.0.39:38770 - 23471 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002557364s 2025-12-12T16:24:48.772474865+00:00 stdout F [INFO] 10.217.0.39:36241 - 64690 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002926193s 2025-12-12T16:24:48.772474865+00:00 stdout F [INFO] 10.217.0.39:41184 - 36747 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.003002915s 2025-12-12T16:25:48.770837780+00:00 stdout F [INFO] 10.217.0.39:47937 - 25729 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001017385s 2025-12-12T16:25:48.770837780+00:00 stdout F [INFO] 10.217.0.39:58072 - 40708 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001098988s 2025-12-12T16:26:48.781246240+00:00 stdout F [INFO] 10.217.0.39:57349 - 5779 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001691903s 2025-12-12T16:26:48.785345693+00:00 stdout F [INFO] 10.217.0.39:51636 - 45024 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.00594259s 2025-12-12T16:27:09.295247148+00:00 stdout F [INFO] 10.217.0.20:33215 - 15394 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000890612s 2025-12-12T16:27:09.295247148+00:00 stdout F [INFO] 10.217.0.20:50529 - 61785 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00199709s 2025-12-12T16:27:11.302001437+00:00 stdout F [INFO] 10.217.0.20:40268 - 30870 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.002169605s 2025-12-12T16:27:11.302001437+00:00 stdout F [INFO] 10.217.0.20:53743 - 16169 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.002515964s 2025-12-12T16:27:32.577228691+00:00 stdout F [INFO] 10.217.0.20:40895 - 43749 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001464007s 2025-12-12T16:27:32.577228691+00:00 stdout F [INFO] 10.217.0.20:56404 - 16136 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001761724s 2025-12-12T16:27:33.586814843+00:00 stdout F [INFO] 10.217.0.20:52496 - 3239 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.006266619s 2025-12-12T16:27:33.586814843+00:00 stdout F [INFO] 10.217.0.20:48852 - 17741 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00632632s 2025-12-12T16:27:35.599666505+00:00 stdout F [INFO] 10.217.0.20:34153 - 48659 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001054447s 2025-12-12T16:27:35.599666505+00:00 stdout F [INFO] 10.217.0.20:35138 - 12864 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00117986s 2025-12-12T16:27:35.764083136+00:00 stdout F [INFO] 10.217.0.20:41343 - 39584 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.0007773s 2025-12-12T16:27:35.764142148+00:00 stdout F [INFO] 10.217.0.20:43164 - 57247 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000961064s 2025-12-12T16:27:36.768360493+00:00 stdout F [INFO] 10.217.0.20:51393 - 62326 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000597345s 2025-12-12T16:27:36.768360493+00:00 stdout F [INFO] 10.217.0.20:38166 - 62990 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00079109s 2025-12-12T16:27:38.778813914+00:00 stdout F [INFO] 10.217.0.20:38478 - 43297 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000977465s 2025-12-12T16:27:38.778813914+00:00 stdout F [INFO] 10.217.0.20:42186 - 33257 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001136599s 2025-12-12T16:27:38.945441011+00:00 stdout F [INFO] 10.217.0.20:59642 - 8519 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000678378s 2025-12-12T16:27:38.945441011+00:00 stdout F [INFO] 10.217.0.20:45909 - 6045 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000633916s 2025-12-12T16:27:39.949508693+00:00 stdout F [INFO] 10.217.0.20:38650 - 52083 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000671497s 2025-12-12T16:27:39.949508693+00:00 stdout F [INFO] 10.217.0.20:38569 - 25278 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000836442s 2025-12-12T16:27:41.954797364+00:00 stdout F [INFO] 10.217.0.20:41632 - 20061 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000876572s 2025-12-12T16:27:41.954896266+00:00 stdout F [INFO] 10.217.0.20:39434 - 7028 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000837421s 2025-12-12T16:27:48.198474135+00:00 stdout F [INFO] 10.217.0.20:59559 - 27776 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001135969s 2025-12-12T16:27:48.198474135+00:00 stdout F [INFO] 10.217.0.20:49903 - 42513 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001075227s 2025-12-12T16:27:48.772105923+00:00 stdout F [INFO] 10.217.0.39:55245 - 57211 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000517083s 2025-12-12T16:27:48.772211446+00:00 stdout F [INFO] 10.217.0.39:48688 - 40287 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000596295s 2025-12-12T16:27:49.201745847+00:00 stdout F [INFO] 10.217.0.20:36591 - 57620 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000585125s 2025-12-12T16:27:49.202002573+00:00 stdout F [INFO] 10.217.0.20:33041 - 47834 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.0007798s 2025-12-12T16:27:51.207732547+00:00 stdout F [INFO] 10.217.0.20:52199 - 59830 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000846752s 2025-12-12T16:27:51.207826079+00:00 stdout F [INFO] 10.217.0.20:48273 - 15655 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001147069s 2025-12-12T16:27:57.332505227+00:00 stdout F [INFO] 10.217.0.20:33057 - 20204 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000759489s 2025-12-12T16:27:57.332621520+00:00 stdout F [INFO] 10.217.0.20:39311 - 24093 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000871192s 2025-12-12T16:27:58.338500768+00:00 stdout F [INFO] 10.217.0.20:47129 - 46232 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000648217s 2025-12-12T16:27:58.338570299+00:00 stdout F [INFO] 10.217.0.20:43075 - 60656 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000651686s 2025-12-12T16:28:00.351456532+00:00 stdout F [INFO] 10.217.0.20:55848 - 64583 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001367464s 2025-12-12T16:28:00.351456532+00:00 stdout F [INFO] 10.217.0.20:60635 - 16347 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00196085s 2025-12-12T16:28:12.248248103+00:00 stdout F [INFO] 10.217.0.20:56414 - 54179 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.002105233s 2025-12-12T16:28:12.248248103+00:00 stdout F [INFO] 10.217.0.20:44149 - 55808 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.002568055s 2025-12-12T16:28:13.253592036+00:00 stdout F [INFO] 10.217.0.20:50093 - 8356 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001054647s 2025-12-12T16:28:13.253898284+00:00 stdout F [INFO] 10.217.0.20:56457 - 5065 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00039917s 2025-12-12T16:28:15.259289118+00:00 stdout F [INFO] 10.217.0.20:45060 - 9329 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000764659s 2025-12-12T16:28:15.259289118+00:00 stdout F [INFO] 10.217.0.20:40965 - 44780 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00080098s 2025-12-12T16:28:15.477383048+00:00 stdout F [INFO] 10.217.0.20:57545 - 22707 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000736579s 2025-12-12T16:28:15.477470550+00:00 stdout F [INFO] 10.217.0.20:46830 - 49706 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00079741s 2025-12-12T16:28:16.480590458+00:00 stdout F [INFO] 10.217.0.20:53439 - 62288 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000690898s 2025-12-12T16:28:16.480675110+00:00 stdout F [INFO] 10.217.0.20:58471 - 61857 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000826831s 2025-12-12T16:28:18.487349805+00:00 stdout F [INFO] 10.217.0.20:34614 - 34043 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000859692s 2025-12-12T16:28:18.487349805+00:00 stdout F [INFO] 10.217.0.20:60231 - 15952 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000945504s 2025-12-12T16:28:48.775727476+00:00 stdout F [INFO] 10.217.0.39:52933 - 45369 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002701158s 2025-12-12T16:28:48.775727476+00:00 stdout F [INFO] 10.217.0.39:55136 - 4150 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002669747s 2025-12-12T16:29:23.777512721+00:00 stdout F [INFO] 10.217.0.20:45302 - 28037 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001812086s 2025-12-12T16:29:23.777512721+00:00 stdout F [INFO] 10.217.0.20:36120 - 43985 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001880488s 2025-12-12T16:29:24.781733968+00:00 stdout F [INFO] 10.217.0.20:41726 - 21135 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001399845s 2025-12-12T16:29:24.781733968+00:00 stdout F [INFO] 10.217.0.20:33843 - 9076 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001260092s 2025-12-12T16:29:26.786401597+00:00 stdout F [INFO] 10.217.0.20:52798 - 48539 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001011425s 2025-12-12T16:29:26.786491799+00:00 stdout F [INFO] 10.217.0.20:49720 - 41335 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001165829s 2025-12-12T16:29:33.838652918+00:00 stdout F [INFO] 10.217.0.20:51628 - 54504 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00079345s 2025-12-12T16:29:33.838778032+00:00 stdout F [INFO] 10.217.0.20:53212 - 61602 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00080238s 2025-12-12T16:29:34.847967714+00:00 stdout F [INFO] 10.217.0.20:60102 - 23621 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000575114s 2025-12-12T16:29:34.848093837+00:00 stdout F [INFO] 10.217.0.20:49562 - 28783 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000663067s 2025-12-12T16:29:36.852945361+00:00 stdout F [INFO] 10.217.0.20:58187 - 10998 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000597235s 2025-12-12T16:29:36.853052093+00:00 stdout F [INFO] 10.217.0.20:54113 - 49486 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000404461s 2025-12-12T16:29:48.773153475+00:00 stdout F [INFO] 10.217.0.39:55921 - 61184 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000882202s 2025-12-12T16:29:48.773153475+00:00 stdout F [INFO] 10.217.0.39:55724 - 51112 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001056537s 2025-12-12T16:30:09.268825942+00:00 stdout F [INFO] 10.217.0.20:35507 - 39518 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000690418s 2025-12-12T16:30:09.268825942+00:00 stdout F [INFO] 10.217.0.20:51969 - 2413 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000768139s 2025-12-12T16:30:10.272927997+00:00 stdout F [INFO] 10.217.0.20:34584 - 52333 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000815601s 2025-12-12T16:30:10.273712896+00:00 stdout F [INFO] 10.217.0.20:38052 - 34403 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001652572s 2025-12-12T16:30:12.279759636+00:00 stdout F [INFO] 10.217.0.20:43609 - 49584 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000699547s 2025-12-12T16:30:12.279759636+00:00 stdout F [INFO] 10.217.0.20:45615 - 40825 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00078792s 2025-12-12T16:30:12.391928636+00:00 stdout F [INFO] 10.217.0.20:56937 - 8214 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000619936s 2025-12-12T16:30:12.391928636+00:00 stdout F [INFO] 10.217.0.20:33627 - 61244 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000630005s 2025-12-12T16:30:13.396533377+00:00 stdout F [INFO] 10.217.0.20:46875 - 45247 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000943934s 2025-12-12T16:30:13.396533377+00:00 stdout F [INFO] 10.217.0.20:51717 - 899 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001297043s 2025-12-12T16:30:15.400430176+00:00 stdout F [INFO] 10.217.0.20:52624 - 18436 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000759969s 2025-12-12T16:30:15.400543879+00:00 stdout F [INFO] 10.217.0.20:57004 - 1512 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000864071s 2025-12-12T16:30:22.258607576+00:00 stdout F [INFO] 10.217.0.20:35865 - 18034 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000837041s 2025-12-12T16:30:22.258607576+00:00 stdout F [INFO] 10.217.0.20:33134 - 30470 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001010055s 2025-12-12T16:30:23.263230778+00:00 stdout F [INFO] 10.217.0.20:44036 - 17057 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001031375s 2025-12-12T16:30:23.263308560+00:00 stdout F [INFO] 10.217.0.20:50571 - 9732 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000949944s 2025-12-12T16:30:25.268154392+00:00 stdout F [INFO] 10.217.0.20:54871 - 47080 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000734809s 2025-12-12T16:30:25.268154392+00:00 stdout F [INFO] 10.217.0.20:51432 - 10257 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00078449s 2025-12-12T16:30:25.492279317+00:00 stdout F [INFO] 10.217.0.20:53579 - 17383 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000547414s 2025-12-12T16:30:25.492279317+00:00 stdout F [INFO] 10.217.0.20:56584 - 52725 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000731848s 2025-12-12T16:30:26.495535344+00:00 stdout F [INFO] 10.217.0.20:34244 - 41286 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000596485s 2025-12-12T16:30:26.495535344+00:00 stdout F [INFO] 10.217.0.20:36070 - 13252 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000669307s 2025-12-12T16:30:28.499665549+00:00 stdout F [INFO] 10.217.0.20:45213 - 16940 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000697597s 2025-12-12T16:30:28.499795103+00:00 stdout F [INFO] 10.217.0.20:47349 - 26355 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000858002s 2025-12-12T16:30:48.774450059+00:00 stdout F [INFO] 10.217.0.39:47774 - 5159 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001846436s 2025-12-12T16:30:48.774450059+00:00 stdout F [INFO] 10.217.0.39:44709 - 34868 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001902068s 2025-12-12T16:31:48.776212622+00:00 stdout F [INFO] 10.217.0.39:47090 - 17663 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.00204494s 2025-12-12T16:31:48.776212622+00:00 stdout F [INFO] 10.217.0.39:44485 - 48821 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002123522s 2025-12-12T16:32:48.776225025+00:00 stdout F [INFO] 10.217.0.39:51907 - 36626 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001893338s 2025-12-12T16:32:48.776225025+00:00 stdout F [INFO] 10.217.0.39:47180 - 23992 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.00199573s 2025-12-12T16:33:48.777667024+00:00 stdout F [INFO] 10.217.0.39:39329 - 45831 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001265072s 2025-12-12T16:33:48.777667024+00:00 stdout F [INFO] 10.217.0.39:58922 - 50615 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001239461s 2025-12-12T16:34:48.778537588+00:00 stdout F [INFO] 10.217.0.39:38826 - 16645 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002274967s 2025-12-12T16:34:48.778537588+00:00 stdout F [INFO] 10.217.0.39:37145 - 33509 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002103582s 2025-12-12T16:35:48.779496989+00:00 stdout F [INFO] 10.217.0.39:55249 - 63845 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001113138s 2025-12-12T16:35:48.779496989+00:00 stdout F [INFO] 10.217.0.39:53067 - 13818 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001392515s 2025-12-12T16:36:48.778735720+00:00 stdout F [INFO] 10.217.0.39:46685 - 40001 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002705008s 2025-12-12T16:36:48.778735720+00:00 stdout F [INFO] 10.217.0.39:47535 - 63657 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002810001s 2025-12-12T16:37:35.323523203+00:00 stdout F [INFO] 10.217.0.39:56805 - 47927 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000892833s 2025-12-12T16:37:35.323523203+00:00 stdout F [INFO] 10.217.0.39:49014 - 45902 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001151998s 2025-12-12T16:37:48.778802912+00:00 stdout F [INFO] 10.217.0.39:33401 - 40719 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001666202s 2025-12-12T16:37:48.778802912+00:00 stdout F [INFO] 10.217.0.39:44025 - 51115 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001728193s 2025-12-12T16:38:48.779356658+00:00 stdout F [INFO] 10.217.0.39:49667 - 44792 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.001014105s 2025-12-12T16:38:48.779356658+00:00 stdout F [INFO] 10.217.0.39:53675 - 64322 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.00079768s 2025-12-12T16:39:33.847093258+00:00 stdout F [INFO] 10.217.0.20:41796 - 34510 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000859781s 2025-12-12T16:39:33.847247672+00:00 stdout F [INFO] 10.217.0.20:52176 - 23534 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001313183s 2025-12-12T16:39:34.854474710+00:00 stdout F [INFO] 10.217.0.20:45821 - 43669 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000755849s 2025-12-12T16:39:34.854511031+00:00 stdout F [INFO] 10.217.0.20:50071 - 29670 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000830631s 2025-12-12T16:39:36.859092116+00:00 stdout F [INFO] 10.217.0.20:60544 - 38123 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00078824s 2025-12-12T16:39:36.859228020+00:00 stdout F [INFO] 10.217.0.20:48803 - 57526 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000844552s 2025-12-12T16:39:48.785525908+00:00 stdout F [INFO] 10.217.0.39:54151 - 28094 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.000733308s 2025-12-12T16:39:48.785525908+00:00 stdout F [INFO] 10.217.0.39:43948 - 46690 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.00077513s 2025-12-12T16:40:09.299543319+00:00 stdout F [INFO] 10.217.0.20:51826 - 18622 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000712798s 2025-12-12T16:40:09.299543319+00:00 stdout F [INFO] 10.217.0.20:35979 - 31886 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000704278s 2025-12-12T16:40:10.303923085+00:00 stdout F [INFO] 10.217.0.20:49612 - 1281 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000746179s 2025-12-12T16:40:10.303923085+00:00 stdout F [INFO] 10.217.0.20:57244 - 21454 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001281292s 2025-12-12T16:40:12.308466071+00:00 stdout F [INFO] 10.217.0.20:33023 - 61927 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000513023s 2025-12-12T16:40:12.308466071+00:00 stdout F [INFO] 10.217.0.20:55838 - 55770 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000602675s 2025-12-12T16:40:12.348710692+00:00 stdout F [INFO] 10.217.0.20:46896 - 62998 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000725399s 2025-12-12T16:40:12.349080082+00:00 stdout F [INFO] 10.217.0.20:55850 - 14874 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000961064s 2025-12-12T16:40:13.353039716+00:00 stdout F [INFO] 10.217.0.20:57138 - 58925 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000641806s 2025-12-12T16:40:13.353139448+00:00 stdout F [INFO] 10.217.0.20:56520 - 6976 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000824721s 2025-12-12T16:40:15.359773116+00:00 stdout F [INFO] 10.217.0.20:48198 - 37106 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00121791s 2025-12-12T16:40:15.360103604+00:00 stdout F [INFO] 10.217.0.20:46732 - 47034 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001305543s 2025-12-12T16:40:22.267096096+00:00 stdout F [INFO] 10.217.0.20:38612 - 39689 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000862092s 2025-12-12T16:40:22.267142998+00:00 stdout F [INFO] 10.217.0.20:48151 - 56871 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001641351s 2025-12-12T16:40:23.275408680+00:00 stdout F [INFO] 10.217.0.20:50258 - 5049 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000952374s 2025-12-12T16:40:23.275408680+00:00 stdout F [INFO] 10.217.0.20:35460 - 45567 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000970935s 2025-12-12T16:40:25.284201402+00:00 stdout F [INFO] 10.217.0.20:34530 - 58866 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001251672s 2025-12-12T16:40:25.285446113+00:00 stdout F [INFO] 10.217.0.20:36962 - 31876 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.00079516s 2025-12-12T16:40:25.436003946+00:00 stdout F [INFO] 10.217.0.20:48709 - 38858 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000727648s 2025-12-12T16:40:25.436003946+00:00 stdout F [INFO] 10.217.0.20:49953 - 34432 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000980595s 2025-12-12T16:40:26.440788942+00:00 stdout F [INFO] 10.217.0.20:49847 - 64745 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000648507s 2025-12-12T16:40:26.440861883+00:00 stdout F [INFO] 10.217.0.20:45946 - 45252 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.000668897s 2025-12-12T16:40:28.449585422+00:00 stdout F [INFO] 10.217.0.20:52546 - 53019 "A IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001562489s 2025-12-12T16:40:28.451264425+00:00 stdout F [INFO] 10.217.0.20:55524 - 25240 "AAAA IN cluster-monitoring-operator.openshift-monitoring.svc. udp 81 false 1232" NXDOMAIN qr,rd,ra 70 0.001694542s 2025-12-12T16:40:48.796523157+00:00 stdout F [INFO] 10.217.0.39:56386 - 35064 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.003791775s 2025-12-12T16:40:48.796523157+00:00 stdout F [INFO] 10.217.0.39:45284 - 27276 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.003718433s 2025-12-12T16:41:48.785263917+00:00 stdout F [INFO] 10.217.0.39:49808 - 55483 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.003448447s 2025-12-12T16:41:48.785327858+00:00 stdout F [INFO] 10.217.0.39:55477 - 53532 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.003500568s 2025-12-12T16:42:48.783496708+00:00 stdout F [INFO] 10.217.0.39:37187 - 48657 "A IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.002054992s 2025-12-12T16:42:48.783620521+00:00 stdout F [INFO] 10.217.0.39:39725 - 28168 "AAAA IN thanos-querier.openshift-monitoring.svc. udp 68 false 1232" NXDOMAIN qr,rd,ra 57 0.00278573s ././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043043033225 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000061015117043043033224 0ustar zuulzuul2025-12-12T16:16:24.992605271+00:00 stdout F 2025-12-12T16:16:24+00:00 [cnibincopy] Successfully copied files in /bondcni/rhel9/ to /host/opt/cni/bin/upgrade_d203bd09-bc8a-4e59-bfbb-01ca82ffca55 2025-12-12T16:16:24.998158527+00:00 stdout F 2025-12-12T16:16:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d203bd09-bc8a-4e59-bfbb-01ca82ffca55 to /host/opt/cni/bin/ ././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000062415117043043033231 0ustar zuulzuul2025-12-12T16:16:24.442700946+00:00 stdout F 2025-12-12T16:16:24+00:00 [cnibincopy] Successfully copied files in /usr/src/plugins/rhel9/bin/ to /host/opt/cni/bin/upgrade_5bc6d1a4-94b5-4efa-8922-08941b73a736 2025-12-12T16:16:24.450843425+00:00 stdout F 2025-12-12T16:16:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5bc6d1a4-94b5-4efa-8922-08941b73a736 to /host/opt/cni/bin/ ././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000000015117043043033215 0ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000012015117043043033220 0ustar zuulzuul2025-12-12T16:16:28.078830219+00:00 stdout F Done configuring CNI. Sleep=false ././@LongLink0000644000000000000000000000030300000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000063615117043043033234 0ustar zuulzuul2025-12-12T16:16:23.274775153+00:00 stdout F 2025-12-12T16:16:23+00:00 [cnibincopy] Successfully copied files in /usr/src/egress-router-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_98008bf0-2522-4b76-9770-aa8dcc344c0c 2025-12-12T16:16:23.280218675+00:00 stdout F 2025-12-12T16:16:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_98008bf0-2522-4b76-9770-aa8dcc344c0c to /host/opt/cni/bin/ ././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000063015117043043033226 0ustar zuulzuul2025-12-12T16:16:27.322993987+00:00 stdout F 2025-12-12T16:16:27+00:00 [cnibincopy] Successfully copied files in /usr/src/whereabouts/rhel9/bin/ to /host/opt/cni/bin/upgrade_780a78ab-95a4-49ab-b4b6-a83b57498d19 2025-12-12T16:16:27.327894387+00:00 stdout F 2025-12-12T16:16:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_780a78ab-95a4-49ab-b4b6-a83b57498d19 to /host/opt/cni/bin/ ././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000000063315117043043033231 0ustar zuulzuul2025-12-12T16:16:26.018043587+00:00 stdout F 2025-12-12T16:16:26+00:00 [cnibincopy] Successfully copied files in /usr/src/route-override/rhel9/bin/ to /host/opt/cni/bin/upgrade_f10e0de5-d35f-4087-939f-d9eb7f4108df 2025-12-12T16:16:26.024633818+00:00 stdout F 2025-12-12T16:16:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f10e0de5-d35f-4087-939f-d9eb7f4108df to /host/opt/cni/bin/ ././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015117043043032760 5ustar zuulzuul././@LongLink0000644000000000000000000000030500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015117043063032762 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000644000175000017500000004354315117043043032773 0ustar zuulzuul2025-12-12T16:16:41.323597990+00:00 stderr F I1212 16:16:41.323220 1 start.go:74] Version: 4.20.0-202510211040.p2.g4a9b90e.assembly.stream.el9 2025-12-12T16:16:41.324231986+00:00 stderr F I1212 16:16:41.324154 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:41.324536793+00:00 stderr F I1212 16:16:41.324501 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:41.324536793+00:00 stderr F I1212 16:16:41.324522 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:41.324536793+00:00 stderr F I1212 16:16:41.324528 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:41.324551923+00:00 stderr F I1212 16:16:41.324533 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:41.324551923+00:00 stderr F I1212 16:16:41.324540 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:41.355306594+00:00 stderr F I1212 16:16:41.355218 1 leaderelection.go:257] attempting to acquire leader lease openshift-machine-api/machine-api-operator... 2025-12-12T16:16:41.366443286+00:00 stderr F I1212 16:16:41.366345 1 leaderelection.go:271] successfully acquired lease openshift-machine-api/machine-api-operator 2025-12-12T16:16:41.378835839+00:00 stderr F I1212 16:16:41.378547 1 operator.go:217] Starting Machine API Operator 2025-12-12T16:16:41.379853094+00:00 stderr F I1212 16:16:41.379814 1 reflector.go:357] "Starting reflector" type="*v1.ValidatingWebhookConfiguration" resyncPeriod="16m1.845890163s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.379873174+00:00 stderr F I1212 16:16:41.379858 1 reflector.go:403] "Listing and watching" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.379929866+00:00 stderr F I1212 16:16:41.379879 1 reflector.go:357] "Starting reflector" type="*v1.FeatureGate" resyncPeriod="12m10.168902281s" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.379919 1 reflector.go:357] "Starting reflector" type="*v1.DaemonSet" resyncPeriod="16m1.845890163s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.379973 1 reflector.go:403] "Listing and watching" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.379979 1 reflector.go:357] "Starting reflector" type="*v1.Deployment" resyncPeriod="16m1.845890163s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.380004 1 reflector.go:357] "Starting reflector" type="*v1beta1.Machine" resyncPeriod="18m48.407074456s" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.379827 1 reflector.go:357] "Starting reflector" type="*v1.ClusterOperator" resyncPeriod="12m10.168902281s" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.380020 1 reflector.go:403] "Listing and watching" type="*v1beta1.Machine" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.380042 1 reflector.go:403] "Listing and watching" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380159451+00:00 stderr F I1212 16:16:41.380002 1 reflector.go:403] "Listing and watching" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.380318995+00:00 stderr F I1212 16:16:41.379905 1 reflector.go:357] "Starting reflector" type="*v1beta1.MachineSet" resyncPeriod="18m48.407074456s" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380338155+00:00 stderr F I1212 16:16:41.380317 1 reflector.go:403] "Listing and watching" type="*v1beta1.MachineSet" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.380399017+00:00 stderr F I1212 16:16:41.379935 1 reflector.go:403] "Listing and watching" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.381025152+00:00 stderr F I1212 16:16:41.380604 1 reflector.go:357] "Starting reflector" type="*v1.ClusterVersion" resyncPeriod="12m10.168902281s" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.381025152+00:00 stderr F I1212 16:16:41.380653 1 reflector.go:403] "Listing and watching" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.381025152+00:00 stderr F I1212 16:16:41.379949 1 reflector.go:357] "Starting reflector" type="*v1.Proxy" resyncPeriod="12m10.168902281s" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.381060863+00:00 stderr F I1212 16:16:41.381019 1 reflector.go:403] "Listing and watching" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.381974555+00:00 stderr F I1212 16:16:41.380631 1 reflector.go:357] "Starting reflector" type="*v1.MutatingWebhookConfiguration" resyncPeriod="16m1.845890163s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.381974555+00:00 stderr F I1212 16:16:41.381281 1 reflector.go:403] "Listing and watching" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.383482802+00:00 stderr F I1212 16:16:41.383390 1 reflector.go:430] "Caches populated" type="*v1beta1.Machine" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.383935623+00:00 stderr F I1212 16:16:41.383898 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.384784974+00:00 stderr F I1212 16:16:41.384279 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.385161753+00:00 stderr F I1212 16:16:41.385127 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.387337756+00:00 stderr F I1212 16:16:41.385428 1 reflector.go:430] "Caches populated" type="*v1beta1.MachineSet" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.387337756+00:00 stderr F I1212 16:16:41.385804 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.387337756+00:00 stderr F I1212 16:16:41.386301 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:41.389115740+00:00 stderr F I1212 16:16:41.389081 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.389253713+00:00 stderr F I1212 16:16:41.389206 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.393613890+00:00 stderr F I1212 16:16:41.393572 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:41.478934493+00:00 stderr F I1212 16:16:41.478843 1 operator.go:229] Synced up caches 2025-12-12T16:16:41.478934493+00:00 stderr F I1212 16:16:41.478911 1 operator.go:234] Started feature gate accessor 2025-12-12T16:16:41.479018075+00:00 stderr F I1212 16:16:41.478935 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:41.479440575+00:00 stderr F I1212 16:16:41.479330 1 start.go:123] Synced up machine api informer caches 2025-12-12T16:16:41.479440575+00:00 stderr F I1212 16:16:41.479290 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-machine-api", Name:"machine-api-operator", UID:"6e3281a2-74ca-4530-b743-ae9a62edcc78", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:41.494867292+00:00 stderr F I1212 16:16:41.494812 1 status.go:70] Syncing status: re-syncing 2025-12-12T16:16:41.504230080+00:00 stderr F I1212 16:16:41.503974 1 sync.go:78] Provider is NoOp, skipping synchronisation 2025-12-12T16:16:41.507352786+00:00 stderr F I1212 16:16:41.507292 1 status.go:100] Syncing status: available 2025-12-12T16:18:41.379851288+00:00 stderr F E1212 16:18:41.379092 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-api/leases/machine-api-operator": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:41.380941215+00:00 stderr F E1212 16:18:41.380847 1 leaderelection.go:436] error retrieving resource lock openshift-machine-api/machine-api-operator: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-api/leases/machine-api-operator": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:19:57.915837579+00:00 stderr F I1212 16:19:57.915258 1 reflector.go:403] "Listing and watching" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:19:57.920146157+00:00 stderr F I1212 16:19:57.920092 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:01.540804714+00:00 stderr F I1212 16:20:01.539428 1 reflector.go:403] "Listing and watching" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:01.549304888+00:00 stderr F I1212 16:20:01.548968 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:01.560903669+00:00 stderr F I1212 16:20:01.560819 1 status.go:70] Syncing status: re-syncing 2025-12-12T16:20:01.567233268+00:00 stderr F I1212 16:20:01.567150 1 sync.go:78] Provider is NoOp, skipping synchronisation 2025-12-12T16:20:01.569742001+00:00 stderr F I1212 16:20:01.569705 1 status.go:100] Syncing status: available 2025-12-12T16:20:04.425533123+00:00 stderr F I1212 16:20:04.425108 1 reflector.go:403] "Listing and watching" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:04.427830521+00:00 stderr F I1212 16:20:04.427776 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:06.775302581+00:00 stderr F I1212 16:20:06.774706 1 reflector.go:403] "Listing and watching" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:06.776394898+00:00 stderr F I1212 16:20:06.776368 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:13.255586128+00:00 stderr F I1212 16:20:13.254580 1 reflector.go:403] "Listing and watching" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:13.259860156+00:00 stderr F I1212 16:20:13.259809 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:14.202604795+00:00 stderr F I1212 16:20:14.202523 1 reflector.go:403] "Listing and watching" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:14.204888373+00:00 stderr F I1212 16:20:14.204863 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:18.054353485+00:00 stderr F I1212 16:20:18.054277 1 reflector.go:403] "Listing and watching" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:18.056250892+00:00 stderr F I1212 16:20:18.056171 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:21.913831918+00:00 stderr F I1212 16:20:21.912666 1 reflector.go:403] "Listing and watching" type="*v1beta1.MachineSet" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:20:21.916647339+00:00 stderr F I1212 16:20:21.916585 1 reflector.go:430] "Caches populated" type="*v1beta1.MachineSet" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:20:23.026315261+00:00 stderr F I1212 16:20:23.024689 1 reflector.go:403] "Listing and watching" type="*v1beta1.Machine" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:20:23.029401398+00:00 stderr F I1212 16:20:23.029359 1 reflector.go:430] "Caches populated" type="*v1beta1.Machine" reflector="github.com/openshift/client-go/machine/informers/externalversions/factory.go:125" 2025-12-12T16:20:29.158516547+00:00 stderr F I1212 16:20:29.157921 1 reflector.go:403] "Listing and watching" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:29.162811485+00:00 stderr F I1212 16:20:29.162785 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:32:11.736712180+00:00 stderr F I1212 16:32:11.735717 1 status.go:70] Syncing status: re-syncing 2025-12-12T16:32:11.747340911+00:00 stderr F I1212 16:32:11.747257 1 sync.go:78] Provider is NoOp, skipping synchronisation 2025-12-12T16:32:11.752502018+00:00 stderr F I1212 16:32:11.752436 1 status.go:100] Syncing status: available ././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015117043062032761 5ustar zuulzuul././@LongLink0000644000000000000000000000030500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000644000175000017500000002025615117043043032767 0ustar zuulzuul2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093399 1 flags.go:64] FLAG: --add-dir-header="false" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093558 1 flags.go:64] FLAG: --allow-paths="[]" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093572 1 flags.go:64] FLAG: --alsologtostderr="false" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093575 1 flags.go:64] FLAG: --auth-header-fields-enabled="false" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093578 1 flags.go:64] FLAG: --auth-header-groups-field-name="x-remote-groups" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093582 1 flags.go:64] FLAG: --auth-header-groups-field-separator="|" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093585 1 flags.go:64] FLAG: --auth-header-user-field-name="x-remote-user" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093588 1 flags.go:64] FLAG: --auth-token-audiences="[]" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093592 1 flags.go:64] FLAG: --client-ca-file="" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093595 1 flags.go:64] FLAG: --config-file="/etc/kube-rbac-proxy/config-file.yaml" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093598 1 flags.go:64] FLAG: --help="false" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093600 1 flags.go:64] FLAG: --http2-disable="false" 2025-12-12T16:16:41.093612745+00:00 stderr F I1212 16:16:41.093603 1 flags.go:64] FLAG: --http2-max-concurrent-streams="100" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093606 1 flags.go:64] FLAG: --http2-max-size="262144" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093612 1 flags.go:64] FLAG: --ignore-paths="[]" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093615 1 flags.go:64] FLAG: --insecure-listen-address="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093619 1 flags.go:64] FLAG: --kube-api-burst="0" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093626 1 flags.go:64] FLAG: --kube-api-qps="0" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093630 1 flags.go:64] FLAG: --kubeconfig="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093633 1 flags.go:64] FLAG: --log-backtrace-at="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093636 1 flags.go:64] FLAG: --log-dir="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093638 1 flags.go:64] FLAG: --log-file="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093641 1 flags.go:64] FLAG: --log-file-max-size="0" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093645 1 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093649 1 flags.go:64] FLAG: --logtostderr="true" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093651 1 flags.go:64] FLAG: --oidc-ca-file="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093654 1 flags.go:64] FLAG: --oidc-clientID="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093656 1 flags.go:64] FLAG: --oidc-groups-claim="groups" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093659 1 flags.go:64] FLAG: --oidc-groups-prefix="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093662 1 flags.go:64] FLAG: --oidc-issuer="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093665 1 flags.go:64] FLAG: --oidc-sign-alg="[RS256]" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093669 1 flags.go:64] FLAG: --oidc-username-claim="email" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093672 1 flags.go:64] FLAG: --oidc-username-prefix="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093675 1 flags.go:64] FLAG: --one-output="false" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093677 1 flags.go:64] FLAG: --proxy-endpoints-port="0" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093681 1 flags.go:64] FLAG: --secure-listen-address="0.0.0.0:8443" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093683 1 flags.go:64] FLAG: --skip-headers="false" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093686 1 flags.go:64] FLAG: --skip-log-headers="false" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093689 1 flags.go:64] FLAG: --stderrthreshold="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093691 1 flags.go:64] FLAG: --tls-cert-file="/etc/tls/private/tls.crt" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093694 1 flags.go:64] FLAG: --tls-cipher-suites="[TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305]" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093703 1 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093706 1 flags.go:64] FLAG: --tls-private-key-file="/etc/tls/private/tls.key" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093709 1 flags.go:64] FLAG: --tls-reload-interval="1m0s" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093713 1 flags.go:64] FLAG: --upstream="http://localhost:8080/" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093715 1 flags.go:64] FLAG: --upstream-ca-file="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093718 1 flags.go:64] FLAG: --upstream-client-cert-file="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093721 1 flags.go:64] FLAG: --upstream-client-key-file="" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093723 1 flags.go:64] FLAG: --upstream-force-h2c="false" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093726 1 flags.go:64] FLAG: --v="3" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093729 1 flags.go:64] FLAG: --version="false" 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093734 1 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:16:41.093764219+00:00 stderr F W1212 16:16:41.093741 1 deprecated.go:66] 2025-12-12T16:16:41.093764219+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:41.093764219+00:00 stderr F 2025-12-12T16:16:41.093764219+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:41.093764219+00:00 stderr F 2025-12-12T16:16:41.093764219+00:00 stderr F =============================================== 2025-12-12T16:16:41.093764219+00:00 stderr F 2025-12-12T16:16:41.093764219+00:00 stderr F I1212 16:16:41.093751 1 kube-rbac-proxy.go:532] Reading config file: /etc/kube-rbac-proxy/config-file.yaml 2025-12-12T16:16:41.094388224+00:00 stderr F I1212 16:16:41.094343 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:41.094388224+00:00 stderr F I1212 16:16:41.094362 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:41.094388224+00:00 stderr F I1212 16:16:41.094367 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:41.094388224+00:00 stderr F I1212 16:16:41.094371 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:41.094567368+00:00 stderr F I1212 16:16:41.094534 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:41.095817569+00:00 stderr F I1212 16:16:41.095788 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:41.096221359+00:00 stderr F I1212 16:16:41.096193 1 kube-rbac-proxy.go:397] Starting TCP socket on 0.0.0.0:8443 2025-12-12T16:16:41.097039769+00:00 stderr F I1212 16:16:41.097010 1 kube-rbac-proxy.go:404] Listening securely on 0.0.0.0:8443 ././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_infrawatch-operators-cdpts_eeed1a9b-f386-4d11-b730-03bcb44f9a55/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_infr0000755000175000017500000000000015117043043033137 5ustar zuulzuul././@LongLink0000644000000000000000000000023400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043043033043 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043063033045 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000000016415117043043033046 0ustar zuulzuul2025-12-12T16:18:29.878324788+00:00 stderr F I1212 16:18:29.878169 1 readyz.go:111] Listening on 0.0.0.0:6080 ././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000004623315117043043033055 0ustar zuulzuul2025-12-12T16:18:29.553460048+00:00 stderr F W1212 16:18:29.553282 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:18:29.553691464+00:00 stderr F I1212 16:18:29.553498 1 crypto.go:594] Generating new CA for cert-regeneration-controller-signer@1765556309 cert, and key in /tmp/serving-cert-3955722927/serving-signer.crt, /tmp/serving-cert-3955722927/serving-signer.key 2025-12-12T16:18:29.553691464+00:00 stderr F Validity period of the certificate for "cert-regeneration-controller-signer@1765556309" is unset, resetting to 43800h0m0s! 2025-12-12T16:18:30.086612378+00:00 stderr F I1212 16:18:30.086507 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:18:30.087359876+00:00 stderr F I1212 16:18:30.087305 1 observer_polling.go:159] Starting file observer 2025-12-12T16:18:30.087476569+00:00 stderr F I1212 16:18:30.087441 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:18:30.087476569+00:00 stderr F I1212 16:18:30.087465 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:18:30.087476569+00:00 stderr F I1212 16:18:30.087472 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:18:30.087488680+00:00 stderr F I1212 16:18:30.087479 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:18:30.087488680+00:00 stderr F I1212 16:18:30.087484 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:18:35.460932166+00:00 stderr F I1212 16:18:35.460615 1 builder.go:304] cert-regeneration-controller version v0.0.0-unknown-c3d9642-c3d9642 2025-12-12T16:18:35.464342001+00:00 stderr F I1212 16:18:35.464135 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:18:35.464623268+00:00 stderr F I1212 16:18:35.464587 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-apiserver/cert-regeneration-controller-lock... 2025-12-12T16:18:35.473617710+00:00 stderr F I1212 16:18:35.473553 1 leaderelection.go:271] successfully acquired lease openshift-kube-apiserver/cert-regeneration-controller-lock 2025-12-12T16:18:35.474099252+00:00 stderr F I1212 16:18:35.473986 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-apiserver", Name:"cert-regeneration-controller-lock", UID:"d07cd5e6-5db4-43bb-a0b4-f3d73f59aa16", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"39146", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' crc_3d8f0079-9f12-4d6b-b04d-161961f4b3fb became leader 2025-12-12T16:18:35.474613655+00:00 stderr F I1212 16:18:35.474592 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:18:35.478290425+00:00 stderr F I1212 16:18:35.478261 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.478693445+00:00 stderr F I1212 16:18:35.478624 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.480194072+00:00 stderr F I1212 16:18:35.478922 1 cmd.go:126] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:18:35.480194072+00:00 stderr F I1212 16:18:35.479075 1 certrotationcontroller.go:148] Setting monthPeriod to 720h0m0s, yearPeriod to 8760h0m0s, tenMonthPeriod to 7008h0m0s 2025-12-12T16:18:35.480194072+00:00 stderr F I1212 16:18:35.479540 1 event.go:377] Event(v1.ObjectReference{Kind:"Node", Namespace:"openshift-kube-apiserver", Name:"crc", UID:"23216ff3-032e-49af-af7e-1d23d5907b59", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:18:35.485727619+00:00 stderr F I1212 16:18:35.485477 1 cabundlesyncer.go:82] Starting CA bundle controller 2025-12-12T16:18:35.485727619+00:00 stderr F I1212 16:18:35.485521 1 shared_informer.go:350] "Waiting for caches to sync" controller="CABundleController" 2025-12-12T16:18:35.487914403+00:00 stderr F I1212 16:18:35.487876 1 certrotationcontroller.go:919] Starting CertRotation 2025-12-12T16:18:35.487914403+00:00 stderr F I1212 16:18:35.487891 1 certrotationcontroller.go:884] Waiting for CertRotation 2025-12-12T16:18:35.489101433+00:00 stderr F I1212 16:18:35.489070 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.490161669+00:00 stderr F I1212 16:18:35.490119 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.493932662+00:00 stderr F I1212 16:18:35.493851 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.495826799+00:00 stderr F I1212 16:18:35.495762 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.497994323+00:00 stderr F I1212 16:18:35.497918 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.507760354+00:00 stderr F I1212 16:18:35.507692 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.508877892+00:00 stderr F I1212 16:18:35.508811 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.516161402+00:00 stderr F I1212 16:18:35.516002 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.518952461+00:00 stderr F I1212 16:18:35.518918 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubeapiservers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.522251412+00:00 stderr F I1212 16:18:35.522161 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:35.586651174+00:00 stderr F I1212 16:18:35.586565 1 shared_informer.go:357] "Caches are synced" controller="CABundleController" 2025-12-12T16:18:35.588460349+00:00 stderr F I1212 16:18:35.588389 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:18:35.588493400+00:00 stderr F I1212 16:18:35.588456 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:18:35.588493400+00:00 stderr F I1212 16:18:35.588469 1 internalloadbalancer.go:27] syncing internal loadbalancer hostnames: api-int.crc.testing 2025-12-12T16:18:35.588493400+00:00 stderr F I1212 16:18:35.588477 1 certrotationcontroller.go:902] Finished waiting for CertRotation 2025-12-12T16:18:35.588596582+00:00 stderr F I1212 16:18:35.588572 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.588596582+00:00 stderr F I1212 16:18:35.588590 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.588626503+00:00 stderr F I1212 16:18:35.588602 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.588856549+00:00 stderr F I1212 16:18:35.588777 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:18:35.589962496+00:00 stderr F I1212 16:18:35.589938 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:18:35.594530849+00:00 stderr F I1212 16:18:35.594503 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.594705174+00:00 stderr F I1212 16:18:35.594692 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.594743164+00:00 stderr F I1212 16:18:35.594729 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.594779555+00:00 stderr F I1212 16:18:35.594594 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.594809836+00:00 stderr F I1212 16:18:35.594792 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.594809836+00:00 stderr F I1212 16:18:35.594803 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.594866868+00:00 stderr F I1212 16:18:35.594605 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.594866868+00:00 stderr F I1212 16:18:35.594848 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.594866868+00:00 stderr F I1212 16:18:35.594853 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.594901678+00:00 stderr F I1212 16:18:35.594614 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.594931759+00:00 stderr F I1212 16:18:35.594921 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.594957850+00:00 stderr F I1212 16:18:35.594948 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.597333999+00:00 stderr F I1212 16:18:35.594622 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.597333999+00:00 stderr F I1212 16:18:35.597262 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.597333999+00:00 stderr F I1212 16:18:35.597288 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.597603875+00:00 stderr F I1212 16:18:35.594629 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.597631126+00:00 stderr F I1212 16:18:35.597605 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.597631126+00:00 stderr F I1212 16:18:35.597625 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.628163941+00:00 stderr F I1212 16:18:35.594642 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.628163941+00:00 stderr F I1212 16:18:35.628013 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.628163941+00:00 stderr F I1212 16:18:35.628041 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.632257752+00:00 stderr F I1212 16:18:35.594649 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.632257752+00:00 stderr F I1212 16:18:35.630041 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.632257752+00:00 stderr F I1212 16:18:35.630052 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.638555388+00:00 stderr F I1212 16:18:35.594654 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.638555388+00:00 stderr F I1212 16:18:35.638515 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.638555388+00:00 stderr F I1212 16:18:35.638531 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.638616929+00:00 stderr F I1212 16:18:35.594660 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.638616929+00:00 stderr F I1212 16:18:35.638572 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.638616929+00:00 stderr F I1212 16:18:35.638577 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:18:35.639312516+00:00 stderr F I1212 16:18:35.594666 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:18:35.639463440+00:00 stderr F I1212 16:18:35.639369 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:18:35.639463440+00:00 stderr F I1212 16:18:35.639442 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:28:35.490643167+00:00 stderr F I1212 16:28:35.490518 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:28:36.242338582+00:00 stderr F I1212 16:28:36.240923 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] 2025-12-12T16:38:35.490822060+00:00 stderr F I1212 16:38:35.490745 1 externalloadbalancer.go:27] syncing external loadbalancer hostnames: api.crc.testing 2025-12-12T16:38:36.242458614+00:00 stderr F I1212 16:38:36.241114 1 servicehostname.go:46] syncing servicenetwork hostnames: [10.217.4.1 kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local openshift openshift.default openshift.default.svc openshift.default.svc.cluster.local] ././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000001771415117043043033057 0ustar zuulzuul2025-12-12T16:18:29.211507223+00:00 stderr F I1212 16:18:29.210902 1 base_controller.go:76] Waiting for caches to sync for CertSyncController 2025-12-12T16:18:29.211507223+00:00 stderr F I1212 16:18:29.210981 1 observer_polling.go:159] Starting file observer 2025-12-12T16:18:35.511716842+00:00 stderr F I1212 16:18:35.511605 1 base_controller.go:82] Caches are synced for CertSyncController 2025-12-12T16:18:35.511716842+00:00 stderr F I1212 16:18:35.511703 1 base_controller.go:119] Starting #1 worker of CertSyncController controller ... 2025-12-12T16:18:35.511907627+00:00 stderr F I1212 16:18:35.511827 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:18:35.512321887+00:00 stderr F I1212 16:18:35.512281 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] 2025-12-12T16:28:35.469876412+00:00 stderr F I1212 16:28:35.468985 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:28:35.470483047+00:00 stderr F I1212 16:28:35.470417 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] 2025-12-12T16:28:35.471153524+00:00 stderr F I1212 16:28:35.471121 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:28:35.474253723+00:00 stderr F I1212 16:28:35.474151 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] 2025-12-12T16:28:35.474910629+00:00 stderr F I1212 16:28:35.474884 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:28:35.475132985+00:00 stderr F I1212 16:28:35.475095 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] 2025-12-12T16:38:35.469699399+00:00 stderr F I1212 16:38:35.469542 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:38:35.470672214+00:00 stderr F I1212 16:38:35.470608 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] 2025-12-12T16:38:35.471662269+00:00 stderr F I1212 16:38:35.471624 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:38:35.471955176+00:00 stderr F I1212 16:38:35.471920 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] 2025-12-12T16:38:35.472689775+00:00 stderr F I1212 16:38:35.472647 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true} {control-plane-node-kubeconfig false} {check-endpoints-kubeconfig false}] 2025-12-12T16:38:35.472997452+00:00 stderr F I1212 16:38:35.472954 1 certsync_controller.go:178] Syncing secrets: [{aggregator-client false} {localhost-serving-cert-certkey false} {service-network-serving-certkey false} {external-loadbalancer-serving-certkey false} {internal-loadbalancer-serving-certkey false} {bound-service-account-signing-key false} {control-plane-node-admin-client-cert-key false} {check-endpoints-client-cert-key false} {kubelet-client false} {node-kubeconfigs false} {user-serving-cert true} {user-serving-cert-000 true} {user-serving-cert-001 true} {user-serving-cert-002 true} {user-serving-cert-003 true} {user-serving-cert-004 true} {user-serving-cert-005 true} {user-serving-cert-006 true} {user-serving-cert-007 true} {user-serving-cert-008 true} {user-serving-cert-009 true}] ././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000234441415117043043033061 0ustar zuulzuul2025-12-12T16:18:28.666195802+00:00 stdout F flock: getting lock took 0.000005 seconds 2025-12-12T16:18:28.666363096+00:00 stdout F Copying system trust bundle ... 2025-12-12T16:18:28.680128697+00:00 stderr F I1212 16:18:28.680005 1 loader.go:402] Config loaded from file: /etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-cert-syncer-kubeconfig/kubeconfig 2025-12-12T16:18:28.680403054+00:00 stderr F I1212 16:18:28.680356 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:18:28.680403054+00:00 stderr F I1212 16:18:28.680377 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:18:28.680403054+00:00 stderr F I1212 16:18:28.680383 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:18:28.680403054+00:00 stderr F I1212 16:18:28.680387 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:18:28.680403054+00:00 stderr F I1212 16:18:28.680391 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:18:28.680679580+00:00 stderr F Copying termination logs to "/var/log/kube-apiserver/termination.log" 2025-12-12T16:18:28.680770363+00:00 stderr F I1212 16:18:28.680741 1 main.go:161] Touching termination lock file "/var/log/kube-apiserver/.terminating" 2025-12-12T16:18:28.681281905+00:00 stderr F I1212 16:18:28.681161 1 main.go:219] Launching sub-process "/usr/bin/hyperkube kube-apiserver --openshift-config=/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml --advertise-address=192.168.126.11 -v=2 --permit-address-sharing" 2025-12-12T16:18:28.744568390+00:00 stderr F Flag --openshift-config has been deprecated, to be removed 2025-12-12T16:18:28.745703188+00:00 stderr F I1212 16:18:28.745265 12 flags.go:64] FLAG: --admission-control="[]" 2025-12-12T16:18:28.745722638+00:00 stderr F I1212 16:18:28.745699 12 flags.go:64] FLAG: --admission-control-config-file="" 2025-12-12T16:18:28.745765629+00:00 stderr F I1212 16:18:28.745727 12 flags.go:64] FLAG: --advertise-address="192.168.126.11" 2025-12-12T16:18:28.745773440+00:00 stderr F I1212 16:18:28.745757 12 flags.go:64] FLAG: --aggregator-reject-forwarding-redirect="true" 2025-12-12T16:18:28.745824091+00:00 stderr F I1212 16:18:28.745778 12 flags.go:64] FLAG: --allow-metric-labels="[]" 2025-12-12T16:18:28.745831491+00:00 stderr F I1212 16:18:28.745815 12 flags.go:64] FLAG: --allow-metric-labels-manifest="" 2025-12-12T16:18:28.745864672+00:00 stderr F I1212 16:18:28.745837 12 flags.go:64] FLAG: --allow-privileged="false" 2025-12-12T16:18:28.745893933+00:00 stderr F I1212 16:18:28.745866 12 flags.go:64] FLAG: --anonymous-auth="true" 2025-12-12T16:18:28.745920563+00:00 stderr F I1212 16:18:28.745892 12 flags.go:64] FLAG: --api-audiences="[]" 2025-12-12T16:18:28.745948434+00:00 stderr F I1212 16:18:28.745921 12 flags.go:64] FLAG: --apiserver-count="1" 2025-12-12T16:18:28.745975155+00:00 stderr F I1212 16:18:28.745946 12 flags.go:64] FLAG: --audit-log-batch-buffer-size="10000" 2025-12-12T16:18:28.745982845+00:00 stderr F I1212 16:18:28.745970 12 flags.go:64] FLAG: --audit-log-batch-max-size="1" 2025-12-12T16:18:28.746077327+00:00 stderr F I1212 16:18:28.746032 12 flags.go:64] FLAG: --audit-log-batch-max-wait="0s" 2025-12-12T16:18:28.746086337+00:00 stderr F I1212 16:18:28.746069 12 flags.go:64] FLAG: --audit-log-batch-throttle-burst="0" 2025-12-12T16:18:28.746112818+00:00 stderr F I1212 16:18:28.746088 12 flags.go:64] FLAG: --audit-log-batch-throttle-enable="false" 2025-12-12T16:18:28.746140119+00:00 stderr F I1212 16:18:28.746110 12 flags.go:64] FLAG: --audit-log-batch-throttle-qps="0" 2025-12-12T16:18:28.746164469+00:00 stderr F I1212 16:18:28.746139 12 flags.go:64] FLAG: --audit-log-compress="false" 2025-12-12T16:18:28.746198270+00:00 stderr F I1212 16:18:28.746161 12 flags.go:64] FLAG: --audit-log-format="json" 2025-12-12T16:18:28.746246071+00:00 stderr F I1212 16:18:28.746216 12 flags.go:64] FLAG: --audit-log-maxage="0" 2025-12-12T16:18:28.746274812+00:00 stderr F I1212 16:18:28.746248 12 flags.go:64] FLAG: --audit-log-maxbackup="0" 2025-12-12T16:18:28.746298013+00:00 stderr F I1212 16:18:28.746275 12 flags.go:64] FLAG: --audit-log-maxsize="0" 2025-12-12T16:18:28.746319323+00:00 stderr F I1212 16:18:28.746297 12 flags.go:64] FLAG: --audit-log-mode="blocking" 2025-12-12T16:18:28.746331273+00:00 stderr F I1212 16:18:28.746317 12 flags.go:64] FLAG: --audit-log-path="" 2025-12-12T16:18:28.746357514+00:00 stderr F I1212 16:18:28.746335 12 flags.go:64] FLAG: --audit-log-truncate-enabled="false" 2025-12-12T16:18:28.746383655+00:00 stderr F I1212 16:18:28.746357 12 flags.go:64] FLAG: --audit-log-truncate-max-batch-size="10485760" 2025-12-12T16:18:28.746421026+00:00 stderr F I1212 16:18:28.746382 12 flags.go:64] FLAG: --audit-log-truncate-max-event-size="102400" 2025-12-12T16:18:28.746447016+00:00 stderr F I1212 16:18:28.746418 12 flags.go:64] FLAG: --audit-log-version="audit.k8s.io/v1" 2025-12-12T16:18:28.746456107+00:00 stderr F I1212 16:18:28.746442 12 flags.go:64] FLAG: --audit-policy-file="" 2025-12-12T16:18:28.746486147+00:00 stderr F I1212 16:18:28.746460 12 flags.go:64] FLAG: --audit-webhook-batch-buffer-size="10000" 2025-12-12T16:18:28.746495648+00:00 stderr F I1212 16:18:28.746482 12 flags.go:64] FLAG: --audit-webhook-batch-initial-backoff="10s" 2025-12-12T16:18:28.746522368+00:00 stderr F I1212 16:18:28.746500 12 flags.go:64] FLAG: --audit-webhook-batch-max-size="400" 2025-12-12T16:18:28.746544779+00:00 stderr F I1212 16:18:28.746522 12 flags.go:64] FLAG: --audit-webhook-batch-max-wait="30s" 2025-12-12T16:18:28.746565709+00:00 stderr F I1212 16:18:28.746543 12 flags.go:64] FLAG: --audit-webhook-batch-throttle-burst="15" 2025-12-12T16:18:28.746586980+00:00 stderr F I1212 16:18:28.746564 12 flags.go:64] FLAG: --audit-webhook-batch-throttle-enable="true" 2025-12-12T16:18:28.746616110+00:00 stderr F I1212 16:18:28.746585 12 flags.go:64] FLAG: --audit-webhook-batch-throttle-qps="10" 2025-12-12T16:18:28.746639351+00:00 stderr F I1212 16:18:28.746616 12 flags.go:64] FLAG: --audit-webhook-config-file="" 2025-12-12T16:18:28.746660362+00:00 stderr F I1212 16:18:28.746637 12 flags.go:64] FLAG: --audit-webhook-initial-backoff="10s" 2025-12-12T16:18:28.746681272+00:00 stderr F I1212 16:18:28.746658 12 flags.go:64] FLAG: --audit-webhook-mode="batch" 2025-12-12T16:18:28.746702143+00:00 stderr F I1212 16:18:28.746679 12 flags.go:64] FLAG: --audit-webhook-truncate-enabled="false" 2025-12-12T16:18:28.746724743+00:00 stderr F I1212 16:18:28.746701 12 flags.go:64] FLAG: --audit-webhook-truncate-max-batch-size="10485760" 2025-12-12T16:18:28.746745554+00:00 stderr F I1212 16:18:28.746723 12 flags.go:64] FLAG: --audit-webhook-truncate-max-event-size="102400" 2025-12-12T16:18:28.746768374+00:00 stderr F I1212 16:18:28.746746 12 flags.go:64] FLAG: --audit-webhook-version="audit.k8s.io/v1" 2025-12-12T16:18:28.746796415+00:00 stderr F I1212 16:18:28.746768 12 flags.go:64] FLAG: --authentication-config="" 2025-12-12T16:18:28.746853926+00:00 stderr F I1212 16:18:28.746796 12 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" 2025-12-12T16:18:28.746853926+00:00 stderr F I1212 16:18:28.746841 12 flags.go:64] FLAG: --authentication-token-webhook-config-file="" 2025-12-12T16:18:28.746883167+00:00 stderr F I1212 16:18:28.746860 12 flags.go:64] FLAG: --authentication-token-webhook-version="v1beta1" 2025-12-12T16:18:28.746906508+00:00 stderr F I1212 16:18:28.746884 12 flags.go:64] FLAG: --authorization-config="" 2025-12-12T16:18:28.746941749+00:00 stderr F I1212 16:18:28.746906 12 flags.go:64] FLAG: --authorization-mode="[]" 2025-12-12T16:18:28.746978919+00:00 stderr F I1212 16:18:28.746945 12 flags.go:64] FLAG: --authorization-policy-file="" 2025-12-12T16:18:28.747002210+00:00 stderr F I1212 16:18:28.746978 12 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" 2025-12-12T16:18:28.747015430+00:00 stderr F I1212 16:18:28.747001 12 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" 2025-12-12T16:18:28.747039741+00:00 stderr F I1212 16:18:28.747017 12 flags.go:64] FLAG: --authorization-webhook-config-file="" 2025-12-12T16:18:28.747060091+00:00 stderr F I1212 16:18:28.747038 12 flags.go:64] FLAG: --authorization-webhook-version="v1beta1" 2025-12-12T16:18:28.747084432+00:00 stderr F I1212 16:18:28.747061 12 flags.go:64] FLAG: --bind-address="0.0.0.0" 2025-12-12T16:18:28.747106093+00:00 stderr F I1212 16:18:28.747083 12 flags.go:64] FLAG: --cert-dir="/var/run/kubernetes" 2025-12-12T16:18:28.747127263+00:00 stderr F I1212 16:18:28.747104 12 flags.go:64] FLAG: --client-ca-file="" 2025-12-12T16:18:28.747152034+00:00 stderr F I1212 16:18:28.747125 12 flags.go:64] FLAG: --contention-profiling="false" 2025-12-12T16:18:28.747192005+00:00 stderr F I1212 16:18:28.747151 12 flags.go:64] FLAG: --cors-allowed-origins="[]" 2025-12-12T16:18:28.747228996+00:00 stderr F I1212 16:18:28.747189 12 flags.go:64] FLAG: --debug-socket-path="" 2025-12-12T16:18:28.747263386+00:00 stderr F I1212 16:18:28.747228 12 flags.go:64] FLAG: --default-not-ready-toleration-seconds="300" 2025-12-12T16:18:28.747288427+00:00 stderr F I1212 16:18:28.747265 12 flags.go:64] FLAG: --default-unreachable-toleration-seconds="300" 2025-12-12T16:18:28.747309518+00:00 stderr F I1212 16:18:28.747286 12 flags.go:64] FLAG: --default-watch-cache-size="100" 2025-12-12T16:18:28.747330898+00:00 stderr F I1212 16:18:28.747307 12 flags.go:64] FLAG: --delete-collection-workers="1" 2025-12-12T16:18:28.747361829+00:00 stderr F I1212 16:18:28.747328 12 flags.go:64] FLAG: --disable-admission-plugins="[]" 2025-12-12T16:18:28.747384109+00:00 stderr F I1212 16:18:28.747360 12 flags.go:64] FLAG: --disable-http2-serving="false" 2025-12-12T16:18:28.747406620+00:00 stderr F I1212 16:18:28.747382 12 flags.go:64] FLAG: --disabled-metrics="[]" 2025-12-12T16:18:28.747427221+00:00 stderr F I1212 16:18:28.747405 12 flags.go:64] FLAG: --egress-selector-config-file="" 2025-12-12T16:18:28.747454511+00:00 stderr F I1212 16:18:28.747425 12 flags.go:64] FLAG: --emulated-version="[]" 2025-12-12T16:18:28.747477252+00:00 stderr F I1212 16:18:28.747454 12 flags.go:64] FLAG: --emulation-forward-compatible="false" 2025-12-12T16:18:28.747498182+00:00 stderr F I1212 16:18:28.747475 12 flags.go:64] FLAG: --enable-admission-plugins="[]" 2025-12-12T16:18:28.747518963+00:00 stderr F I1212 16:18:28.747497 12 flags.go:64] FLAG: --enable-aggregator-routing="false" 2025-12-12T16:18:28.747539333+00:00 stderr F I1212 16:18:28.747517 12 flags.go:64] FLAG: --enable-bootstrap-token-auth="false" 2025-12-12T16:18:28.747561174+00:00 stderr F I1212 16:18:28.747539 12 flags.go:64] FLAG: --enable-garbage-collector="true" 2025-12-12T16:18:28.747582164+00:00 stderr F I1212 16:18:28.747560 12 flags.go:64] FLAG: --enable-logs-handler="false" 2025-12-12T16:18:28.747603665+00:00 stderr F I1212 16:18:28.747581 12 flags.go:64] FLAG: --enable-priority-and-fairness="true" 2025-12-12T16:18:28.747627805+00:00 stderr F I1212 16:18:28.747602 12 flags.go:64] FLAG: --encryption-provider-config="" 2025-12-12T16:18:28.747665406+00:00 stderr F I1212 16:18:28.747631 12 flags.go:64] FLAG: --encryption-provider-config-automatic-reload="false" 2025-12-12T16:18:28.747697097+00:00 stderr F I1212 16:18:28.747671 12 flags.go:64] FLAG: --endpoint-reconciler-type="lease" 2025-12-12T16:18:28.747728178+00:00 stderr F I1212 16:18:28.747697 12 flags.go:64] FLAG: --etcd-cafile="" 2025-12-12T16:18:28.747751279+00:00 stderr F I1212 16:18:28.747728 12 flags.go:64] FLAG: --etcd-certfile="" 2025-12-12T16:18:28.747776049+00:00 stderr F I1212 16:18:28.747751 12 flags.go:64] FLAG: --etcd-compaction-interval="5m0s" 2025-12-12T16:18:28.747803550+00:00 stderr F I1212 16:18:28.747777 12 flags.go:64] FLAG: --etcd-count-metric-poll-period="1m0s" 2025-12-12T16:18:28.747836891+00:00 stderr F I1212 16:18:28.747807 12 flags.go:64] FLAG: --etcd-db-metric-poll-interval="30s" 2025-12-12T16:18:28.747860951+00:00 stderr F I1212 16:18:28.747836 12 flags.go:64] FLAG: --etcd-healthcheck-timeout="2s" 2025-12-12T16:18:28.747887522+00:00 stderr F I1212 16:18:28.747862 12 flags.go:64] FLAG: --etcd-keyfile="" 2025-12-12T16:18:28.747913223+00:00 stderr F I1212 16:18:28.747888 12 flags.go:64] FLAG: --etcd-prefix="/registry" 2025-12-12T16:18:28.747943123+00:00 stderr F I1212 16:18:28.747912 12 flags.go:64] FLAG: --etcd-readycheck-timeout="2s" 2025-12-12T16:18:28.747975534+00:00 stderr F I1212 16:18:28.747942 12 flags.go:64] FLAG: --etcd-servers="[]" 2025-12-12T16:18:28.748003185+00:00 stderr F I1212 16:18:28.747976 12 flags.go:64] FLAG: --etcd-servers-overrides="[]" 2025-12-12T16:18:28.748028755+00:00 stderr F I1212 16:18:28.748004 12 flags.go:64] FLAG: --event-ttl="1h0m0s" 2025-12-12T16:18:28.748052656+00:00 stderr F I1212 16:18:28.748028 12 flags.go:64] FLAG: --external-hostname="" 2025-12-12T16:18:28.748086817+00:00 stderr F I1212 16:18:28.748054 12 flags.go:64] FLAG: --feature-gates="" 2025-12-12T16:18:28.748119288+00:00 stderr F I1212 16:18:28.748087 12 flags.go:64] FLAG: --goaway-chance="0" 2025-12-12T16:18:28.748144968+00:00 stderr F I1212 16:18:28.748120 12 flags.go:64] FLAG: --help="false" 2025-12-12T16:18:28.748170949+00:00 stderr F I1212 16:18:28.748145 12 flags.go:64] FLAG: --http2-max-streams-per-connection="0" 2025-12-12T16:18:28.748298572+00:00 stderr F I1212 16:18:28.748171 12 flags.go:64] FLAG: --kubelet-certificate-authority="" 2025-12-12T16:18:28.748310962+00:00 stderr F I1212 16:18:28.748288 12 flags.go:64] FLAG: --kubelet-client-certificate="" 2025-12-12T16:18:28.748339993+00:00 stderr F I1212 16:18:28.748311 12 flags.go:64] FLAG: --kubelet-client-key="" 2025-12-12T16:18:28.748368694+00:00 stderr F I1212 16:18:28.748338 12 flags.go:64] FLAG: --kubelet-port="10250" 2025-12-12T16:18:28.748416865+00:00 stderr F I1212 16:18:28.748366 12 flags.go:64] FLAG: --kubelet-preferred-address-types="[Hostname,InternalDNS,InternalIP,ExternalDNS,ExternalIP]" 2025-12-12T16:18:28.748446176+00:00 stderr F I1212 16:18:28.748418 12 flags.go:64] FLAG: --kubelet-read-only-port="10255" 2025-12-12T16:18:28.748467656+00:00 stderr F I1212 16:18:28.748443 12 flags.go:64] FLAG: --kubelet-timeout="5s" 2025-12-12T16:18:28.748488637+00:00 stderr F I1212 16:18:28.748465 12 flags.go:64] FLAG: --kubernetes-service-node-port="0" 2025-12-12T16:18:28.748522058+00:00 stderr F I1212 16:18:28.748487 12 flags.go:64] FLAG: --lease-reuse-duration-seconds="60" 2025-12-12T16:18:28.748543648+00:00 stderr F I1212 16:18:28.748521 12 flags.go:64] FLAG: --livez-grace-period="0s" 2025-12-12T16:18:28.748570789+00:00 stderr F I1212 16:18:28.748543 12 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:18:28.748599810+00:00 stderr F I1212 16:18:28.748568 12 flags.go:64] FLAG: --log-json-info-buffer-size="0" 2025-12-12T16:18:28.748631240+00:00 stderr F I1212 16:18:28.748595 12 flags.go:64] FLAG: --log-json-split-stream="false" 2025-12-12T16:18:28.748640591+00:00 stderr F I1212 16:18:28.748626 12 flags.go:64] FLAG: --log-text-info-buffer-size="0" 2025-12-12T16:18:28.748673001+00:00 stderr F I1212 16:18:28.748645 12 flags.go:64] FLAG: --log-text-split-stream="false" 2025-12-12T16:18:28.748680822+00:00 stderr F I1212 16:18:28.748666 12 flags.go:64] FLAG: --logging-format="text" 2025-12-12T16:18:28.748708332+00:00 stderr F I1212 16:18:28.748683 12 flags.go:64] FLAG: --max-connection-bytes-per-sec="0" 2025-12-12T16:18:28.748717542+00:00 stderr F I1212 16:18:28.748705 12 flags.go:64] FLAG: --max-mutating-requests-inflight="200" 2025-12-12T16:18:28.748747913+00:00 stderr F I1212 16:18:28.748723 12 flags.go:64] FLAG: --max-requests-inflight="400" 2025-12-12T16:18:28.748755923+00:00 stderr F I1212 16:18:28.748743 12 flags.go:64] FLAG: --min-request-timeout="1800" 2025-12-12T16:18:28.748784034+00:00 stderr F I1212 16:18:28.748761 12 flags.go:64] FLAG: --oidc-ca-file="" 2025-12-12T16:18:28.748812805+00:00 stderr F I1212 16:18:28.748781 12 flags.go:64] FLAG: --oidc-client-id="" 2025-12-12T16:18:28.748839255+00:00 stderr F I1212 16:18:28.748809 12 flags.go:64] FLAG: --oidc-groups-claim="" 2025-12-12T16:18:28.748846716+00:00 stderr F I1212 16:18:28.748831 12 flags.go:64] FLAG: --oidc-groups-prefix="" 2025-12-12T16:18:28.748875326+00:00 stderr F I1212 16:18:28.748850 12 flags.go:64] FLAG: --oidc-issuer-url="" 2025-12-12T16:18:28.748884557+00:00 stderr F I1212 16:18:28.748871 12 flags.go:64] FLAG: --oidc-required-claim="" 2025-12-12T16:18:28.748919047+00:00 stderr F I1212 16:18:28.748891 12 flags.go:64] FLAG: --oidc-signing-algs="[RS256]" 2025-12-12T16:18:28.748927088+00:00 stderr F I1212 16:18:28.748914 12 flags.go:64] FLAG: --oidc-username-claim="sub" 2025-12-12T16:18:28.748959958+00:00 stderr F I1212 16:18:28.748931 12 flags.go:64] FLAG: --oidc-username-prefix="" 2025-12-12T16:18:28.748968349+00:00 stderr F I1212 16:18:28.748952 12 flags.go:64] FLAG: --openshift-config="/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml" 2025-12-12T16:18:28.748999009+00:00 stderr F I1212 16:18:28.748971 12 flags.go:64] FLAG: --peer-advertise-ip="" 2025-12-12T16:18:28.749006270+00:00 stderr F I1212 16:18:28.748993 12 flags.go:64] FLAG: --peer-advertise-port="" 2025-12-12T16:18:28.749084231+00:00 stderr F I1212 16:18:28.749010 12 flags.go:64] FLAG: --peer-ca-file="" 2025-12-12T16:18:28.749084231+00:00 stderr F I1212 16:18:28.749031 12 flags.go:64] FLAG: --permit-address-sharing="true" 2025-12-12T16:18:28.749084231+00:00 stderr F I1212 16:18:28.749048 12 flags.go:64] FLAG: --permit-port-sharing="false" 2025-12-12T16:18:28.749084231+00:00 stderr F I1212 16:18:28.749071 12 flags.go:64] FLAG: --profiling="true" 2025-12-12T16:18:28.749132723+00:00 stderr F I1212 16:18:28.749089 12 flags.go:64] FLAG: --proxy-client-cert-file="" 2025-12-12T16:18:28.749132723+00:00 stderr F I1212 16:18:28.749106 12 flags.go:64] FLAG: --proxy-client-key-file="" 2025-12-12T16:18:28.749140443+00:00 stderr F I1212 16:18:28.749122 12 flags.go:64] FLAG: --request-timeout="1m0s" 2025-12-12T16:18:28.749169624+00:00 stderr F I1212 16:18:28.749141 12 flags.go:64] FLAG: --requestheader-allowed-names="[]" 2025-12-12T16:18:28.749198284+00:00 stderr F I1212 16:18:28.749166 12 flags.go:64] FLAG: --requestheader-client-ca-file="" 2025-12-12T16:18:28.749224175+00:00 stderr F I1212 16:18:28.749196 12 flags.go:64] FLAG: --requestheader-extra-headers-prefix="[]" 2025-12-12T16:18:28.749245905+00:00 stderr F I1212 16:18:28.749219 12 flags.go:64] FLAG: --requestheader-group-headers="[]" 2025-12-12T16:18:28.749273556+00:00 stderr F I1212 16:18:28.749244 12 flags.go:64] FLAG: --requestheader-uid-headers="[]" 2025-12-12T16:18:28.749285116+00:00 stderr F I1212 16:18:28.749269 12 flags.go:64] FLAG: --requestheader-username-headers="[]" 2025-12-12T16:18:28.749316287+00:00 stderr F I1212 16:18:28.749289 12 flags.go:64] FLAG: --runtime-config="" 2025-12-12T16:18:28.749343318+00:00 stderr F I1212 16:18:28.749312 12 flags.go:64] FLAG: --runtime-config-emulation-forward-compatible="false" 2025-12-12T16:18:28.749364358+00:00 stderr F I1212 16:18:28.749340 12 flags.go:64] FLAG: --secure-port="6443" 2025-12-12T16:18:28.749374079+00:00 stderr F I1212 16:18:28.749360 12 flags.go:64] FLAG: --send-retry-after-while-not-ready-once="false" 2025-12-12T16:18:28.749399959+00:00 stderr F I1212 16:18:28.749377 12 flags.go:64] FLAG: --service-account-extend-token-expiration="true" 2025-12-12T16:18:28.749431760+00:00 stderr F I1212 16:18:28.749395 12 flags.go:64] FLAG: --service-account-issuer="[]" 2025-12-12T16:18:28.749439320+00:00 stderr F I1212 16:18:28.749423 12 flags.go:64] FLAG: --service-account-jwks-uri="" 2025-12-12T16:18:28.749475111+00:00 stderr F I1212 16:18:28.749441 12 flags.go:64] FLAG: --service-account-key-file="[]" 2025-12-12T16:18:28.749482881+00:00 stderr F I1212 16:18:28.749465 12 flags.go:64] FLAG: --service-account-lookup="true" 2025-12-12T16:18:28.749504122+00:00 stderr F I1212 16:18:28.749483 12 flags.go:64] FLAG: --service-account-max-token-expiration="0s" 2025-12-12T16:18:28.749525512+00:00 stderr F I1212 16:18:28.749503 12 flags.go:64] FLAG: --service-account-signing-endpoint="" 2025-12-12T16:18:28.749534433+00:00 stderr F I1212 16:18:28.749521 12 flags.go:64] FLAG: --service-account-signing-key-file="" 2025-12-12T16:18:28.749562693+00:00 stderr F I1212 16:18:28.749538 12 flags.go:64] FLAG: --service-cluster-ip-range="" 2025-12-12T16:18:28.749596644+00:00 stderr F I1212 16:18:28.749559 12 flags.go:64] FLAG: --service-node-port-range="30000-32767" 2025-12-12T16:18:28.749618115+00:00 stderr F I1212 16:18:28.749594 12 flags.go:64] FLAG: --show-hidden-metrics-for-version="" 2025-12-12T16:18:28.749627045+00:00 stderr F I1212 16:18:28.749614 12 flags.go:64] FLAG: --shutdown-delay-duration="0s" 2025-12-12T16:18:28.749658486+00:00 stderr F I1212 16:18:28.749630 12 flags.go:64] FLAG: --shutdown-send-retry-after="false" 2025-12-12T16:18:28.749667226+00:00 stderr F I1212 16:18:28.749652 12 flags.go:64] FLAG: --shutdown-watch-termination-grace-period="0s" 2025-12-12T16:18:28.749706127+00:00 stderr F I1212 16:18:28.749671 12 flags.go:64] FLAG: --storage-backend="" 2025-12-12T16:18:28.749716067+00:00 stderr F I1212 16:18:28.749698 12 flags.go:64] FLAG: --storage-initialization-timeout="1m0s" 2025-12-12T16:18:28.749751428+00:00 stderr F I1212 16:18:28.749716 12 flags.go:64] FLAG: --storage-media-type="application/vnd.kubernetes.protobuf" 2025-12-12T16:18:28.749751428+00:00 stderr F I1212 16:18:28.749738 12 flags.go:64] FLAG: --strict-transport-security-directives="[]" 2025-12-12T16:18:28.749778789+00:00 stderr F I1212 16:18:28.749756 12 flags.go:64] FLAG: --tls-cert-file="" 2025-12-12T16:18:28.749810789+00:00 stderr F I1212 16:18:28.749774 12 flags.go:64] FLAG: --tls-cipher-suites="[]" 2025-12-12T16:18:28.749819740+00:00 stderr F I1212 16:18:28.749806 12 flags.go:64] FLAG: --tls-min-version="" 2025-12-12T16:18:28.749849730+00:00 stderr F I1212 16:18:28.749824 12 flags.go:64] FLAG: --tls-private-key-file="" 2025-12-12T16:18:28.749870171+00:00 stderr F I1212 16:18:28.749846 12 flags.go:64] FLAG: --tls-sni-cert-key="[]" 2025-12-12T16:18:28.749879091+00:00 stderr F I1212 16:18:28.749866 12 flags.go:64] FLAG: --token-auth-file="" 2025-12-12T16:18:28.749923642+00:00 stderr F I1212 16:18:28.749884 12 flags.go:64] FLAG: --tracing-config-file="" 2025-12-12T16:18:28.749976174+00:00 stderr F I1212 16:18:28.749929 12 flags.go:64] FLAG: --v="2" 2025-12-12T16:18:28.749999894+00:00 stderr F I1212 16:18:28.749971 12 flags.go:64] FLAG: --version="false" 2025-12-12T16:18:28.750032415+00:00 stderr F I1212 16:18:28.750000 12 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:18:28.750055535+00:00 stderr F I1212 16:18:28.750028 12 flags.go:64] FLAG: --watch-cache="true" 2025-12-12T16:18:28.750086506+00:00 stderr F I1212 16:18:28.750055 12 flags.go:64] FLAG: --watch-cache-sizes="[]" 2025-12-12T16:18:28.750172218+00:00 stderr F I1212 16:18:28.750140 12 plugins.go:83] "Registered admission plugin" plugin="authorization.openshift.io/RestrictSubjectBindings" 2025-12-12T16:18:28.750208109+00:00 stderr F I1212 16:18:28.750173 12 plugins.go:83] "Registered admission plugin" plugin="route.openshift.io/RouteHostAssignment" 2025-12-12T16:18:28.750249990+00:00 stderr F I1212 16:18:28.750217 12 plugins.go:83] "Registered admission plugin" plugin="image.openshift.io/ImagePolicy" 2025-12-12T16:18:28.750281781+00:00 stderr F I1212 16:18:28.750251 12 plugins.go:83] "Registered admission plugin" plugin="route.openshift.io/IngressAdmission" 2025-12-12T16:18:28.750315112+00:00 stderr F I1212 16:18:28.750283 12 plugins.go:83] "Registered admission plugin" plugin="autoscaling.openshift.io/ManagementCPUsOverride" 2025-12-12T16:18:28.750354133+00:00 stderr F I1212 16:18:28.750314 12 plugins.go:83] "Registered admission plugin" plugin="autoscaling.openshift.io/ManagedNode" 2025-12-12T16:18:28.750383804+00:00 stderr F I1212 16:18:28.750355 12 plugins.go:83] "Registered admission plugin" plugin="autoscaling.openshift.io/MixedCPUs" 2025-12-12T16:18:28.750411664+00:00 stderr F I1212 16:18:28.750383 12 plugins.go:83] "Registered admission plugin" plugin="scheduling.openshift.io/OriginPodNodeEnvironment" 2025-12-12T16:18:28.750437745+00:00 stderr F I1212 16:18:28.750410 12 plugins.go:83] "Registered admission plugin" plugin="autoscaling.openshift.io/ClusterResourceOverride" 2025-12-12T16:18:28.750468446+00:00 stderr F I1212 16:18:28.750438 12 plugins.go:83] "Registered admission plugin" plugin="quota.openshift.io/ClusterResourceQuota" 2025-12-12T16:18:28.750495676+00:00 stderr F I1212 16:18:28.750467 12 plugins.go:83] "Registered admission plugin" plugin="autoscaling.openshift.io/RunOnceDuration" 2025-12-12T16:18:28.750532037+00:00 stderr F I1212 16:18:28.750494 12 plugins.go:83] "Registered admission plugin" plugin="scheduling.openshift.io/PodNodeConstraints" 2025-12-12T16:18:28.750560648+00:00 stderr F I1212 16:18:28.750532 12 plugins.go:83] "Registered admission plugin" plugin="security.openshift.io/SecurityContextConstraint" 2025-12-12T16:18:28.750580748+00:00 stderr F I1212 16:18:28.750558 12 plugins.go:83] "Registered admission plugin" plugin="security.openshift.io/SCCExecRestrictions" 2025-12-12T16:18:28.750600969+00:00 stderr F I1212 16:18:28.750578 12 plugins.go:83] "Registered admission plugin" plugin="network.openshift.io/ExternalIPRanger" 2025-12-12T16:18:28.750629700+00:00 stderr F I1212 16:18:28.750598 12 plugins.go:83] "Registered admission plugin" plugin="network.openshift.io/RestrictedEndpointsAdmission" 2025-12-12T16:18:28.750650050+00:00 stderr F I1212 16:18:28.750628 12 plugins.go:83] "Registered admission plugin" plugin="storage.openshift.io/CSIInlineVolumeSecurity" 2025-12-12T16:18:28.750670231+00:00 stderr F I1212 16:18:28.750648 12 plugins.go:83] "Registered admission plugin" plugin="storage.openshift.io/PerformantSecurityPolicy" 2025-12-12T16:18:28.750706782+00:00 stderr F I1212 16:18:28.750681 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateAPIServer" 2025-12-12T16:18:28.750737742+00:00 stderr F I1212 16:18:28.750704 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateAuthentication" 2025-12-12T16:18:28.750768843+00:00 stderr F I1212 16:18:28.750739 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateFeatureGate" 2025-12-12T16:18:28.750830515+00:00 stderr F I1212 16:18:28.750776 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateConsole" 2025-12-12T16:18:28.750841065+00:00 stderr F I1212 16:18:28.750816 12 plugins.go:83] "Registered admission plugin" plugin="operator.openshift.io/ValidateDNS" 2025-12-12T16:18:28.750878816+00:00 stderr F I1212 16:18:28.750849 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateImage" 2025-12-12T16:18:28.750907467+00:00 stderr F I1212 16:18:28.750878 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateOAuth" 2025-12-12T16:18:28.750935287+00:00 stderr F I1212 16:18:28.750906 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateProject" 2025-12-12T16:18:28.750969328+00:00 stderr F I1212 16:18:28.750932 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/DenyDeleteClusterConfiguration" 2025-12-12T16:18:28.750997669+00:00 stderr F I1212 16:18:28.750964 12 plugins.go:83] "Registered admission plugin" plugin="operator.openshift.io/DenyDeleteClusterOperators" 2025-12-12T16:18:28.751005569+00:00 stderr F I1212 16:18:28.750991 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateScheduler" 2025-12-12T16:18:28.751038840+00:00 stderr F I1212 16:18:28.751010 12 plugins.go:83] "Registered admission plugin" plugin="operator.openshift.io/ValidateKubeControllerManager" 2025-12-12T16:18:28.751067721+00:00 stderr F I1212 16:18:28.751039 12 plugins.go:83] "Registered admission plugin" plugin="quota.openshift.io/ValidateClusterResourceQuota" 2025-12-12T16:18:28.751098271+00:00 stderr F I1212 16:18:28.751068 12 plugins.go:83] "Registered admission plugin" plugin="security.openshift.io/ValidateSecurityContextConstraints" 2025-12-12T16:18:28.751126872+00:00 stderr F I1212 16:18:28.751097 12 plugins.go:83] "Registered admission plugin" plugin="authorization.openshift.io/ValidateRoleBindingRestriction" 2025-12-12T16:18:28.751159503+00:00 stderr F I1212 16:18:28.751126 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateNetwork" 2025-12-12T16:18:28.751212734+00:00 stderr F I1212 16:18:28.751164 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateAPIRequestCount" 2025-12-12T16:18:28.751246595+00:00 stderr F I1212 16:18:28.751212 12 plugins.go:83] "Registered admission plugin" plugin="config.openshift.io/ValidateConfigNodeV1" 2025-12-12T16:18:28.751278216+00:00 stderr F I1212 16:18:28.751248 12 plugins.go:83] "Registered admission plugin" plugin="security.openshift.io/DefaultSecurityContextConstraints" 2025-12-12T16:18:28.751305266+00:00 stderr F I1212 16:18:28.751277 12 plugins.go:83] "Registered admission plugin" plugin="route.openshift.io/ValidateRoute" 2025-12-12T16:18:28.751337377+00:00 stderr F I1212 16:18:28.751304 12 plugins.go:83] "Registered admission plugin" plugin="route.openshift.io/DefaultRoute" 2025-12-12T16:18:28.754976307+00:00 stderr F Flag --openshift-config has been deprecated, to be removed 2025-12-12T16:18:28.754976307+00:00 stderr F Flag --enable-logs-handler has been deprecated, Log handler functionality is deprecated 2025-12-12T16:18:28.754976307+00:00 stderr F Flag --kubelet-read-only-port has been deprecated, kubelet-read-only-port is deprecated and will be removed. 2025-12-12T16:18:28.754998118+00:00 stderr F I1212 16:18:28.754971 12 registry.go:355] setting kube:feature gate emulation version to 1.33 2025-12-12T16:18:28.755063429+00:00 stderr F I1212 16:18:28.755005 12 registry.go:375] setting kube:feature-gates=AdditionalRoutingCapabilities=true,AdminNetworkPolicy=true,AlibabaPlatform=true,AzureWorkloadIdentity=true,BuildCSIVolumes=true,CPMSMachineNamePrefix=true,ConsolePluginContentSecurityPolicy=true,GatewayAPI=true,GatewayAPIController=true,HighlyAvailableArbiter=true,ImageVolume=true,IngressControllerLBSubnetsAWS=true,KMSv1=true,MachineConfigNodes=true,ManagedBootImages=true,ManagedBootImagesAWS=true,MetricsCollectionProfiles=true,NetworkDiagnosticsConfig=true,NetworkLiveMigration=true,NetworkSegmentation=true,NewOLM=true,PinnedImages=true,ProcMountType=true,RouteAdvertisements=true,RouteExternalCertificate=true,ServiceAccountTokenNodeBinding=true,SetEIPForNLBIngressController=true,SigstoreImageVerification=true,StoragePerformantSecurityPolicy=true,UpgradeStatus=true,UserNamespacesPodSecurityStandards=true,UserNamespacesSupport=true,VSphereMultiDisk=true,VSphereMultiNetworks=true,AWSClusterHostedDNS=false,AWSClusterHostedDNSInstall=false,AWSDedicatedHosts=false,AWSServiceLBNetworkSecurityGroup=false,AutomatedEtcdBackup=false,AzureClusterHostedDNSInstall=false,AzureDedicatedHosts=false,AzureMultiDisk=false,BootImageSkewEnforcement=false,BootcNodeManagement=false,ClusterAPIInstall=false,ClusterAPIInstallIBMCloud=false,ClusterMonitoringConfig=false,ClusterVersionOperatorConfiguration=false,DNSNameResolver=false,DualReplica=false,DyanmicServiceEndpointIBMCloud=false,DynamicResourceAllocation=false,EtcdBackendQuota=false,EventedPLEG=false,Example=false,Example2=false,ExternalOIDC=false,ExternalOIDCWithUIDAndExtraClaimMappings=false,ExternalSnapshotMetadata=false,GCPClusterHostedDNS=false,GCPClusterHostedDNSInstall=false,GCPCustomAPIEndpoints=false,GCPCustomAPIEndpointsInstall=false,ImageModeStatusReporting=false,ImageStreamImportMode=false,IngressControllerDynamicConfigurationManager=false,InsightsConfig=false,InsightsConfigAPI=false,InsightsOnDemandDataGather=false,IrreconcilableMachineConfig=false,KMSEncryptionProvider=false,MachineAPIMigration=false,MachineAPIOperatorDisableMachineHealthCheckController=false,ManagedBootImagesAzure=false,ManagedBootImagesvSphere=false,MaxUnavailableStatefulSet=false,MinimumKubeletVersion=false,MixedCPUsAllocation=false,MultiArchInstallAzure=false,MultiDiskSetup=false,MutatingAdmissionPolicy=false,NewOLMCatalogdAPIV1Metas=false,NewOLMOwnSingleNamespace=false,NewOLMPreflightPermissionChecks=false,NewOLMWebhookProviderOpenshiftServiceCA=false,NoRegistryClusterOperations=false,NodeSwap=false,NutanixMultiSubnets=false,OVNObservability=false,OpenShiftPodSecurityAdmission=false,PreconfiguredUDNAddresses=false,SELinuxMount=false,ShortCertRotation=false,SignatureStores=false,SigstoreImageVerificationPKI=false,TranslateStreamCloseWebsocketRequests=false,VSphereConfigurableMaxAllowedBlockVolumesPerNode=false,VSphereHostVMGroupZonal=false,VSphereMixedNodeEnv=false,VolumeAttributesClass=false,VolumeGroupSnapshot=false 2025-12-12T16:18:28.755224743+00:00 stderr F W1212 16:18:28.755156 12 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:18:28.755241554+00:00 stderr F W1212 16:18:28.755217 12 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:18:28.755270204+00:00 stderr F W1212 16:18:28.755241 12 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:18:28.755295485+00:00 stderr F W1212 16:18:28.755269 12 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:18:28.755321806+00:00 stderr F W1212 16:18:28.755295 12 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:18:28.755350226+00:00 stderr F W1212 16:18:28.755321 12 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:18:28.755389137+00:00 stderr F W1212 16:18:28.755356 12 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:18:28.755399448+00:00 stderr F W1212 16:18:28.755384 12 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:18:28.755434098+00:00 stderr F W1212 16:18:28.755406 12 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:18:28.755444289+00:00 stderr F W1212 16:18:28.755427 12 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:18:28.755633273+00:00 stderr F W1212 16:18:28.755443 12 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.755535 12 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756456 12 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756460 12 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756464 12 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756467 12 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756469 12 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756473 12 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756476 12 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756487 12 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756490 12 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756493 12 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756498 12 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756501 12 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756503 12 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756506 12 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756509 12 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756512 12 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756514 12 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756516 12 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756519 12 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756521 12 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756525 12 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:18:28.756538386+00:00 stderr F W1212 16:18:28.756528 12 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756531 12 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756535 12 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756542 12 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756547 12 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756553 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756557 12 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756561 12 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:18:28.756571347+00:00 stderr F W1212 16:18:28.756564 12 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:18:28.756582547+00:00 stderr F W1212 16:18:28.756567 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:18:28.756582547+00:00 stderr F W1212 16:18:28.756570 12 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:18:28.756582547+00:00 stderr F W1212 16:18:28.756574 12 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:18:28.756590167+00:00 stderr F W1212 16:18:28.756578 12 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:18:28.756597937+00:00 stderr F W1212 16:18:28.756583 12 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:18:28.756597937+00:00 stderr F W1212 16:18:28.756588 12 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:18:28.756597937+00:00 stderr F W1212 16:18:28.756591 12 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:18:28.756609638+00:00 stderr F W1212 16:18:28.756594 12 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:18:28.756609638+00:00 stderr F W1212 16:18:28.756597 12 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:18:28.756609638+00:00 stderr F W1212 16:18:28.756603 12 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:18:28.756617208+00:00 stderr F W1212 16:18:28.756607 12 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:18:28.756617208+00:00 stderr F W1212 16:18:28.756609 12 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:18:28.756624488+00:00 stderr F W1212 16:18:28.756612 12 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:18:28.756624488+00:00 stderr F W1212 16:18:28.756617 12 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:18:28.756632298+00:00 stderr F W1212 16:18:28.756620 12 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:18:28.756640708+00:00 stderr F W1212 16:18:28.756624 12 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:18:28.756640708+00:00 stderr F W1212 16:18:28.756626 12 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:18:28.756640708+00:00 stderr F W1212 16:18:28.756629 12 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:18:28.756640708+00:00 stderr F W1212 16:18:28.756632 12 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:18:28.756640708+00:00 stderr F W1212 16:18:28.756635 12 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:18:28.756651279+00:00 stderr F W1212 16:18:28.756638 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:18:28.756651279+00:00 stderr F W1212 16:18:28.756641 12 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:18:28.756651279+00:00 stderr F W1212 16:18:28.756644 12 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:18:28.756660649+00:00 stderr F W1212 16:18:28.756646 12 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:18:28.756660649+00:00 stderr F W1212 16:18:28.756649 12 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:18:28.756660649+00:00 stderr F W1212 16:18:28.756652 12 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:18:28.756669839+00:00 stderr F W1212 16:18:28.756656 12 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:18:28.756669839+00:00 stderr F W1212 16:18:28.756659 12 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:18:28.756669839+00:00 stderr F W1212 16:18:28.756663 12 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:18:28.756679249+00:00 stderr F W1212 16:18:28.756666 12 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:18:28.756679249+00:00 stderr F W1212 16:18:28.756669 12 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:18:28.756679249+00:00 stderr F W1212 16:18:28.756672 12 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:18:28.756688439+00:00 stderr F W1212 16:18:28.756675 12 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:18:28.756688439+00:00 stderr F W1212 16:18:28.756679 12 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:18:28.756702120+00:00 stderr F W1212 16:18:28.756682 12 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:18:28.756702120+00:00 stderr F W1212 16:18:28.756686 12 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:18:28.756702120+00:00 stderr F W1212 16:18:28.756691 12 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:18:28.756702120+00:00 stderr F W1212 16:18:28.756694 12 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:18:28.756710510+00:00 stderr F W1212 16:18:28.756699 12 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:18:28.756710510+00:00 stderr F W1212 16:18:28.756702 12 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:18:28.756710510+00:00 stderr F W1212 16:18:28.756704 12 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:18:28.756718250+00:00 stderr F W1212 16:18:28.756707 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:18:28.756718250+00:00 stderr F W1212 16:18:28.756710 12 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:18:28.756725410+00:00 stderr F W1212 16:18:28.756714 12 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:18:28.756812123+00:00 stderr F I1212 16:18:28.756728 12 feature_gate.go:384] feature gates: {map[DynamicResourceAllocation:false EventedPLEG:false ImageVolume:true KMSv1:true MaxUnavailableStatefulSet:false MinimumKubeletVersion:false MutatingAdmissionPolicy:false NodeSwap:false ProcMountType:true RouteExternalCertificate:true SELinuxMount:false ServiceAccountTokenNodeBinding:true StoragePerformantSecurityPolicy:true TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:true UserNamespacesSupport:true VolumeAttributesClass:false]} 2025-12-12T16:18:28.756812123+00:00 stderr F I1212 16:18:28.756768 12 flags.go:64] FLAG: --admission-control="[]" 2025-12-12T16:18:28.756812123+00:00 stderr F I1212 16:18:28.756778 12 flags.go:64] FLAG: --admission-control-config-file="/tmp/kubeapiserver-admission-config.yaml1778629991" 2025-12-12T16:18:28.756812123+00:00 stderr F I1212 16:18:28.756785 12 flags.go:64] FLAG: --advertise-address="192.168.126.11" 2025-12-12T16:18:28.756836673+00:00 stderr F I1212 16:18:28.756800 12 flags.go:64] FLAG: --aggregator-reject-forwarding-redirect="true" 2025-12-12T16:18:28.756836673+00:00 stderr F I1212 16:18:28.756813 12 flags.go:64] FLAG: --allow-metric-labels="[]" 2025-12-12T16:18:28.756836673+00:00 stderr F I1212 16:18:28.756822 12 flags.go:64] FLAG: --allow-metric-labels-manifest="" 2025-12-12T16:18:28.756836673+00:00 stderr F I1212 16:18:28.756825 12 flags.go:64] FLAG: --allow-privileged="true" 2025-12-12T16:18:28.756836673+00:00 stderr F I1212 16:18:28.756828 12 flags.go:64] FLAG: --anonymous-auth="true" 2025-12-12T16:18:28.756847353+00:00 stderr F I1212 16:18:28.756832 12 flags.go:64] FLAG: --api-audiences="[https://kubernetes.default.svc]" 2025-12-12T16:18:28.756847353+00:00 stderr F I1212 16:18:28.756838 12 flags.go:64] FLAG: --apiserver-count="1" 2025-12-12T16:18:28.756856414+00:00 stderr F I1212 16:18:28.756842 12 flags.go:64] FLAG: --audit-log-batch-buffer-size="10000" 2025-12-12T16:18:28.756856414+00:00 stderr F I1212 16:18:28.756847 12 flags.go:64] FLAG: --audit-log-batch-max-size="1" 2025-12-12T16:18:28.756865054+00:00 stderr F I1212 16:18:28.756851 12 flags.go:64] FLAG: --audit-log-batch-max-wait="0s" 2025-12-12T16:18:28.756865054+00:00 stderr F I1212 16:18:28.756856 12 flags.go:64] FLAG: --audit-log-batch-throttle-burst="0" 2025-12-12T16:18:28.756873804+00:00 stderr F I1212 16:18:28.756860 12 flags.go:64] FLAG: --audit-log-batch-throttle-enable="false" 2025-12-12T16:18:28.756873804+00:00 stderr F I1212 16:18:28.756864 12 flags.go:64] FLAG: --audit-log-batch-throttle-qps="0" 2025-12-12T16:18:28.756889054+00:00 stderr F I1212 16:18:28.756869 12 flags.go:64] FLAG: --audit-log-compress="false" 2025-12-12T16:18:28.756889054+00:00 stderr F I1212 16:18:28.756872 12 flags.go:64] FLAG: --audit-log-format="json" 2025-12-12T16:18:28.756889054+00:00 stderr F I1212 16:18:28.756876 12 flags.go:64] FLAG: --audit-log-maxage="0" 2025-12-12T16:18:28.756889054+00:00 stderr F I1212 16:18:28.756879 12 flags.go:64] FLAG: --audit-log-maxbackup="10" 2025-12-12T16:18:28.756889054+00:00 stderr F I1212 16:18:28.756883 12 flags.go:64] FLAG: --audit-log-maxsize="200" 2025-12-12T16:18:28.756901805+00:00 stderr F I1212 16:18:28.756886 12 flags.go:64] FLAG: --audit-log-mode="blocking" 2025-12-12T16:18:28.756901805+00:00 stderr F I1212 16:18:28.756890 12 flags.go:64] FLAG: --audit-log-path="/var/log/kube-apiserver/audit.log" 2025-12-12T16:18:28.756910735+00:00 stderr F I1212 16:18:28.756893 12 flags.go:64] FLAG: --audit-log-truncate-enabled="false" 2025-12-12T16:18:28.756910735+00:00 stderr F I1212 16:18:28.756896 12 flags.go:64] FLAG: --audit-log-truncate-max-batch-size="10485760" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756901 12 flags.go:64] FLAG: --audit-log-truncate-max-event-size="102400" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756905 12 flags.go:64] FLAG: --audit-log-version="audit.k8s.io/v1" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756909 12 flags.go:64] FLAG: --audit-policy-file="/etc/kubernetes/static-pod-resources/configmaps/kube-apiserver-audit-policies/policy.yaml" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756913 12 flags.go:64] FLAG: --audit-webhook-batch-buffer-size="10000" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756916 12 flags.go:64] FLAG: --audit-webhook-batch-initial-backoff="10s" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756920 12 flags.go:64] FLAG: --audit-webhook-batch-max-size="400" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756923 12 flags.go:64] FLAG: --audit-webhook-batch-max-wait="30s" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756926 12 flags.go:64] FLAG: --audit-webhook-batch-throttle-burst="15" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756929 12 flags.go:64] FLAG: --audit-webhook-batch-throttle-enable="true" 2025-12-12T16:18:28.756943076+00:00 stderr F I1212 16:18:28.756933 12 flags.go:64] FLAG: --audit-webhook-batch-throttle-qps="10" 2025-12-12T16:18:28.756955726+00:00 stderr F I1212 16:18:28.756937 12 flags.go:64] FLAG: --audit-webhook-config-file="" 2025-12-12T16:18:28.756955726+00:00 stderr F I1212 16:18:28.756941 12 flags.go:64] FLAG: --audit-webhook-initial-backoff="10s" 2025-12-12T16:18:28.756955726+00:00 stderr F I1212 16:18:28.756944 12 flags.go:64] FLAG: --audit-webhook-mode="batch" 2025-12-12T16:18:28.756955726+00:00 stderr F I1212 16:18:28.756948 12 flags.go:64] FLAG: --audit-webhook-truncate-enabled="false" 2025-12-12T16:18:28.756965236+00:00 stderr F I1212 16:18:28.756953 12 flags.go:64] FLAG: --audit-webhook-truncate-max-batch-size="10485760" 2025-12-12T16:18:28.756965236+00:00 stderr F I1212 16:18:28.756958 12 flags.go:64] FLAG: --audit-webhook-truncate-max-event-size="102400" 2025-12-12T16:18:28.756974437+00:00 stderr F I1212 16:18:28.756961 12 flags.go:64] FLAG: --audit-webhook-version="audit.k8s.io/v1" 2025-12-12T16:18:28.756974437+00:00 stderr F I1212 16:18:28.756965 12 flags.go:64] FLAG: --authentication-config="" 2025-12-12T16:18:28.756983417+00:00 stderr F I1212 16:18:28.756968 12 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" 2025-12-12T16:18:28.756992177+00:00 stderr F I1212 16:18:28.756972 12 flags.go:64] FLAG: --authentication-token-webhook-config-file="/etc/kubernetes/static-pod-resources/secrets/webhook-authenticator/kubeConfig" 2025-12-12T16:18:28.756992177+00:00 stderr F I1212 16:18:28.756976 12 flags.go:64] FLAG: --authentication-token-webhook-version="v1" 2025-12-12T16:18:28.756992177+00:00 stderr F I1212 16:18:28.756979 12 flags.go:64] FLAG: --authorization-config="" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.756982 12 flags.go:64] FLAG: --authorization-mode="[Scope,SystemMasters,RBAC,Node]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757019 12 flags.go:64] FLAG: --authorization-policy-file="" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757023 12 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757027 12 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757031 12 flags.go:64] FLAG: --authorization-webhook-config-file="" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757035 12 flags.go:64] FLAG: --authorization-webhook-version="v1beta1" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757038 12 flags.go:64] FLAG: --bind-address="0.0.0.0" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757042 12 flags.go:64] FLAG: --cert-dir="/var/run/kubernetes" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757045 12 flags.go:64] FLAG: --client-ca-file="/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757049 12 flags.go:64] FLAG: --contention-profiling="false" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757053 12 flags.go:64] FLAG: --cors-allowed-origins="[//127\\.0\\.0\\.1(:|$),//localhost(:|$)]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757057 12 flags.go:64] FLAG: --debug-socket-path="" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757061 12 flags.go:64] FLAG: --default-not-ready-toleration-seconds="300" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757066 12 flags.go:64] FLAG: --default-unreachable-toleration-seconds="300" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757070 12 flags.go:64] FLAG: --default-watch-cache-size="100" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757073 12 flags.go:64] FLAG: --delete-collection-workers="1" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757077 12 flags.go:64] FLAG: --disable-admission-plugins="[]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757082 12 flags.go:64] FLAG: --disable-http2-serving="false" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757085 12 flags.go:64] FLAG: --disabled-metrics="[]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757096 12 flags.go:64] FLAG: --egress-selector-config-file="" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757100 12 flags.go:64] FLAG: --emulated-version="[]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757105 12 flags.go:64] FLAG: --emulation-forward-compatible="false" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757109 12 flags.go:64] FLAG: --enable-admission-plugins="[CertificateApproval,CertificateSigning,CertificateSubjectRestriction,DefaultIngressClass,DefaultStorageClass,DefaultTolerationSeconds,LimitRanger,MutatingAdmissionWebhook,NamespaceLifecycle,NodeRestriction,OwnerReferencesPermissionEnforcement,PersistentVolumeClaimResize,PodNodeSelector,PodTolerationRestriction,Priority,ResourceQuota,RuntimeClass,ServiceAccount,StorageObjectInUseProtection,TaintNodesByCondition,ValidatingAdmissionPolicy,ValidatingAdmissionWebhook,authorization.openshift.io/RestrictSubjectBindings,authorization.openshift.io/ValidateRoleBindingRestriction,config.openshift.io/DenyDeleteClusterConfiguration,config.openshift.io/ValidateAPIServer,config.openshift.io/ValidateAuthentication,config.openshift.io/ValidateConsole,config.openshift.io/ValidateFeatureGate,config.openshift.io/ValidateImage,config.openshift.io/ValidateOAuth,config.openshift.io/ValidateProject,config.openshift.io/ValidateScheduler,image.openshift.io/ImagePolicy,network.openshift.io/ExternalIPRanger,network.openshift.io/RestrictedEndpointsAdmission,quota.openshift.io/ClusterResourceQuota,quota.openshift.io/ValidateClusterResourceQuota,route.openshift.io/IngressAdmission,scheduling.openshift.io/OriginPodNodeEnvironment,security.openshift.io/DefaultSecurityContextConstraints,security.openshift.io/SCCExecRestrictions,security.openshift.io/SecurityContextConstraint,security.openshift.io/ValidateSecurityContextConstraints,storage.openshift.io/CSIInlineVolumeSecurity]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757131 12 flags.go:64] FLAG: --enable-aggregator-routing="true" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757134 12 flags.go:64] FLAG: --enable-bootstrap-token-auth="false" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757137 12 flags.go:64] FLAG: --enable-garbage-collector="true" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757140 12 flags.go:64] FLAG: --enable-logs-handler="false" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757143 12 flags.go:64] FLAG: --enable-priority-and-fairness="true" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757145 12 flags.go:64] FLAG: --encryption-provider-config="" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757148 12 flags.go:64] FLAG: --encryption-provider-config-automatic-reload="false" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757153 12 flags.go:64] FLAG: --endpoint-reconciler-type="lease" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757155 12 flags.go:64] FLAG: --etcd-cafile="/etc/kubernetes/static-pod-resources/configmaps/etcd-serving-ca/ca-bundle.crt" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757159 12 flags.go:64] FLAG: --etcd-certfile="/etc/kubernetes/static-pod-resources/secrets/etcd-client/tls.crt" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757162 12 flags.go:64] FLAG: --etcd-compaction-interval="5m0s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757165 12 flags.go:64] FLAG: --etcd-count-metric-poll-period="1m0s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757168 12 flags.go:64] FLAG: --etcd-db-metric-poll-interval="30s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757171 12 flags.go:64] FLAG: --etcd-healthcheck-timeout="9s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757174 12 flags.go:64] FLAG: --etcd-keyfile="/etc/kubernetes/static-pod-resources/secrets/etcd-client/tls.key" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757193 12 flags.go:64] FLAG: --etcd-prefix="kubernetes.io" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757197 12 flags.go:64] FLAG: --etcd-readycheck-timeout="9s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757215 12 flags.go:64] FLAG: --etcd-servers="[https://192.168.126.11:2379,https://localhost:2379]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757220 12 flags.go:64] FLAG: --etcd-servers-overrides="[]" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757224 12 flags.go:64] FLAG: --event-ttl="3h0m0s" 2025-12-12T16:18:28.757543911+00:00 stderr F I1212 16:18:28.757227 12 flags.go:64] FLAG: --external-hostname="" 2025-12-12T16:18:28.757543911+00:00 stderr P I1212 16:18:28.757230 12 flags.go:64] FLAG: --feature-gates=":AdditionalRoutingCapabilities=true,:AdminNetworkPolicy=true,:AlibabaPlatform=true,:AzureWorkloadIdentity=true,:BuildCSIVolumes=true,:CPMSMachineNamePrefix=true,:ConsolePluginContentSecurityPolicy=true,:GatewayAPI=true,:GatewayAPIController=true,:HighlyAvailableArbiter=true,:ImageVolume=true,:IngressControllerLBSubnetsAWS=true,:KMSv1=true,:MachineConfigNodes=true,:ManagedBootImages=true,:ManagedBootImagesAWS=true,:MetricsCollectionProfiles=true,:NetworkDiagnosticsConfig=true,:NetworkLiveMigration=true,:NetworkSegmentation=true,:NewOLM=true,:PinnedImages=true,:ProcMountType=true,:RouteAdvertisements=true,:RouteExternalCertificate=true,:ServiceAccountTokenNodeBinding=true,:SetEIPForNLBIngressController=true,:SigstoreImageVerification=true,:StoragePerformantSecurityPolicy=true,:UpgradeStatus=true,:UserNamespacesPodSecurityStandards=true,:UserNamespacesSupport=true,:VSphereMultiDisk=true,:VSphereMultiNetworks=true,:AWSClusterHostedDNS=false,:AWSClusterHostedDNSInstall=false,:AWSDedicatedHosts=false,:AWSServiceLBNetworkSecurityGroup=false,:AutomatedEtcdBackup=false,:AzureClusterHostedDNSInstall=false,:AzureDedicatedHosts=false,:AzureMultiDisk=false,:BootImageSkewEnforcement=false,:BootcNodeManagement=false,:ClusterAPIInstall=false,:ClusterAPIInstallIBMCloud=false,:ClusterMonitoringConfig=false,:ClusterVersionOperatorConfiguration=false,:DNSNameResolver=false,:DualReplica=false,:DyanmicServiceEndpointIBMCloud=false,:DynamicResourceAllocation=false,:EtcdBackendQuota=false,:EventedPLEG=false,:Example=false,:Example2=false,:ExternalOIDC=false,:ExternalOIDCWithUIDAndExtraClaimMappings=false,:ExternalSnapshotMetadata=false,:GCPClusterHostedDNS=false,:GCPClusterHostedDNSInstall=false,:GCPCustomAPIEndpoints=false,:GCPCustomAPIEndpointsInstall=false,:ImageModeStatusReporting=false,:ImageStreamImportMode=false,:IngressControllerDynamicConfigurationManager=false,:InsightsConfig=false,:InsightsConfigAPI=false,:InsightsOnDemandDataGather=false,:IrreconcilableMachineConfig=false,:KMSEncryptionProvider=false,:MachineAPIMigration=false,:MachineAPIOperatorDisableMachineHealthCheckController=false,:ManagedBootImagesAzure=false,:ManagedBootImagesvSphere=false,:MaxUnavailableStatefulSet=false,:MinimumKubeletVersion=false,:MixedCPUsAllocation=false,:MultiArchInstallAzure=false,:MultiDiskSetup=false,:MutatingAdmissionPolicy=false,:NewOLMCatalogdAPIV1Metas=false,:NewOLMOwnSingleNamespace=false,:NewOLMPreflightPermissionChecks=false,:NewOLMWebhookProviderOpenshiftServiceCA=false,:NoRegistryClusterOperations=false,:NodeSwap=false,:NutanixMultiSubnets=false,:OVNObservability=false,:OpenShiftPodSecurityAdmission=false,:PreconfiguredUDNAddresses=false,:SELinuxMount=false,:Sho 2025-12-12T16:18:28.757597332+00:00 stderr F rtCertRotation=false,:SignatureStores=false,:SigstoreImageVerificationPKI=false,:TranslateStreamCloseWebsocketRequests=false,:VSphereConfigurableMaxAllowedBlockVolumesPerNode=false,:VSphereHostVMGroupZonal=false,:VSphereMixedNodeEnv=false,:VolumeAttributesClass=false,:VolumeGroupSnapshot=false" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757294 12 flags.go:64] FLAG: --goaway-chance="0" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757299 12 flags.go:64] FLAG: --help="false" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757301 12 flags.go:64] FLAG: --http2-max-streams-per-connection="2000" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757304 12 flags.go:64] FLAG: --kubelet-certificate-authority="/etc/kubernetes/static-pod-resources/configmaps/kubelet-serving-ca/ca-bundle.crt" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757307 12 flags.go:64] FLAG: --kubelet-client-certificate="/etc/kubernetes/static-pod-certs/secrets/kubelet-client/tls.crt" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757310 12 flags.go:64] FLAG: --kubelet-client-key="/etc/kubernetes/static-pod-certs/secrets/kubelet-client/tls.key" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757313 12 flags.go:64] FLAG: --kubelet-port="10250" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757316 12 flags.go:64] FLAG: --kubelet-preferred-address-types="[InternalIP]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757321 12 flags.go:64] FLAG: --kubelet-read-only-port="0" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757324 12 flags.go:64] FLAG: --kubelet-timeout="5s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757327 12 flags.go:64] FLAG: --kubernetes-service-node-port="0" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757331 12 flags.go:64] FLAG: --lease-reuse-duration-seconds="60" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757334 12 flags.go:64] FLAG: --livez-grace-period="0s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757336 12 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757339 12 flags.go:64] FLAG: --log-json-info-buffer-size="0" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757343 12 flags.go:64] FLAG: --log-json-split-stream="false" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757346 12 flags.go:64] FLAG: --log-text-info-buffer-size="0" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757349 12 flags.go:64] FLAG: --log-text-split-stream="false" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757351 12 flags.go:64] FLAG: --logging-format="text" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757354 12 flags.go:64] FLAG: --max-connection-bytes-per-sec="0" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757357 12 flags.go:64] FLAG: --max-mutating-requests-inflight="1000" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757360 12 flags.go:64] FLAG: --max-requests-inflight="3000" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757362 12 flags.go:64] FLAG: --min-request-timeout="3600" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757365 12 flags.go:64] FLAG: --oidc-ca-file="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757369 12 flags.go:64] FLAG: --oidc-client-id="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757371 12 flags.go:64] FLAG: --oidc-groups-claim="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757374 12 flags.go:64] FLAG: --oidc-groups-prefix="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757377 12 flags.go:64] FLAG: --oidc-issuer-url="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757380 12 flags.go:64] FLAG: --oidc-required-claim="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757383 12 flags.go:64] FLAG: --oidc-signing-algs="[RS256]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757389 12 flags.go:64] FLAG: --oidc-username-claim="sub" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757392 12 flags.go:64] FLAG: --oidc-username-prefix="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757395 12 flags.go:64] FLAG: --openshift-config="/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757398 12 flags.go:64] FLAG: --peer-advertise-ip="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757402 12 flags.go:64] FLAG: --peer-advertise-port="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757404 12 flags.go:64] FLAG: --peer-ca-file="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757407 12 flags.go:64] FLAG: --permit-address-sharing="true" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757410 12 flags.go:64] FLAG: --permit-port-sharing="false" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757413 12 flags.go:64] FLAG: --profiling="true" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757416 12 flags.go:64] FLAG: --proxy-client-cert-file="/etc/kubernetes/static-pod-certs/secrets/aggregator-client/tls.crt" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757419 12 flags.go:64] FLAG: --proxy-client-key-file="/etc/kubernetes/static-pod-certs/secrets/aggregator-client/tls.key" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757423 12 flags.go:64] FLAG: --request-timeout="1m0s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757426 12 flags.go:64] FLAG: --requestheader-allowed-names="[kube-apiserver-proxy,system:kube-apiserver-proxy,system:openshift-aggregator]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757431 12 flags.go:64] FLAG: --requestheader-client-ca-file="/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757450 12 flags.go:64] FLAG: --requestheader-extra-headers-prefix="[X-Remote-Extra-]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757454 12 flags.go:64] FLAG: --requestheader-group-headers="[X-Remote-Group]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757460 12 flags.go:64] FLAG: --requestheader-uid-headers="[]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757463 12 flags.go:64] FLAG: --requestheader-username-headers="[X-Remote-User]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757468 12 flags.go:64] FLAG: --runtime-config="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757472 12 flags.go:64] FLAG: --runtime-config-emulation-forward-compatible="false" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757474 12 flags.go:64] FLAG: --secure-port="6443" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757477 12 flags.go:64] FLAG: --send-retry-after-while-not-ready-once="true" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757480 12 flags.go:64] FLAG: --service-account-extend-token-expiration="true" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757483 12 flags.go:64] FLAG: --service-account-issuer="[https://kubernetes.default.svc]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757487 12 flags.go:64] FLAG: --service-account-jwks-uri="https://api.crc.testing:6443/openid/v1/jwks" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757490 12 flags.go:64] FLAG: --service-account-key-file="[/etc/kubernetes/static-pod-resources/configmaps/sa-token-signing-certs/service-account-001.pub,/etc/kubernetes/static-pod-resources/configmaps/sa-token-signing-certs/service-account-002.pub,/etc/kubernetes/static-pod-resources/configmaps/bound-sa-token-signing-certs/service-account-001.pub]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757498 12 flags.go:64] FLAG: --service-account-lookup="true" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757501 12 flags.go:64] FLAG: --service-account-max-token-expiration="0s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757504 12 flags.go:64] FLAG: --service-account-signing-endpoint="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757508 12 flags.go:64] FLAG: --service-account-signing-key-file="/etc/kubernetes/static-pod-certs/secrets/bound-service-account-signing-key/service-account.key" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757512 12 flags.go:64] FLAG: --service-cluster-ip-range="10.217.4.0/23" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757515 12 flags.go:64] FLAG: --service-node-port-range="30000-32767" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757521 12 flags.go:64] FLAG: --show-hidden-metrics-for-version="" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757524 12 flags.go:64] FLAG: --shutdown-delay-duration="0s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757528 12 flags.go:64] FLAG: --shutdown-send-retry-after="true" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757532 12 flags.go:64] FLAG: --shutdown-watch-termination-grace-period="0s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757536 12 flags.go:64] FLAG: --storage-backend="etcd3" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757539 12 flags.go:64] FLAG: --storage-initialization-timeout="1m0s" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757543 12 flags.go:64] FLAG: --storage-media-type="application/vnd.kubernetes.protobuf" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757546 12 flags.go:64] FLAG: --strict-transport-security-directives="[max-age=31536000,includeSubDomains,preload]" 2025-12-12T16:18:28.757597332+00:00 stderr F I1212 16:18:28.757551 12 flags.go:64] FLAG: --tls-cert-file="/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757555 12 flags.go:64] FLAG: --tls-cipher-suites="[TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256]" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757564 12 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757568 12 flags.go:64] FLAG: --tls-private-key-file="/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757578 12 flags.go:64] FLAG: --tls-sni-cert-key="[/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.crt,/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.key;/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt,/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key;/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.crt,/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.key;/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.crt,/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.key;/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.crt,/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.key]" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757592 12 flags.go:64] FLAG: --token-auth-file="" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757595 12 flags.go:64] FLAG: --tracing-config-file="" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757598 12 flags.go:64] FLAG: --v="2" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757602 12 flags.go:64] FLAG: --version="false" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757607 12 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757613 12 flags.go:64] FLAG: --watch-cache="true" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757617 12 flags.go:64] FLAG: --watch-cache-sizes="[]" 2025-12-12T16:18:28.757884659+00:00 stderr F I1212 16:18:28.757652 12 options.go:249] external host was not specified, using 192.168.126.11 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758319 12 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758334 12 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758337 12 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758341 12 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758343 12 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758346 12 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758349 12 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758352 12 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758355 12 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758358 12 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758360 12 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758364 12 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758366 12 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758369 12 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758371 12 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758376 12 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758378 12 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:18:28.758392622+00:00 stderr F W1212 16:18:28.758381 12 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758384 12 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758387 12 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758390 12 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758394 12 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758398 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758403 12 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:18:28.758418472+00:00 stderr F W1212 16:18:28.758406 12 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:18:28.758428343+00:00 stderr F W1212 16:18:28.758410 12 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:18:28.758428343+00:00 stderr F W1212 16:18:28.758413 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:18:28.758428343+00:00 stderr F W1212 16:18:28.758415 12 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:18:28.758428343+00:00 stderr F W1212 16:18:28.758418 12 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:18:28.758436383+00:00 stderr F W1212 16:18:28.758421 12 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:18:28.758436383+00:00 stderr F W1212 16:18:28.758425 12 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:18:28.758436383+00:00 stderr F W1212 16:18:28.758429 12 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:18:28.758444653+00:00 stderr F W1212 16:18:28.758432 12 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:18:28.758444653+00:00 stderr F W1212 16:18:28.758435 12 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:18:28.758444653+00:00 stderr F W1212 16:18:28.758438 12 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:18:28.758455433+00:00 stderr F W1212 16:18:28.758442 12 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:18:28.758455433+00:00 stderr F W1212 16:18:28.758445 12 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:18:28.758455433+00:00 stderr F W1212 16:18:28.758448 12 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:18:28.758463053+00:00 stderr F W1212 16:18:28.758451 12 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:18:28.758463053+00:00 stderr F W1212 16:18:28.758454 12 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:18:28.758463053+00:00 stderr F W1212 16:18:28.758457 12 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:18:28.758470654+00:00 stderr F W1212 16:18:28.758460 12 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:18:28.758470654+00:00 stderr F W1212 16:18:28.758462 12 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:18:28.758479544+00:00 stderr F W1212 16:18:28.758465 12 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:18:28.758479544+00:00 stderr F W1212 16:18:28.758469 12 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:18:28.758488994+00:00 stderr F W1212 16:18:28.758474 12 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:18:28.758488994+00:00 stderr F W1212 16:18:28.758478 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:18:28.758488994+00:00 stderr F W1212 16:18:28.758481 12 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:18:28.758497934+00:00 stderr F W1212 16:18:28.758485 12 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:18:28.758497934+00:00 stderr F W1212 16:18:28.758488 12 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:18:28.758506354+00:00 stderr F W1212 16:18:28.758491 12 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:18:28.758506354+00:00 stderr F W1212 16:18:28.758496 12 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:18:28.758506354+00:00 stderr F W1212 16:18:28.758499 12 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:18:28.758514895+00:00 stderr F W1212 16:18:28.758502 12 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:18:28.758514895+00:00 stderr F W1212 16:18:28.758505 12 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:18:28.758514895+00:00 stderr F W1212 16:18:28.758508 12 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:18:28.758523265+00:00 stderr F W1212 16:18:28.758511 12 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758518 12 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758526 12 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758529 12 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758531 12 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758534 12 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758537 12 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758539 12 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758542 12 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758546 12 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758549 12 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758552 12 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758554 12 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758559 12 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758562 12 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758566 12 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:18:28.758576696+00:00 stderr F W1212 16:18:28.758570 12 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:18:28.758592867+00:00 stderr F W1212 16:18:28.758573 12 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:18:28.758592867+00:00 stderr F W1212 16:18:28.758576 12 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:18:28.758592867+00:00 stderr F W1212 16:18:28.758581 12 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:18:28.758592867+00:00 stderr F W1212 16:18:28.758584 12 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:18:28.758592867+00:00 stderr F W1212 16:18:28.758587 12 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:18:28.758600937+00:00 stderr F W1212 16:18:28.758591 12 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:18:28.758607927+00:00 stderr F W1212 16:18:28.758594 12 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:18:28.758607927+00:00 stderr F W1212 16:18:28.758598 12 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:18:28.758607927+00:00 stderr F W1212 16:18:28.758602 12 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:18:28.758616047+00:00 stderr F W1212 16:18:28.758605 12 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:18:28.758616047+00:00 stderr F W1212 16:18:28.758607 12 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:18:28.758623257+00:00 stderr F W1212 16:18:28.758610 12 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:18:28.758623257+00:00 stderr F W1212 16:18:28.758613 12 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:18:28.760745210+00:00 stderr F I1212 16:18:28.760631 12 server.go:184] Version: v1.33.5 2025-12-12T16:18:28.760745210+00:00 stderr F I1212 16:18:28.760657 12 server.go:186] "Golang settings" GOGC="100" GOMAXPROCS="" GOTRACEBACK="" 2025-12-12T16:18:28.761710224+00:00 stderr F I1212 16:18:28.761615 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" 2025-12-12T16:18:28.762028331+00:00 stderr F I1212 16:18:28.761940 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.key" 2025-12-12T16:18:28.762488103+00:00 stderr F I1212 16:18:28.762407 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" 2025-12-12T16:18:28.762908203+00:00 stderr F I1212 16:18:28.762824 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.key" 2025-12-12T16:18:28.763342124+00:00 stderr F I1212 16:18:28.763241 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.key" 2025-12-12T16:18:28.763645982+00:00 stderr F I1212 16:18:28.763582 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="sni-serving-cert::/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.crt::/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.key" 2025-12-12T16:18:29.420636164+00:00 stderr F I1212 16:18:29.420439 12 apf_controller.go:292] NewTestableController "Controller" with serverConcurrencyLimit=4000, name=Controller, asFieldManager="api-priority-and-fairness-config-consumer-v1" 2025-12-12T16:18:29.420723726+00:00 stderr F I1212 16:18:29.420565 12 apf_controller.go:898] Introducing queues for priority level "catch-all": config={"type":"Limited","limited":{"nominalConcurrencyShares":5,"limitResponse":{"type":"Reject"},"lendablePercent":0}}, nominalCL=4000, lendableCL=0, borrowingCL=4000, currentCL=4000, quiescing=false (shares=0xc00012118c, shareSum=5) 2025-12-12T16:18:29.420723726+00:00 stderr F I1212 16:18:29.420654 12 apf_controller.go:898] Introducing queues for priority level "exempt": config={"type":"Exempt","exempt":{"nominalConcurrencyShares":0,"lendablePercent":0}}, nominalCL=0, lendableCL=0, borrowingCL=4000, currentCL=0, quiescing=false (shares=0xc000536e50, shareSum=5) 2025-12-12T16:18:29.435053200+00:00 stderr F I1212 16:18:29.434889 12 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:29.437928131+00:00 stderr F I1212 16:18:29.437837 12 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:29.443754925+00:00 stderr F I1212 16:18:29.443659 12 shared_informer.go:350] "Waiting for caches to sync" controller="node_authorizer" 2025-12-12T16:18:29.444199657+00:00 stderr F I1212 16:18:29.444091 12 audit.go:340] Using audit backend: ignoreErrors 2025-12-12T16:18:29.455999168+00:00 stderr F I1212 16:18:29.455924 12 store.go:1663] "Monitoring resource count at path" resource="events" path="//events" 2025-12-12T16:18:29.456395158+00:00 stderr F I1212 16:18:29.456313 12 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:29.456487580+00:00 stderr F I1212 16:18:29.456425 12 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:29.464766565+00:00 stderr F I1212 16:18:29.464645 12 admission.go:47] Admission plugin "autoscaling.openshift.io/ClusterResourceOverride" is not configured so it will be disabled. 2025-12-12T16:18:29.464943269+00:00 stderr F I1212 16:18:29.464902 12 admission.go:33] Admission plugin "autoscaling.openshift.io/RunOnceDuration" is not configured so it will be disabled. 2025-12-12T16:18:29.464943269+00:00 stderr F I1212 16:18:29.464914 12 admission.go:32] Admission plugin "scheduling.openshift.io/PodNodeConstraints" is not configured so it will be disabled. 2025-12-12T16:18:29.474897165+00:00 stderr F I1212 16:18:29.474762 12 shared_informer.go:350] "Waiting for caches to sync" controller="*generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]" 2025-12-12T16:18:29.480470323+00:00 stderr F I1212 16:18:29.480383 12 plugins.go:157] Loaded 25 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,ServiceAccount,NodeRestriction,TaintNodesByCondition,PodNodeSelector,Priority,DefaultTolerationSeconds,PodTolerationRestriction,DefaultStorageClass,StorageObjectInUseProtection,RuntimeClass,DefaultIngressClass,PodTopologyLabels,MutatingAdmissionPolicy,autoscaling.openshift.io/ManagementCPUsOverride,scheduling.openshift.io/OriginPodNodeEnvironment,image.openshift.io/ImagePolicy,security.openshift.io/SecurityContextConstraint,route.openshift.io/RouteHostAssignment,autoscaling.openshift.io/MixedCPUs,storage.openshift.io/PerformantSecurityPolicy,route.openshift.io/DefaultRoute,security.openshift.io/DefaultSecurityContextConstraints,MutatingAdmissionWebhook. 2025-12-12T16:18:29.480504744+00:00 stderr F I1212 16:18:29.480468 12 plugins.go:160] Loaded 47 validating admission controller(s) successfully in the following order: LimitRanger,ServiceAccount,PodSecurity,PodNodeSelector,Priority,PodTolerationRestriction,OwnerReferencesPermissionEnforcement,PersistentVolumeClaimResize,RuntimeClass,CertificateApproval,CertificateSigning,ClusterTrustBundleAttest,CertificateSubjectRestriction,autoscaling.openshift.io/ManagementCPUsOverride,authorization.openshift.io/RestrictSubjectBindings,scheduling.openshift.io/OriginPodNodeEnvironment,network.openshift.io/ExternalIPRanger,network.openshift.io/RestrictedEndpointsAdmission,image.openshift.io/ImagePolicy,security.openshift.io/SecurityContextConstraint,security.openshift.io/SCCExecRestrictions,route.openshift.io/IngressAdmission,storage.openshift.io/CSIInlineVolumeSecurity,autoscaling.openshift.io/ManagedNode,config.openshift.io/ValidateAPIServer,config.openshift.io/ValidateAuthentication,config.openshift.io/ValidateFeatureGate,config.openshift.io/ValidateConsole,operator.openshift.io/ValidateDNS,config.openshift.io/ValidateImage,config.openshift.io/ValidateOAuth,config.openshift.io/ValidateProject,config.openshift.io/DenyDeleteClusterConfiguration,operator.openshift.io/DenyDeleteClusterOperators,config.openshift.io/ValidateScheduler,quota.openshift.io/ValidateClusterResourceQuota,security.openshift.io/ValidateSecurityContextConstraints,authorization.openshift.io/ValidateRoleBindingRestriction,config.openshift.io/ValidateNetwork,config.openshift.io/ValidateAPIRequestCount,config.openshift.io/ValidateConfigNodeV1,route.openshift.io/ValidateRoute,operator.openshift.io/ValidateKubeControllerManager,ValidatingAdmissionPolicy,ValidatingAdmissionWebhook,ResourceQuota,quota.openshift.io/ClusterResourceQuota. 2025-12-12T16:18:29.480879783+00:00 stderr F I1212 16:18:29.480819 12 instance.go:233] Using reconciler: lease 2025-12-12T16:18:29.496986931+00:00 stderr F I1212 16:18:29.496822 12 store.go:1663] "Monitoring resource count at path" resource="customresourcedefinitions.apiextensions.k8s.io" path="//apiextensions.k8s.io/customresourcedefinitions" 2025-12-12T16:18:29.500859837+00:00 stderr F I1212 16:18:29.500725 12 handler.go:288] Adding GroupVersion apiextensions.k8s.io v1 to ResourceManager 2025-12-12T16:18:29.500859837+00:00 stderr F W1212 16:18:29.500756 12 genericapiserver.go:810] Skipping API apiextensions.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:29.515230563+00:00 stderr F I1212 16:18:29.515095 12 cidrallocator.go:197] starting ServiceCIDR Allocator Controller 2025-12-12T16:18:29.530905810+00:00 stderr F I1212 16:18:29.530727 12 deleted_kinds.go:96] NewResourceExpirationEvaluator with currentVersion: 1.33. 2025-12-12T16:18:29.546976917+00:00 stderr F I1212 16:18:29.542950 12 store.go:1663] "Monitoring resource count at path" resource="events" path="//events" 2025-12-12T16:18:29.549562571+00:00 stderr F I1212 16:18:29.549451 12 store.go:1663] "Monitoring resource count at path" resource="resourcequotas" path="//resourcequotas" 2025-12-12T16:18:29.553598411+00:00 stderr F I1212 16:18:29.551405 12 cacher.go:469] cacher (resourcequotas): initialized 2025-12-12T16:18:29.553598411+00:00 stderr F I1212 16:18:29.551430 12 reflector.go:430] "Caches populated" type="*core.ResourceQuota" reflector="storage/cacher.go:/resourcequotas" 2025-12-12T16:18:29.559006885+00:00 stderr F I1212 16:18:29.558493 12 store.go:1663] "Monitoring resource count at path" resource="secrets" path="//secrets" 2025-12-12T16:18:29.565765632+00:00 stderr F I1212 16:18:29.565612 12 store.go:1663] "Monitoring resource count at path" resource="configmaps" path="//configmaps" 2025-12-12T16:18:29.573265927+00:00 stderr F I1212 16:18:29.572579 12 store.go:1663] "Monitoring resource count at path" resource="namespaces" path="//namespaces" 2025-12-12T16:18:29.580414973+00:00 stderr F I1212 16:18:29.580297 12 cacher.go:469] cacher (namespaces): initialized 2025-12-12T16:18:29.580414973+00:00 stderr F I1212 16:18:29.580372 12 reflector.go:430] "Caches populated" type="*core.Namespace" reflector="storage/cacher.go:/namespaces" 2025-12-12T16:18:29.586549385+00:00 stderr F I1212 16:18:29.586060 12 store.go:1663] "Monitoring resource count at path" resource="serviceaccounts" path="//serviceaccounts" 2025-12-12T16:18:29.593845035+00:00 stderr F I1212 16:18:29.593764 12 cacher.go:469] cacher (serviceaccounts): initialized 2025-12-12T16:18:29.593895936+00:00 stderr F I1212 16:18:29.593855 12 reflector.go:430] "Caches populated" type="*core.ServiceAccount" reflector="storage/cacher.go:/serviceaccounts" 2025-12-12T16:18:29.595861985+00:00 stderr F I1212 16:18:29.595794 12 store.go:1663] "Monitoring resource count at path" resource="podtemplates" path="//podtemplates" 2025-12-12T16:18:29.596950872+00:00 stderr F I1212 16:18:29.596901 12 cacher.go:469] cacher (podtemplates): initialized 2025-12-12T16:18:29.596988523+00:00 stderr F I1212 16:18:29.596958 12 reflector.go:430] "Caches populated" type="*core.PodTemplate" reflector="storage/cacher.go:/podtemplates" 2025-12-12T16:18:29.610351133+00:00 stderr F I1212 16:18:29.610151 12 store.go:1663] "Monitoring resource count at path" resource="limitranges" path="//limitranges" 2025-12-12T16:18:29.611396209+00:00 stderr F I1212 16:18:29.611326 12 cacher.go:469] cacher (limitranges): initialized 2025-12-12T16:18:29.611396209+00:00 stderr F I1212 16:18:29.611361 12 reflector.go:430] "Caches populated" type="*core.LimitRange" reflector="storage/cacher.go:/limitranges" 2025-12-12T16:18:29.614945377+00:00 stderr F I1212 16:18:29.614855 12 cacher.go:469] cacher (secrets): initialized 2025-12-12T16:18:29.614945377+00:00 stderr F I1212 16:18:29.614900 12 reflector.go:430] "Caches populated" type="*core.Secret" reflector="storage/cacher.go:/secrets" 2025-12-12T16:18:29.617731426+00:00 stderr F I1212 16:18:29.617666 12 store.go:1663] "Monitoring resource count at path" resource="persistentvolumes" path="//persistentvolumes" 2025-12-12T16:18:29.622096424+00:00 stderr F I1212 16:18:29.622017 12 cacher.go:469] cacher (persistentvolumes): initialized 2025-12-12T16:18:29.622096424+00:00 stderr F I1212 16:18:29.622041 12 reflector.go:430] "Caches populated" type="*core.PersistentVolume" reflector="storage/cacher.go:/persistentvolumes" 2025-12-12T16:18:29.629106947+00:00 stderr F I1212 16:18:29.629004 12 store.go:1663] "Monitoring resource count at path" resource="persistentvolumeclaims" path="//persistentvolumeclaims" 2025-12-12T16:18:29.631143887+00:00 stderr F I1212 16:18:29.631058 12 cacher.go:469] cacher (persistentvolumeclaims): initialized 2025-12-12T16:18:29.631261600+00:00 stderr F I1212 16:18:29.631172 12 reflector.go:430] "Caches populated" type="*core.PersistentVolumeClaim" reflector="storage/cacher.go:/persistentvolumeclaims" 2025-12-12T16:18:29.642870937+00:00 stderr F I1212 16:18:29.642760 12 store.go:1663] "Monitoring resource count at path" resource="endpoints" path="//services/endpoints" 2025-12-12T16:18:29.645280137+00:00 stderr F I1212 16:18:29.644500 12 cacher.go:469] cacher (configmaps): initialized 2025-12-12T16:18:29.645280137+00:00 stderr F I1212 16:18:29.644553 12 reflector.go:430] "Caches populated" type="*core.ConfigMap" reflector="storage/cacher.go:/configmaps" 2025-12-12T16:18:29.645988534+00:00 stderr F I1212 16:18:29.645927 12 cacher.go:469] cacher (endpoints): initialized 2025-12-12T16:18:29.645988534+00:00 stderr F I1212 16:18:29.645948 12 reflector.go:430] "Caches populated" type="*core.Endpoints" reflector="storage/cacher.go:/services/endpoints" 2025-12-12T16:18:29.659781225+00:00 stderr F I1212 16:18:29.657645 12 store.go:1663] "Monitoring resource count at path" resource="nodes" path="//minions" 2025-12-12T16:18:29.661393235+00:00 stderr F I1212 16:18:29.661327 12 cacher.go:469] cacher (nodes): initialized 2025-12-12T16:18:29.661393235+00:00 stderr F I1212 16:18:29.661363 12 reflector.go:430] "Caches populated" type="*core.Node" reflector="storage/cacher.go:/minions" 2025-12-12T16:18:29.668675475+00:00 stderr F I1212 16:18:29.668582 12 store.go:1663] "Monitoring resource count at path" resource="pods" path="//pods" 2025-12-12T16:18:29.678549749+00:00 stderr F I1212 16:18:29.675677 12 store.go:1663] "Monitoring resource count at path" resource="services" path="//services/specs" 2025-12-12T16:18:29.679679267+00:00 stderr F I1212 16:18:29.679614 12 cacher.go:469] cacher (services): initialized 2025-12-12T16:18:29.679679267+00:00 stderr F I1212 16:18:29.679651 12 reflector.go:430] "Caches populated" type="*core.Service" reflector="storage/cacher.go:/services/specs" 2025-12-12T16:18:29.682340183+00:00 stderr F I1212 16:18:29.682146 12 store.go:1663] "Monitoring resource count at path" resource="serviceaccounts" path="//serviceaccounts" 2025-12-12T16:18:29.685302216+00:00 stderr F I1212 16:18:29.684703 12 cacher.go:469] cacher (pods): initialized 2025-12-12T16:18:29.685302216+00:00 stderr F I1212 16:18:29.684747 12 reflector.go:430] "Caches populated" type="*core.Pod" reflector="storage/cacher.go:/pods" 2025-12-12T16:18:29.688355082+00:00 stderr F I1212 16:18:29.688280 12 store.go:1663] "Monitoring resource count at path" resource="replicationcontrollers" path="//controllers" 2025-12-12T16:18:29.689917790+00:00 stderr F I1212 16:18:29.689770 12 apis.go:128] Enabling API group "". 2025-12-12T16:18:29.692068944+00:00 stderr F I1212 16:18:29.691981 12 cacher.go:469] cacher (replicationcontrollers): initialized 2025-12-12T16:18:29.692068944+00:00 stderr F I1212 16:18:29.692026 12 reflector.go:430] "Caches populated" type="*core.ReplicationController" reflector="storage/cacher.go:/controllers" 2025-12-12T16:18:29.702380019+00:00 stderr F I1212 16:18:29.702268 12 cacher.go:469] cacher (serviceaccounts): initialized 2025-12-12T16:18:29.702380019+00:00 stderr F I1212 16:18:29.702339 12 reflector.go:430] "Caches populated" type="*core.ServiceAccount" reflector="storage/cacher.go:/serviceaccounts" 2025-12-12T16:18:29.726847594+00:00 stderr F I1212 16:18:29.726692 12 handler.go:288] Adding GroupVersion v1 to ResourceManager 2025-12-12T16:18:29.727216783+00:00 stderr F I1212 16:18:29.727149 12 apis.go:112] API group "internal.apiserver.k8s.io" is not enabled, skipping. 2025-12-12T16:18:29.727355526+00:00 stderr F I1212 16:18:29.727305 12 apis.go:128] Enabling API group "authentication.k8s.io". 2025-12-12T16:18:29.727463199+00:00 stderr F I1212 16:18:29.727419 12 apis.go:128] Enabling API group "authorization.k8s.io". 2025-12-12T16:18:29.735530458+00:00 stderr F I1212 16:18:29.735428 12 store.go:1663] "Monitoring resource count at path" resource="horizontalpodautoscalers.autoscaling" path="//horizontalpodautoscalers" 2025-12-12T16:18:29.736851091+00:00 stderr F I1212 16:18:29.736746 12 cacher.go:469] cacher (horizontalpodautoscalers.autoscaling): initialized 2025-12-12T16:18:29.736851091+00:00 stderr F I1212 16:18:29.736787 12 reflector.go:430] "Caches populated" type="*autoscaling.HorizontalPodAutoscaler" reflector="storage/cacher.go:/horizontalpodautoscalers" 2025-12-12T16:18:29.743350052+00:00 stderr F I1212 16:18:29.743254 12 store.go:1663] "Monitoring resource count at path" resource="horizontalpodautoscalers.autoscaling" path="//horizontalpodautoscalers" 2025-12-12T16:18:29.743650299+00:00 stderr F I1212 16:18:29.743583 12 apis.go:128] Enabling API group "autoscaling". 2025-12-12T16:18:29.744281945+00:00 stderr F I1212 16:18:29.744205 12 cacher.go:469] cacher (horizontalpodautoscalers.autoscaling): initialized 2025-12-12T16:18:29.744281945+00:00 stderr F I1212 16:18:29.744241 12 reflector.go:430] "Caches populated" type="*autoscaling.HorizontalPodAutoscaler" reflector="storage/cacher.go:/horizontalpodautoscalers" 2025-12-12T16:18:29.750891458+00:00 stderr F I1212 16:18:29.750795 12 store.go:1663] "Monitoring resource count at path" resource="jobs.batch" path="//jobs" 2025-12-12T16:18:29.753577544+00:00 stderr F I1212 16:18:29.753488 12 cacher.go:469] cacher (jobs.batch): initialized 2025-12-12T16:18:29.753577544+00:00 stderr F I1212 16:18:29.753531 12 reflector.go:430] "Caches populated" type="*batch.Job" reflector="storage/cacher.go:/jobs" 2025-12-12T16:18:29.762024893+00:00 stderr F I1212 16:18:29.761916 12 store.go:1663] "Monitoring resource count at path" resource="cronjobs.batch" path="//cronjobs" 2025-12-12T16:18:29.762115675+00:00 stderr F I1212 16:18:29.762061 12 apis.go:128] Enabling API group "batch". 2025-12-12T16:18:29.766949845+00:00 stderr F I1212 16:18:29.765546 12 cacher.go:469] cacher (cronjobs.batch): initialized 2025-12-12T16:18:29.766949845+00:00 stderr F I1212 16:18:29.765584 12 reflector.go:430] "Caches populated" type="*batch.CronJob" reflector="storage/cacher.go:/cronjobs" 2025-12-12T16:18:29.769894128+00:00 stderr F I1212 16:18:29.769389 12 cacher.go:469] cacher (customresourcedefinitions.apiextensions.k8s.io): initialized 2025-12-12T16:18:29.769894128+00:00 stderr F I1212 16:18:29.769476 12 reflector.go:430] "Caches populated" type="*apiextensions.CustomResourceDefinition" reflector="storage/cacher.go:/apiextensions.k8s.io/customresourcedefinitions" 2025-12-12T16:18:29.770687587+00:00 stderr F I1212 16:18:29.770614 12 store.go:1663] "Monitoring resource count at path" resource="certificatesigningrequests.certificates.k8s.io" path="//certificatesigningrequests" 2025-12-12T16:18:29.770755339+00:00 stderr F I1212 16:18:29.770707 12 apis.go:128] Enabling API group "certificates.k8s.io". 2025-12-12T16:18:29.772246086+00:00 stderr F I1212 16:18:29.772146 12 cacher.go:469] cacher (certificatesigningrequests.certificates.k8s.io): initialized 2025-12-12T16:18:29.772246086+00:00 stderr F I1212 16:18:29.772206 12 reflector.go:430] "Caches populated" type="*certificates.CertificateSigningRequest" reflector="storage/cacher.go:/certificatesigningrequests" 2025-12-12T16:18:29.778530651+00:00 stderr F I1212 16:18:29.778464 12 store.go:1663] "Monitoring resource count at path" resource="leases.coordination.k8s.io" path="//leases" 2025-12-12T16:18:29.778676025+00:00 stderr F I1212 16:18:29.778613 12 apis.go:128] Enabling API group "coordination.k8s.io". 2025-12-12T16:18:29.780886080+00:00 stderr F I1212 16:18:29.779877 12 cacher.go:469] cacher (leases.coordination.k8s.io): initialized 2025-12-12T16:18:29.780886080+00:00 stderr F I1212 16:18:29.779931 12 reflector.go:430] "Caches populated" type="*coordination.Lease" reflector="storage/cacher.go:/leases" 2025-12-12T16:18:29.784981601+00:00 stderr F I1212 16:18:29.784906 12 store.go:1663] "Monitoring resource count at path" resource="endpointslices.discovery.k8s.io" path="//endpointslices" 2025-12-12T16:18:29.785090443+00:00 stderr F I1212 16:18:29.785028 12 apis.go:128] Enabling API group "discovery.k8s.io". 2025-12-12T16:18:29.789160114+00:00 stderr F I1212 16:18:29.786912 12 cacher.go:469] cacher (endpointslices.discovery.k8s.io): initialized 2025-12-12T16:18:29.789160114+00:00 stderr F I1212 16:18:29.786953 12 reflector.go:430] "Caches populated" type="*discovery.EndpointSlice" reflector="storage/cacher.go:/endpointslices" 2025-12-12T16:18:29.791138913+00:00 stderr F I1212 16:18:29.791014 12 store.go:1663] "Monitoring resource count at path" resource="networkpolicies.networking.k8s.io" path="//networkpolicies" 2025-12-12T16:18:29.792456896+00:00 stderr F I1212 16:18:29.792388 12 cacher.go:469] cacher (networkpolicies.networking.k8s.io): initialized 2025-12-12T16:18:29.792456896+00:00 stderr F I1212 16:18:29.792416 12 reflector.go:430] "Caches populated" type="*networking.NetworkPolicy" reflector="storage/cacher.go:/networkpolicies" 2025-12-12T16:18:29.798451784+00:00 stderr F I1212 16:18:29.798380 12 store.go:1663] "Monitoring resource count at path" resource="ingresses.networking.k8s.io" path="//ingress" 2025-12-12T16:18:29.799594292+00:00 stderr F I1212 16:18:29.799540 12 cacher.go:469] cacher (ingresses.networking.k8s.io): initialized 2025-12-12T16:18:29.799594292+00:00 stderr F I1212 16:18:29.799569 12 reflector.go:430] "Caches populated" type="*networking.Ingress" reflector="storage/cacher.go:/ingress" 2025-12-12T16:18:29.805233261+00:00 stderr F I1212 16:18:29.804701 12 store.go:1663] "Monitoring resource count at path" resource="ingressclasses.networking.k8s.io" path="//ingressclasses" 2025-12-12T16:18:29.806113863+00:00 stderr F I1212 16:18:29.806017 12 cacher.go:469] cacher (ingressclasses.networking.k8s.io): initialized 2025-12-12T16:18:29.806297628+00:00 stderr F I1212 16:18:29.806066 12 reflector.go:430] "Caches populated" type="*networking.IngressClass" reflector="storage/cacher.go:/ingressclasses" 2025-12-12T16:18:29.813109976+00:00 stderr F I1212 16:18:29.813026 12 store.go:1663] "Monitoring resource count at path" resource="ipaddresses.networking.k8s.io" path="//ipaddresses" 2025-12-12T16:18:29.817642028+00:00 stderr F I1212 16:18:29.815126 12 cacher.go:469] cacher (ipaddresses.networking.k8s.io): initialized 2025-12-12T16:18:29.817642028+00:00 stderr F I1212 16:18:29.815194 12 reflector.go:430] "Caches populated" type="*networking.IPAddress" reflector="storage/cacher.go:/ipaddresses" 2025-12-12T16:18:29.820668903+00:00 stderr F I1212 16:18:29.820555 12 store.go:1663] "Monitoring resource count at path" resource="servicecidrs.networking.k8s.io" path="//servicecidrs" 2025-12-12T16:18:29.820728255+00:00 stderr F I1212 16:18:29.820685 12 apis.go:128] Enabling API group "networking.k8s.io". 2025-12-12T16:18:29.822104219+00:00 stderr F I1212 16:18:29.822032 12 cacher.go:469] cacher (servicecidrs.networking.k8s.io): initialized 2025-12-12T16:18:29.822104219+00:00 stderr F I1212 16:18:29.822066 12 reflector.go:430] "Caches populated" type="*networking.ServiceCIDR" reflector="storage/cacher.go:/servicecidrs" 2025-12-12T16:18:29.828474956+00:00 stderr F I1212 16:18:29.828366 12 store.go:1663] "Monitoring resource count at path" resource="runtimeclasses.node.k8s.io" path="//runtimeclasses" 2025-12-12T16:18:29.828503197+00:00 stderr F I1212 16:18:29.828451 12 apis.go:128] Enabling API group "node.k8s.io". 2025-12-12T16:18:29.838347750+00:00 stderr F I1212 16:18:29.837380 12 cacher.go:469] cacher (runtimeclasses.node.k8s.io): initialized 2025-12-12T16:18:29.838347750+00:00 stderr F I1212 16:18:29.837432 12 reflector.go:430] "Caches populated" type="*node.RuntimeClass" reflector="storage/cacher.go:/runtimeclasses" 2025-12-12T16:18:29.841910768+00:00 stderr F I1212 16:18:29.841804 12 store.go:1663] "Monitoring resource count at path" resource="poddisruptionbudgets.policy" path="//poddisruptionbudgets" 2025-12-12T16:18:29.842160114+00:00 stderr F I1212 16:18:29.841942 12 apis.go:128] Enabling API group "policy". 2025-12-12T16:18:29.843501848+00:00 stderr F I1212 16:18:29.843426 12 cacher.go:469] cacher (poddisruptionbudgets.policy): initialized 2025-12-12T16:18:29.843501848+00:00 stderr F I1212 16:18:29.843470 12 reflector.go:430] "Caches populated" type="*policy.PodDisruptionBudget" reflector="storage/cacher.go:/poddisruptionbudgets" 2025-12-12T16:18:29.851330431+00:00 stderr F I1212 16:18:29.851203 12 store.go:1663] "Monitoring resource count at path" resource="roles.rbac.authorization.k8s.io" path="//roles" 2025-12-12T16:18:29.857722239+00:00 stderr F I1212 16:18:29.857620 12 cacher.go:469] cacher (roles.rbac.authorization.k8s.io): initialized 2025-12-12T16:18:29.857722239+00:00 stderr F I1212 16:18:29.857662 12 reflector.go:430] "Caches populated" type="*rbac.Role" reflector="storage/cacher.go:/roles" 2025-12-12T16:18:29.860348454+00:00 stderr F I1212 16:18:29.860260 12 store.go:1663] "Monitoring resource count at path" resource="rolebindings.rbac.authorization.k8s.io" path="//rolebindings" 2025-12-12T16:18:29.875412566+00:00 stderr F I1212 16:18:29.874897 12 cacher.go:469] cacher (rolebindings.rbac.authorization.k8s.io): initialized 2025-12-12T16:18:29.875412566+00:00 stderr F I1212 16:18:29.874946 12 reflector.go:430] "Caches populated" type="*rbac.RoleBinding" reflector="storage/cacher.go:/rolebindings" 2025-12-12T16:18:29.876704448+00:00 stderr F I1212 16:18:29.875991 12 store.go:1663] "Monitoring resource count at path" resource="clusterroles.rbac.authorization.k8s.io" path="//clusterroles" 2025-12-12T16:18:29.882541063+00:00 stderr F I1212 16:18:29.882420 12 cacher.go:469] cacher (clusterroles.rbac.authorization.k8s.io): initialized 2025-12-12T16:18:29.882541063+00:00 stderr F I1212 16:18:29.882463 12 reflector.go:430] "Caches populated" type="*rbac.ClusterRole" reflector="storage/cacher.go:/clusterroles" 2025-12-12T16:18:29.882736487+00:00 stderr F I1212 16:18:29.882660 12 store.go:1663] "Monitoring resource count at path" resource="clusterrolebindings.rbac.authorization.k8s.io" path="//clusterrolebindings" 2025-12-12T16:18:29.882859210+00:00 stderr F I1212 16:18:29.882803 12 apis.go:128] Enabling API group "rbac.authorization.k8s.io". 2025-12-12T16:18:29.890328705+00:00 stderr F I1212 16:18:29.889112 12 cacher.go:469] cacher (clusterrolebindings.rbac.authorization.k8s.io): initialized 2025-12-12T16:18:29.890328705+00:00 stderr F I1212 16:18:29.889142 12 reflector.go:430] "Caches populated" type="*rbac.ClusterRoleBinding" reflector="storage/cacher.go:/clusterrolebindings" 2025-12-12T16:18:29.890866728+00:00 stderr F I1212 16:18:29.890815 12 store.go:1663] "Monitoring resource count at path" resource="priorityclasses.scheduling.k8s.io" path="//priorityclasses" 2025-12-12T16:18:29.890898419+00:00 stderr F I1212 16:18:29.890883 12 apis.go:128] Enabling API group "scheduling.k8s.io". 2025-12-12T16:18:29.893323429+00:00 stderr F I1212 16:18:29.893281 12 cacher.go:469] cacher (priorityclasses.scheduling.k8s.io): initialized 2025-12-12T16:18:29.893323429+00:00 stderr F I1212 16:18:29.893300 12 reflector.go:430] "Caches populated" type="*scheduling.PriorityClass" reflector="storage/cacher.go:/priorityclasses" 2025-12-12T16:18:29.897056392+00:00 stderr F I1212 16:18:29.897012 12 store.go:1663] "Monitoring resource count at path" resource="storageclasses.storage.k8s.io" path="//storageclasses" 2025-12-12T16:18:29.899068781+00:00 stderr F I1212 16:18:29.899016 12 cacher.go:469] cacher (storageclasses.storage.k8s.io): initialized 2025-12-12T16:18:29.899106182+00:00 stderr F I1212 16:18:29.899056 12 reflector.go:430] "Caches populated" type="*storage.StorageClass" reflector="storage/cacher.go:/storageclasses" 2025-12-12T16:18:29.903279325+00:00 stderr F I1212 16:18:29.903239 12 store.go:1663] "Monitoring resource count at path" resource="volumeattachments.storage.k8s.io" path="//volumeattachments" 2025-12-12T16:18:29.904033194+00:00 stderr F I1212 16:18:29.903988 12 cacher.go:469] cacher (volumeattachments.storage.k8s.io): initialized 2025-12-12T16:18:29.904033194+00:00 stderr F I1212 16:18:29.904014 12 reflector.go:430] "Caches populated" type="*storage.VolumeAttachment" reflector="storage/cacher.go:/volumeattachments" 2025-12-12T16:18:29.908696799+00:00 stderr F I1212 16:18:29.908623 12 store.go:1663] "Monitoring resource count at path" resource="csinodes.storage.k8s.io" path="//csinodes" 2025-12-12T16:18:29.910077493+00:00 stderr F I1212 16:18:29.910003 12 cacher.go:469] cacher (csinodes.storage.k8s.io): initialized 2025-12-12T16:18:29.910077493+00:00 stderr F I1212 16:18:29.910052 12 reflector.go:430] "Caches populated" type="*storage.CSINode" reflector="storage/cacher.go:/csinodes" 2025-12-12T16:18:29.914557464+00:00 stderr F I1212 16:18:29.914400 12 store.go:1663] "Monitoring resource count at path" resource="csidrivers.storage.k8s.io" path="//csidrivers" 2025-12-12T16:18:29.915708033+00:00 stderr F I1212 16:18:29.915653 12 cacher.go:469] cacher (csidrivers.storage.k8s.io): initialized 2025-12-12T16:18:29.915708033+00:00 stderr F I1212 16:18:29.915684 12 reflector.go:430] "Caches populated" type="*storage.CSIDriver" reflector="storage/cacher.go:/csidrivers" 2025-12-12T16:18:29.921399503+00:00 stderr F I1212 16:18:29.921266 12 store.go:1663] "Monitoring resource count at path" resource="csistoragecapacities.storage.k8s.io" path="//csistoragecapacities" 2025-12-12T16:18:29.921446554+00:00 stderr F I1212 16:18:29.921420 12 apis.go:128] Enabling API group "storage.k8s.io". 2025-12-12T16:18:29.921515226+00:00 stderr F I1212 16:18:29.921466 12 apis.go:112] API group "storagemigration.k8s.io" is not enabled, skipping. 2025-12-12T16:18:29.922682215+00:00 stderr F I1212 16:18:29.922623 12 cacher.go:469] cacher (csistoragecapacities.storage.k8s.io): initialized 2025-12-12T16:18:29.922734996+00:00 stderr F I1212 16:18:29.922674 12 reflector.go:430] "Caches populated" type="*storage.CSIStorageCapacity" reflector="storage/cacher.go:/csistoragecapacities" 2025-12-12T16:18:29.928779696+00:00 stderr F I1212 16:18:29.928508 12 store.go:1663] "Monitoring resource count at path" resource="flowschemas.flowcontrol.apiserver.k8s.io" path="//flowschemas" 2025-12-12T16:18:29.932617331+00:00 stderr F I1212 16:18:29.932523 12 cacher.go:469] cacher (flowschemas.flowcontrol.apiserver.k8s.io): initialized 2025-12-12T16:18:29.932617331+00:00 stderr F I1212 16:18:29.932551 12 reflector.go:430] "Caches populated" type="*flowcontrol.FlowSchema" reflector="storage/cacher.go:/flowschemas" 2025-12-12T16:18:29.934220170+00:00 stderr F I1212 16:18:29.934136 12 store.go:1663] "Monitoring resource count at path" resource="prioritylevelconfigurations.flowcontrol.apiserver.k8s.io" path="//prioritylevelconfigurations" 2025-12-12T16:18:29.934270762+00:00 stderr F I1212 16:18:29.934246 12 apis.go:128] Enabling API group "flowcontrol.apiserver.k8s.io". 2025-12-12T16:18:29.935602215+00:00 stderr F I1212 16:18:29.935526 12 cacher.go:469] cacher (prioritylevelconfigurations.flowcontrol.apiserver.k8s.io): initialized 2025-12-12T16:18:29.935602215+00:00 stderr F I1212 16:18:29.935574 12 reflector.go:430] "Caches populated" type="*flowcontrol.PriorityLevelConfiguration" reflector="storage/cacher.go:/prioritylevelconfigurations" 2025-12-12T16:18:29.941197713+00:00 stderr F I1212 16:18:29.941074 12 store.go:1663] "Monitoring resource count at path" resource="deployments.apps" path="//deployments" 2025-12-12T16:18:29.949003406+00:00 stderr F I1212 16:18:29.948334 12 store.go:1663] "Monitoring resource count at path" resource="statefulsets.apps" path="//statefulsets" 2025-12-12T16:18:29.949720674+00:00 stderr F I1212 16:18:29.949510 12 cacher.go:469] cacher (deployments.apps): initialized 2025-12-12T16:18:29.949720674+00:00 stderr F I1212 16:18:29.949544 12 reflector.go:430] "Caches populated" type="*apps.Deployment" reflector="storage/cacher.go:/deployments" 2025-12-12T16:18:29.950478302+00:00 stderr F I1212 16:18:29.950410 12 cacher.go:469] cacher (statefulsets.apps): initialized 2025-12-12T16:18:29.950478302+00:00 stderr F I1212 16:18:29.950444 12 reflector.go:430] "Caches populated" type="*apps.StatefulSet" reflector="storage/cacher.go:/statefulsets" 2025-12-12T16:18:29.957316091+00:00 stderr F I1212 16:18:29.957230 12 store.go:1663] "Monitoring resource count at path" resource="daemonsets.apps" path="//daemonsets" 2025-12-12T16:18:29.960549231+00:00 stderr F I1212 16:18:29.960423 12 cacher.go:469] cacher (daemonsets.apps): initialized 2025-12-12T16:18:29.960549231+00:00 stderr F I1212 16:18:29.960478 12 reflector.go:430] "Caches populated" type="*apps.DaemonSet" reflector="storage/cacher.go:/daemonsets" 2025-12-12T16:18:29.964271833+00:00 stderr F I1212 16:18:29.964206 12 store.go:1663] "Monitoring resource count at path" resource="replicasets.apps" path="//replicasets" 2025-12-12T16:18:29.970991729+00:00 stderr F I1212 16:18:29.970895 12 store.go:1663] "Monitoring resource count at path" resource="controllerrevisions.apps" path="//controllerrevisions" 2025-12-12T16:18:29.971156314+00:00 stderr F I1212 16:18:29.971119 12 apis.go:128] Enabling API group "apps". 2025-12-12T16:18:29.972517437+00:00 stderr F I1212 16:18:29.972445 12 cacher.go:469] cacher (replicasets.apps): initialized 2025-12-12T16:18:29.972517437+00:00 stderr F I1212 16:18:29.972493 12 reflector.go:430] "Caches populated" type="*apps.ReplicaSet" reflector="storage/cacher.go:/replicasets" 2025-12-12T16:18:29.973778758+00:00 stderr F I1212 16:18:29.973721 12 cacher.go:469] cacher (controllerrevisions.apps): initialized 2025-12-12T16:18:29.973778758+00:00 stderr F I1212 16:18:29.973746 12 reflector.go:430] "Caches populated" type="*apps.ControllerRevision" reflector="storage/cacher.go:/controllerrevisions" 2025-12-12T16:18:29.977703495+00:00 stderr F I1212 16:18:29.977636 12 store.go:1663] "Monitoring resource count at path" resource="validatingwebhookconfigurations.admissionregistration.k8s.io" path="//validatingwebhookconfigurations" 2025-12-12T16:18:29.979650634+00:00 stderr F I1212 16:18:29.979584 12 cacher.go:469] cacher (validatingwebhookconfigurations.admissionregistration.k8s.io): initialized 2025-12-12T16:18:29.979650634+00:00 stderr F I1212 16:18:29.979619 12 reflector.go:430] "Caches populated" type="*admissionregistration.ValidatingWebhookConfiguration" reflector="storage/cacher.go:/validatingwebhookconfigurations" 2025-12-12T16:18:29.984228597+00:00 stderr F I1212 16:18:29.984136 12 store.go:1663] "Monitoring resource count at path" resource="mutatingwebhookconfigurations.admissionregistration.k8s.io" path="//mutatingwebhookconfigurations" 2025-12-12T16:18:29.986412991+00:00 stderr F I1212 16:18:29.986347 12 cacher.go:469] cacher (mutatingwebhookconfigurations.admissionregistration.k8s.io): initialized 2025-12-12T16:18:29.986461872+00:00 stderr F I1212 16:18:29.986423 12 reflector.go:430] "Caches populated" type="*admissionregistration.MutatingWebhookConfiguration" reflector="storage/cacher.go:/mutatingwebhookconfigurations" 2025-12-12T16:18:29.992267705+00:00 stderr F I1212 16:18:29.992147 12 store.go:1663] "Monitoring resource count at path" resource="validatingadmissionpolicies.admissionregistration.k8s.io" path="//validatingadmissionpolicies" 2025-12-12T16:18:29.993922446+00:00 stderr F I1212 16:18:29.993884 12 cacher.go:469] cacher (validatingadmissionpolicies.admissionregistration.k8s.io): initialized 2025-12-12T16:18:29.994006328+00:00 stderr F I1212 16:18:29.993985 12 reflector.go:430] "Caches populated" type="*admissionregistration.ValidatingAdmissionPolicy" reflector="storage/cacher.go:/validatingadmissionpolicies" 2025-12-12T16:18:29.999095954+00:00 stderr F I1212 16:18:29.999050 12 store.go:1663] "Monitoring resource count at path" resource="validatingadmissionpolicybindings.admissionregistration.k8s.io" path="//validatingadmissionpolicybindings" 2025-12-12T16:18:29.999208797+00:00 stderr F I1212 16:18:29.999161 12 apis.go:128] Enabling API group "admissionregistration.k8s.io". 2025-12-12T16:18:30.000755985+00:00 stderr F I1212 16:18:30.000703 12 cacher.go:469] cacher (validatingadmissionpolicybindings.admissionregistration.k8s.io): initialized 2025-12-12T16:18:30.000796486+00:00 stderr F I1212 16:18:30.000749 12 reflector.go:430] "Caches populated" type="*admissionregistration.ValidatingAdmissionPolicyBinding" reflector="storage/cacher.go:/validatingadmissionpolicybindings" 2025-12-12T16:18:30.007299707+00:00 stderr F I1212 16:18:30.007245 12 store.go:1663] "Monitoring resource count at path" resource="events" path="//events" 2025-12-12T16:18:30.007358029+00:00 stderr F I1212 16:18:30.007303 12 apis.go:128] Enabling API group "events.k8s.io". 2025-12-12T16:18:30.007385099+00:00 stderr F I1212 16:18:30.007355 12 apis.go:112] API group "resource.k8s.io" is not enabled, skipping. 2025-12-12T16:18:30.034822088+00:00 stderr F I1212 16:18:30.029325 12 handler.go:288] Adding GroupVersion authentication.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.034822088+00:00 stderr F W1212 16:18:30.029360 12 genericapiserver.go:810] Skipping API authentication.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.034822088+00:00 stderr F W1212 16:18:30.029372 12 genericapiserver.go:810] Skipping API authentication.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.034822088+00:00 stderr F I1212 16:18:30.029973 12 handler.go:288] Adding GroupVersion authorization.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.034822088+00:00 stderr F W1212 16:18:30.029980 12 genericapiserver.go:810] Skipping API authorization.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.034822088+00:00 stderr F I1212 16:18:30.031774 12 handler.go:288] Adding GroupVersion autoscaling v2 to ResourceManager 2025-12-12T16:18:30.034822088+00:00 stderr F I1212 16:18:30.032790 12 handler.go:288] Adding GroupVersion autoscaling v1 to ResourceManager 2025-12-12T16:18:30.034822088+00:00 stderr F W1212 16:18:30.032800 12 genericapiserver.go:810] Skipping API autoscaling/v2beta1 because it has no resources. 2025-12-12T16:18:30.034822088+00:00 stderr F W1212 16:18:30.032809 12 genericapiserver.go:810] Skipping API autoscaling/v2beta2 because it has no resources. 2025-12-12T16:18:30.035150766+00:00 stderr F I1212 16:18:30.035107 12 handler.go:288] Adding GroupVersion batch v1 to ResourceManager 2025-12-12T16:18:30.035150766+00:00 stderr F W1212 16:18:30.035120 12 genericapiserver.go:810] Skipping API batch/v1beta1 because it has no resources. 2025-12-12T16:18:30.037505734+00:00 stderr F I1212 16:18:30.037452 12 handler.go:288] Adding GroupVersion certificates.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.037505734+00:00 stderr F W1212 16:18:30.037470 12 genericapiserver.go:810] Skipping API certificates.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.037505734+00:00 stderr F W1212 16:18:30.037476 12 genericapiserver.go:810] Skipping API certificates.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.038212551+00:00 stderr F I1212 16:18:30.038134 12 handler.go:288] Adding GroupVersion coordination.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.038212551+00:00 stderr F W1212 16:18:30.038148 12 genericapiserver.go:810] Skipping API coordination.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.038212551+00:00 stderr F W1212 16:18:30.038153 12 genericapiserver.go:810] Skipping API coordination.k8s.io/v1alpha2 because it has no resources. 2025-12-12T16:18:30.038983810+00:00 stderr F I1212 16:18:30.038946 12 handler.go:288] Adding GroupVersion discovery.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.038983810+00:00 stderr F W1212 16:18:30.038961 12 genericapiserver.go:810] Skipping API discovery.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.041966244+00:00 stderr F I1212 16:18:30.041911 12 handler.go:288] Adding GroupVersion networking.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.041966244+00:00 stderr F W1212 16:18:30.041928 12 genericapiserver.go:810] Skipping API networking.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.041966244+00:00 stderr F W1212 16:18:30.041933 12 genericapiserver.go:810] Skipping API networking.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.042533678+00:00 stderr F I1212 16:18:30.042493 12 handler.go:288] Adding GroupVersion node.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.042533678+00:00 stderr F W1212 16:18:30.042508 12 genericapiserver.go:810] Skipping API node.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.042533678+00:00 stderr F W1212 16:18:30.042513 12 genericapiserver.go:810] Skipping API node.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.043491842+00:00 stderr F I1212 16:18:30.043451 12 handler.go:288] Adding GroupVersion policy v1 to ResourceManager 2025-12-12T16:18:30.043491842+00:00 stderr F W1212 16:18:30.043465 12 genericapiserver.go:810] Skipping API policy/v1beta1 because it has no resources. 2025-12-12T16:18:30.045482961+00:00 stderr F I1212 16:18:30.045440 12 handler.go:288] Adding GroupVersion rbac.authorization.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.045482961+00:00 stderr F W1212 16:18:30.045455 12 genericapiserver.go:810] Skipping API rbac.authorization.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.045482961+00:00 stderr F W1212 16:18:30.045460 12 genericapiserver.go:810] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.045943313+00:00 stderr F I1212 16:18:30.045908 12 handler.go:288] Adding GroupVersion scheduling.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.045943313+00:00 stderr F W1212 16:18:30.045922 12 genericapiserver.go:810] Skipping API scheduling.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.045943313+00:00 stderr F W1212 16:18:30.045927 12 genericapiserver.go:810] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.048006503+00:00 stderr F I1212 16:18:30.047965 12 handler.go:288] Adding GroupVersion storage.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.048006503+00:00 stderr F W1212 16:18:30.047980 12 genericapiserver.go:810] Skipping API storage.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.048006503+00:00 stderr F W1212 16:18:30.047984 12 genericapiserver.go:810] Skipping API storage.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.048890225+00:00 stderr F I1212 16:18:30.048822 12 handler.go:288] Adding GroupVersion flowcontrol.apiserver.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.048890225+00:00 stderr F W1212 16:18:30.048838 12 genericapiserver.go:810] Skipping API flowcontrol.apiserver.k8s.io/v1beta3 because it has no resources. 2025-12-12T16:18:30.048890225+00:00 stderr F W1212 16:18:30.048843 12 genericapiserver.go:810] Skipping API flowcontrol.apiserver.k8s.io/v1beta2 because it has no resources. 2025-12-12T16:18:30.048890225+00:00 stderr F W1212 16:18:30.048847 12 genericapiserver.go:810] Skipping API flowcontrol.apiserver.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.051915190+00:00 stderr F I1212 16:18:30.051837 12 handler.go:288] Adding GroupVersion apps v1 to ResourceManager 2025-12-12T16:18:30.051915190+00:00 stderr F W1212 16:18:30.051868 12 genericapiserver.go:810] Skipping API apps/v1beta2 because it has no resources. 2025-12-12T16:18:30.051915190+00:00 stderr F W1212 16:18:30.051875 12 genericapiserver.go:810] Skipping API apps/v1beta1 because it has no resources. 2025-12-12T16:18:30.053359086+00:00 stderr F I1212 16:18:30.053313 12 handler.go:288] Adding GroupVersion admissionregistration.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.053359086+00:00 stderr F W1212 16:18:30.053328 12 genericapiserver.go:810] Skipping API admissionregistration.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.053359086+00:00 stderr F W1212 16:18:30.053333 12 genericapiserver.go:810] Skipping API admissionregistration.k8s.io/v1alpha1 because it has no resources. 2025-12-12T16:18:30.053767666+00:00 stderr F I1212 16:18:30.053732 12 handler.go:288] Adding GroupVersion events.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.053767666+00:00 stderr F W1212 16:18:30.053745 12 genericapiserver.go:810] Skipping API events.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.064060270+00:00 stderr F I1212 16:18:30.063688 12 store.go:1663] "Monitoring resource count at path" resource="apiservices.apiregistration.k8s.io" path="//apiregistration.k8s.io/apiservices" 2025-12-12T16:18:30.064543912+00:00 stderr F I1212 16:18:30.064492 12 handler.go:288] Adding GroupVersion apiregistration.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.064543912+00:00 stderr F W1212 16:18:30.064509 12 genericapiserver.go:810] Skipping API apiregistration.k8s.io/v1beta1 because it has no resources. 2025-12-12T16:18:30.065099836+00:00 stderr F I1212 16:18:30.065052 12 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="aggregator-proxy-cert::/etc/kubernetes/static-pod-certs/secrets/aggregator-client/tls.crt::/etc/kubernetes/static-pod-certs/secrets/aggregator-client/tls.key" 2025-12-12T16:18:30.076407096+00:00 stderr F I1212 16:18:30.076286 12 cacher.go:469] cacher (apiservices.apiregistration.k8s.io): initialized 2025-12-12T16:18:30.076407096+00:00 stderr F I1212 16:18:30.076329 12 reflector.go:430] "Caches populated" type="*apiregistration.APIService" reflector="storage/cacher.go:/apiregistration.k8s.io/apiservices" 2025-12-12T16:18:30.396865169+00:00 stderr F I1212 16:18:30.396664 12 genericapiserver.go:599] "[graceful-termination] using HTTP Server shutdown timeout" shutdownTimeout="2s" 2025-12-12T16:18:30.397108925+00:00 stderr F I1212 16:18:30.397056 12 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:30.397108925+00:00 stderr F I1212 16:18:30.397066 12 dynamic_cafile_content.go:161] "Starting controller" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:30.397651588+00:00 stderr F I1212 16:18:30.397582 12 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" 2025-12-12T16:18:30.397773041+00:00 stderr F I1212 16:18:30.397722 12 dynamic_serving_content.go:135] "Starting controller" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.key" 2025-12-12T16:18:30.397944925+00:00 stderr F I1212 16:18:30.397892 12 dynamic_serving_content.go:135] "Starting controller" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" 2025-12-12T16:18:30.398726455+00:00 stderr F I1212 16:18:30.398650 12 dynamic_serving_content.go:135] "Starting controller" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.key" 2025-12-12T16:18:30.398961370+00:00 stderr F I1212 16:18:30.398903 12 dynamic_serving_content.go:135] "Starting controller" name="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.key" 2025-12-12T16:18:30.399254308+00:00 stderr F I1212 16:18:30.399202 12 dynamic_serving_content.go:135] "Starting controller" name="sni-serving-cert::/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.crt::/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.key" 2025-12-12T16:18:30.399368690+00:00 stderr F I1212 16:18:30.399305 12 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:18:30.399259028 +0000 UTC))" 2025-12-12T16:18:30.399391551+00:00 stderr F I1212 16:18:30.399341 12 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:18:30.39933417 +0000 UTC))" 2025-12-12T16:18:30.399412121+00:00 stderr F I1212 16:18:30.399378 12 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:18:30.39937262 +0000 UTC))" 2025-12-12T16:18:30.399412121+00:00 stderr F I1212 16:18:30.399393 12 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:18:30.399389621 +0000 UTC))" 2025-12-12T16:18:30.399456633+00:00 stderr F I1212 16:18:30.399407 12 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:18:30.399403211 +0000 UTC))" 2025-12-12T16:18:30.399456633+00:00 stderr F I1212 16:18:30.399425 12 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:18:30.399421542 +0000 UTC))" 2025-12-12T16:18:30.399456633+00:00 stderr F I1212 16:18:30.399437 12 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:18:30.399433212 +0000 UTC))" 2025-12-12T16:18:30.399490323+00:00 stderr F I1212 16:18:30.399449 12 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:18:30.399445562 +0000 UTC))" 2025-12-12T16:18:30.399490323+00:00 stderr F I1212 16:18:30.399461 12 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:18:30.399457363 +0000 UTC))" 2025-12-12T16:18:30.399532184+00:00 stderr F I1212 16:18:30.399481 12 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:30.399476813 +0000 UTC))" 2025-12-12T16:18:30.399698279+00:00 stderr F I1212 16:18:30.399641 12 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" certDetail="\"10.217.4.1\" [serving] validServingFor=[10.217.4.1,kubernetes,kubernetes.default,kubernetes.default.svc,kubernetes.default.svc.cluster.local,openshift,openshift.default,openshift.default.svc,openshift.default.svc.cluster.local,10.217.4.1] issuer=\"kube-apiserver-service-network-signer\" (2025-11-02 07:51:36 +0000 UTC to 2026-11-02 07:51:37 +0000 UTC (now=2025-12-12 16:18:30.399633157 +0000 UTC))" 2025-12-12T16:18:30.399882283+00:00 stderr F I1212 16:18:30.399826 12 named_certificates.go:53] "Loaded SNI cert" index=5 certName="sni-serving-cert::/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.crt::/etc/kubernetes/static-pod-resources/secrets/localhost-recovery-serving-certkey/tls.key" certDetail="\"localhost-recovery\" [serving] validServingFor=[localhost-recovery] issuer=\"openshift-kube-apiserver-operator_localhost-recovery-serving-signer@1762069890\" (2025-11-02 07:51:36 +0000 UTC to 2035-10-31 07:51:30 +0000 UTC (now=2025-12-12 16:18:30.399816201 +0000 UTC))" 2025-12-12T16:18:30.400043727+00:00 stderr F I1212 16:18:30.400008 12 named_certificates.go:53] "Loaded SNI cert" index=4 certName="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/internal-loadbalancer-serving-certkey/tls.key" certDetail="\"api-int.crc.testing\" [serving] validServingFor=[api-int.crc.testing] issuer=\"kube-apiserver-lb-signer\" (2025-11-02 07:51:35 +0000 UTC to 2026-11-02 07:51:36 +0000 UTC (now=2025-12-12 16:18:30.399999906 +0000 UTC))" 2025-12-12T16:18:30.400219301+00:00 stderr F I1212 16:18:30.400167 12 named_certificates.go:53] "Loaded SNI cert" index=3 certName="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/external-loadbalancer-serving-certkey/tls.key" certDetail="\"api.crc.testing\" [serving] validServingFor=[api.crc.testing] issuer=\"kube-apiserver-lb-signer\" (2025-11-02 07:51:35 +0000 UTC to 2026-11-02 07:51:36 +0000 UTC (now=2025-12-12 16:18:30.40015827 +0000 UTC))" 2025-12-12T16:18:30.400461607+00:00 stderr F I1212 16:18:30.400408 12 named_certificates.go:53] "Loaded SNI cert" index=2 certName="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/service-network-serving-certkey/tls.key" certDetail="\"10.217.4.1\" [serving] validServingFor=[10.217.4.1,kubernetes,kubernetes.default,kubernetes.default.svc,kubernetes.default.svc.cluster.local,openshift,openshift.default,openshift.default.svc,openshift.default.svc.cluster.local,10.217.4.1] issuer=\"kube-apiserver-service-network-signer\" (2025-11-02 07:51:36 +0000 UTC to 2026-11-02 07:51:37 +0000 UTC (now=2025-12-12 16:18:30.400375635 +0000 UTC))" 2025-12-12T16:18:30.400620621+00:00 stderr F I1212 16:18:30.400586 12 named_certificates.go:53] "Loaded SNI cert" index=1 certName="sni-serving-cert::/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.crt::/etc/kubernetes/static-pod-certs/secrets/localhost-serving-cert-certkey/tls.key" certDetail="\"127.0.0.1\" [serving] validServingFor=[127.0.0.1,localhost,127.0.0.1] issuer=\"kube-apiserver-localhost-signer\" (2025-11-02 07:51:35 +0000 UTC to 2026-11-02 07:51:36 +0000 UTC (now=2025-12-12 16:18:30.40057861 +0000 UTC))" 2025-12-12T16:18:30.400759605+00:00 stderr F I1212 16:18:30.400728 12 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556309\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556309\" (2025-12-12 15:18:28 +0000 UTC to 2028-12-12 15:18:28 +0000 UTC (now=2025-12-12 16:18:30.400722474 +0000 UTC))" 2025-12-12T16:18:30.400802916+00:00 stderr F I1212 16:18:30.400779 12 secure_serving.go:211] Serving securely on [::]:6443 2025-12-12T16:18:30.400857797+00:00 stderr F I1212 16:18:30.400827 12 genericapiserver.go:725] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:18:30.400857797+00:00 stderr F I1212 16:18:30.400843 12 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:18:30.400898028+00:00 stderr F I1212 16:18:30.400868 12 controller.go:80] Starting OpenAPI V3 AggregationController 2025-12-12T16:18:30.400953910+00:00 stderr F I1212 16:18:30.400892 12 dynamic_serving_content.go:135] "Starting controller" name="aggregator-proxy-cert::/etc/kubernetes/static-pod-certs/secrets/aggregator-client/tls.crt::/etc/kubernetes/static-pod-certs/secrets/aggregator-client/tls.key" 2025-12-12T16:18:30.401056282+00:00 stderr F I1212 16:18:30.400988 12 aggregator.go:169] waiting for initial CRD sync... 2025-12-12T16:18:30.401056282+00:00 stderr F I1212 16:18:30.400997 12 apf_controller.go:377] Starting API Priority and Fairness config controller 2025-12-12T16:18:30.401056282+00:00 stderr F I1212 16:18:30.400999 12 controller.go:78] Starting OpenAPI AggregationController 2025-12-12T16:18:30.401096303+00:00 stderr F I1212 16:18:30.401052 12 clusterquotamapping.go:127] Starting ClusterQuotaMappingController controller 2025-12-12T16:18:30.401096303+00:00 stderr F I1212 16:18:30.401068 12 system_namespaces_controller.go:66] Starting system namespaces controller 2025-12-12T16:18:30.401126774+00:00 stderr F I1212 16:18:30.401083 12 repairip.go:200] Starting ipallocator-repair-controller 2025-12-12T16:18:30.401126774+00:00 stderr F I1212 16:18:30.401105 12 local_available_controller.go:156] Starting LocalAvailability controller 2025-12-12T16:18:30.401163485+00:00 stderr F I1212 16:18:30.401117 12 cache.go:32] Waiting for caches to sync for LocalAvailability controller 2025-12-12T16:18:30.401163485+00:00 stderr F I1212 16:18:30.401020 12 apiaccess_count_controller.go:89] Starting APIRequestCount controller. 2025-12-12T16:18:30.401370520+00:00 stderr F I1212 16:18:30.401333 12 apiservice_controller.go:100] Starting APIServiceRegistrationController 2025-12-12T16:18:30.401370520+00:00 stderr F I1212 16:18:30.401349 12 cache.go:32] Waiting for caches to sync for APIServiceRegistrationController controller 2025-12-12T16:18:30.401518374+00:00 stderr F I1212 16:18:30.401471 12 crdregistration_controller.go:115] Starting crd-autoregister controller 2025-12-12T16:18:30.401518374+00:00 stderr F I1212 16:18:30.401487 12 shared_informer.go:350] "Waiting for caches to sync" controller="crd-autoregister" 2025-12-12T16:18:30.401558725+00:00 stderr F I1212 16:18:30.401539 12 cluster_authentication_trust_controller.go:459] Starting cluster_authentication_trust_controller controller 2025-12-12T16:18:30.401595575+00:00 stderr F I1212 16:18:30.401548 12 shared_informer.go:350] "Waiting for caches to sync" controller="cluster_authentication_trust_controller" 2025-12-12T16:18:30.401715768+00:00 stderr F I1212 16:18:30.401672 12 customresource_discovery_controller.go:294] Starting DiscoveryController 2025-12-12T16:18:30.401753629+00:00 stderr F I1212 16:18:30.401726 12 default_servicecidr_controller.go:110] Starting kubernetes-service-cidr-controller 2025-12-12T16:18:30.401753629+00:00 stderr F I1212 16:18:30.401734 12 shared_informer.go:350] "Waiting for caches to sync" controller="kubernetes-service-cidr-controller" 2025-12-12T16:18:30.401779020+00:00 stderr F I1212 16:18:30.401089 12 gc_controller.go:78] Starting apiserver lease garbage collector 2025-12-12T16:18:30.401813621+00:00 stderr F I1212 16:18:30.401773 12 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:30.401846092+00:00 stderr F I1212 16:18:30.401105 12 shared_informer.go:350] "Waiting for caches to sync" controller="ipallocator-repair-controller" 2025-12-12T16:18:30.401869362+00:00 stderr F I1212 16:18:30.401853 12 dynamic_cafile_content.go:161] "Starting controller" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:30.402218001+00:00 stderr F I1212 16:18:30.402147 12 controller.go:142] Starting OpenAPI controller 2025-12-12T16:18:30.402262142+00:00 stderr F I1212 16:18:30.402201 12 controller.go:90] Starting OpenAPI V3 controller 2025-12-12T16:18:30.402262142+00:00 stderr F I1212 16:18:30.402229 12 naming_controller.go:299] Starting NamingConditionController 2025-12-12T16:18:30.402285863+00:00 stderr F I1212 16:18:30.402257 12 establishing_controller.go:81] Starting EstablishingController 2025-12-12T16:18:30.402285863+00:00 stderr F I1212 16:18:30.402272 12 nonstructuralschema_controller.go:195] Starting NonStructuralSchemaConditionController 2025-12-12T16:18:30.402322634+00:00 stderr F I1212 16:18:30.402292 12 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController 2025-12-12T16:18:30.402355224+00:00 stderr F I1212 16:18:30.402313 12 crd_finalizer.go:269] Starting CRDFinalizer 2025-12-12T16:18:30.404243871+00:00 stderr F I1212 16:18:30.404103 12 remote_available_controller.go:433] Starting RemoteAvailability controller 2025-12-12T16:18:30.404243871+00:00 stderr F I1212 16:18:30.404117 12 cache.go:32] Waiting for caches to sync for RemoteAvailability controller 2025-12-12T16:18:30.405807550+00:00 stderr F I1212 16:18:30.404768 12 controller.go:119] Starting legacy_token_tracking_controller 2025-12-12T16:18:30.405807550+00:00 stderr F I1212 16:18:30.404782 12 shared_informer.go:350] "Waiting for caches to sync" controller="configmaps" 2025-12-12T16:18:30.406422065+00:00 stderr F W1212 16:18:30.406288 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-cluster-version/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.407257555+00:00 stderr F W1212 16:18:30.407221 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-machine-config-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.407824269+00:00 stderr F W1212 16:18:30.407474 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-storage-version-migrator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.407824269+00:00 stderr F W1212 16:18:30.407728 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-image-registry/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.407972853+00:00 stderr F W1212 16:18:30.407943 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-storage-version-migrator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.408061935+00:00 stderr F W1212 16:18:30.408010 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/kube-system/configmaps" (source IP 38.102.83.180:58496, user agent "cluster-network-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.408225029+00:00 stderr F W1212 16:18:30.408174 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-cluster-samples-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.408675591+00:00 stderr F W1212 16:18:30.408645 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-cluster-version/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.408896136+00:00 stderr F W1212 16:18:30.408869 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-controller-manager-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.409460680+00:00 stderr F W1212 16:18:30.409101 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-ingress/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.409460680+00:00 stderr F W1212 16:18:30.409421 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-multus/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.409568033+00:00 stderr F I1212 16:18:30.409531 12 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:18:30.409716586+00:00 stderr F W1212 16:18:30.409686 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-console-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.409953322+00:00 stderr F W1212 16:18:30.409907 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-66458b6674-brfdj" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.411785477+00:00 stderr F W1212 16:18:30.411720 12 patch_genericapiserver.go:245] Request to "/apis/operator.openshift.io/v1/networks/cluster" (source IP 38.102.83.180:58496, user agent "network-operator/4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.411880730+00:00 stderr F W1212 16:18:30.411825 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-apiserver/endpoints" (source IP 38.102.83.180:58496, user agent "network-operator/4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.413163701+00:00 stderr F I1212 16:18:30.413113 12 patch_genericapiserver.go:241] Loopback request to "/apis/config.openshift.io/v1/clusterversions" (user agent "cluster-kube-controller-manager-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.413642153+00:00 stderr F I1212 16:18:30.413539 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps/csr-signer-ca" (user agent "cluster-kube-controller-manager-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.413708085+00:00 stderr F I1212 16:18:30.413633 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-config/configmaps" (user agent "cluster-kube-controller-manager-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.415002377+00:00 stderr F W1212 16:18:30.414950 12 patch_genericapiserver.go:245] Request to "/apis/monitoring.coreos.com/v1/servicemonitors" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.416854043+00:00 stderr F I1212 16:18:30.416805 12 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.418137944+00:00 stderr F I1212 16:18:30.418005 12 reflector.go:430] "Caches populated" type="*v1.PriorityClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.418656117+00:00 stderr F W1212 16:18:30.418588 12 patch_genericapiserver.go:245] Request to "/apis/whereabouts.cni.cncf.io/v1alpha1/ippools" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.419805466+00:00 stderr F W1212 16:18:30.419644 12 patch_genericapiserver.go:245] Request to "/apis/config.openshift.io/v1/clusterversions" (source IP 38.102.83.180:58622, user agent "machine-config-daemon/v0.0.0 (linux/amd64) kubernetes/$Format/config-shared-informer") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.425582029+00:00 stderr F I1212 16:18:30.425478 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-kube-apiserver/pods" (user agent "cluster-kube-apiserver-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.426088361+00:00 stderr F W1212 16:18:30.426023 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.428752007+00:00 stderr F I1212 16:18:30.428697 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-kube-apiserver/pods" (user agent "cluster-kube-apiserver-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.428805338+00:00 stderr F I1212 16:18:30.428747 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-kube-apiserver/secrets" (user agent "cluster-kube-apiserver-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.428908421+00:00 stderr F I1212 16:18:30.428868 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-kube-apiserver/configmaps" (user agent "cluster-kube-apiserver-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.429434864+00:00 stderr F I1212 16:18:30.429380 12 reflector.go:430] "Caches populated" type="*v1.Lease" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:18:30.429815233+00:00 stderr F I1212 16:18:30.429761 12 reflector.go:430] "Caches populated" type="*v1.VolumeAttachment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.430136701+00:00 stderr F I1212 16:18:30.430092 12 reflector.go:430] "Caches populated" type="*v1.StorageClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.432595292+00:00 stderr F I1212 16:18:30.432544 12 reflector.go:430] "Caches populated" type="*v1.IngressClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.432949541+00:00 stderr F I1212 16:18:30.432888 12 reflector.go:430] "Caches populated" type="*v1.PriorityLevelConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.433126975+00:00 stderr F I1212 16:18:30.433094 12 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.433250928+00:00 stderr F I1212 16:18:30.433151 12 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.433436213+00:00 stderr F I1212 16:18:30.433398 12 reflector.go:430] "Caches populated" type="*v1.RuntimeClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.433614307+00:00 stderr F E1212 16:18:30.433575 12 sdn_readyz_wait.go:100] "Unhandled Error" err="api-openshift-apiserver-available did not find any IPs for kubernetes.default.svc endpoint" logger="UnhandledError" 2025-12-12T16:18:30.433834733+00:00 stderr F I1212 16:18:30.433798 12 reflector.go:430] "Caches populated" type="*v1.CSIDriver" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.434049908+00:00 stderr F I1212 16:18:30.434013 12 reflector.go:430] "Caches populated" type="*v1.PersistentVolume" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.435321329+00:00 stderr F I1212 16:18:30.435270 12 reflector.go:430] "Caches populated" type="*v1.ServiceCIDR" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:18:30.436590801+00:00 stderr F I1212 16:18:30.436549 12 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.436748135+00:00 stderr F I1212 16:18:30.436716 12 reflector.go:430] "Caches populated" type="*v1.ServiceCIDR" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.437001871+00:00 stderr F I1212 16:18:30.436947 12 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicy" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.437264377+00:00 stderr F I1212 16:18:30.437227 12 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicyBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.437591825+00:00 stderr F I1212 16:18:30.437538 12 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.438925728+00:00 stderr F I1212 16:18:30.438882 12 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.439113853+00:00 stderr F I1212 16:18:30.439078 12 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:18:30.444441435+00:00 stderr F E1212 16:18:30.444318 12 sdn_readyz_wait.go:100] "Unhandled Error" err="api-openshift-oauth-apiserver-available did not find any IPs for kubernetes.default.svc endpoint" logger="UnhandledError" 2025-12-12T16:18:30.444592078+00:00 stderr F I1212 16:18:30.444535 12 reflector.go:430] "Caches populated" type="*v1.IPAddress" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.445279666+00:00 stderr F I1212 16:18:30.445226 12 reflector.go:430] "Caches populated" type="*v1.FlowSchema" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.445497711+00:00 stderr F I1212 16:18:30.445455 12 reflector.go:430] "Caches populated" type="*v1.APIService" reflector="pkg/client/informers/externalversions/factory.go:141" 2025-12-12T16:18:30.449515400+00:00 stderr F I1212 16:18:30.449414 12 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.455840287+00:00 stderr F I1212 16:18:30.455735 12 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.464242884+00:00 stderr F I1212 16:18:30.464134 12 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.467683989+00:00 stderr F I1212 16:18:30.467582 12 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.470332505+00:00 stderr F I1212 16:18:30.470217 12 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.475770269+00:00 stderr F I1212 16:18:30.475662 12 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.476593770+00:00 stderr F I1212 16:18:30.476546 12 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.479501622+00:00 stderr F I1212 16:18:30.479388 12 shared_informer.go:357] "Caches are synced" controller="*generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]" 2025-12-12T16:18:30.479501622+00:00 stderr F I1212 16:18:30.479435 12 policy_source.go:240] refreshing policies 2025-12-12T16:18:30.479654495+00:00 stderr F I1212 16:18:30.479607 12 policy_source.go:435] informer started for config.openshift.io/v1, Kind=Infrastructure 2025-12-12T16:18:30.482598378+00:00 stderr F I1212 16:18:30.482511 12 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.502401768+00:00 stderr F I1212 16:18:30.502254 12 shared_informer.go:357] "Caches are synced" controller="ipallocator-repair-controller" 2025-12-12T16:18:30.503346111+00:00 stderr F I1212 16:18:30.503270 12 cache.go:39] Caches are synced for LocalAvailability controller 2025-12-12T16:18:30.504033188+00:00 stderr F I1212 16:18:30.503967 12 shared_informer.go:357] "Caches are synced" controller="cluster_authentication_trust_controller" 2025-12-12T16:18:30.504325095+00:00 stderr F I1212 16:18:30.504276 12 cache.go:39] Caches are synced for RemoteAvailability controller 2025-12-12T16:18:30.504751496+00:00 stderr F I1212 16:18:30.504076 12 apf_controller.go:382] Running API Priority and Fairness config worker 2025-12-12T16:18:30.504791507+00:00 stderr F I1212 16:18:30.504732 12 cache.go:39] Caches are synced for APIServiceRegistrationController controller 2025-12-12T16:18:30.506099509+00:00 stderr F I1212 16:18:30.506025 12 shared_informer.go:357] "Caches are synced" controller="kubernetes-service-cidr-controller" 2025-12-12T16:18:30.506138990+00:00 stderr F I1212 16:18:30.506095 12 default_servicecidr_controller.go:136] Shutting down kubernetes-service-cidr-controller 2025-12-12T16:18:30.506396046+00:00 stderr F I1212 16:18:30.506345 12 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.507027032+00:00 stderr F I1212 16:18:30.506973 12 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process 2025-12-12T16:18:30.507106804+00:00 stderr F I1212 16:18:30.507035 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=52 seatDemandAvg=2.0749444946739195 seatDemandStdev=7.436355270488581 seatDemandSmoothed=9.5112997651625 fairFrac=0 currentCL=52 concurrencyDenominator=52 backstop=false 2025-12-12T16:18:30.509934244+00:00 stderr F I1212 16:18:30.509886 12 shared_informer.go:357] "Caches are synced" controller="configmaps" 2025-12-12T16:18:30.511385010+00:00 stderr F I1212 16:18:30.511282 12 healthz.go:280] informer-sync,poststarthook/start-apiextensions-controllers,poststarthook/crd-informer-synced,poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes,poststarthook/apiservice-registration-controller,poststarthook/apiservice-discovery-controller check failed: readyz 2025-12-12T16:18:30.511385010+00:00 stderr F [-]informer-sync failed: 2 informers not started yet: [*v1.Secret *v1.ConfigMap] 2025-12-12T16:18:30.511385010+00:00 stderr F [-]poststarthook/start-apiextensions-controllers failed: not finished 2025-12-12T16:18:30.511385010+00:00 stderr F [-]poststarthook/crd-informer-synced failed: not finished 2025-12-12T16:18:30.511385010+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:30.511385010+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:30.511385010+00:00 stderr F [-]poststarthook/apiservice-registration-controller failed: not finished 2025-12-12T16:18:30.511385010+00:00 stderr F [-]poststarthook/apiservice-discovery-controller failed: not finished 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.512919 12 apf_controller.go:898] Introducing queues for priority level "system": config={"type":"Limited","limited":{"nominalConcurrencyShares":30,"limitResponse":{"type":"Queue","queuing":{"queues":64,"handSize":6,"queueLengthLimit":50}},"lendablePercent":33}}, nominalCL=471, lendableCL=155, borrowingCL=4000, currentCL=394, quiescing=false (shares=0xc00a713b58, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.512980 12 apf_controller.go:898] Introducing queues for priority level "global-default": config={"type":"Limited","limited":{"nominalConcurrencyShares":20,"limitResponse":{"type":"Queue","queuing":{"queues":128,"handSize":6,"queueLengthLimit":50}},"lendablePercent":50}}, nominalCL=314, lendableCL=157, borrowingCL=4000, currentCL=236, quiescing=false (shares=0xc00a7139f0, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.512991 12 apf_controller.go:898] Introducing queues for priority level "workload-low": config={"type":"Limited","limited":{"nominalConcurrencyShares":100,"limitResponse":{"type":"Queue","queuing":{"queues":128,"handSize":6,"queueLengthLimit":50}},"lendablePercent":90}}, nominalCL=1569, lendableCL=1412, borrowingCL=4000, currentCL=863, quiescing=false (shares=0xc00a713c10, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.513005 12 apf_controller.go:906] Retaining queues for priority level "catch-all": config={"type":"Limited","limited":{"nominalConcurrencyShares":5,"limitResponse":{"type":"Reject"},"lendablePercent":0}}, nominalCL=79, lendableCL=0, borrowingCL=4000, currentCL=4000, quiescing=false, numPending=0 (shares=0xc00a713978, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.513016 12 apf_controller.go:898] Introducing queues for priority level "leader-election": config={"type":"Limited","limited":{"nominalConcurrencyShares":10,"limitResponse":{"type":"Queue","queuing":{"queues":16,"handSize":4,"queueLengthLimit":50}},"lendablePercent":0}}, nominalCL=157, lendableCL=0, borrowingCL=4000, currentCL=157, quiescing=false (shares=0xc00a713a50, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.513024 12 apf_controller.go:898] Introducing queues for priority level "node-high": config={"type":"Limited","limited":{"nominalConcurrencyShares":40,"limitResponse":{"type":"Queue","queuing":{"queues":64,"handSize":6,"queueLengthLimit":50}},"lendablePercent":25}}, nominalCL=628, lendableCL=157, borrowingCL=4000, currentCL=550, quiescing=false (shares=0xc00a713a98, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.513034 12 apf_controller.go:898] Introducing queues for priority level "workload-high": config={"type":"Limited","limited":{"nominalConcurrencyShares":40,"limitResponse":{"type":"Queue","queuing":{"queues":128,"handSize":6,"queueLengthLimit":50}},"lendablePercent":50}}, nominalCL=628, lendableCL=314, borrowingCL=4000, currentCL=471, quiescing=false (shares=0xc00a713bc0, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.513044 12 apf_controller.go:898] Introducing queues for priority level "openshift-control-plane-operators": config={"type":"Limited","limited":{"nominalConcurrencyShares":10,"limitResponse":{"type":"Queue","queuing":{"queues":128,"handSize":6,"queueLengthLimit":50}},"lendablePercent":33}}, nominalCL=157, lendableCL=52, borrowingCL=4000, currentCL=131, quiescing=false (shares=0xc00a713b08, shareSum=255) 2025-12-12T16:18:30.513160664+00:00 stderr F I1212 16:18:30.513083 12 apf_controller.go:493] "Update CurrentCL" plName="openshift-control-plane-operators" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=239 concurrencyDenominator=239 backstop=false 2025-12-12T16:18:30.513246266+00:00 stderr F I1212 16:18:30.513159 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=15 seatDemandAvg=14.135252905327862 seatDemandStdev=0.34199350421940017 seatDemandSmoothed=14.477246409547263 fairFrac=2.2796127562642368 currentCL=15 concurrencyDenominator=15 backstop=false 2025-12-12T16:18:30.513275856+00:00 stderr F I1212 16:18:30.513242 12 apf_controller.go:493] "Update CurrentCL" plName="system" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=720 concurrencyDenominator=720 backstop=false 2025-12-12T16:18:30.513323078+00:00 stderr F I1212 16:18:30.513281 12 apf_controller.go:493] "Update CurrentCL" plName="global-default" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=358 concurrencyDenominator=358 backstop=false 2025-12-12T16:18:30.513398930+00:00 stderr F I1212 16:18:30.513345 12 apf_controller.go:493] "Update CurrentCL" plName="workload-low" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=358 concurrencyDenominator=358 backstop=false 2025-12-12T16:18:30.513576314+00:00 stderr F I1212 16:18:30.513484 12 apf_controller.go:493] "Update CurrentCL" plName="catch-all" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=180 concurrencyDenominator=180 backstop=false 2025-12-12T16:18:30.513576314+00:00 stderr F I1212 16:18:30.513517 12 apf_controller.go:493] "Update CurrentCL" plName="leader-election" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=358 concurrencyDenominator=358 backstop=false 2025-12-12T16:18:30.513616715+00:00 stderr F I1212 16:18:30.513559 12 apf_controller.go:493] "Update CurrentCL" plName="node-high" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=1074 concurrencyDenominator=1074 backstop=false 2025-12-12T16:18:30.513674946+00:00 stderr F I1212 16:18:30.513631 12 apf_controller.go:493] "Update CurrentCL" plName="workload-high" seatDemandHighWatermark=0 seatDemandAvg=0 seatDemandStdev=0 seatDemandSmoothed=0 fairFrac=2.2796127562642368 currentCL=716 concurrencyDenominator=716 backstop=false 2025-12-12T16:18:30.518639249+00:00 stderr F I1212 16:18:30.518560 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:18:30.518791563+00:00 stderr F I1212 16:18:30.518732 12 cidrallocator.go:301] created ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:18:30.518791563+00:00 stderr F I1212 16:18:30.518744 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 190.775µs 2025-12-12T16:18:30.537394643+00:00 stderr F W1212 16:18:30.536709 12 patch_genericapiserver.go:245] Request to "/apis/network.operator.openshift.io/v1/egressrouters" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.539291210+00:00 stderr F W1212 16:18:30.539239 12 patch_genericapiserver.go:245] Request to "/apis/k8s.ovn.org/v1/egressservices" (source IP 38.102.83.180:58664, user agent "crc/ovnkube@23bb8b679668 (linux/amd64) kubernetes/v0.33.3") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.544008856+00:00 stderr F I1212 16:18:30.543791 12 shared_informer.go:357] "Caches are synced" controller="node_authorizer" 2025-12-12T16:18:30.547976164+00:00 stderr F I1212 16:18:30.547885 12 handler_discovery.go:451] Starting ResourceDiscoveryManager 2025-12-12T16:18:30.555757867+00:00 stderr F I1212 16:18:30.555665 12 handler.go:288] Adding GroupVersion user.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.555934941+00:00 stderr F I1212 16:18:30.555899 12 handler.go:288] Adding GroupVersion packages.operators.coreos.com v1 to ResourceManager 2025-12-12T16:18:30.556993327+00:00 stderr F I1212 16:18:30.556946 12 handler.go:288] Adding GroupVersion oauth.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.559042458+00:00 stderr F W1212 16:18:30.558968 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-authentication-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.561251843+00:00 stderr F I1212 16:18:30.561126 12 handler.go:288] Adding GroupVersion template.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.561536790+00:00 stderr F I1212 16:18:30.561471 12 handler.go:288] Adding GroupVersion image.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.562909274+00:00 stderr F I1212 16:18:30.562815 12 handler.go:288] Adding GroupVersion security.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.562909274+00:00 stderr F I1212 16:18:30.562856 12 handler.go:288] Adding GroupVersion quota.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.562909274+00:00 stderr F I1212 16:18:30.562864 12 handler.go:288] Adding GroupVersion project.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.563064057+00:00 stderr F I1212 16:18:30.562969 12 handler.go:288] Adding GroupVersion build.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.563114409+00:00 stderr F I1212 16:18:30.563053 12 handler.go:288] Adding GroupVersion route.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.563354355+00:00 stderr F I1212 16:18:30.563190 12 handler.go:288] Adding GroupVersion apps.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.563964420+00:00 stderr F I1212 16:18:30.563907 12 handler.go:288] Adding GroupVersion authorization.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.567249181+00:00 stderr F E1212 16:18:30.567170 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.567249181+00:00 stderr F Error updating APIService "v1.apps.openshift.io" with err: failed to download v1.apps.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.567249181+00:00 stderr F , Header: map[Audit-Id:[d6b27e7c-cf55-4689-9fbc-8627085d0eac] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.567249181+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.568732808+00:00 stderr F I1212 16:18:30.568685 12 policy_source.go:240] refreshing policies 2025-12-12T16:18:30.575067014+00:00 stderr F E1212 16:18:30.574942 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.575067014+00:00 stderr F Error updating APIService "v1.authorization.openshift.io" with err: failed to download v1.authorization.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.575067014+00:00 stderr F , Header: map[Audit-Id:[b65ada75-16b1-4cf3-b690-66bddd0804af] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.575067014+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.583869202+00:00 stderr F W1212 16:18:30.583799 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-etcd-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.586708032+00:00 stderr F E1212 16:18:30.586666 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.586708032+00:00 stderr F Error updating APIService "v1.build.openshift.io" with err: failed to download v1.build.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.586708032+00:00 stderr F , Header: map[Audit-Id:[46f0fdb3-5007-4d2c-a24a-2aa33a094e5e] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.586708032+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.598149955+00:00 stderr F E1212 16:18:30.598030 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.598149955+00:00 stderr F Error updating APIService "v1.image.openshift.io" with err: failed to download v1.image.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.598149955+00:00 stderr F , Header: map[Audit-Id:[3a9e8406-40b8-4257-9301-3f740f4bf372] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.598149955+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.610959242+00:00 stderr F I1212 16:18:30.610857 12 healthz.go:280] informer-sync,poststarthook/start-apiextensions-controllers,poststarthook/crd-informer-synced,poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:30.610959242+00:00 stderr F [-]informer-sync failed: 2 informers not started yet: [*v1.Secret *v1.ConfigMap] 2025-12-12T16:18:30.610959242+00:00 stderr F [-]poststarthook/start-apiextensions-controllers failed: not finished 2025-12-12T16:18:30.610959242+00:00 stderr F [-]poststarthook/crd-informer-synced failed: not finished 2025-12-12T16:18:30.610959242+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:30.610959242+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:30.614707174+00:00 stderr F E1212 16:18:30.614566 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.614707174+00:00 stderr F Error updating APIService "v1.oauth.openshift.io" with err: failed to download v1.oauth.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.614707174+00:00 stderr F , Header: map[Audit-Id:[1720dbba-ac79-45eb-a6ce-e4862bcb4127] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.614707174+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.623948443+00:00 stderr F I1212 16:18:30.623812 12 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.625703726+00:00 stderr F W1212 16:18:30.625647 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-apiserver/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.629404478+00:00 stderr F I1212 16:18:30.627063 12 controller.go:667] quota admission added evaluator for: leases.coordination.k8s.io 2025-12-12T16:18:30.630751271+00:00 stderr F E1212 16:18:30.630150 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.630751271+00:00 stderr F Error updating APIService "v1.packages.operators.coreos.com" with err: failed to download v1.packages.operators.coreos.com: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.630751271+00:00 stderr F , Header: map[Audit-Id:[63ad71b2-818c-41d6-9022-9702829ee704] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.630751271+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.635543989+00:00 stderr F E1212 16:18:30.635471 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.635543989+00:00 stderr F Error updating APIService "v1.project.openshift.io" with err: failed to download v1.project.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.635543989+00:00 stderr F , Header: map[Audit-Id:[eb26f6ac-2c1b-449b-bfd4-448bd399e14c] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.635543989+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.641442055+00:00 stderr F E1212 16:18:30.641271 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.641442055+00:00 stderr F Error updating APIService "v1.quota.openshift.io" with err: failed to download v1.quota.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.641442055+00:00 stderr F , Header: map[Audit-Id:[dea18e32-038e-4049-874b-ab0fff04b661] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.641442055+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.645607768+00:00 stderr F I1212 16:18:30.645483 12 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:30.646214393+00:00 stderr F E1212 16:18:30.646116 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.646214393+00:00 stderr F Error updating APIService "v1.route.openshift.io" with err: failed to download v1.route.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.646214393+00:00 stderr F , Header: map[Audit-Id:[b678463d-5d87-494b-ad9f-d8f54520a21e] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.646214393+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.650124430+00:00 stderr F E1212 16:18:30.650011 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.650124430+00:00 stderr F Error updating APIService "v1.security.openshift.io" with err: failed to download v1.security.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.650124430+00:00 stderr F , Header: map[Audit-Id:[b3e89703-d29b-4d83-98b9-64065692b4f7] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.650124430+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.650655123+00:00 stderr F I1212 16:18:30.650579 12 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="pkg/client/informers/externalversions/factory.go:141" 2025-12-12T16:18:30.653462002+00:00 stderr F E1212 16:18:30.653388 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.653462002+00:00 stderr F Error updating APIService "v1.template.openshift.io" with err: failed to download v1.template.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.653462002+00:00 stderr F , Header: map[Audit-Id:[4588b236-44fb-4f0a-bf8c-4df5520c8fd1] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.653462002+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.658498367+00:00 stderr F E1212 16:18:30.658424 12 controller.go:146] "Unhandled Error" err=< 2025-12-12T16:18:30.658498367+00:00 stderr F Error updating APIService "v1.user.openshift.io" with err: failed to download v1.user.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.658498367+00:00 stderr F , Header: map[Audit-Id:[5d646c8d-60cb-4378-99c7-dbd1489a5f94] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:30 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:30.658498367+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:30.691041211+00:00 stderr F W1212 16:18:30.690854 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-marketplace/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.701795647+00:00 stderr F I1212 16:18:30.701608 12 shared_informer.go:357] "Caches are synced" controller="crd-autoregister" 2025-12-12T16:18:30.701795647+00:00 stderr F I1212 16:18:30.701660 12 genericapiserver.go:550] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:18:30.701967712+00:00 stderr F I1212 16:18:30.701911 12 handler.go:288] Adding GroupVersion config.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702002812+00:00 stderr F I1212 16:18:30.701968 12 handler.go:288] Adding GroupVersion operators.coreos.com v1alpha1 to ResourceManager 2025-12-12T16:18:30.702049124+00:00 stderr F I1212 16:18:30.702008 12 handler.go:288] Adding GroupVersion k8s.ovn.org v1 to ResourceManager 2025-12-12T16:18:30.702122545+00:00 stderr F I1212 16:18:30.702073 12 handler.go:288] Adding GroupVersion operator.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702150196+00:00 stderr F I1212 16:18:30.702105 12 handler.go:288] Adding GroupVersion gateway.networking.k8s.io v1 to ResourceManager 2025-12-12T16:18:30.702150196+00:00 stderr F I1212 16:18:30.702134 12 handler.go:288] Adding GroupVersion gateway.networking.k8s.io v1beta1 to ResourceManager 2025-12-12T16:18:30.702582497+00:00 stderr F I1212 16:18:30.702299 12 handler.go:288] Adding GroupVersion ipam.cluster.x-k8s.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.702582497+00:00 stderr F I1212 16:18:30.702332 12 handler.go:288] Adding GroupVersion ipam.cluster.x-k8s.io v1beta1 to ResourceManager 2025-12-12T16:18:30.702626368+00:00 stderr F I1212 16:18:30.702549 12 handler.go:288] Adding GroupVersion machineconfiguration.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702626368+00:00 stderr F I1212 16:18:30.702592 12 handler.go:288] Adding GroupVersion machine.openshift.io v1beta1 to ResourceManager 2025-12-12T16:18:30.702675259+00:00 stderr F I1212 16:18:30.702632 12 handler.go:288] Adding GroupVersion monitoring.coreos.com v1 to ResourceManager 2025-12-12T16:18:30.702675259+00:00 stderr F I1212 16:18:30.702653 12 handler.go:288] Adding GroupVersion authorization.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702728610+00:00 stderr F I1212 16:18:30.702691 12 handler.go:288] Adding GroupVersion console.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702852453+00:00 stderr F I1212 16:18:30.702806 12 aggregator.go:171] initial CRD sync complete... 2025-12-12T16:18:30.702852453+00:00 stderr F I1212 16:18:30.702836 12 handler.go:288] Adding GroupVersion security.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702886474+00:00 stderr F I1212 16:18:30.702836 12 autoregister_controller.go:144] Starting autoregister controller 2025-12-12T16:18:30.702886474+00:00 stderr F I1212 16:18:30.702850 12 cache.go:32] Waiting for caches to sync for autoregister controller 2025-12-12T16:18:30.702886474+00:00 stderr F I1212 16:18:30.702857 12 cache.go:39] Caches are synced for autoregister controller 2025-12-12T16:18:30.702886474+00:00 stderr F I1212 16:18:30.702864 12 handler.go:288] Adding GroupVersion monitoring.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.702908645+00:00 stderr F I1212 16:18:30.702878 12 controller.go:231] Updating CRD OpenAPI spec because adminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:18:30.702927315+00:00 stderr F I1212 16:18:30.702909 12 controller.go:231] Updating CRD OpenAPI spec because adminpolicybasedexternalroutes.k8s.ovn.org changed 2025-12-12T16:18:30.702957856+00:00 stderr F I1212 16:18:30.702921 12 controller.go:231] Updating CRD OpenAPI spec because alertingrules.monitoring.openshift.io changed 2025-12-12T16:18:30.702957856+00:00 stderr F I1212 16:18:30.702930 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.coreos.com changed 2025-12-12T16:18:30.702957856+00:00 stderr F I1212 16:18:30.702942 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.coreos.com changed 2025-12-12T16:18:30.702985457+00:00 stderr F I1212 16:18:30.702953 12 controller.go:231] Updating CRD OpenAPI spec because alertrelabelconfigs.monitoring.openshift.io changed 2025-12-12T16:18:30.702985457+00:00 stderr F I1212 16:18:30.702962 12 controller.go:231] Updating CRD OpenAPI spec because apirequestcounts.apiserver.openshift.io changed 2025-12-12T16:18:30.702985457+00:00 stderr F I1212 16:18:30.702969 12 controller.go:231] Updating CRD OpenAPI spec because apiservers.config.openshift.io changed 2025-12-12T16:18:30.703006207+00:00 stderr F I1212 16:18:30.702977 12 controller.go:231] Updating CRD OpenAPI spec because authentications.config.openshift.io changed 2025-12-12T16:18:30.703006207+00:00 stderr F I1212 16:18:30.702991 12 controller.go:231] Updating CRD OpenAPI spec because authentications.operator.openshift.io changed 2025-12-12T16:18:30.703026018+00:00 stderr F I1212 16:18:30.702999 12 controller.go:231] Updating CRD OpenAPI spec because baselineadminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:18:30.703026018+00:00 stderr F I1212 16:18:30.703008 12 controller.go:231] Updating CRD OpenAPI spec because builds.config.openshift.io changed 2025-12-12T16:18:30.703026018+00:00 stderr F I1212 16:18:30.703015 12 controller.go:231] Updating CRD OpenAPI spec because catalogsources.operators.coreos.com changed 2025-12-12T16:18:30.703055778+00:00 stderr F I1212 16:18:30.703022 12 controller.go:231] Updating CRD OpenAPI spec because clusterautoscalers.autoscaling.openshift.io changed 2025-12-12T16:18:30.703055778+00:00 stderr F I1212 16:18:30.703030 12 controller.go:231] Updating CRD OpenAPI spec because clustercsidrivers.operator.openshift.io changed 2025-12-12T16:18:30.703055778+00:00 stderr F I1212 16:18:30.703038 12 controller.go:231] Updating CRD OpenAPI spec because clusterimagepolicies.config.openshift.io changed 2025-12-12T16:18:30.703128680+00:00 stderr F I1212 16:18:30.703046 12 controller.go:231] Updating CRD OpenAPI spec because clusteroperators.config.openshift.io changed 2025-12-12T16:18:30.703128680+00:00 stderr F I1212 16:18:30.703055 12 controller.go:231] Updating CRD OpenAPI spec because clusterresourcequotas.quota.openshift.io changed 2025-12-12T16:18:30.703128680+00:00 stderr F I1212 16:18:30.703062 12 controller.go:231] Updating CRD OpenAPI spec because clusterserviceversions.operators.coreos.com changed 2025-12-12T16:18:30.703128680+00:00 stderr F I1212 16:18:30.703070 12 controller.go:231] Updating CRD OpenAPI spec because clusteruserdefinednetworks.k8s.ovn.org changed 2025-12-12T16:18:30.703128680+00:00 stderr F I1212 16:18:30.703078 12 controller.go:231] Updating CRD OpenAPI spec because clusterversions.config.openshift.io changed 2025-12-12T16:18:30.703128680+00:00 stderr F I1212 16:18:30.703086 12 controller.go:231] Updating CRD OpenAPI spec because configs.imageregistry.operator.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703092 12 handler.go:288] Adding GroupVersion policy.networking.k8s.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703095 12 controller.go:231] Updating CRD OpenAPI spec because configs.operator.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703104 12 controller.go:231] Updating CRD OpenAPI spec because configs.samples.operator.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703112 12 controller.go:231] Updating CRD OpenAPI spec because consoleclidownloads.console.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703114 12 handler.go:288] Adding GroupVersion samples.operator.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703119 12 controller.go:231] Updating CRD OpenAPI spec because consoleexternalloglinks.console.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703128 12 controller.go:231] Updating CRD OpenAPI spec because consolelinks.console.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703137 12 controller.go:231] Updating CRD OpenAPI spec because consolenotifications.console.openshift.io changed 2025-12-12T16:18:30.703170251+00:00 stderr F I1212 16:18:30.703145 12 controller.go:231] Updating CRD OpenAPI spec because consoleplugins.console.openshift.io changed 2025-12-12T16:18:30.703224573+00:00 stderr F I1212 16:18:30.703155 12 controller.go:231] Updating CRD OpenAPI spec because consolequickstarts.console.openshift.io changed 2025-12-12T16:18:30.703224573+00:00 stderr F I1212 16:18:30.703164 12 controller.go:231] Updating CRD OpenAPI spec because consoles.config.openshift.io changed 2025-12-12T16:18:30.703249463+00:00 stderr F I1212 16:18:30.703223 12 controller.go:231] Updating CRD OpenAPI spec because consoles.operator.openshift.io changed 2025-12-12T16:18:30.703249463+00:00 stderr F I1212 16:18:30.703236 12 controller.go:231] Updating CRD OpenAPI spec because consolesamples.console.openshift.io changed 2025-12-12T16:18:30.703278474+00:00 stderr F I1212 16:18:30.703244 12 controller.go:231] Updating CRD OpenAPI spec because consoleyamlsamples.console.openshift.io changed 2025-12-12T16:18:30.703278474+00:00 stderr F I1212 16:18:30.703254 12 controller.go:231] Updating CRD OpenAPI spec because containerruntimeconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703308315+00:00 stderr F I1212 16:18:30.703263 12 controller.go:231] Updating CRD OpenAPI spec because controllerconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703308315+00:00 stderr F I1212 16:18:30.703272 12 controller.go:231] Updating CRD OpenAPI spec because controlplanemachinesets.machine.openshift.io changed 2025-12-12T16:18:30.703308315+00:00 stderr F I1212 16:18:30.703281 12 controller.go:231] Updating CRD OpenAPI spec because csisnapshotcontrollers.operator.openshift.io changed 2025-12-12T16:18:30.703308315+00:00 stderr F I1212 16:18:30.703289 12 controller.go:231] Updating CRD OpenAPI spec because dnses.config.openshift.io changed 2025-12-12T16:18:30.703348196+00:00 stderr F I1212 16:18:30.703298 12 controller.go:231] Updating CRD OpenAPI spec because dnses.operator.openshift.io changed 2025-12-12T16:18:30.703348196+00:00 stderr F I1212 16:18:30.703314 12 controller.go:231] Updating CRD OpenAPI spec because dnsrecords.ingress.operator.openshift.io changed 2025-12-12T16:18:30.703348196+00:00 stderr F I1212 16:18:30.703323 12 controller.go:231] Updating CRD OpenAPI spec because egressfirewalls.k8s.ovn.org changed 2025-12-12T16:18:30.703382617+00:00 stderr F I1212 16:18:30.703334 12 controller.go:231] Updating CRD OpenAPI spec because egressips.k8s.ovn.org changed 2025-12-12T16:18:30.703382617+00:00 stderr F I1212 16:18:30.703343 12 controller.go:231] Updating CRD OpenAPI spec because egressqoses.k8s.ovn.org changed 2025-12-12T16:18:30.703382617+00:00 stderr F I1212 16:18:30.703353 12 controller.go:231] Updating CRD OpenAPI spec because egressrouters.network.operator.openshift.io changed 2025-12-12T16:18:30.703382617+00:00 stderr F I1212 16:18:30.703361 12 controller.go:231] Updating CRD OpenAPI spec because egressservices.k8s.ovn.org changed 2025-12-12T16:18:30.703382617+00:00 stderr F I1212 16:18:30.703370 12 controller.go:231] Updating CRD OpenAPI spec because etcds.operator.openshift.io changed 2025-12-12T16:18:30.703426268+00:00 stderr F I1212 16:18:30.703382 12 controller.go:231] Updating CRD OpenAPI spec because featuregates.config.openshift.io changed 2025-12-12T16:18:30.703426268+00:00 stderr F I1212 16:18:30.703393 12 controller.go:231] Updating CRD OpenAPI spec because gatewayclasses.gateway.networking.k8s.io changed 2025-12-12T16:18:30.703452798+00:00 stderr F I1212 16:18:30.703405 12 controller.go:231] Updating CRD OpenAPI spec because gateways.gateway.networking.k8s.io changed 2025-12-12T16:18:30.703452798+00:00 stderr F I1212 16:18:30.703415 12 controller.go:231] Updating CRD OpenAPI spec because grpcroutes.gateway.networking.k8s.io changed 2025-12-12T16:18:30.703452798+00:00 stderr F I1212 16:18:30.703422 12 controller.go:231] Updating CRD OpenAPI spec because helmchartrepositories.helm.openshift.io changed 2025-12-12T16:18:30.703452798+00:00 stderr F I1212 16:18:30.703430 12 controller.go:231] Updating CRD OpenAPI spec because httproutes.gateway.networking.k8s.io changed 2025-12-12T16:18:30.703452798+00:00 stderr F I1212 16:18:30.703439 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentpolicies.config.openshift.io changed 2025-12-12T16:18:30.703490929+00:00 stderr F I1212 16:18:30.703448 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentsourcepolicies.operator.openshift.io changed 2025-12-12T16:18:30.703490929+00:00 stderr F I1212 16:18:30.703458 12 controller.go:231] Updating CRD OpenAPI spec because imagedigestmirrorsets.config.openshift.io changed 2025-12-12T16:18:30.703490929+00:00 stderr F I1212 16:18:30.703466 12 controller.go:231] Updating CRD OpenAPI spec because imagepolicies.config.openshift.io changed 2025-12-12T16:18:30.703518600+00:00 stderr F I1212 16:18:30.703480 12 controller.go:231] Updating CRD OpenAPI spec because imagepruners.imageregistry.operator.openshift.io changed 2025-12-12T16:18:30.703518600+00:00 stderr F I1212 16:18:30.703488 12 controller.go:231] Updating CRD OpenAPI spec because images.config.openshift.io changed 2025-12-12T16:18:30.703518600+00:00 stderr F I1212 16:18:30.703496 12 controller.go:231] Updating CRD OpenAPI spec because imagetagmirrorsets.config.openshift.io changed 2025-12-12T16:18:30.703518600+00:00 stderr F I1212 16:18:30.703503 12 controller.go:231] Updating CRD OpenAPI spec because infrastructures.config.openshift.io changed 2025-12-12T16:18:30.703543841+00:00 stderr F I1212 16:18:30.703511 12 controller.go:231] Updating CRD OpenAPI spec because ingresscontrollers.operator.openshift.io changed 2025-12-12T16:18:30.703543841+00:00 stderr F I1212 16:18:30.703519 12 controller.go:231] Updating CRD OpenAPI spec because ingresses.config.openshift.io changed 2025-12-12T16:18:30.703543841+00:00 stderr F I1212 16:18:30.703527 12 controller.go:231] Updating CRD OpenAPI spec because installplans.operators.coreos.com changed 2025-12-12T16:18:30.703566551+00:00 stderr F I1212 16:18:30.703534 12 controller.go:231] Updating CRD OpenAPI spec because ipaddressclaims.ipam.cluster.x-k8s.io changed 2025-12-12T16:18:30.703566551+00:00 stderr F I1212 16:18:30.703542 12 controller.go:231] Updating CRD OpenAPI spec because ipaddresses.ipam.cluster.x-k8s.io changed 2025-12-12T16:18:30.703566551+00:00 stderr F I1212 16:18:30.703549 12 controller.go:231] Updating CRD OpenAPI spec because ipamclaims.k8s.cni.cncf.io changed 2025-12-12T16:18:30.703609622+00:00 stderr F I1212 16:18:30.703556 12 controller.go:231] Updating CRD OpenAPI spec because ippools.whereabouts.cni.cncf.io changed 2025-12-12T16:18:30.703609622+00:00 stderr F I1212 16:18:30.703564 12 controller.go:231] Updating CRD OpenAPI spec because kubeapiservers.operator.openshift.io changed 2025-12-12T16:18:30.703609622+00:00 stderr F I1212 16:18:30.703571 12 controller.go:231] Updating CRD OpenAPI spec because kubecontrollermanagers.operator.openshift.io changed 2025-12-12T16:18:30.703609622+00:00 stderr F I1212 16:18:30.703579 12 controller.go:231] Updating CRD OpenAPI spec because kubeletconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703609622+00:00 stderr F I1212 16:18:30.703586 12 controller.go:231] Updating CRD OpenAPI spec because kubeschedulers.operator.openshift.io changed 2025-12-12T16:18:30.703609622+00:00 stderr F I1212 16:18:30.703595 12 controller.go:231] Updating CRD OpenAPI spec because kubestorageversionmigrators.operator.openshift.io changed 2025-12-12T16:18:30.703634643+00:00 stderr F I1212 16:18:30.703603 12 controller.go:231] Updating CRD OpenAPI spec because machineautoscalers.autoscaling.openshift.io changed 2025-12-12T16:18:30.703634643+00:00 stderr F I1212 16:18:30.703614 12 controller.go:231] Updating CRD OpenAPI spec because machineconfignodes.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703654983+00:00 stderr F I1212 16:18:30.703625 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigpools.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703654983+00:00 stderr F I1212 16:18:30.703633 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703654983+00:00 stderr F I1212 16:18:30.703641 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigurations.operator.openshift.io changed 2025-12-12T16:18:30.703706615+00:00 stderr F I1212 16:18:30.703649 12 handler.go:288] Adding GroupVersion whereabouts.cni.cncf.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.703706615+00:00 stderr F I1212 16:18:30.703654 12 controller.go:231] Updating CRD OpenAPI spec because machinehealthchecks.machine.openshift.io changed 2025-12-12T16:18:30.703706615+00:00 stderr F I1212 16:18:30.703664 12 controller.go:231] Updating CRD OpenAPI spec because machineosbuilds.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703706615+00:00 stderr F I1212 16:18:30.703673 12 controller.go:231] Updating CRD OpenAPI spec because machineosconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703734045+00:00 stderr F I1212 16:18:30.703681 12 controller.go:231] Updating CRD OpenAPI spec because machines.machine.openshift.io changed 2025-12-12T16:18:30.703734045+00:00 stderr F I1212 16:18:30.703690 12 controller.go:231] Updating CRD OpenAPI spec because machinesets.machine.openshift.io changed 2025-12-12T16:18:30.703734045+00:00 stderr F I1212 16:18:30.703699 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediations.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:18:30.703734045+00:00 stderr F I1212 16:18:30.703709 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediationtemplates.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:18:30.703734045+00:00 stderr F I1212 16:18:30.703717 12 controller.go:231] Updating CRD OpenAPI spec because network-attachment-definitions.k8s.cni.cncf.io changed 2025-12-12T16:18:30.703780786+00:00 stderr F I1212 16:18:30.703725 12 controller.go:231] Updating CRD OpenAPI spec because networks.config.openshift.io changed 2025-12-12T16:18:30.703780786+00:00 stderr F I1212 16:18:30.703750 12 controller.go:231] Updating CRD OpenAPI spec because networks.operator.openshift.io changed 2025-12-12T16:18:30.703780786+00:00 stderr F I1212 16:18:30.703761 12 controller.go:231] Updating CRD OpenAPI spec because nodes.config.openshift.io changed 2025-12-12T16:18:30.703803737+00:00 stderr F I1212 16:18:30.703771 12 controller.go:231] Updating CRD OpenAPI spec because nodeslicepools.whereabouts.cni.cncf.io changed 2025-12-12T16:18:30.703803737+00:00 stderr F I1212 16:18:30.703782 12 controller.go:231] Updating CRD OpenAPI spec because oauths.config.openshift.io changed 2025-12-12T16:18:30.703803737+00:00 stderr F I1212 16:18:30.703790 12 controller.go:231] Updating CRD OpenAPI spec because olmconfigs.operators.coreos.com changed 2025-12-12T16:18:30.703826538+00:00 stderr F I1212 16:18:30.703799 12 controller.go:231] Updating CRD OpenAPI spec because openshiftapiservers.operator.openshift.io changed 2025-12-12T16:18:30.703826538+00:00 stderr F I1212 16:18:30.703808 12 controller.go:231] Updating CRD OpenAPI spec because openshiftcontrollermanagers.operator.openshift.io changed 2025-12-12T16:18:30.703867159+00:00 stderr F I1212 16:18:30.703816 12 controller.go:231] Updating CRD OpenAPI spec because operatorconditions.operators.coreos.com changed 2025-12-12T16:18:30.703867159+00:00 stderr F I1212 16:18:30.703825 12 controller.go:231] Updating CRD OpenAPI spec because operatorgroups.operators.coreos.com changed 2025-12-12T16:18:30.703867159+00:00 stderr F I1212 16:18:30.703833 12 controller.go:231] Updating CRD OpenAPI spec because operatorhubs.config.openshift.io changed 2025-12-12T16:18:30.703867159+00:00 stderr F I1212 16:18:30.703841 12 controller.go:231] Updating CRD OpenAPI spec because operatorpkis.network.operator.openshift.io changed 2025-12-12T16:18:30.703867159+00:00 stderr F I1212 16:18:30.703849 12 controller.go:231] Updating CRD OpenAPI spec because operators.operators.coreos.com changed 2025-12-12T16:18:30.703889809+00:00 stderr F I1212 16:18:30.703857 12 controller.go:231] Updating CRD OpenAPI spec because overlappingrangeipreservations.whereabouts.cni.cncf.io changed 2025-12-12T16:18:30.703889809+00:00 stderr F I1212 16:18:30.703867 12 controller.go:231] Updating CRD OpenAPI spec because pinnedimagesets.machineconfiguration.openshift.io changed 2025-12-12T16:18:30.703889809+00:00 stderr F I1212 16:18:30.703876 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.coreos.com changed 2025-12-12T16:18:30.703910220+00:00 stderr F I1212 16:18:30.703884 12 controller.go:231] Updating CRD OpenAPI spec because podnetworkconnectivitychecks.controlplane.operator.openshift.io changed 2025-12-12T16:18:30.703910220+00:00 stderr F I1212 16:18:30.703892 12 handler.go:288] Adding GroupVersion operators.coreos.com v1 to ResourceManager 2025-12-12T16:18:30.703945500+00:00 stderr F I1212 16:18:30.703932 12 handler.go:288] Adding GroupVersion operators.coreos.com v1alpha2 to ResourceManager 2025-12-12T16:18:30.704491184+00:00 stderr F I1212 16:18:30.703893 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.coreos.com changed 2025-12-12T16:18:30.704628327+00:00 stderr F I1212 16:18:30.704602 12 controller.go:231] Updating CRD OpenAPI spec because projecthelmchartrepositories.helm.openshift.io changed 2025-12-12T16:18:30.704687859+00:00 stderr F I1212 16:18:30.704671 12 controller.go:231] Updating CRD OpenAPI spec because projects.config.openshift.io changed 2025-12-12T16:18:30.704759801+00:00 stderr F I1212 16:18:30.704739 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.coreos.com changed 2025-12-12T16:18:30.704824442+00:00 stderr F I1212 16:18:30.704807 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.coreos.com changed 2025-12-12T16:18:30.704889694+00:00 stderr F I1212 16:18:30.704869 12 controller.go:231] Updating CRD OpenAPI spec because proxies.config.openshift.io changed 2025-12-12T16:18:30.704956435+00:00 stderr F I1212 16:18:30.704934 12 controller.go:231] Updating CRD OpenAPI spec because rangeallocations.security.internal.openshift.io changed 2025-12-12T16:18:30.705030577+00:00 stderr F I1212 16:18:30.705010 12 controller.go:231] Updating CRD OpenAPI spec because referencegrants.gateway.networking.k8s.io changed 2025-12-12T16:18:30.705083319+00:00 stderr F I1212 16:18:30.705066 12 controller.go:231] Updating CRD OpenAPI spec because rolebindingrestrictions.authorization.openshift.io changed 2025-12-12T16:18:30.705166361+00:00 stderr F I1212 16:18:30.705131 12 controller.go:231] Updating CRD OpenAPI spec because schedulers.config.openshift.io changed 2025-12-12T16:18:30.705249723+00:00 stderr F I1212 16:18:30.705228 12 controller.go:231] Updating CRD OpenAPI spec because securitycontextconstraints.security.openshift.io changed 2025-12-12T16:18:30.705329445+00:00 stderr F I1212 16:18:30.705310 12 controller.go:231] Updating CRD OpenAPI spec because servicecas.operator.openshift.io changed 2025-12-12T16:18:30.705393526+00:00 stderr F I1212 16:18:30.705371 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed 2025-12-12T16:18:30.705460588+00:00 stderr F I1212 16:18:30.705442 12 controller.go:231] Updating CRD OpenAPI spec because storages.operator.openshift.io changed 2025-12-12T16:18:30.705521009+00:00 stderr F I1212 16:18:30.705502 12 controller.go:231] Updating CRD OpenAPI spec because storagestates.migration.k8s.io changed 2025-12-12T16:18:30.705587551+00:00 stderr F I1212 16:18:30.705561 12 controller.go:231] Updating CRD OpenAPI spec because storageversionmigrations.migration.k8s.io changed 2025-12-12T16:18:30.705663733+00:00 stderr F I1212 16:18:30.704749 12 handler.go:288] Adding GroupVersion helm.openshift.io v1beta1 to ResourceManager 2025-12-12T16:18:30.705744425+00:00 stderr F I1212 16:18:30.705630 12 controller.go:231] Updating CRD OpenAPI spec because subscriptions.operators.coreos.com changed 2025-12-12T16:18:30.705829237+00:00 stderr F I1212 16:18:30.705809 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed 2025-12-12T16:18:30.705891589+00:00 stderr F I1212 16:18:30.705874 12 controller.go:231] Updating CRD OpenAPI spec because userdefinednetworks.k8s.ovn.org changed 2025-12-12T16:18:30.705959610+00:00 stderr F I1212 16:18:30.705730 12 handler.go:288] Adding GroupVersion security.internal.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.706158725+00:00 stderr F I1212 16:18:30.706129 12 handler.go:288] Adding GroupVersion quota.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.706556555+00:00 stderr F I1212 16:18:30.706523 12 handler.go:288] Adding GroupVersion network.operator.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.706772310+00:00 stderr F I1212 16:18:30.706745 12 handler.go:288] Adding GroupVersion k8s.cni.cncf.io v1 to ResourceManager 2025-12-12T16:18:30.706967645+00:00 stderr F I1212 16:18:30.706938 12 handler.go:288] Adding GroupVersion operators.coreos.com v2 to ResourceManager 2025-12-12T16:18:30.707354805+00:00 stderr F I1212 16:18:30.707322 12 handler.go:288] Adding GroupVersion monitoring.coreos.com v1alpha1 to ResourceManager 2025-12-12T16:18:30.707492938+00:00 stderr F I1212 16:18:30.707466 12 handler.go:288] Adding GroupVersion monitoring.coreos.com v1beta1 to ResourceManager 2025-12-12T16:18:30.708380530+00:00 stderr F I1212 16:18:30.708343 12 handler.go:288] Adding GroupVersion autoscaling.openshift.io v1beta1 to ResourceManager 2025-12-12T16:18:30.708864982+00:00 stderr F I1212 16:18:30.708835 12 handler.go:288] Adding GroupVersion infrastructure.cluster.x-k8s.io v1alpha5 to ResourceManager 2025-12-12T16:18:30.708968205+00:00 stderr F I1212 16:18:30.708934 12 handler.go:288] Adding GroupVersion infrastructure.cluster.x-k8s.io v1beta1 to ResourceManager 2025-12-12T16:18:30.709257712+00:00 stderr F I1212 16:18:30.709231 12 handler.go:288] Adding GroupVersion ingress.operator.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.710559244+00:00 stderr F I1212 16:18:30.710527 12 handler.go:288] Adding GroupVersion controlplane.operator.openshift.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.710657296+00:00 stderr F I1212 16:18:30.710624 12 handler.go:288] Adding GroupVersion apiserver.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.710955624+00:00 stderr F I1212 16:18:30.710905 12 healthz.go:280] poststarthook/start-apiextensions-controllers,poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:30.710955624+00:00 stderr F [-]poststarthook/start-apiextensions-controllers failed: not finished 2025-12-12T16:18:30.710955624+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:30.710955624+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:30.712780069+00:00 stderr F I1212 16:18:30.712724 12 handler.go:288] Adding GroupVersion machine.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.713083206+00:00 stderr F I1212 16:18:30.713058 12 handler.go:288] Adding GroupVersion migration.k8s.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.713544518+00:00 stderr F I1212 16:18:30.713498 12 handler.go:288] Adding GroupVersion operator.openshift.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.713765333+00:00 stderr F I1212 16:18:30.713741 12 handler.go:288] Adding GroupVersion k8s.cni.cncf.io v1alpha1 to ResourceManager 2025-12-12T16:18:30.713985789+00:00 stderr F I1212 16:18:30.713963 12 handler.go:288] Adding GroupVersion imageregistry.operator.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.714413879+00:00 stderr F I1212 16:18:30.714372 12 handler.go:288] Adding GroupVersion autoscaling.openshift.io v1 to ResourceManager 2025-12-12T16:18:30.716472660+00:00 stderr F W1212 16:18:30.716419 12 patch_genericapiserver.go:245] Request to "/api/v1/secrets" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.725520894+00:00 stderr F W1212 16:18:30.725415 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-console/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.758009517+00:00 stderr F I1212 16:18:30.757889 12 patch_genericapiserver.go:241] Loopback request to "/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc" (user agent "cluster-kube-apiserver-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready. This client probably does not watch /readyz and might get inconsistent answers. 2025-12-12T16:18:30.772819133+00:00 stderr F W1212 16:18:30.772703 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-dns-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.811123260+00:00 stderr F I1212 16:18:30.811020 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:30.811123260+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:30.811123260+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:30.867420112+00:00 stderr F W1212 16:18:30.867317 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/kube-system/configmaps" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.882463134+00:00 stderr F W1212 16:18:30.882337 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-network-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.906440357+00:00 stderr F W1212 16:18:30.906328 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-apiserver/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.911114702+00:00 stderr F I1212 16:18:30.911068 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:30.911114702+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:30.911114702+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:30.925718763+00:00 stderr F W1212 16:18:30.925677 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-machine-config-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.945904902+00:00 stderr F W1212 16:18:30.945832 12 patch_genericapiserver.go:245] Request to "/apis/config.openshift.io/v1/clusterversions/version/status" (source IP 38.102.83.180:58678, user agent "cluster-version-operator/v0.0.0 (linux/amd64) kubernetes/$Format/openshift-cluster-version") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.952040744+00:00 stderr F W1212 16:18:30.951995 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-ovn-kubernetes/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.967142217+00:00 stderr F W1212 16:18:30.967084 12 patch_genericapiserver.go:245] Request to "/api/v1/nodes" (source IP 38.102.83.180:58622, user agent "machine-config-daemon/v0.0.0 (linux/amd64) kubernetes/$Format/node-scoped-informer") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.982755853+00:00 stderr F W1212 16:18:30.982633 12 patch_genericapiserver.go:245] Request to "/api/v1/replicationcontrollers" (source IP 38.102.83.180:58682, user agent "kube-scheduler/v1.33.5 (linux/amd64) kubernetes/27f72e0/scheduler") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.989533161+00:00 stderr F W1212 16:18:30.989431 12 patch_genericapiserver.go:245] Request to "/apis/config.openshift.io/v1/proxies" (source IP 38.102.83.180:58496, user agent "network-operator/4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:30.994941455+00:00 stderr F W1212 16:18:30.994838 12 patch_genericapiserver.go:245] Request to "/apis/controlplane.operator.openshift.io/v1alpha1/podnetworkconnectivitychecks" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.011432612+00:00 stderr F I1212 16:18:31.011305 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:31.011432612+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.011432612+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:31.037543938+00:00 stderr F W1212 16:18:31.037428 12 patch_genericapiserver.go:245] Request to "/apis/storage.k8s.io/v1/volumeattachments" (source IP 38.102.83.180:58682, user agent "kube-scheduler/v1.33.5 (linux/amd64) kubernetes/27f72e0/scheduler") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.042772737+00:00 stderr F W1212 16:18:31.042664 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-controller-manager-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.053296187+00:00 stderr F W1212 16:18:31.053142 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-apiserver-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.064699609+00:00 stderr F W1212 16:18:31.064613 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.073967148+00:00 stderr F W1212 16:18:31.073852 12 patch_genericapiserver.go:245] Request to "/api/v1/pods" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.102290049+00:00 stderr F W1212 16:18:31.102155 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-service-ca-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.111124817+00:00 stderr F I1212 16:18:31.111010 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:31.111124817+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.111124817+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:31.138226517+00:00 stderr F W1212 16:18:31.138076 12 patch_genericapiserver.go:245] Request to "/apis/coordination.k8s.io/v1/namespaces/openshift-cluster-version/leases/version" (source IP 38.102.83.180:58678, user agent "cluster-version-operator/v0.0.0 (linux/amd64) kubernetes/$Format/leader-election") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.145917167+00:00 stderr F W1212 16:18:31.145805 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-authentication-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.181449786+00:00 stderr F W1212 16:18:31.181355 12 patch_genericapiserver.go:245] Request to "/apis/k8s.cni.cncf.io/v1/network-attachment-definitions" (source IP 38.102.83.180:58690, user agent "crc/ovnkube@23bb8b679668 (linux/amd64) kubernetes/v0.33.3") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.206710050+00:00 stderr F W1212 16:18:31.206557 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/kube-system/configmaps" (source IP 38.102.83.180:58496, user agent "cluster-network-operator/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.211148070+00:00 stderr F I1212 16:18:31.211088 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:31.211148070+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.211148070+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:31.215022796+00:00 stderr F W1212 16:18:31.214973 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-machine-config-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.250506943+00:00 stderr F W1212 16:18:31.250359 12 patch_genericapiserver.go:245] Request to "/apis/network.operator.openshift.io/v1/operatorpkis" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.252868741+00:00 stderr F W1212 16:18:31.252799 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-dns/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.254979174+00:00 stderr F W1212 16:18:31.254934 12 patch_genericapiserver.go:245] Request to "/apis/config.openshift.io/v1/networks" (source IP 38.102.83.180:58496, user agent "network-operator/4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.285428556+00:00 stderr F W1212 16:18:31.285364 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-oauth-apiserver/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.311488891+00:00 stderr F I1212 16:18:31.311369 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:31.311488891+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.311488891+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:31.318765091+00:00 stderr F W1212 16:18:31.318696 12 patch_genericapiserver.go:245] Request to "/apis/k8s.cni.cncf.io/v1alpha1/ipamclaims" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.333473664+00:00 stderr F W1212 16:18:31.333410 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-authentication-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.345544873+00:00 stderr F W1212 16:18:31.345458 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-ingress-operator/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.386131126+00:00 stderr F W1212 16:18:31.385936 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-ingress/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.401427594+00:00 stderr F W1212 16:18:31.401321 12 patch_genericapiserver.go:245] Request to "/apis/gateway.networking.k8s.io/v1beta1/referencegrants" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.418387843+00:00 stderr F W1212 16:18:31.418235 12 patch_genericapiserver.go:245] Request to "/apis/storage.k8s.io/v1/storageclasses" (source IP 38.102.83.180:58682, user agent "kube-scheduler/v1.33.5 (linux/amd64) kubernetes/27f72e0/scheduler") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.419363028+00:00 stderr F I1212 16:18:31.419310 12 healthz.go:280] poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes check failed: readyz 2025-12-12T16:18:31.419363028+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.419363028+00:00 stderr F [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished 2025-12-12T16:18:31.479400622+00:00 stderr F W1212 16:18:31.478847 12 patch_genericapiserver.go:245] Request to "/apis/helm.openshift.io/v1beta1/projecthelmchartrepositories" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.484160910+00:00 stderr F W1212 16:18:31.483702 12 patch_genericapiserver.go:245] Request to "/apis/k8s.ovn.org/v1/egressips" (source IP 38.102.83.180:58664, user agent "crc/ovnkube@23bb8b679668 (linux/amd64) kubernetes/v0.33.3") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.495753476+00:00 stderr F W1212 16:18:31.495641 12 patch_genericapiserver.go:245] Request to "/apis/rbac.authorization.k8s.io/v1/roles" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.503103158+00:00 stderr F W1212 16:18:31.502990 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-scheduler-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.504106643+00:00 stderr F I1212 16:18:31.504056 12 storage_scheduling.go:111] all system priority classes are created successfully or already exist. 2025-12-12T16:18:31.511064925+00:00 stderr F I1212 16:18:31.510941 12 healthz.go:280] poststarthook/rbac/bootstrap-roles check failed: readyz 2025-12-12T16:18:31.511064925+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.516869388+00:00 stderr F E1212 16:18:31.516760 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.516869388+00:00 stderr F loading OpenAPI spec for "v1.image.openshift.io" failed with: failed to download v1.image.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.516869388+00:00 stderr F , Header: map[Audit-Id:[eefa4b25-34b5-4c7e-9406-60991b4cbf84] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.516869388+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.518307934+00:00 stderr F I1212 16:18:31.517925 12 controller.go:109] OpenAPI AggregationController: action for item v1.image.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.524219340+00:00 stderr F E1212 16:18:31.524094 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.524219340+00:00 stderr F loading OpenAPI spec for "v1.authorization.openshift.io" failed with: failed to download v1.authorization.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.524219340+00:00 stderr F , Header: map[Audit-Id:[ab1e2bff-91f3-42b2-a96f-a2b148af665e] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.524219340+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.525395449+00:00 stderr F I1212 16:18:31.525345 12 controller.go:109] OpenAPI AggregationController: action for item v1.authorization.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.530715561+00:00 stderr F W1212 16:18:31.530603 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-marketplace/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.530970607+00:00 stderr F E1212 16:18:31.530917 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.530970607+00:00 stderr F loading OpenAPI spec for "v1.user.openshift.io" failed with: failed to download v1.user.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.530970607+00:00 stderr F , Header: map[Audit-Id:[99b48849-5dd5-49ed-9c9f-9f5cb4caf60c] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.530970607+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.532278479+00:00 stderr F I1212 16:18:31.532079 12 controller.go:109] OpenAPI AggregationController: action for item v1.user.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.535773636+00:00 stderr F W1212 16:18:31.535689 12 patch_genericapiserver.go:245] Request to "/apis/monitoring.openshift.io/v1/alertrelabelconfigs" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.536101494+00:00 stderr F E1212 16:18:31.536042 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.536101494+00:00 stderr F loading OpenAPI spec for "v1.security.openshift.io" failed with: failed to download v1.security.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.536101494+00:00 stderr F , Header: map[Audit-Id:[0971f08c-354d-45c0-886e-a4f184007e0c] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.536101494+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.539333674+00:00 stderr F I1212 16:18:31.537170 12 controller.go:109] OpenAPI AggregationController: action for item v1.security.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.541152479+00:00 stderr F W1212 16:18:31.541082 12 patch_genericapiserver.go:245] Request to "/apis/gateway.networking.k8s.io/v1/httproutes" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.541821885+00:00 stderr F E1212 16:18:31.541756 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.541821885+00:00 stderr F loading OpenAPI spec for "v1.project.openshift.io" failed with: failed to download v1.project.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.541821885+00:00 stderr F , Header: map[Audit-Id:[6658387e-b2d1-4086-a045-84a85e451ff0] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.541821885+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.543007334+00:00 stderr F I1212 16:18:31.542945 12 controller.go:109] OpenAPI AggregationController: action for item v1.project.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.549463514+00:00 stderr F E1212 16:18:31.547594 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.549463514+00:00 stderr F loading OpenAPI spec for "v1.build.openshift.io" failed with: failed to download v1.build.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.549463514+00:00 stderr F , Header: map[Audit-Id:[df40bb53-69e0-4986-a0d0-95a5a27c6f25] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.549463514+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.549463514+00:00 stderr F I1212 16:18:31.548645 12 controller.go:109] OpenAPI AggregationController: action for item v1.build.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.553116874+00:00 stderr F E1212 16:18:31.553011 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.553116874+00:00 stderr F loading OpenAPI spec for "v1.template.openshift.io" failed with: failed to download v1.template.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.553116874+00:00 stderr F , Header: map[Audit-Id:[08cc2df8-9876-427c-94e9-5ee70788c6d5] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.553116874+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.553247228+00:00 stderr F E1212 16:18:31.553175 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.image.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.554668413+00:00 stderr F I1212 16:18:31.554596 12 controller.go:126] OpenAPI AggregationController: action for item v1.image.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.554693863+00:00 stderr F I1212 16:18:31.554662 12 controller.go:109] OpenAPI AggregationController: action for item v1.template.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.558558099+00:00 stderr F E1212 16:18:31.558484 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.authorization.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.558578359+00:00 stderr F E1212 16:18:31.558532 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.558578359+00:00 stderr F loading OpenAPI spec for "v1.quota.openshift.io" failed with: failed to download v1.quota.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.558578359+00:00 stderr F , Header: map[Audit-Id:[592618bd-33ac-4e0f-98ca-3d14e447a2d6] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.558578359+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.560125828+00:00 stderr F I1212 16:18:31.560066 12 controller.go:109] OpenAPI AggregationController: action for item v1.quota.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.560194079+00:00 stderr F I1212 16:18:31.560091 12 controller.go:126] OpenAPI AggregationController: action for item v1.authorization.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.563448360+00:00 stderr F E1212 16:18:31.563338 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.563448360+00:00 stderr F loading OpenAPI spec for "v1.oauth.openshift.io" failed with: failed to download v1.oauth.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.563448360+00:00 stderr F , Header: map[Audit-Id:[6de5f773-db98-4d35-9712-c1b4edc001da] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.563448360+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.563882751+00:00 stderr F E1212 16:18:31.563830 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.security.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.564499136+00:00 stderr F I1212 16:18:31.564424 12 controller.go:109] OpenAPI AggregationController: action for item v1.oauth.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.564934897+00:00 stderr F I1212 16:18:31.564878 12 controller.go:126] OpenAPI AggregationController: action for item v1.security.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.568263929+00:00 stderr F E1212 16:18:31.568136 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.568263929+00:00 stderr F loading OpenAPI spec for "v1.packages.operators.coreos.com" failed with: failed to download v1.packages.operators.coreos.com: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.568263929+00:00 stderr F , Header: map[Audit-Id:[8dc374a8-8df7-4750-9171-b17a1327210a] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.568263929+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.568594487+00:00 stderr F E1212 16:18:31.568510 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.user.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.568854663+00:00 stderr F I1212 16:18:31.568820 12 policy_source.go:240] refreshing policies 2025-12-12T16:18:31.569296794+00:00 stderr F I1212 16:18:31.569196 12 controller.go:109] OpenAPI AggregationController: action for item v1.packages.operators.coreos.com: Rate Limited Requeue. 2025-12-12T16:18:31.569634333+00:00 stderr F I1212 16:18:31.569577 12 controller.go:126] OpenAPI AggregationController: action for item v1.user.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.572774900+00:00 stderr F E1212 16:18:31.572652 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.project.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.573332864+00:00 stderr F E1212 16:18:31.573285 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.573332864+00:00 stderr F loading OpenAPI spec for "v1.apps.openshift.io" failed with: failed to download v1.apps.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.573332864+00:00 stderr F , Header: map[Audit-Id:[50db1f63-6a61-4082-9b5c-d2dc2f0f24a1] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.573332864+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.573777365+00:00 stderr F I1212 16:18:31.573725 12 controller.go:126] OpenAPI AggregationController: action for item v1.project.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.574449292+00:00 stderr F I1212 16:18:31.574367 12 controller.go:109] OpenAPI AggregationController: action for item v1.apps.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.578376239+00:00 stderr F E1212 16:18:31.578023 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.build.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.578376239+00:00 stderr F E1212 16:18:31.578104 12 controller.go:102] "Unhandled Error" err=< 2025-12-12T16:18:31.578376239+00:00 stderr F loading OpenAPI spec for "v1.route.openshift.io" failed with: failed to download v1.route.openshift.io: failed to retrieve openAPI spec, http error: ResponseCode: 500, Body: Internal Server Error: "/openapi/v2": Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.578376239+00:00 stderr F , Header: map[Audit-Id:[f895ced1-22f4-4e03-a9f9-fa1bd99f11ec] Cache-Control:[no-cache, private] Content-Length:[184] Content-Type:[text/plain; charset=utf-8] Date:[Fri, 12 Dec 2025 16:18:31 GMT] X-Content-Type-Options:[nosniff]] 2025-12-12T16:18:31.578376239+00:00 stderr F > logger="UnhandledError" 2025-12-12T16:18:31.578576114+00:00 stderr F W1212 16:18:31.578515 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.579144188+00:00 stderr F I1212 16:18:31.579073 12 controller.go:126] OpenAPI AggregationController: action for item v1.build.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.579238240+00:00 stderr F I1212 16:18:31.579166 12 controller.go:109] OpenAPI AggregationController: action for item v1.route.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.582245344+00:00 stderr F E1212 16:18:31.582154 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.template.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.583082915+00:00 stderr F W1212 16:18:31.583017 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-config/configmaps" (source IP 38.102.83.180:58678, user agent "cluster-version-operator/v0.0.0 (linux/amd64) kubernetes/$Format/openshift-config") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.583386083+00:00 stderr F I1212 16:18:31.583335 12 controller.go:126] OpenAPI AggregationController: action for item v1.template.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.586203422+00:00 stderr F E1212 16:18:31.586095 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.quota.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.587203267+00:00 stderr F I1212 16:18:31.587137 12 controller.go:126] OpenAPI AggregationController: action for item v1.quota.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.590189221+00:00 stderr F E1212 16:18:31.590097 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.oauth.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.591206226+00:00 stderr F I1212 16:18:31.591135 12 controller.go:126] OpenAPI AggregationController: action for item v1.oauth.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.595150414+00:00 stderr F E1212 16:18:31.595087 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.packages.operators.coreos.com\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.596211370+00:00 stderr F I1212 16:18:31.596148 12 controller.go:126] OpenAPI AggregationController: action for item v1.packages.operators.coreos.com: Rate Limited Requeue. 2025-12-12T16:18:31.608417672+00:00 stderr F E1212 16:18:31.608265 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.apps.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.610672897+00:00 stderr F I1212 16:18:31.610594 12 controller.go:126] OpenAPI AggregationController: action for item v1.apps.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.611128649+00:00 stderr F I1212 16:18:31.611068 12 healthz.go:280] poststarthook/rbac/bootstrap-roles check failed: readyz 2025-12-12T16:18:31.611128649+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.619995468+00:00 stderr F W1212 16:18:31.619869 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-dns-operator/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.622114420+00:00 stderr F E1212 16:18:31.622020 12 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1.route.openshift.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError" 2025-12-12T16:18:31.623320130+00:00 stderr F I1212 16:18:31.623238 12 controller.go:126] OpenAPI AggregationController: action for item v1.route.openshift.io: Rate Limited Requeue. 2025-12-12T16:18:31.627421651+00:00 stderr F W1212 16:18:31.627277 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-image-registry/secrets" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.709732166+00:00 stderr F W1212 16:18:31.709556 12 patch_genericapiserver.go:245] Request to "/apis/route.openshift.io/v1/routes" (source IP 38.102.83.180:58592, user agent "cluster-policy-controller/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.710538356+00:00 stderr F I1212 16:18:31.710390 12 healthz.go:280] poststarthook/rbac/bootstrap-roles check failed: readyz 2025-12-12T16:18:31.710538356+00:00 stderr F [-]poststarthook/rbac/bootstrap-roles failed: not finished 2025-12-12T16:18:31.738230901+00:00 stderr F W1212 16:18:31.738062 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-kube-apiserver/endpoints" (source IP 38.102.83.180:58496, user agent "network-operator/4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.779368698+00:00 stderr F W1212 16:18:31.779230 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-network-diagnostics/services" (source IP 38.102.83.180:58496, user agent "network-operator/4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.793839996+00:00 stderr F W1212 16:18:31.793686 12 patch_genericapiserver.go:245] Request to "/api/v1/namespaces/openshift-image-registry/configmaps" (source IP 38.102.83.180:58542, user agent "kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.797849075+00:00 stderr F W1212 16:18:31.797749 12 patch_genericapiserver.go:245] Request to "/apis/k8s.cni.cncf.io/v1/network-attachment-definitions" (source IP 38.102.83.180:60414, user agent "multus-daemon/v0.0.0 (linux/amd64) kubernetes/$Format") before server is ready, possibly a sign for a broken load balancer setup. 2025-12-12T16:18:31.811569444+00:00 stderr F I1212 16:18:31.811413 12 patch_genericapiserver.go:97] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-apiserver", Name:"kube-apiserver-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'KubeAPIReadyz' readyz=true 2025-12-12T16:18:31.819142291+00:00 stderr F W1212 16:18:31.819001 12 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.126.11] 2025-12-12T16:18:31.821669654+00:00 stderr F I1212 16:18:31.821576 12 controller.go:667] quota admission added evaluator for: endpoints 2025-12-12T16:18:31.826305268+00:00 stderr F I1212 16:18:31.826164 12 controller.go:667] quota admission added evaluator for: endpointslices.discovery.k8s.io 2025-12-12T16:18:31.922612919+00:00 stderr F I1212 16:18:31.922474 12 store.go:1663] "Monitoring resource count at path" resource="grpcroutes.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/grpcroutes" 2025-12-12T16:18:31.923539912+00:00 stderr F I1212 16:18:31.923449 12 cacher.go:469] cacher (grpcroutes.gateway.networking.k8s.io): initialized 2025-12-12T16:18:31.923539912+00:00 stderr F I1212 16:18:31.923499 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1, Kind=GRPCRoute" reflector="storage/cacher.go:/gateway.networking.k8s.io/grpcroutes" 2025-12-12T16:18:31.930544575+00:00 stderr F I1212 16:18:31.930426 12 store.go:1663] "Monitoring resource count at path" resource="machineconfigs.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/machineconfigs" 2025-12-12T16:18:31.948627163+00:00 stderr F I1212 16:18:31.948464 12 cacher.go:469] cacher (machineconfigs.machineconfiguration.openshift.io): initialized 2025-12-12T16:18:31.948627163+00:00 stderr F I1212 16:18:31.948513 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=MachineConfig" reflector="storage/cacher.go:/machineconfiguration.openshift.io/machineconfigs" 2025-12-12T16:18:32.040294889+00:00 stderr F I1212 16:18:32.040127 12 store.go:1663] "Monitoring resource count at path" resource="imagepolicies.config.openshift.io" path="//config.openshift.io/imagepolicies" 2025-12-12T16:18:32.041332224+00:00 stderr F I1212 16:18:32.041266 12 cacher.go:469] cacher (imagepolicies.config.openshift.io): initialized 2025-12-12T16:18:32.041332224+00:00 stderr F I1212 16:18:32.041300 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ImagePolicy" reflector="storage/cacher.go:/config.openshift.io/imagepolicies" 2025-12-12T16:18:32.052050189+00:00 stderr F I1212 16:18:32.051942 12 store.go:1663] "Monitoring resource count at path" resource="machineautoscalers.autoscaling.openshift.io" path="//autoscaling.openshift.io/machineautoscalers" 2025-12-12T16:18:32.053080715+00:00 stderr F I1212 16:18:32.053009 12 cacher.go:469] cacher (machineautoscalers.autoscaling.openshift.io): initialized 2025-12-12T16:18:32.053080715+00:00 stderr F I1212 16:18:32.053052 12 reflector.go:430] "Caches populated" type="autoscaling.openshift.io/v1beta1, Kind=MachineAutoscaler" reflector="storage/cacher.go:/autoscaling.openshift.io/machineautoscalers" 2025-12-12T16:18:32.112787811+00:00 stderr F I1212 16:18:32.112600 12 store.go:1663] "Monitoring resource count at path" resource="userdefinednetworks.k8s.ovn.org" path="//k8s.ovn.org/userdefinednetworks" 2025-12-12T16:18:32.114077133+00:00 stderr F I1212 16:18:32.113754 12 cacher.go:469] cacher (userdefinednetworks.k8s.ovn.org): initialized 2025-12-12T16:18:32.114077133+00:00 stderr F I1212 16:18:32.113790 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=UserDefinedNetwork" reflector="storage/cacher.go:/k8s.ovn.org/userdefinednetworks" 2025-12-12T16:18:32.128588452+00:00 stderr F I1212 16:18:32.128448 12 store.go:1663] "Monitoring resource count at path" resource="egressqoses.k8s.ovn.org" path="//k8s.ovn.org/egressqoses" 2025-12-12T16:18:32.130134340+00:00 stderr F I1212 16:18:32.130078 12 cacher.go:469] cacher (egressqoses.k8s.ovn.org): initialized 2025-12-12T16:18:32.130134340+00:00 stderr F I1212 16:18:32.130103 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=EgressQoS" reflector="storage/cacher.go:/k8s.ovn.org/egressqoses" 2025-12-12T16:18:32.236564071+00:00 stderr F I1212 16:18:32.235719 12 store.go:1663] "Monitoring resource count at path" resource="adminpolicybasedexternalroutes.k8s.ovn.org" path="//k8s.ovn.org/adminpolicybasedexternalroutes" 2025-12-12T16:18:32.237064403+00:00 stderr F I1212 16:18:32.236923 12 cacher.go:469] cacher (adminpolicybasedexternalroutes.k8s.ovn.org): initialized 2025-12-12T16:18:32.237095864+00:00 stderr F I1212 16:18:32.237034 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=AdminPolicyBasedExternalRoute" reflector="storage/cacher.go:/k8s.ovn.org/adminpolicybasedexternalroutes" 2025-12-12T16:18:32.302822929+00:00 stderr F I1212 16:18:32.302455 12 store.go:1663] "Monitoring resource count at path" resource="ipaddresses.ipam.cluster.x-k8s.io" path="//ipam.cluster.x-k8s.io/ipaddresses" 2025-12-12T16:18:32.304424499+00:00 stderr F I1212 16:18:32.304304 12 cacher.go:469] cacher (ipaddresses.ipam.cluster.x-k8s.io): initialized 2025-12-12T16:18:32.304424499+00:00 stderr F I1212 16:18:32.304334 12 reflector.go:430] "Caches populated" type="ipam.cluster.x-k8s.io/v1alpha1, Kind=IPAddress" reflector="storage/cacher.go:/ipam.cluster.x-k8s.io/ipaddresses" 2025-12-12T16:18:32.308889919+00:00 stderr F I1212 16:18:32.308745 12 store.go:1663] "Monitoring resource count at path" resource="ipaddresses.ipam.cluster.x-k8s.io" path="//ipam.cluster.x-k8s.io/ipaddresses" 2025-12-12T16:18:32.310493909+00:00 stderr F I1212 16:18:32.309777 12 cacher.go:469] cacher (ipaddresses.ipam.cluster.x-k8s.io): initialized 2025-12-12T16:18:32.310493909+00:00 stderr F I1212 16:18:32.309794 12 reflector.go:430] "Caches populated" type="ipam.cluster.x-k8s.io/v1beta1, Kind=IPAddress" reflector="storage/cacher.go:/ipam.cluster.x-k8s.io/ipaddresses" 2025-12-12T16:18:32.341653459+00:00 stderr F I1212 16:18:32.341479 12 store.go:1663] "Monitoring resource count at path" resource="machines.machine.openshift.io" path="//machine.openshift.io/machines" 2025-12-12T16:18:32.343388172+00:00 stderr F I1212 16:18:32.343289 12 cacher.go:469] cacher (machines.machine.openshift.io): initialized 2025-12-12T16:18:32.343388172+00:00 stderr F I1212 16:18:32.343324 12 reflector.go:430] "Caches populated" type="machine.openshift.io/v1beta1, Kind=Machine" reflector="storage/cacher.go:/machine.openshift.io/machines" 2025-12-12T16:18:32.446196304+00:00 stderr F I1212 16:18:32.446043 12 store.go:1663] "Monitoring resource count at path" resource="operatorconditions.operators.coreos.com" path="//operators.coreos.com/operatorconditions" 2025-12-12T16:18:32.447686491+00:00 stderr F I1212 16:18:32.447620 12 cacher.go:469] cacher (operatorconditions.operators.coreos.com): initialized 2025-12-12T16:18:32.447686491+00:00 stderr F I1212 16:18:32.447653 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1, Kind=OperatorCondition" reflector="storage/cacher.go:/operators.coreos.com/operatorconditions" 2025-12-12T16:18:32.453404872+00:00 stderr F I1212 16:18:32.453291 12 store.go:1663] "Monitoring resource count at path" resource="operatorconditions.operators.coreos.com" path="//operators.coreos.com/operatorconditions" 2025-12-12T16:18:32.454798777+00:00 stderr F I1212 16:18:32.454727 12 cacher.go:469] cacher (operatorconditions.operators.coreos.com): initialized 2025-12-12T16:18:32.454798777+00:00 stderr F I1212 16:18:32.454753 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v2, Kind=OperatorCondition" reflector="storage/cacher.go:/operators.coreos.com/operatorconditions" 2025-12-12T16:18:32.464705881+00:00 stderr F I1212 16:18:32.464581 12 store.go:1663] "Monitoring resource count at path" resource="operatorgroups.operators.coreos.com" path="//operators.coreos.com/operatorgroups" 2025-12-12T16:18:32.467006778+00:00 stderr F I1212 16:18:32.466902 12 cacher.go:469] cacher (operatorgroups.operators.coreos.com): initialized 2025-12-12T16:18:32.467006778+00:00 stderr F I1212 16:18:32.466929 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1, Kind=OperatorGroup" reflector="storage/cacher.go:/operators.coreos.com/operatorgroups" 2025-12-12T16:18:32.471102470+00:00 stderr F I1212 16:18:32.470989 12 store.go:1663] "Monitoring resource count at path" resource="operatorgroups.operators.coreos.com" path="//operators.coreos.com/operatorgroups" 2025-12-12T16:18:32.474896053+00:00 stderr F I1212 16:18:32.474767 12 cacher.go:469] cacher (operatorgroups.operators.coreos.com): initialized 2025-12-12T16:18:32.474896053+00:00 stderr F I1212 16:18:32.474801 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1alpha2, Kind=OperatorGroup" reflector="storage/cacher.go:/operators.coreos.com/operatorgroups" 2025-12-12T16:18:32.532268682+00:00 stderr F I1212 16:18:32.532082 12 store.go:1663] "Monitoring resource count at path" resource="podnetworkconnectivitychecks.controlplane.operator.openshift.io" path="//controlplane.operator.openshift.io/podnetworkconnectivitychecks" 2025-12-12T16:18:32.536816004+00:00 stderr F I1212 16:18:32.536705 12 cacher.go:469] cacher (podnetworkconnectivitychecks.controlplane.operator.openshift.io): initialized 2025-12-12T16:18:32.536816004+00:00 stderr F I1212 16:18:32.536739 12 reflector.go:430] "Caches populated" type="controlplane.operator.openshift.io/v1alpha1, Kind=PodNetworkConnectivityCheck" reflector="storage/cacher.go:/controlplane.operator.openshift.io/podnetworkconnectivitychecks" 2025-12-12T16:18:32.621548029+00:00 stderr F I1212 16:18:32.620553 12 store.go:1663] "Monitoring resource count at path" resource="egressfirewalls.k8s.ovn.org" path="//k8s.ovn.org/egressfirewalls" 2025-12-12T16:18:32.622920723+00:00 stderr F I1212 16:18:32.622787 12 cacher.go:469] cacher (egressfirewalls.k8s.ovn.org): initialized 2025-12-12T16:18:32.622920723+00:00 stderr F I1212 16:18:32.622880 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=EgressFirewall" reflector="storage/cacher.go:/k8s.ovn.org/egressfirewalls" 2025-12-12T16:18:32.767589770+00:00 stderr F I1212 16:18:32.767344 12 store.go:1663] "Monitoring resource count at path" resource="thanosrulers.monitoring.coreos.com" path="//monitoring.coreos.com/thanosrulers" 2025-12-12T16:18:32.768768819+00:00 stderr F I1212 16:18:32.768691 12 cacher.go:469] cacher (thanosrulers.monitoring.coreos.com): initialized 2025-12-12T16:18:32.768768819+00:00 stderr F I1212 16:18:32.768713 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=ThanosRuler" reflector="storage/cacher.go:/monitoring.coreos.com/thanosrulers" 2025-12-12T16:18:32.814398387+00:00 stderr F I1212 16:18:32.814272 12 store.go:1663] "Monitoring resource count at path" resource="clusterversions.config.openshift.io" path="//config.openshift.io/clusterversions" 2025-12-12T16:18:32.816413437+00:00 stderr F I1212 16:18:32.816300 12 cacher.go:469] cacher (clusterversions.config.openshift.io): initialized 2025-12-12T16:18:32.816413437+00:00 stderr F I1212 16:18:32.816321 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ClusterVersion" reflector="storage/cacher.go:/config.openshift.io/clusterversions" 2025-12-12T16:18:32.878194904+00:00 stderr F I1212 16:18:32.878044 12 store.go:1663] "Monitoring resource count at path" resource="infrastructures.config.openshift.io" path="//config.openshift.io/infrastructures" 2025-12-12T16:18:32.879630590+00:00 stderr F I1212 16:18:32.879561 12 cacher.go:469] cacher (infrastructures.config.openshift.io): initialized 2025-12-12T16:18:32.879630590+00:00 stderr F I1212 16:18:32.879599 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Infrastructure" reflector="storage/cacher.go:/config.openshift.io/infrastructures" 2025-12-12T16:18:32.886160731+00:00 stderr F I1212 16:18:32.886007 12 store.go:1663] "Monitoring resource count at path" resource="egressservices.k8s.ovn.org" path="//k8s.ovn.org/egressservices" 2025-12-12T16:18:32.887138875+00:00 stderr F I1212 16:18:32.887056 12 cacher.go:469] cacher (egressservices.k8s.ovn.org): initialized 2025-12-12T16:18:32.887138875+00:00 stderr F I1212 16:18:32.887090 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=EgressService" reflector="storage/cacher.go:/k8s.ovn.org/egressservices" 2025-12-12T16:18:32.938273470+00:00 stderr F I1212 16:18:32.938082 12 store.go:1663] "Monitoring resource count at path" resource="clusteruserdefinednetworks.k8s.ovn.org" path="//k8s.ovn.org/clusteruserdefinednetworks" 2025-12-12T16:18:32.939348306+00:00 stderr F I1212 16:18:32.938945 12 cacher.go:469] cacher (clusteruserdefinednetworks.k8s.ovn.org): initialized 2025-12-12T16:18:32.939348306+00:00 stderr F I1212 16:18:32.938981 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=ClusterUserDefinedNetwork" reflector="storage/cacher.go:/k8s.ovn.org/clusteruserdefinednetworks" 2025-12-12T16:18:33.446475333+00:00 stderr F I1212 16:18:33.446298 12 store.go:1663] "Monitoring resource count at path" resource="probes.monitoring.coreos.com" path="//monitoring.coreos.com/probes" 2025-12-12T16:18:33.451011575+00:00 stderr F I1212 16:18:33.450307 12 cacher.go:469] cacher (probes.monitoring.coreos.com): initialized 2025-12-12T16:18:33.451011575+00:00 stderr F I1212 16:18:33.450351 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=Probe" reflector="storage/cacher.go:/monitoring.coreos.com/probes" 2025-12-12T16:18:33.474656350+00:00 stderr F I1212 16:18:33.474410 12 store.go:1663] "Monitoring resource count at path" resource="prometheusrules.monitoring.coreos.com" path="//monitoring.coreos.com/prometheusrules" 2025-12-12T16:18:33.484876552+00:00 stderr F I1212 16:18:33.484688 12 cacher.go:469] cacher (prometheusrules.monitoring.coreos.com): initialized 2025-12-12T16:18:33.484876552+00:00 stderr F I1212 16:18:33.484737 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=PrometheusRule" reflector="storage/cacher.go:/monitoring.coreos.com/prometheusrules" 2025-12-12T16:18:33.533710039+00:00 stderr F I1212 16:18:33.533557 12 store.go:1663] "Monitoring resource count at path" resource="machineconfigpools.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/machineconfigpools" 2025-12-12T16:18:33.537133694+00:00 stderr F I1212 16:18:33.537006 12 cacher.go:469] cacher (machineconfigpools.machineconfiguration.openshift.io): initialized 2025-12-12T16:18:33.537133694+00:00 stderr F I1212 16:18:33.537049 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=MachineConfigPool" reflector="storage/cacher.go:/machineconfiguration.openshift.io/machineconfigpools" 2025-12-12T16:18:33.543675706+00:00 stderr F I1212 16:18:33.543606 12 store.go:1663] "Monitoring resource count at path" resource="securitycontextconstraints.security.openshift.io" path="//security.openshift.io/securitycontextconstraints" 2025-12-12T16:18:33.552018632+00:00 stderr F I1212 16:18:33.551852 12 cacher.go:469] cacher (securitycontextconstraints.security.openshift.io): initialized 2025-12-12T16:18:33.552018632+00:00 stderr F I1212 16:18:33.551897 12 reflector.go:430] "Caches populated" type="security.openshift.io/v1, Kind=SecurityContextConstraints" reflector="storage/cacher.go:/security.openshift.io/securitycontextconstraints" 2025-12-12T16:18:33.582263720+00:00 stderr F I1212 16:18:33.582117 12 store.go:1663] "Monitoring resource count at path" resource="kubeschedulers.operator.openshift.io" path="//operator.openshift.io/kubeschedulers" 2025-12-12T16:18:33.585654724+00:00 stderr F I1212 16:18:33.585088 12 cacher.go:469] cacher (kubeschedulers.operator.openshift.io): initialized 2025-12-12T16:18:33.585654724+00:00 stderr F I1212 16:18:33.585128 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=KubeScheduler" reflector="storage/cacher.go:/operator.openshift.io/kubeschedulers" 2025-12-12T16:18:33.591089768+00:00 stderr F I1212 16:18:33.590956 12 store.go:1663] "Monitoring resource count at path" resource="egressips.k8s.ovn.org" path="//k8s.ovn.org/egressips" 2025-12-12T16:18:33.593848636+00:00 stderr F I1212 16:18:33.593763 12 cacher.go:469] cacher (egressips.k8s.ovn.org): initialized 2025-12-12T16:18:33.593887697+00:00 stderr F I1212 16:18:33.593804 12 reflector.go:430] "Caches populated" type="k8s.ovn.org/v1, Kind=EgressIP" reflector="storage/cacher.go:/k8s.ovn.org/egressips" 2025-12-12T16:18:33.603213288+00:00 stderr F I1212 16:18:33.603041 12 store.go:1663] "Monitoring resource count at path" resource="featuregates.config.openshift.io" path="//config.openshift.io/featuregates" 2025-12-12T16:18:33.605301599+00:00 stderr F I1212 16:18:33.605231 12 cacher.go:469] cacher (featuregates.config.openshift.io): initialized 2025-12-12T16:18:33.605318390+00:00 stderr F I1212 16:18:33.605279 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=FeatureGate" reflector="storage/cacher.go:/config.openshift.io/featuregates" 2025-12-12T16:18:33.718056857+00:00 stderr F I1212 16:18:33.717882 12 store.go:1663] "Monitoring resource count at path" resource="network-attachment-definitions.k8s.cni.cncf.io" path="//k8s.cni.cncf.io/network-attachment-definitions" 2025-12-12T16:18:33.720230931+00:00 stderr F I1212 16:18:33.720102 12 cacher.go:469] cacher (network-attachment-definitions.k8s.cni.cncf.io): initialized 2025-12-12T16:18:33.720230931+00:00 stderr F I1212 16:18:33.720164 12 reflector.go:430] "Caches populated" type="k8s.cni.cncf.io/v1, Kind=NetworkAttachmentDefinition" reflector="storage/cacher.go:/k8s.cni.cncf.io/network-attachment-definitions" 2025-12-12T16:18:33.775625000+00:00 stderr F I1212 16:18:33.775473 12 store.go:1663] "Monitoring resource count at path" resource="clusteroperators.config.openshift.io" path="//config.openshift.io/clusteroperators" 2025-12-12T16:18:33.789957535+00:00 stderr F I1212 16:18:33.789816 12 cacher.go:469] cacher (clusteroperators.config.openshift.io): initialized 2025-12-12T16:18:33.789957535+00:00 stderr F I1212 16:18:33.789863 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ClusterOperator" reflector="storage/cacher.go:/config.openshift.io/clusteroperators" 2025-12-12T16:18:33.833094011+00:00 stderr F I1212 16:18:33.832931 12 store.go:1663] "Monitoring resource count at path" resource="metal3remediationtemplates.infrastructure.cluster.x-k8s.io" path="//infrastructure.cluster.x-k8s.io/metal3remediationtemplates" 2025-12-12T16:18:33.834850665+00:00 stderr F I1212 16:18:33.834681 12 cacher.go:469] cacher (metal3remediationtemplates.infrastructure.cluster.x-k8s.io): initialized 2025-12-12T16:18:33.834850665+00:00 stderr F I1212 16:18:33.834763 12 reflector.go:430] "Caches populated" type="infrastructure.cluster.x-k8s.io/v1alpha5, Kind=Metal3RemediationTemplate" reflector="storage/cacher.go:/infrastructure.cluster.x-k8s.io/metal3remediationtemplates" 2025-12-12T16:18:33.840475024+00:00 stderr F I1212 16:18:33.840342 12 store.go:1663] "Monitoring resource count at path" resource="metal3remediationtemplates.infrastructure.cluster.x-k8s.io" path="//infrastructure.cluster.x-k8s.io/metal3remediationtemplates" 2025-12-12T16:18:33.841524430+00:00 stderr F I1212 16:18:33.841465 12 cacher.go:469] cacher (metal3remediationtemplates.infrastructure.cluster.x-k8s.io): initialized 2025-12-12T16:18:33.841524430+00:00 stderr F I1212 16:18:33.841488 12 reflector.go:430] "Caches populated" type="infrastructure.cluster.x-k8s.io/v1beta1, Kind=Metal3RemediationTemplate" reflector="storage/cacher.go:/infrastructure.cluster.x-k8s.io/metal3remediationtemplates" 2025-12-12T16:18:33.960255825+00:00 stderr F I1212 16:18:33.960099 12 store.go:1663] "Monitoring resource count at path" resource="ingresscontrollers.operator.openshift.io" path="//operator.openshift.io/ingresscontrollers" 2025-12-12T16:18:33.963337341+00:00 stderr F I1212 16:18:33.963256 12 cacher.go:469] cacher (ingresscontrollers.operator.openshift.io): initialized 2025-12-12T16:18:33.963337341+00:00 stderr F I1212 16:18:33.963284 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=IngressController" reflector="storage/cacher.go:/operator.openshift.io/ingresscontrollers" 2025-12-12T16:18:34.032776578+00:00 stderr F I1212 16:18:34.031916 12 store.go:1663] "Monitoring resource count at path" resource="overlappingrangeipreservations.whereabouts.cni.cncf.io" path="//whereabouts.cni.cncf.io/overlappingrangeipreservations" 2025-12-12T16:18:34.033404153+00:00 stderr F I1212 16:18:34.033345 12 cacher.go:469] cacher (overlappingrangeipreservations.whereabouts.cni.cncf.io): initialized 2025-12-12T16:18:34.033404153+00:00 stderr F I1212 16:18:34.033376 12 reflector.go:430] "Caches populated" type="whereabouts.cni.cncf.io/v1alpha1, Kind=OverlappingRangeIPReservation" reflector="storage/cacher.go:/whereabouts.cni.cncf.io/overlappingrangeipreservations" 2025-12-12T16:18:34.173429615+00:00 stderr F I1212 16:18:34.173290 12 store.go:1663] "Monitoring resource count at path" resource="catalogsources.operators.coreos.com" path="//operators.coreos.com/catalogsources" 2025-12-12T16:18:34.176637614+00:00 stderr F I1212 16:18:34.176542 12 cacher.go:469] cacher (catalogsources.operators.coreos.com): initialized 2025-12-12T16:18:34.176637614+00:00 stderr F I1212 16:18:34.176587 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1alpha1, Kind=CatalogSource" reflector="storage/cacher.go:/operators.coreos.com/catalogsources" 2025-12-12T16:18:34.228959148+00:00 stderr F I1212 16:18:34.228803 12 store.go:1663] "Monitoring resource count at path" resource="clusterresourcequotas.quota.openshift.io" path="//quota.openshift.io/clusterresourcequotas" 2025-12-12T16:18:34.230765693+00:00 stderr F I1212 16:18:34.230598 12 cacher.go:469] cacher (clusterresourcequotas.quota.openshift.io): initialized 2025-12-12T16:18:34.230765693+00:00 stderr F I1212 16:18:34.230693 12 reflector.go:430] "Caches populated" type="quota.openshift.io/v1, Kind=ClusterResourceQuota" reflector="storage/cacher.go:/quota.openshift.io/clusterresourcequotas" 2025-12-12T16:18:34.254260373+00:00 stderr F I1212 16:18:34.254114 12 store.go:1663] "Monitoring resource count at path" resource="nodeslicepools.whereabouts.cni.cncf.io" path="//whereabouts.cni.cncf.io/nodeslicepools" 2025-12-12T16:18:34.255945315+00:00 stderr F I1212 16:18:34.255639 12 cacher.go:469] cacher (nodeslicepools.whereabouts.cni.cncf.io): initialized 2025-12-12T16:18:34.255945315+00:00 stderr F I1212 16:18:34.255686 12 reflector.go:430] "Caches populated" type="whereabouts.cni.cncf.io/v1alpha1, Kind=NodeSlicePool" reflector="storage/cacher.go:/whereabouts.cni.cncf.io/nodeslicepools" 2025-12-12T16:18:34.430646634+00:00 stderr F I1212 16:18:34.430432 12 store.go:1663] "Monitoring resource count at path" resource="kubecontrollermanagers.operator.openshift.io" path="//operator.openshift.io/kubecontrollermanagers" 2025-12-12T16:18:34.434811967+00:00 stderr F I1212 16:18:34.434662 12 cacher.go:469] cacher (kubecontrollermanagers.operator.openshift.io): initialized 2025-12-12T16:18:34.434811967+00:00 stderr F I1212 16:18:34.434711 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=KubeControllerManager" reflector="storage/cacher.go:/operator.openshift.io/kubecontrollermanagers" 2025-12-12T16:18:34.506147511+00:00 stderr F I1212 16:18:34.506015 12 store.go:1663] "Monitoring resource count at path" resource="alertmanagerconfigs.monitoring.coreos.com" path="//monitoring.coreos.com/alertmanagerconfigs" 2025-12-12T16:18:34.507480804+00:00 stderr F I1212 16:18:34.507403 12 cacher.go:469] cacher (alertmanagerconfigs.monitoring.coreos.com): initialized 2025-12-12T16:18:34.507480804+00:00 stderr F I1212 16:18:34.507432 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1alpha1, Kind=AlertmanagerConfig" reflector="storage/cacher.go:/monitoring.coreos.com/alertmanagerconfigs" 2025-12-12T16:18:34.517781059+00:00 stderr F I1212 16:18:34.517678 12 store.go:1663] "Monitoring resource count at path" resource="alertmanagerconfigs.monitoring.coreos.com" path="//monitoring.coreos.com/alertmanagerconfigs" 2025-12-12T16:18:34.518969868+00:00 stderr F I1212 16:18:34.518885 12 cacher.go:469] cacher (alertmanagerconfigs.monitoring.coreos.com): initialized 2025-12-12T16:18:34.518969868+00:00 stderr F I1212 16:18:34.518937 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1beta1, Kind=AlertmanagerConfig" reflector="storage/cacher.go:/monitoring.coreos.com/alertmanagerconfigs" 2025-12-12T16:18:34.529142489+00:00 stderr F I1212 16:18:34.528947 12 store.go:1663] "Monitoring resource count at path" resource="machinesets.machine.openshift.io" path="//machine.openshift.io/machinesets" 2025-12-12T16:18:34.530539624+00:00 stderr F I1212 16:18:34.530229 12 cacher.go:469] cacher (machinesets.machine.openshift.io): initialized 2025-12-12T16:18:34.530539624+00:00 stderr F I1212 16:18:34.530290 12 reflector.go:430] "Caches populated" type="machine.openshift.io/v1beta1, Kind=MachineSet" reflector="storage/cacher.go:/machine.openshift.io/machinesets" 2025-12-12T16:18:34.664845105+00:00 stderr F I1212 16:18:34.664685 12 store.go:1663] "Monitoring resource count at path" resource="networks.operator.openshift.io" path="//operator.openshift.io/networks" 2025-12-12T16:18:34.672094174+00:00 stderr F I1212 16:18:34.671944 12 cacher.go:469] cacher (networks.operator.openshift.io): initialized 2025-12-12T16:18:34.672094174+00:00 stderr F I1212 16:18:34.671989 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=Network" reflector="storage/cacher.go:/operator.openshift.io/networks" 2025-12-12T16:18:34.845243725+00:00 stderr F I1212 16:18:34.845027 12 store.go:1663] "Monitoring resource count at path" resource="subscriptions.operators.coreos.com" path="//operators.coreos.com/subscriptions" 2025-12-12T16:18:34.847027039+00:00 stderr F I1212 16:18:34.846366 12 cacher.go:469] cacher (subscriptions.operators.coreos.com): initialized 2025-12-12T16:18:34.847027039+00:00 stderr F I1212 16:18:34.846418 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1alpha1, Kind=Subscription" reflector="storage/cacher.go:/operators.coreos.com/subscriptions" 2025-12-12T16:18:34.862257725+00:00 stderr F I1212 16:18:34.862134 12 store.go:1663] "Monitoring resource count at path" resource="adminnetworkpolicies.policy.networking.k8s.io" path="//policy.networking.k8s.io/adminnetworkpolicies" 2025-12-12T16:18:34.864302386+00:00 stderr F I1212 16:18:34.864167 12 cacher.go:469] cacher (adminnetworkpolicies.policy.networking.k8s.io): initialized 2025-12-12T16:18:34.864355887+00:00 stderr F I1212 16:18:34.864273 12 reflector.go:430] "Caches populated" type="policy.networking.k8s.io/v1alpha1, Kind=AdminNetworkPolicy" reflector="storage/cacher.go:/policy.networking.k8s.io/adminnetworkpolicies" 2025-12-12T16:18:34.930861911+00:00 stderr F I1212 16:18:34.930691 12 store.go:1663] "Monitoring resource count at path" resource="gateways.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/gateways" 2025-12-12T16:18:34.932291497+00:00 stderr F I1212 16:18:34.932207 12 cacher.go:469] cacher (gateways.gateway.networking.k8s.io): initialized 2025-12-12T16:18:34.932349868+00:00 stderr F I1212 16:18:34.932288 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1, Kind=Gateway" reflector="storage/cacher.go:/gateway.networking.k8s.io/gateways" 2025-12-12T16:18:34.949436220+00:00 stderr F I1212 16:18:34.949303 12 store.go:1663] "Monitoring resource count at path" resource="gateways.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/gateways" 2025-12-12T16:18:34.951208424+00:00 stderr F I1212 16:18:34.950943 12 cacher.go:469] cacher (gateways.gateway.networking.k8s.io): initialized 2025-12-12T16:18:34.951208424+00:00 stderr F I1212 16:18:34.951051 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1beta1, Kind=Gateway" reflector="storage/cacher.go:/gateway.networking.k8s.io/gateways" 2025-12-12T16:18:35.029893140+00:00 stderr F I1212 16:18:35.029732 12 store.go:1663] "Monitoring resource count at path" resource="controllerconfigs.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/controllerconfigs" 2025-12-12T16:18:35.032705619+00:00 stderr F I1212 16:18:35.032629 12 cacher.go:469] cacher (controllerconfigs.machineconfiguration.openshift.io): initialized 2025-12-12T16:18:35.032763180+00:00 stderr F I1212 16:18:35.032719 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=ControllerConfig" reflector="storage/cacher.go:/machineconfiguration.openshift.io/controllerconfigs" 2025-12-12T16:18:35.040299207+00:00 stderr F I1212 16:18:35.040136 12 store.go:1663] "Monitoring resource count at path" resource="pinnedimagesets.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/pinnedimagesets" 2025-12-12T16:18:35.041070296+00:00 stderr F I1212 16:18:35.041006 12 cacher.go:469] cacher (pinnedimagesets.machineconfiguration.openshift.io): initialized 2025-12-12T16:18:35.041099177+00:00 stderr F I1212 16:18:35.041035 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=PinnedImageSet" reflector="storage/cacher.go:/machineconfiguration.openshift.io/pinnedimagesets" 2025-12-12T16:18:35.219556719+00:00 stderr F I1212 16:18:35.219426 12 store.go:1663] "Monitoring resource count at path" resource="alertingrules.monitoring.openshift.io" path="//monitoring.openshift.io/alertingrules" 2025-12-12T16:18:35.220814570+00:00 stderr F I1212 16:18:35.220733 12 cacher.go:469] cacher (alertingrules.monitoring.openshift.io): initialized 2025-12-12T16:18:35.220814570+00:00 stderr F I1212 16:18:35.220769 12 reflector.go:430] "Caches populated" type="monitoring.openshift.io/v1, Kind=AlertingRule" reflector="storage/cacher.go:/monitoring.openshift.io/alertingrules" 2025-12-12T16:18:35.254154654+00:00 stderr F I1212 16:18:35.254001 12 store.go:1663] "Monitoring resource count at path" resource="clusterserviceversions.operators.coreos.com" path="//operators.coreos.com/clusterserviceversions" 2025-12-12T16:18:35.317119991+00:00 stderr F I1212 16:18:35.316928 12 cacher.go:469] cacher (clusterserviceversions.operators.coreos.com): initialized 2025-12-12T16:18:35.317119991+00:00 stderr F I1212 16:18:35.316970 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1alpha1, Kind=ClusterServiceVersion" reflector="storage/cacher.go:/operators.coreos.com/clusterserviceversions" 2025-12-12T16:18:35.428775931+00:00 stderr F I1212 16:18:35.428649 12 store.go:1663] "Monitoring resource count at path" resource="servicemonitors.monitoring.coreos.com" path="//monitoring.coreos.com/servicemonitors" 2025-12-12T16:18:35.436782599+00:00 stderr F I1212 16:18:35.436647 12 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/config/informers/externalversions/factory.go:125" 2025-12-12T16:18:35.439368633+00:00 stderr F I1212 16:18:35.439290 12 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/quota/informers/externalversions/factory.go:125" 2025-12-12T16:18:35.441388343+00:00 stderr F I1212 16:18:35.441250 12 reflector.go:430] "Caches populated" type="*v1.Group" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/user/informers/externalversions/factory.go:125" 2025-12-12T16:18:35.443429614+00:00 stderr F I1212 16:18:35.443067 12 store.go:1663] "Monitoring resource count at path" resource="ippools.whereabouts.cni.cncf.io" path="//whereabouts.cni.cncf.io/ippools" 2025-12-12T16:18:35.444477519+00:00 stderr F I1212 16:18:35.444362 12 cacher.go:469] cacher (ippools.whereabouts.cni.cncf.io): initialized 2025-12-12T16:18:35.444477519+00:00 stderr F I1212 16:18:35.444408 12 reflector.go:430] "Caches populated" type="whereabouts.cni.cncf.io/v1alpha1, Kind=IPPool" reflector="storage/cacher.go:/whereabouts.cni.cncf.io/ippools" 2025-12-12T16:18:35.445905905+00:00 stderr F I1212 16:18:35.444676 12 cacher.go:469] cacher (servicemonitors.monitoring.coreos.com): initialized 2025-12-12T16:18:35.445905905+00:00 stderr F I1212 16:18:35.444721 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=ServiceMonitor" reflector="storage/cacher.go:/monitoring.coreos.com/servicemonitors" 2025-12-12T16:18:35.445905905+00:00 stderr F I1212 16:18:35.445139 12 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/security/informers/externalversions/factory.go:125" 2025-12-12T16:18:35.459593903+00:00 stderr F I1212 16:18:35.459380 12 store.go:1663] "Monitoring resource count at path" resource="apirequestcounts.apiserver.openshift.io" path="//apiserver.openshift.io/apirequestcounts" 2025-12-12T16:18:35.460435164+00:00 stderr F I1212 16:18:35.460361 12 cacher.go:469] cacher (apirequestcounts.apiserver.openshift.io): initialized 2025-12-12T16:18:35.460435164+00:00 stderr F I1212 16:18:35.460408 12 reflector.go:430] "Caches populated" type="apiserver.openshift.io/v1, Kind=APIRequestCount" reflector="storage/cacher.go:/apiserver.openshift.io/apirequestcounts" 2025-12-12T16:18:35.484741595+00:00 stderr F I1212 16:18:35.484570 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Resource=infrastructures" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:18:35.506364980+00:00 stderr F I1212 16:18:35.505822 12 store.go:1663] "Monitoring resource count at path" resource="kubeapiservers.operator.openshift.io" path="//operator.openshift.io/kubeapiservers" 2025-12-12T16:18:35.511716292+00:00 stderr F I1212 16:18:35.511571 12 cacher.go:469] cacher (kubeapiservers.operator.openshift.io): initialized 2025-12-12T16:18:35.511716292+00:00 stderr F I1212 16:18:35.511626 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=KubeAPIServer" reflector="storage/cacher.go:/operator.openshift.io/kubeapiservers" 2025-12-12T16:18:35.516037189+00:00 stderr F I1212 16:18:35.515928 12 controller.go:667] quota admission added evaluator for: leases.coordination.k8s.io 2025-12-12T16:18:35.517594577+00:00 stderr F I1212 16:18:35.517510 12 trace.go:236] Trace[1927991403]: "Update" accept:application/vnd.kubernetes.protobuf,application/json,audit-id:f98becf6-9261-432d-a399-5fac0a74b9de,client:38.102.83.180,api-group:coordination.k8s.io,api-version:v1,name:crc,subresource:,namespace:kube-node-lease,protocol:HTTP/2.0,resource:leases,scope:resource,url:/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc,user-agent:kubelet/v1.33.5 (linux/amd64) kubernetes/27f72e0,verb:PUT (12-Dec-2025 16:18:34.195) (total time: 1322ms): 2025-12-12T16:18:35.517594577+00:00 stderr F Trace[1927991403]: ["GuaranteedUpdate etcd3" audit-id:f98becf6-9261-432d-a399-5fac0a74b9de,key:/leases/kube-node-lease/crc,type:*coordination.Lease,resource:leases.coordination.k8s.io 1322ms (16:18:34.195) 2025-12-12T16:18:35.517594577+00:00 stderr F Trace[1927991403]: ---"About to Encode" 1320ms (16:18:35.515)] 2025-12-12T16:18:35.517594577+00:00 stderr F Trace[1927991403]: [1.322458276s] [1.322458276s] END 2025-12-12T16:18:35.518952161+00:00 stderr F I1212 16:18:35.518877 12 store.go:1663] "Monitoring resource count at path" resource="networks.config.openshift.io" path="//config.openshift.io/networks" 2025-12-12T16:18:35.521017902+00:00 stderr F I1212 16:18:35.520858 12 cacher.go:469] cacher (networks.config.openshift.io): initialized 2025-12-12T16:18:35.521017902+00:00 stderr F I1212 16:18:35.520884 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Network" reflector="storage/cacher.go:/config.openshift.io/networks" 2025-12-12T16:18:35.528846615+00:00 stderr F I1212 16:18:35.528674 12 store.go:1663] "Monitoring resource count at path" resource="ipamclaims.k8s.cni.cncf.io" path="//k8s.cni.cncf.io/ipamclaims" 2025-12-12T16:18:35.531471700+00:00 stderr F I1212 16:18:35.531402 12 cacher.go:469] cacher (ipamclaims.k8s.cni.cncf.io): initialized 2025-12-12T16:18:35.531471700+00:00 stderr F I1212 16:18:35.531441 12 reflector.go:430] "Caches populated" type="k8s.cni.cncf.io/v1alpha1, Kind=IPAMClaim" reflector="storage/cacher.go:/k8s.cni.cncf.io/ipamclaims" 2025-12-12T16:18:35.551888545+00:00 stderr F I1212 16:18:35.551786 12 store.go:1663] "Monitoring resource count at path" resource="egressrouters.network.operator.openshift.io" path="//network.operator.openshift.io/egressrouters" 2025-12-12T16:18:35.553388082+00:00 stderr F I1212 16:18:35.553281 12 cacher.go:469] cacher (egressrouters.network.operator.openshift.io): initialized 2025-12-12T16:18:35.553388082+00:00 stderr F I1212 16:18:35.553326 12 reflector.go:430] "Caches populated" type="network.operator.openshift.io/v1, Kind=EgressRouter" reflector="storage/cacher.go:/network.operator.openshift.io/egressrouters" 2025-12-12T16:18:35.563277777+00:00 stderr F I1212 16:18:35.561244 12 store.go:1663] "Monitoring resource count at path" resource="dnsrecords.ingress.operator.openshift.io" path="//ingress.operator.openshift.io/dnsrecords" 2025-12-12T16:18:35.563277777+00:00 stderr F I1212 16:18:35.562385 12 cacher.go:469] cacher (dnsrecords.ingress.operator.openshift.io): initialized 2025-12-12T16:18:35.563277777+00:00 stderr F I1212 16:18:35.562456 12 reflector.go:430] "Caches populated" type="ingress.operator.openshift.io/v1, Kind=DNSRecord" reflector="storage/cacher.go:/ingress.operator.openshift.io/dnsrecords" 2025-12-12T16:18:35.572267829+00:00 stderr F I1212 16:18:35.572098 12 store.go:1663] "Monitoring resource count at path" resource="rolebindingrestrictions.authorization.openshift.io" path="//authorization.openshift.io/rolebindingrestrictions" 2025-12-12T16:18:35.573836838+00:00 stderr F I1212 16:18:35.573747 12 cacher.go:469] cacher (rolebindingrestrictions.authorization.openshift.io): initialized 2025-12-12T16:18:35.573836838+00:00 stderr F I1212 16:18:35.573800 12 reflector.go:430] "Caches populated" type="authorization.openshift.io/v1, Kind=RoleBindingRestriction" reflector="storage/cacher.go:/authorization.openshift.io/rolebindingrestrictions" 2025-12-12T16:18:35.634447776+00:00 stderr F I1212 16:18:35.633965 12 store.go:1663] "Monitoring resource count at path" resource="alertmanagers.monitoring.coreos.com" path="//monitoring.coreos.com/alertmanagers" 2025-12-12T16:18:35.643316225+00:00 stderr F I1212 16:18:35.640488 12 cacher.go:469] cacher (alertmanagers.monitoring.coreos.com): initialized 2025-12-12T16:18:35.643316225+00:00 stderr F I1212 16:18:35.640546 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=Alertmanager" reflector="storage/cacher.go:/monitoring.coreos.com/alertmanagers" 2025-12-12T16:18:35.655492486+00:00 stderr F I1212 16:18:35.655328 12 store.go:1663] "Monitoring resource count at path" resource="installplans.operators.coreos.com" path="//operators.coreos.com/installplans" 2025-12-12T16:18:35.656985893+00:00 stderr F I1212 16:18:35.656716 12 cacher.go:469] cacher (installplans.operators.coreos.com): initialized 2025-12-12T16:18:35.656985893+00:00 stderr F I1212 16:18:35.656748 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1alpha1, Kind=InstallPlan" reflector="storage/cacher.go:/operators.coreos.com/installplans" 2025-12-12T16:18:35.819765618+00:00 stderr F I1212 16:18:35.819605 12 store.go:1663] "Monitoring resource count at path" resource="ipaddressclaims.ipam.cluster.x-k8s.io" path="//ipam.cluster.x-k8s.io/ipaddressclaims" 2025-12-12T16:18:35.822318861+00:00 stderr F I1212 16:18:35.821354 12 cacher.go:469] cacher (ipaddressclaims.ipam.cluster.x-k8s.io): initialized 2025-12-12T16:18:35.822318861+00:00 stderr F I1212 16:18:35.821384 12 reflector.go:430] "Caches populated" type="ipam.cluster.x-k8s.io/v1alpha1, Kind=IPAddressClaim" reflector="storage/cacher.go:/ipam.cluster.x-k8s.io/ipaddressclaims" 2025-12-12T16:18:35.825738275+00:00 stderr F I1212 16:18:35.825651 12 store.go:1663] "Monitoring resource count at path" resource="ipaddressclaims.ipam.cluster.x-k8s.io" path="//ipam.cluster.x-k8s.io/ipaddressclaims" 2025-12-12T16:18:35.826575546+00:00 stderr F I1212 16:18:35.826526 12 cacher.go:469] cacher (ipaddressclaims.ipam.cluster.x-k8s.io): initialized 2025-12-12T16:18:35.826589726+00:00 stderr F I1212 16:18:35.826571 12 reflector.go:430] "Caches populated" type="ipam.cluster.x-k8s.io/v1beta1, Kind=IPAddressClaim" reflector="storage/cacher.go:/ipam.cluster.x-k8s.io/ipaddressclaims" 2025-12-12T16:18:35.933984471+00:00 stderr F I1212 16:18:35.933777 12 store.go:1663] "Monitoring resource count at path" resource="machinehealthchecks.machine.openshift.io" path="//machine.openshift.io/machinehealthchecks" 2025-12-12T16:18:35.936344920+00:00 stderr F I1212 16:18:35.936246 12 cacher.go:469] cacher (machinehealthchecks.machine.openshift.io): initialized 2025-12-12T16:18:35.936344920+00:00 stderr F I1212 16:18:35.936270 12 reflector.go:430] "Caches populated" type="machine.openshift.io/v1beta1, Kind=MachineHealthCheck" reflector="storage/cacher.go:/machine.openshift.io/machinehealthchecks" 2025-12-12T16:18:36.004773142+00:00 stderr F I1212 16:18:36.004598 12 store.go:1663] "Monitoring resource count at path" resource="proxies.config.openshift.io" path="//config.openshift.io/proxies" 2025-12-12T16:18:36.006323690+00:00 stderr F I1212 16:18:36.006249 12 cacher.go:469] cacher (proxies.config.openshift.io): initialized 2025-12-12T16:18:36.006323690+00:00 stderr F I1212 16:18:36.006279 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Proxy" reflector="storage/cacher.go:/config.openshift.io/proxies" 2025-12-12T16:18:36.072469225+00:00 stderr F I1212 16:18:36.072344 12 store.go:1663] "Monitoring resource count at path" resource="controlplanemachinesets.machine.openshift.io" path="//machine.openshift.io/controlplanemachinesets" 2025-12-12T16:18:36.073788258+00:00 stderr F I1212 16:18:36.073694 12 cacher.go:469] cacher (controlplanemachinesets.machine.openshift.io): initialized 2025-12-12T16:18:36.073808968+00:00 stderr F I1212 16:18:36.073774 12 reflector.go:430] "Caches populated" type="machine.openshift.io/v1, Kind=ControlPlaneMachineSet" reflector="storage/cacher.go:/machine.openshift.io/controlplanemachinesets" 2025-12-12T16:18:36.115223362+00:00 stderr F I1212 16:18:36.115066 12 store.go:1663] "Monitoring resource count at path" resource="machineconfigurations.operator.openshift.io" path="//operator.openshift.io/machineconfigurations" 2025-12-12T16:18:36.117525579+00:00 stderr F I1212 16:18:36.117434 12 cacher.go:469] cacher (machineconfigurations.operator.openshift.io): initialized 2025-12-12T16:18:36.117525579+00:00 stderr F I1212 16:18:36.117475 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=MachineConfiguration" reflector="storage/cacher.go:/operator.openshift.io/machineconfigurations" 2025-12-12T16:18:36.147450729+00:00 stderr F I1212 16:18:36.147318 12 store.go:1663] "Monitoring resource count at path" resource="prometheuses.monitoring.coreos.com" path="//monitoring.coreos.com/prometheuses" 2025-12-12T16:18:36.148369132+00:00 stderr F I1212 16:18:36.148304 12 cacher.go:469] cacher (prometheuses.monitoring.coreos.com): initialized 2025-12-12T16:18:36.148369132+00:00 stderr F I1212 16:18:36.148328 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=Prometheus" reflector="storage/cacher.go:/monitoring.coreos.com/prometheuses" 2025-12-12T16:18:36.264701248+00:00 stderr F I1212 16:18:36.264414 12 store.go:1663] "Monitoring resource count at path" resource="operatorpkis.network.operator.openshift.io" path="//network.operator.openshift.io/operatorpkis" 2025-12-12T16:18:36.266797070+00:00 stderr F I1212 16:18:36.266671 12 cacher.go:469] cacher (operatorpkis.network.operator.openshift.io): initialized 2025-12-12T16:18:36.266797070+00:00 stderr F I1212 16:18:36.266707 12 reflector.go:430] "Caches populated" type="network.operator.openshift.io/v1, Kind=OperatorPKI" reflector="storage/cacher.go:/network.operator.openshift.io/operatorpkis" 2025-12-12T16:18:36.413304062+00:00 stderr F I1212 16:18:36.413191 12 store.go:1663] "Monitoring resource count at path" resource="referencegrants.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/referencegrants" 2025-12-12T16:18:36.414535792+00:00 stderr F I1212 16:18:36.414461 12 cacher.go:469] cacher (referencegrants.gateway.networking.k8s.io): initialized 2025-12-12T16:18:36.414598244+00:00 stderr F I1212 16:18:36.414563 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1beta1, Kind=ReferenceGrant" reflector="storage/cacher.go:/gateway.networking.k8s.io/referencegrants" 2025-12-12T16:18:36.561415293+00:00 stderr F I1212 16:18:36.561281 12 store.go:1663] "Monitoring resource count at path" resource="openshiftapiservers.operator.openshift.io" path="//operator.openshift.io/openshiftapiservers" 2025-12-12T16:18:36.564816987+00:00 stderr F I1212 16:18:36.564719 12 cacher.go:469] cacher (openshiftapiservers.operator.openshift.io): initialized 2025-12-12T16:18:36.564816987+00:00 stderr F I1212 16:18:36.564756 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=OpenShiftAPIServer" reflector="storage/cacher.go:/operator.openshift.io/openshiftapiservers" 2025-12-12T16:18:36.632779768+00:00 stderr F I1212 16:18:36.632654 12 store.go:1663] "Monitoring resource count at path" resource="podmonitors.monitoring.coreos.com" path="//monitoring.coreos.com/podmonitors" 2025-12-12T16:18:36.633935786+00:00 stderr F I1212 16:18:36.633732 12 cacher.go:469] cacher (podmonitors.monitoring.coreos.com): initialized 2025-12-12T16:18:36.633935786+00:00 stderr F I1212 16:18:36.633763 12 reflector.go:430] "Caches populated" type="monitoring.coreos.com/v1, Kind=PodMonitor" reflector="storage/cacher.go:/monitoring.coreos.com/podmonitors" 2025-12-12T16:18:36.935327807+00:00 stderr F I1212 16:18:36.935151 12 store.go:1663] "Monitoring resource count at path" resource="projecthelmchartrepositories.helm.openshift.io" path="//helm.openshift.io/projecthelmchartrepositories" 2025-12-12T16:18:36.937129891+00:00 stderr F I1212 16:18:36.937001 12 cacher.go:469] cacher (projecthelmchartrepositories.helm.openshift.io): initialized 2025-12-12T16:18:36.937129891+00:00 stderr F I1212 16:18:36.937026 12 reflector.go:430] "Caches populated" type="helm.openshift.io/v1beta1, Kind=ProjectHelmChartRepository" reflector="storage/cacher.go:/helm.openshift.io/projecthelmchartrepositories" 2025-12-12T16:18:37.139454553+00:00 stderr F I1212 16:18:37.138848 12 store.go:1663] "Monitoring resource count at path" resource="alertrelabelconfigs.monitoring.openshift.io" path="//monitoring.openshift.io/alertrelabelconfigs" 2025-12-12T16:18:37.140634762+00:00 stderr F I1212 16:18:37.140517 12 cacher.go:469] cacher (alertrelabelconfigs.monitoring.openshift.io): initialized 2025-12-12T16:18:37.140634762+00:00 stderr F I1212 16:18:37.140569 12 reflector.go:430] "Caches populated" type="monitoring.openshift.io/v1, Kind=AlertRelabelConfig" reflector="storage/cacher.go:/monitoring.openshift.io/alertrelabelconfigs" 2025-12-12T16:18:37.253998545+00:00 stderr F I1212 16:18:37.253829 12 store.go:1663] "Monitoring resource count at path" resource="metal3remediations.infrastructure.cluster.x-k8s.io" path="//infrastructure.cluster.x-k8s.io/metal3remediations" 2025-12-12T16:18:37.255636796+00:00 stderr F I1212 16:18:37.255465 12 cacher.go:469] cacher (metal3remediations.infrastructure.cluster.x-k8s.io): initialized 2025-12-12T16:18:37.255636796+00:00 stderr F I1212 16:18:37.255521 12 reflector.go:430] "Caches populated" type="infrastructure.cluster.x-k8s.io/v1alpha5, Kind=Metal3Remediation" reflector="storage/cacher.go:/infrastructure.cluster.x-k8s.io/metal3remediations" 2025-12-12T16:18:37.262339361+00:00 stderr F I1212 16:18:37.262208 12 store.go:1663] "Monitoring resource count at path" resource="metal3remediations.infrastructure.cluster.x-k8s.io" path="//infrastructure.cluster.x-k8s.io/metal3remediations" 2025-12-12T16:18:37.263603853+00:00 stderr F I1212 16:18:37.263531 12 cacher.go:469] cacher (metal3remediations.infrastructure.cluster.x-k8s.io): initialized 2025-12-12T16:18:37.263603853+00:00 stderr F I1212 16:18:37.263565 12 reflector.go:430] "Caches populated" type="infrastructure.cluster.x-k8s.io/v1beta1, Kind=Metal3Remediation" reflector="storage/cacher.go:/infrastructure.cluster.x-k8s.io/metal3remediations" 2025-12-12T16:18:37.390607772+00:00 stderr F I1212 16:18:37.390463 12 store.go:1663] "Monitoring resource count at path" resource="httproutes.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/httproutes" 2025-12-12T16:18:37.391769051+00:00 stderr F I1212 16:18:37.391605 12 cacher.go:469] cacher (httproutes.gateway.networking.k8s.io): initialized 2025-12-12T16:18:37.391769051+00:00 stderr F I1212 16:18:37.391713 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1, Kind=HTTPRoute" reflector="storage/cacher.go:/gateway.networking.k8s.io/httproutes" 2025-12-12T16:18:37.446321980+00:00 stderr F I1212 16:18:37.444898 12 store.go:1663] "Monitoring resource count at path" resource="httproutes.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/httproutes" 2025-12-12T16:18:37.446669468+00:00 stderr F I1212 16:18:37.446582 12 cacher.go:469] cacher (httproutes.gateway.networking.k8s.io): initialized 2025-12-12T16:18:37.446669468+00:00 stderr F I1212 16:18:37.446642 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1beta1, Kind=HTTPRoute" reflector="storage/cacher.go:/gateway.networking.k8s.io/httproutes" 2025-12-12T16:18:38.342781962+00:00 stderr F I1212 16:18:38.341409 12 store.go:1663] "Monitoring resource count at path" resource="baselineadminnetworkpolicies.policy.networking.k8s.io" path="//policy.networking.k8s.io/baselineadminnetworkpolicies" 2025-12-12T16:18:38.343700815+00:00 stderr F I1212 16:18:38.343592 12 cacher.go:469] cacher (baselineadminnetworkpolicies.policy.networking.k8s.io): initialized 2025-12-12T16:18:38.343862929+00:00 stderr F I1212 16:18:38.343774 12 reflector.go:430] "Caches populated" type="policy.networking.k8s.io/v1alpha1, Kind=BaselineAdminNetworkPolicy" reflector="storage/cacher.go:/policy.networking.k8s.io/baselineadminnetworkpolicies" 2025-12-12T16:18:40.508618838+00:00 stderr F I1212 16:18:40.508332 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=14 seatDemandAvg=0.17096130919038166 seatDemandStdev=1.330799170024677 seatDemandSmoothed=14.178810233149623 fairFrac=2.2796127562642368 currentCL=14 concurrencyDenominator=14 backstop=false 2025-12-12T16:18:46.323943271+00:00 stderr F I1212 16:18:46.323757 12 controller.go:667] quota admission added evaluator for: podnetworkconnectivitychecks.controlplane.operator.openshift.io 2025-12-12T16:18:46.323943271+00:00 stderr F I1212 16:18:46.323811 12 controller.go:667] quota admission added evaluator for: podnetworkconnectivitychecks.controlplane.operator.openshift.io 2025-12-12T16:18:50.509231143+00:00 stderr F I1212 16:18:50.509059 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0009931112857613736 seatDemandStdev=0.031498016060308734 seatDemandSmoothed=13.85344489371614 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:18:52.226939539+00:00 stderr F I1212 16:18:52.226803 12 controller.go:667] quota admission added evaluator for: serviceaccounts 2025-12-12T16:18:53.341304699+00:00 stderr F I1212 16:18:53.341116 12 store.go:1663] "Monitoring resource count at path" resource="servicecas.operator.openshift.io" path="//operator.openshift.io/servicecas" 2025-12-12T16:18:53.343055302+00:00 stderr F I1212 16:18:53.342991 12 cacher.go:469] cacher (servicecas.operator.openshift.io): initialized 2025-12-12T16:18:53.343055302+00:00 stderr F I1212 16:18:53.343021 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=ServiceCA" reflector="storage/cacher.go:/operator.openshift.io/servicecas" 2025-12-12T16:18:54.190897173+00:00 stderr F I1212 16:18:54.190741 12 store.go:1663] "Monitoring resource count at path" resource="authentications.operator.openshift.io" path="//operator.openshift.io/authentications" 2025-12-12T16:18:54.196292117+00:00 stderr F I1212 16:18:54.196091 12 cacher.go:469] cacher (authentications.operator.openshift.io): initialized 2025-12-12T16:18:54.196292117+00:00 stderr F I1212 16:18:54.196139 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=Authentication" reflector="storage/cacher.go:/operator.openshift.io/authentications" 2025-12-12T16:18:54.746611031+00:00 stderr F I1212 16:18:54.746451 12 store.go:1663] "Monitoring resource count at path" resource="etcds.operator.openshift.io" path="//operator.openshift.io/etcds" 2025-12-12T16:18:54.750842816+00:00 stderr F I1212 16:18:54.750695 12 cacher.go:469] cacher (etcds.operator.openshift.io): initialized 2025-12-12T16:18:54.750842816+00:00 stderr F I1212 16:18:54.750723 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=Etcd" reflector="storage/cacher.go:/operator.openshift.io/etcds" 2025-12-12T16:18:56.090480465+00:00 stderr F I1212 16:18:56.090159 12 store.go:1663] "Monitoring resource count at path" resource="kubestorageversionmigrators.operator.openshift.io" path="//operator.openshift.io/kubestorageversionmigrators" 2025-12-12T16:18:56.093266834+00:00 stderr F I1212 16:18:56.093110 12 cacher.go:469] cacher (kubestorageversionmigrators.operator.openshift.io): initialized 2025-12-12T16:18:56.093266834+00:00 stderr F I1212 16:18:56.093146 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=KubeStorageVersionMigrator" reflector="storage/cacher.go:/operator.openshift.io/kubestorageversionmigrators" 2025-12-12T16:18:56.816697179+00:00 stderr F I1212 16:18:56.816547 12 store.go:1663] "Monitoring resource count at path" resource="imagetagmirrorsets.config.openshift.io" path="//config.openshift.io/imagetagmirrorsets" 2025-12-12T16:18:56.818570206+00:00 stderr F I1212 16:18:56.818496 12 cacher.go:469] cacher (imagetagmirrorsets.config.openshift.io): initialized 2025-12-12T16:18:56.818570206+00:00 stderr F I1212 16:18:56.818542 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ImageTagMirrorSet" reflector="storage/cacher.go:/config.openshift.io/imagetagmirrorsets" 2025-12-12T16:18:56.877052022+00:00 stderr F I1212 16:18:56.876908 12 store.go:1663] "Monitoring resource count at path" resource="storageversionmigrations.migration.k8s.io" path="//migration.k8s.io/storageversionmigrations" 2025-12-12T16:18:56.881165783+00:00 stderr F I1212 16:18:56.881056 12 cacher.go:469] cacher (storageversionmigrations.migration.k8s.io): initialized 2025-12-12T16:18:56.881165783+00:00 stderr F I1212 16:18:56.881090 12 reflector.go:430] "Caches populated" type="migration.k8s.io/v1alpha1, Kind=StorageVersionMigration" reflector="storage/cacher.go:/migration.k8s.io/storageversionmigrations" 2025-12-12T16:18:57.174391363+00:00 stderr F I1212 16:18:57.174215 12 controller.go:667] quota admission added evaluator for: csistoragecapacities.storage.k8s.io 2025-12-12T16:18:57.174391363+00:00 stderr F I1212 16:18:57.174258 12 controller.go:667] quota admission added evaluator for: csistoragecapacities.storage.k8s.io 2025-12-12T16:18:57.470376190+00:00 stderr F I1212 16:18:57.470241 12 store.go:1663] "Monitoring resource count at path" resource="schedulers.config.openshift.io" path="//config.openshift.io/schedulers" 2025-12-12T16:18:57.472053132+00:00 stderr F I1212 16:18:57.471999 12 cacher.go:469] cacher (schedulers.config.openshift.io): initialized 2025-12-12T16:18:57.472087773+00:00 stderr F I1212 16:18:57.472060 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Scheduler" reflector="storage/cacher.go:/config.openshift.io/schedulers" 2025-12-12T16:18:57.496793193+00:00 stderr F I1212 16:18:57.496633 12 store.go:1663] "Monitoring resource count at path" resource="apiservers.config.openshift.io" path="//config.openshift.io/apiservers" 2025-12-12T16:18:57.498541067+00:00 stderr F I1212 16:18:57.498449 12 cacher.go:469] cacher (apiservers.config.openshift.io): initialized 2025-12-12T16:18:57.498541067+00:00 stderr F I1212 16:18:57.498491 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=APIServer" reflector="storage/cacher.go:/config.openshift.io/apiservers" 2025-12-12T16:18:57.597708728+00:00 stderr F I1212 16:18:57.597582 12 store.go:1663] "Monitoring resource count at path" resource="consoles.config.openshift.io" path="//config.openshift.io/consoles" 2025-12-12T16:18:57.600121298+00:00 stderr F I1212 16:18:57.600038 12 cacher.go:469] cacher (consoles.config.openshift.io): initialized 2025-12-12T16:18:57.600121298+00:00 stderr F I1212 16:18:57.600087 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Console" reflector="storage/cacher.go:/config.openshift.io/consoles" 2025-12-12T16:18:57.608485055+00:00 stderr F I1212 16:18:57.608372 12 store.go:1663] "Monitoring resource count at path" resource="imagedigestmirrorsets.config.openshift.io" path="//config.openshift.io/imagedigestmirrorsets" 2025-12-12T16:18:57.609885469+00:00 stderr F I1212 16:18:57.609825 12 cacher.go:469] cacher (imagedigestmirrorsets.config.openshift.io): initialized 2025-12-12T16:18:57.609923580+00:00 stderr F I1212 16:18:57.609868 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ImageDigestMirrorSet" reflector="storage/cacher.go:/config.openshift.io/imagedigestmirrorsets" 2025-12-12T16:18:57.944377859+00:00 stderr F I1212 16:18:57.944244 12 store.go:1663] "Monitoring resource count at path" resource="imagecontentsourcepolicies.operator.openshift.io" path="//operator.openshift.io/imagecontentsourcepolicies" 2025-12-12T16:18:57.946610014+00:00 stderr F I1212 16:18:57.946550 12 cacher.go:469] cacher (imagecontentsourcepolicies.operator.openshift.io): initialized 2025-12-12T16:18:57.946654275+00:00 stderr F I1212 16:18:57.946611 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1alpha1, Kind=ImageContentSourcePolicy" reflector="storage/cacher.go:/operator.openshift.io/imagecontentsourcepolicies" 2025-12-12T16:18:58.334806870+00:00 stderr F I1212 16:18:58.334705 12 store.go:1663] "Monitoring resource count at path" resource="openshiftcontrollermanagers.operator.openshift.io" path="//operator.openshift.io/openshiftcontrollermanagers" 2025-12-12T16:18:58.337452376+00:00 stderr F I1212 16:18:58.337357 12 cacher.go:469] cacher (openshiftcontrollermanagers.operator.openshift.io): initialized 2025-12-12T16:18:58.337452376+00:00 stderr F I1212 16:18:58.337408 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=OpenShiftControllerManager" reflector="storage/cacher.go:/operator.openshift.io/openshiftcontrollermanagers" 2025-12-12T16:18:58.411815474+00:00 stderr F I1212 16:18:58.411687 12 store.go:1663] "Monitoring resource count at path" resource="builds.config.openshift.io" path="//config.openshift.io/builds" 2025-12-12T16:18:58.413396723+00:00 stderr F I1212 16:18:58.413337 12 cacher.go:469] cacher (builds.config.openshift.io): initialized 2025-12-12T16:18:58.413430624+00:00 stderr F I1212 16:18:58.413383 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Build" reflector="storage/cacher.go:/config.openshift.io/builds" 2025-12-12T16:18:58.983566700+00:00 stderr F I1212 16:18:58.983357 12 controller.go:667] quota admission added evaluator for: namespaces 2025-12-12T16:18:59.102607753+00:00 stderr F I1212 16:18:59.102474 12 store.go:1663] "Monitoring resource count at path" resource="dnses.config.openshift.io" path="//config.openshift.io/dnses" 2025-12-12T16:18:59.103776872+00:00 stderr F I1212 16:18:59.103718 12 cacher.go:469] cacher (dnses.config.openshift.io): initialized 2025-12-12T16:18:59.103821193+00:00 stderr F I1212 16:18:59.103753 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=DNS" reflector="storage/cacher.go:/config.openshift.io/dnses" 2025-12-12T16:18:59.554390502+00:00 stderr F I1212 16:18:59.554280 12 store.go:1663] "Monitoring resource count at path" resource="configs.imageregistry.operator.openshift.io" path="//imageregistry.operator.openshift.io/configs" 2025-12-12T16:18:59.557148430+00:00 stderr F I1212 16:18:59.557100 12 cacher.go:469] cacher (configs.imageregistry.operator.openshift.io): initialized 2025-12-12T16:18:59.557243693+00:00 stderr F I1212 16:18:59.557221 12 reflector.go:430] "Caches populated" type="imageregistry.operator.openshift.io/v1, Kind=Config" reflector="storage/cacher.go:/imageregistry.operator.openshift.io/configs" 2025-12-12T16:19:00.104311148+00:00 stderr F I1212 16:19:00.104171 12 store.go:1663] "Monitoring resource count at path" resource="ingresses.config.openshift.io" path="//config.openshift.io/ingresses" 2025-12-12T16:19:00.107054435+00:00 stderr F I1212 16:19:00.106945 12 cacher.go:469] cacher (ingresses.config.openshift.io): initialized 2025-12-12T16:19:00.107054435+00:00 stderr F I1212 16:19:00.106993 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Ingress" reflector="storage/cacher.go:/config.openshift.io/ingresses" 2025-12-12T16:19:00.393481867+00:00 stderr F I1212 16:19:00.393273 12 store.go:1663] "Monitoring resource count at path" resource="operators.operators.coreos.com" path="//operators.coreos.com/operators" 2025-12-12T16:19:00.395389634+00:00 stderr F I1212 16:19:00.395328 12 cacher.go:469] cacher (operators.operators.coreos.com): initialized 2025-12-12T16:19:00.395438615+00:00 stderr F I1212 16:19:00.395376 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1, Kind=Operator" reflector="storage/cacher.go:/operators.coreos.com/operators" 2025-12-12T16:19:00.449870011+00:00 stderr F I1212 16:19:00.449739 12 store.go:1663] "Monitoring resource count at path" resource="nodes.config.openshift.io" path="//config.openshift.io/nodes" 2025-12-12T16:19:00.453669285+00:00 stderr F I1212 16:19:00.453592 12 cacher.go:469] cacher (nodes.config.openshift.io): initialized 2025-12-12T16:19:00.453669285+00:00 stderr F I1212 16:19:00.453629 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Node" reflector="storage/cacher.go:/config.openshift.io/nodes" 2025-12-12T16:19:00.510330286+00:00 stderr F I1212 16:19:00.510010 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.009005592456148606 seatDemandStdev=0.14740609555664974 seatDemandSmoothed=13.538413129984963 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:19:01.157282950+00:00 stderr F I1212 16:19:01.157149 12 store.go:1663] "Monitoring resource count at path" resource="consolenotifications.console.openshift.io" path="//console.openshift.io/consolenotifications" 2025-12-12T16:19:01.159390452+00:00 stderr F I1212 16:19:01.159319 12 cacher.go:469] cacher (consolenotifications.console.openshift.io): initialized 2025-12-12T16:19:01.159438633+00:00 stderr F I1212 16:19:01.159364 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleNotification" reflector="storage/cacher.go:/console.openshift.io/consolenotifications" 2025-12-12T16:19:01.330231956+00:00 stderr F I1212 16:19:01.330091 12 store.go:1663] "Monitoring resource count at path" resource="images.config.openshift.io" path="//config.openshift.io/images" 2025-12-12T16:19:01.331393475+00:00 stderr F I1212 16:19:01.331322 12 cacher.go:469] cacher (images.config.openshift.io): initialized 2025-12-12T16:19:01.331393475+00:00 stderr F I1212 16:19:01.331359 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Image" reflector="storage/cacher.go:/config.openshift.io/images" 2025-12-12T16:19:01.477117077+00:00 stderr F I1212 16:19:01.476992 12 store.go:1663] "Monitoring resource count at path" resource="gatewayclasses.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/gatewayclasses" 2025-12-12T16:19:01.478525032+00:00 stderr F I1212 16:19:01.478458 12 cacher.go:469] cacher (gatewayclasses.gateway.networking.k8s.io): initialized 2025-12-12T16:19:01.478567923+00:00 stderr F I1212 16:19:01.478508 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1, Kind=GatewayClass" reflector="storage/cacher.go:/gateway.networking.k8s.io/gatewayclasses" 2025-12-12T16:19:01.485620508+00:00 stderr F I1212 16:19:01.485536 12 store.go:1663] "Monitoring resource count at path" resource="gatewayclasses.gateway.networking.k8s.io" path="//gateway.networking.k8s.io/gatewayclasses" 2025-12-12T16:19:01.488046158+00:00 stderr F I1212 16:19:01.487564 12 cacher.go:469] cacher (gatewayclasses.gateway.networking.k8s.io): initialized 2025-12-12T16:19:01.488046158+00:00 stderr F I1212 16:19:01.487607 12 reflector.go:430] "Caches populated" type="gateway.networking.k8s.io/v1beta1, Kind=GatewayClass" reflector="storage/cacher.go:/gateway.networking.k8s.io/gatewayclasses" 2025-12-12T16:19:01.677532312+00:00 stderr F I1212 16:19:01.677379 12 store.go:1663] "Monitoring resource count at path" resource="oauths.config.openshift.io" path="//config.openshift.io/oauths" 2025-12-12T16:19:01.679724596+00:00 stderr F I1212 16:19:01.679670 12 cacher.go:469] cacher (oauths.config.openshift.io): initialized 2025-12-12T16:19:01.679724596+00:00 stderr F I1212 16:19:01.679691 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=OAuth" reflector="storage/cacher.go:/config.openshift.io/oauths" 2025-12-12T16:19:01.696310717+00:00 stderr F I1212 16:19:01.696143 12 store.go:1663] "Monitoring resource count at path" resource="olmconfigs.operators.coreos.com" path="//operators.coreos.com/olmconfigs" 2025-12-12T16:19:01.697590838+00:00 stderr F I1212 16:19:01.697526 12 cacher.go:469] cacher (olmconfigs.operators.coreos.com): initialized 2025-12-12T16:19:01.697635539+00:00 stderr F I1212 16:19:01.697558 12 reflector.go:430] "Caches populated" type="operators.coreos.com/v1, Kind=OLMConfig" reflector="storage/cacher.go:/operators.coreos.com/olmconfigs" 2025-12-12T16:19:01.991473863+00:00 stderr F I1212 16:19:01.990936 12 store.go:1663] "Monitoring resource count at path" resource="configs.operator.openshift.io" path="//operator.openshift.io/configs" 2025-12-12T16:19:01.994432496+00:00 stderr F I1212 16:19:01.993987 12 cacher.go:469] cacher (configs.operator.openshift.io): initialized 2025-12-12T16:19:01.994432496+00:00 stderr F I1212 16:19:01.994020 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=Config" reflector="storage/cacher.go:/operator.openshift.io/configs" 2025-12-12T16:19:02.167221468+00:00 stderr F I1212 16:19:02.166409 12 store.go:1663] "Monitoring resource count at path" resource="consoleclidownloads.console.openshift.io" path="//console.openshift.io/consoleclidownloads" 2025-12-12T16:19:02.169234137+00:00 stderr F I1212 16:19:02.169054 12 cacher.go:469] cacher (consoleclidownloads.console.openshift.io): initialized 2025-12-12T16:19:02.169234137+00:00 stderr F I1212 16:19:02.169089 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleCLIDownload" reflector="storage/cacher.go:/console.openshift.io/consoleclidownloads" 2025-12-12T16:19:02.455228928+00:00 stderr F I1212 16:19:02.455069 12 node_authorizer.go:224] "NODE DENY" err="node 'crc' cannot get unknown pod openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" 2025-12-12T16:19:02.637474534+00:00 stderr F I1212 16:19:02.637361 12 node_authorizer.go:224] "NODE DENY" err="node 'crc' cannot get unknown pod openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" 2025-12-12T16:19:03.289822872+00:00 stderr F I1212 16:19:03.289716 12 store.go:1663] "Monitoring resource count at path" resource="operatorhubs.config.openshift.io" path="//config.openshift.io/operatorhubs" 2025-12-12T16:19:03.292213171+00:00 stderr F I1212 16:19:03.292116 12 cacher.go:469] cacher (operatorhubs.config.openshift.io): initialized 2025-12-12T16:19:03.292213171+00:00 stderr F I1212 16:19:03.292159 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=OperatorHub" reflector="storage/cacher.go:/config.openshift.io/operatorhubs" 2025-12-12T16:19:05.575616672+00:00 stderr F I1212 16:19:05.573863 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:19:05.575616672+00:00 stderr F I1212 16:19:05.573894 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:19:05.575616672+00:00 stderr F I1212 16:19:05.573900 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 44.601µs 2025-12-12T16:19:05.594411327+00:00 stderr F I1212 16:19:05.594198 12 store.go:1663] "Monitoring resource count at path" resource="clustercsidrivers.operator.openshift.io" path="//operator.openshift.io/clustercsidrivers" 2025-12-12T16:19:05.595549965+00:00 stderr F I1212 16:19:05.595483 12 cacher.go:469] cacher (clustercsidrivers.operator.openshift.io): initialized 2025-12-12T16:19:05.595586456+00:00 stderr F I1212 16:19:05.595527 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=ClusterCSIDriver" reflector="storage/cacher.go:/operator.openshift.io/clustercsidrivers" 2025-12-12T16:19:05.603545023+00:00 stderr F I1212 16:19:05.603468 12 store.go:1663] "Monitoring resource count at path" resource="authentications.config.openshift.io" path="//config.openshift.io/authentications" 2025-12-12T16:19:05.605334687+00:00 stderr F I1212 16:19:05.605246 12 cacher.go:469] cacher (authentications.config.openshift.io): initialized 2025-12-12T16:19:05.605334687+00:00 stderr F I1212 16:19:05.605291 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Authentication" reflector="storage/cacher.go:/config.openshift.io/authentications" 2025-12-12T16:19:05.628380137+00:00 stderr F I1212 16:19:05.627996 12 store.go:1663] "Monitoring resource count at path" resource="clusterimagepolicies.config.openshift.io" path="//config.openshift.io/clusterimagepolicies" 2025-12-12T16:19:05.637210365+00:00 stderr F I1212 16:19:05.637020 12 cacher.go:469] cacher (clusterimagepolicies.config.openshift.io): initialized 2025-12-12T16:19:05.637210365+00:00 stderr F I1212 16:19:05.637082 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ClusterImagePolicy" reflector="storage/cacher.go:/config.openshift.io/clusterimagepolicies" 2025-12-12T16:19:05.639149523+00:00 stderr F I1212 16:19:05.639063 12 controller.go:667] quota admission added evaluator for: replicasets.apps 2025-12-12T16:19:05.645411858+00:00 stderr F I1212 16:19:05.645313 12 store.go:1663] "Monitoring resource count at path" resource="kubeletconfigs.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/kubeletconfigs" 2025-12-12T16:19:05.650588696+00:00 stderr F I1212 16:19:05.650482 12 cacher.go:469] cacher (kubeletconfigs.machineconfiguration.openshift.io): initialized 2025-12-12T16:19:05.650588696+00:00 stderr F I1212 16:19:05.650520 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=KubeletConfig" reflector="storage/cacher.go:/machineconfiguration.openshift.io/kubeletconfigs" 2025-12-12T16:19:05.658876271+00:00 stderr F I1212 16:19:05.658781 12 store.go:1663] "Monitoring resource count at path" resource="storagestates.migration.k8s.io" path="//migration.k8s.io/storagestates" 2025-12-12T16:19:05.663160577+00:00 stderr F I1212 16:19:05.663073 12 cacher.go:469] cacher (storagestates.migration.k8s.io): initialized 2025-12-12T16:19:05.663160577+00:00 stderr F I1212 16:19:05.663109 12 reflector.go:430] "Caches populated" type="migration.k8s.io/v1alpha1, Kind=StorageState" reflector="storage/cacher.go:/migration.k8s.io/storagestates" 2025-12-12T16:19:05.669938344+00:00 stderr F I1212 16:19:05.669843 12 store.go:1663] "Monitoring resource count at path" resource="dnses.operator.openshift.io" path="//operator.openshift.io/dnses" 2025-12-12T16:19:05.673015800+00:00 stderr F I1212 16:19:05.672786 12 cacher.go:469] cacher (dnses.operator.openshift.io): initialized 2025-12-12T16:19:05.673015800+00:00 stderr F I1212 16:19:05.672839 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=DNS" reflector="storage/cacher.go:/operator.openshift.io/dnses" 2025-12-12T16:19:05.677314977+00:00 stderr F I1212 16:19:05.677233 12 controller.go:667] quota admission added evaluator for: deployments.apps 2025-12-12T16:19:05.686835862+00:00 stderr F I1212 16:19:05.679918 12 store.go:1663] "Monitoring resource count at path" resource="consolelinks.console.openshift.io" path="//console.openshift.io/consolelinks" 2025-12-12T16:19:05.686835862+00:00 stderr F I1212 16:19:05.682262 12 cacher.go:469] cacher (consolelinks.console.openshift.io): initialized 2025-12-12T16:19:05.686835862+00:00 stderr F I1212 16:19:05.682306 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleLink" reflector="storage/cacher.go:/console.openshift.io/consolelinks" 2025-12-12T16:19:05.686835862+00:00 stderr F I1212 16:19:05.684857 12 controller.go:667] quota admission added evaluator for: endpointslices.discovery.k8s.io 2025-12-12T16:19:05.686835862+00:00 stderr F I1212 16:19:05.685391 12 controller.go:667] quota admission added evaluator for: endpoints 2025-12-12T16:19:05.687890678+00:00 stderr F I1212 16:19:05.687780 12 cacher.go:847] cacher (endpoints): 1 objects queued in incoming channel. 2025-12-12T16:19:05.687890678+00:00 stderr F I1212 16:19:05.687835 12 cacher.go:847] cacher (endpoints): 2 objects queued in incoming channel. 2025-12-12T16:19:05.687984340+00:00 stderr F I1212 16:19:05.687931 12 cacher.go:847] cacher (endpointslices.discovery.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:19:05.687984340+00:00 stderr F I1212 16:19:05.687948 12 cacher.go:847] cacher (endpointslices.discovery.k8s.io): 2 objects queued in incoming channel. 2025-12-12T16:19:05.691275512+00:00 stderr F I1212 16:19:05.690707 12 store.go:1663] "Monitoring resource count at path" resource="containerruntimeconfigs.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/containerruntimeconfigs" 2025-12-12T16:19:05.692143403+00:00 stderr F I1212 16:19:05.692068 12 cacher.go:469] cacher (containerruntimeconfigs.machineconfiguration.openshift.io): initialized 2025-12-12T16:19:05.692143403+00:00 stderr F I1212 16:19:05.692114 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=ContainerRuntimeConfig" reflector="storage/cacher.go:/machineconfiguration.openshift.io/containerruntimeconfigs" 2025-12-12T16:19:05.703563116+00:00 stderr F I1212 16:19:05.703376 12 store.go:1663] "Monitoring resource count at path" resource="storages.operator.openshift.io" path="//operator.openshift.io/storages" 2025-12-12T16:19:05.707258327+00:00 stderr F I1212 16:19:05.707081 12 cacher.go:469] cacher (storages.operator.openshift.io): initialized 2025-12-12T16:19:05.707258327+00:00 stderr F I1212 16:19:05.707116 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=Storage" reflector="storage/cacher.go:/operator.openshift.io/storages" 2025-12-12T16:19:05.716146587+00:00 stderr F I1212 16:19:05.714823 12 store.go:1663] "Monitoring resource count at path" resource="consoleyamlsamples.console.openshift.io" path="//console.openshift.io/consoleyamlsamples" 2025-12-12T16:19:05.716489755+00:00 stderr F I1212 16:19:05.716102 12 cacher.go:469] cacher (consoleyamlsamples.console.openshift.io): initialized 2025-12-12T16:19:05.716489755+00:00 stderr F I1212 16:19:05.716147 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleYAMLSample" reflector="storage/cacher.go:/console.openshift.io/consoleyamlsamples" 2025-12-12T16:19:05.743271517+00:00 stderr F I1212 16:19:05.738209 12 store.go:1663] "Monitoring resource count at path" resource="machineosconfigs.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/machineosconfigs" 2025-12-12T16:19:05.743271517+00:00 stderr F I1212 16:19:05.739241 12 cacher.go:469] cacher (machineosconfigs.machineconfiguration.openshift.io): initialized 2025-12-12T16:19:05.743271517+00:00 stderr F I1212 16:19:05.739283 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=MachineOSConfig" reflector="storage/cacher.go:/machineconfiguration.openshift.io/machineosconfigs" 2025-12-12T16:19:05.758021912+00:00 stderr F I1212 16:19:05.757902 12 store.go:1663] "Monitoring resource count at path" resource="machineconfignodes.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/machineconfignodes" 2025-12-12T16:19:05.760404261+00:00 stderr F I1212 16:19:05.760215 12 cacher.go:469] cacher (machineconfignodes.machineconfiguration.openshift.io): initialized 2025-12-12T16:19:05.760404261+00:00 stderr F I1212 16:19:05.760250 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=MachineConfigNode" reflector="storage/cacher.go:/machineconfiguration.openshift.io/machineconfignodes" 2025-12-12T16:19:05.766820690+00:00 stderr F I1212 16:19:05.766687 12 store.go:1663] "Monitoring resource count at path" resource="consolequickstarts.console.openshift.io" path="//console.openshift.io/consolequickstarts" 2025-12-12T16:19:05.777730029+00:00 stderr F I1212 16:19:05.777139 12 cacher.go:469] cacher (consolequickstarts.console.openshift.io): initialized 2025-12-12T16:19:05.777730029+00:00 stderr F I1212 16:19:05.777275 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleQuickStart" reflector="storage/cacher.go:/console.openshift.io/consolequickstarts" 2025-12-12T16:19:05.778427847+00:00 stderr F I1212 16:19:05.778353 12 store.go:1663] "Monitoring resource count at path" resource="csisnapshotcontrollers.operator.openshift.io" path="//operator.openshift.io/csisnapshotcontrollers" 2025-12-12T16:19:05.782473476+00:00 stderr F I1212 16:19:05.782347 12 cacher.go:469] cacher (csisnapshotcontrollers.operator.openshift.io): initialized 2025-12-12T16:19:05.782473476+00:00 stderr F I1212 16:19:05.782374 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=CSISnapshotController" reflector="storage/cacher.go:/operator.openshift.io/csisnapshotcontrollers" 2025-12-12T16:19:05.792332980+00:00 stderr F I1212 16:19:05.788880 12 store.go:1663] "Monitoring resource count at path" resource="configs.samples.operator.openshift.io" path="//samples.operator.openshift.io/configs" 2025-12-12T16:19:05.794056043+00:00 stderr F I1212 16:19:05.793932 12 cacher.go:469] cacher (configs.samples.operator.openshift.io): initialized 2025-12-12T16:19:05.794056043+00:00 stderr F I1212 16:19:05.793973 12 reflector.go:430] "Caches populated" type="samples.operator.openshift.io/v1, Kind=Config" reflector="storage/cacher.go:/samples.operator.openshift.io/configs" 2025-12-12T16:19:05.798795400+00:00 stderr F I1212 16:19:05.798676 12 store.go:1663] "Monitoring resource count at path" resource="clusterautoscalers.autoscaling.openshift.io" path="//autoscaling.openshift.io/clusterautoscalers" 2025-12-12T16:19:05.800816280+00:00 stderr F I1212 16:19:05.800715 12 cacher.go:469] cacher (clusterautoscalers.autoscaling.openshift.io): initialized 2025-12-12T16:19:05.800816280+00:00 stderr F I1212 16:19:05.800758 12 reflector.go:430] "Caches populated" type="autoscaling.openshift.io/v1, Kind=ClusterAutoscaler" reflector="storage/cacher.go:/autoscaling.openshift.io/clusterautoscalers" 2025-12-12T16:19:05.822046095+00:00 stderr F I1212 16:19:05.821921 12 store.go:1663] "Monitoring resource count at path" resource="consoles.operator.openshift.io" path="//operator.openshift.io/consoles" 2025-12-12T16:19:05.826064164+00:00 stderr F I1212 16:19:05.825967 12 cacher.go:469] cacher (consoles.operator.openshift.io): initialized 2025-12-12T16:19:05.826064164+00:00 stderr F I1212 16:19:05.826000 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Kind=Console" reflector="storage/cacher.go:/operator.openshift.io/consoles" 2025-12-12T16:19:05.837146768+00:00 stderr F I1212 16:19:05.836200 12 store.go:1663] "Monitoring resource count at path" resource="consoleplugins.console.openshift.io" path="//console.openshift.io/consoleplugins" 2025-12-12T16:19:05.837973549+00:00 stderr F I1212 16:19:05.837909 12 cacher.go:469] cacher (consoleplugins.console.openshift.io): initialized 2025-12-12T16:19:05.838033990+00:00 stderr F I1212 16:19:05.837987 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsolePlugin" reflector="storage/cacher.go:/console.openshift.io/consoleplugins" 2025-12-12T16:19:05.843577347+00:00 stderr F I1212 16:19:05.843519 12 store.go:1663] "Monitoring resource count at path" resource="consoleplugins.console.openshift.io" path="//console.openshift.io/consoleplugins" 2025-12-12T16:19:05.844881879+00:00 stderr F I1212 16:19:05.844830 12 cacher.go:469] cacher (consoleplugins.console.openshift.io): initialized 2025-12-12T16:19:05.844881879+00:00 stderr F I1212 16:19:05.844854 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1alpha1, Kind=ConsolePlugin" reflector="storage/cacher.go:/console.openshift.io/consoleplugins" 2025-12-12T16:19:05.870252387+00:00 stderr F I1212 16:19:05.869517 12 store.go:1663] "Monitoring resource count at path" resource="machineosbuilds.machineconfiguration.openshift.io" path="//machineconfiguration.openshift.io/machineosbuilds" 2025-12-12T16:19:05.871628301+00:00 stderr F I1212 16:19:05.871544 12 cacher.go:469] cacher (machineosbuilds.machineconfiguration.openshift.io): initialized 2025-12-12T16:19:05.871628301+00:00 stderr F I1212 16:19:05.871582 12 reflector.go:430] "Caches populated" type="machineconfiguration.openshift.io/v1, Kind=MachineOSBuild" reflector="storage/cacher.go:/machineconfiguration.openshift.io/machineosbuilds" 2025-12-12T16:19:05.880642754+00:00 stderr F I1212 16:19:05.880515 12 store.go:1663] "Monitoring resource count at path" resource="consoleexternalloglinks.console.openshift.io" path="//console.openshift.io/consoleexternalloglinks" 2025-12-12T16:19:05.881773711+00:00 stderr F I1212 16:19:05.881710 12 cacher.go:469] cacher (consoleexternalloglinks.console.openshift.io): initialized 2025-12-12T16:19:05.881796172+00:00 stderr F I1212 16:19:05.881770 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleExternalLogLink" reflector="storage/cacher.go:/console.openshift.io/consoleexternalloglinks" 2025-12-12T16:19:05.888761724+00:00 stderr F I1212 16:19:05.888666 12 store.go:1663] "Monitoring resource count at path" resource="rangeallocations.security.internal.openshift.io" path="//security.internal.openshift.io/rangeallocations" 2025-12-12T16:19:05.890076167+00:00 stderr F I1212 16:19:05.890000 12 cacher.go:469] cacher (rangeallocations.security.internal.openshift.io): initialized 2025-12-12T16:19:05.890076167+00:00 stderr F I1212 16:19:05.890039 12 reflector.go:430] "Caches populated" type="security.internal.openshift.io/v1, Kind=RangeAllocation" reflector="storage/cacher.go:/security.internal.openshift.io/rangeallocations" 2025-12-12T16:19:05.898552916+00:00 stderr F I1212 16:19:05.898442 12 store.go:1663] "Monitoring resource count at path" resource="helmchartrepositories.helm.openshift.io" path="//helm.openshift.io/helmchartrepositories" 2025-12-12T16:19:05.899893959+00:00 stderr F I1212 16:19:05.899782 12 cacher.go:469] cacher (helmchartrepositories.helm.openshift.io): initialized 2025-12-12T16:19:05.899893959+00:00 stderr F I1212 16:19:05.899805 12 reflector.go:430] "Caches populated" type="helm.openshift.io/v1beta1, Kind=HelmChartRepository" reflector="storage/cacher.go:/helm.openshift.io/helmchartrepositories" 2025-12-12T16:19:05.907820865+00:00 stderr F I1212 16:19:05.907731 12 store.go:1663] "Monitoring resource count at path" resource="consolesamples.console.openshift.io" path="//console.openshift.io/consolesamples" 2025-12-12T16:19:05.908944533+00:00 stderr F I1212 16:19:05.908830 12 cacher.go:469] cacher (consolesamples.console.openshift.io): initialized 2025-12-12T16:19:05.908944533+00:00 stderr F I1212 16:19:05.908861 12 reflector.go:430] "Caches populated" type="console.openshift.io/v1, Kind=ConsoleSample" reflector="storage/cacher.go:/console.openshift.io/consolesamples" 2025-12-12T16:19:05.917812502+00:00 stderr F I1212 16:19:05.917711 12 store.go:1663] "Monitoring resource count at path" resource="projects.config.openshift.io" path="//config.openshift.io/projects" 2025-12-12T16:19:05.919293189+00:00 stderr F I1212 16:19:05.919218 12 cacher.go:469] cacher (projects.config.openshift.io): initialized 2025-12-12T16:19:05.919293189+00:00 stderr F I1212 16:19:05.919270 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=Project" reflector="storage/cacher.go:/config.openshift.io/projects" 2025-12-12T16:19:05.926226660+00:00 stderr F I1212 16:19:05.926070 12 store.go:1663] "Monitoring resource count at path" resource="imagecontentpolicies.config.openshift.io" path="//config.openshift.io/imagecontentpolicies" 2025-12-12T16:19:05.926924718+00:00 stderr F I1212 16:19:05.926852 12 cacher.go:469] cacher (imagecontentpolicies.config.openshift.io): initialized 2025-12-12T16:19:05.926924718+00:00 stderr F I1212 16:19:05.926887 12 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Kind=ImageContentPolicy" reflector="storage/cacher.go:/config.openshift.io/imagecontentpolicies" 2025-12-12T16:19:05.936648538+00:00 stderr F I1212 16:19:05.936542 12 store.go:1663] "Monitoring resource count at path" resource="imagepruners.imageregistry.operator.openshift.io" path="//imageregistry.operator.openshift.io/imagepruners" 2025-12-12T16:19:05.938095064+00:00 stderr F I1212 16:19:05.937942 12 cacher.go:469] cacher (imagepruners.imageregistry.operator.openshift.io): initialized 2025-12-12T16:19:05.938095064+00:00 stderr F I1212 16:19:05.937976 12 reflector.go:430] "Caches populated" type="imageregistry.operator.openshift.io/v1, Kind=ImagePruner" reflector="storage/cacher.go:/imageregistry.operator.openshift.io/imagepruners" 2025-12-12T16:19:06.388213352+00:00 stderr F I1212 16:19:06.387972 12 controller.go:667] quota admission added evaluator for: deployments.apps 2025-12-12T16:19:06.409940179+00:00 stderr F I1212 16:19:06.409464 12 controller.go:667] quota admission added evaluator for: replicasets.apps 2025-12-12T16:19:06.466657672+00:00 stderr F E1212 16:19:06.466473 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/build.openshift.io/v1/builds?allowWatchBookmarks=true&resourceVersion=39122&timeout=8m12s&timeoutSeconds=492&watch=true" auditID="a21c20fc-9242-4d58-b776-c153ff663345" 2025-12-12T16:19:06.467160304+00:00 stderr F E1212 16:19:06.467095 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/build.openshift.io/v1/buildconfigs?allowWatchBookmarks=true&resourceVersion=39122&timeout=9m20s&timeoutSeconds=560&watch=true" auditID="7a078bfe-94de-4777-81b1-339be805ca89" 2025-12-12T16:19:06.468267141+00:00 stderr F E1212 16:19:06.468159 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/route.openshift.io/v1/routes?allowWatchBookmarks=true&resourceVersion=39059&timeout=6m37s&timeoutSeconds=397&watch=true" auditID="6aa29f3b-7617-40cd-9b6c-b073c4262bfc" 2025-12-12T16:19:10.511519481+00:00 stderr F I1212 16:19:10.511276 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=3 seatDemandAvg=0.01842657594885261 seatDemandStdev=0.14217812161142587 seatDemandSmoothed=13.230723536039195 fairFrac=2.2796127562642368 currentCL=3 concurrencyDenominator=3 backstop=false 2025-12-12T16:19:18.328500668+00:00 stderr F E1212 16:19:18.328360 12 authentication.go:75] "Unable to authenticate the request" err="[invalid bearer token, context canceled]" 2025-12-12T16:19:18.328760414+00:00 stderr F E1212 16:19:18.328686 12 writers.go:123] "Unhandled Error" err="apiserver was unable to write a JSON response: http: Handler timeout" logger="UnhandledError" 2025-12-12T16:19:18.330373294+00:00 stderr F E1212 16:19:18.330019 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"http: Handler timeout\"}: http: Handler timeout" logger="UnhandledError" 2025-12-12T16:19:18.331334558+00:00 stderr F E1212 16:19:18.331258 12 writers.go:136] "Unhandled Error" err="apiserver was unable to write a fallback JSON response: http: Handler timeout" logger="UnhandledError" 2025-12-12T16:19:18.334823874+00:00 stderr F E1212 16:19:18.334712 12 timeout.go:140] "Post-timeout activity" logger="UnhandledError" timeElapsed="6.395999ms" method="GET" path="/apis/coordination.k8s.io/v1/namespaces/openshift-marketplace/leases/marketplace-operator-lock" result=null 2025-12-12T16:19:26.372067212+00:00 stderr F I1212 16:19:26.371906 12 cacher.go:847] cacher (deployments.apps): 1 objects queued in incoming channel. 2025-12-12T16:19:26.408162558+00:00 stderr F E1212 16:19:26.408016 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/image.openshift.io/v1/images?allowWatchBookmarks=true&resourceVersion=39399&timeout=6m10s&timeoutSeconds=370&watch=true" auditID="de8e3508-a299-4f99-9c22-6dd50738272e" 2025-12-12T16:19:26.408162558+00:00 stderr F E1212 16:19:26.408052 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/build.openshift.io/v1/buildconfigs?allowWatchBookmarks=true&resourceVersion=39399&timeout=8m27s&timeoutSeconds=507&watch=true" auditID="d0b36527-d628-4181-8259-f67d462cf9e2" 2025-12-12T16:19:26.408250810+00:00 stderr F E1212 16:19:26.408193 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/image.openshift.io/v1/imagestreams?allowWatchBookmarks=true&resourceVersion=39369&timeout=9m51s&timeoutSeconds=591&watch=true" auditID="7dc5bb66-8a0f-46a0-a367-0f133b5287b0" 2025-12-12T16:19:26.408391494+00:00 stderr F E1212 16:19:26.408331 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/apps.openshift.io/v1/deploymentconfigs?allowWatchBookmarks=true&resourceVersion=39399&timeout=6m17s&timeoutSeconds=377&watch=true" auditID="fc366d18-9ee6-4a44-8ddf-08b6c75c4e08" 2025-12-12T16:19:26.408467266+00:00 stderr F E1212 16:19:26.408413 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/build.openshift.io/v1/builds?allowWatchBookmarks=true&resourceVersion=39395&timeout=9m33s&timeoutSeconds=573&watch=true" auditID="522d6a0c-9235-4a25-a37c-b3399862e7d8" 2025-12-12T16:19:26.408608989+00:00 stderr F E1212 16:19:26.408549 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/template.openshift.io/v1/templateinstances?allowWatchBookmarks=true&resourceVersion=39399&timeout=7m42s&timeoutSeconds=462&watch=true" auditID="fd06195d-500a-4f76-82c2-136ad5b200d5" 2025-12-12T16:19:26.427087903+00:00 stderr F E1212 16:19:26.425707 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/route.openshift.io/v1/routes?allowWatchBookmarks=true&resourceVersion=39278&timeout=7m0s&timeoutSeconds=420&watch=true" auditID="b88c5ac6-5499-4eb1-a333-316c9e0cd101" 2025-12-12T16:19:26.429961955+00:00 stderr F I1212 16:19:26.429786 12 cacher.go:847] cacher (replicasets.apps): 1 objects queued in incoming channel. 2025-12-12T16:19:26.429961955+00:00 stderr F I1212 16:19:26.429813 12 cacher.go:847] cacher (replicasets.apps): 2 objects queued in incoming channel. 2025-12-12T16:19:26.456093381+00:00 stderr F I1212 16:19:26.455933 12 cacher.go:847] cacher (deployments.apps): 2 objects queued in incoming channel. 2025-12-12T16:19:30.513230777+00:00 stderr F I1212 16:19:30.512952 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=4 seatDemandAvg=0.003275247882810948 seatDemandStdev=0.07019013131338302 seatDemandSmoothed=12.63260739683854 fairFrac=2.2796127562642368 currentCL=4 concurrencyDenominator=4 backstop=false 2025-12-12T16:19:33.231277552+00:00 stderr F I1212 16:19:33.231117 12 controller.go:667] quota admission added evaluator for: serviceaccounts 2025-12-12T16:19:33.241294094+00:00 stderr F I1212 16:19:33.241104 12 controller.go:667] quota admission added evaluator for: catalogsources.operators.coreos.com 2025-12-12T16:19:40.514537810+00:00 stderr F I1212 16:19:40.514279 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.011359940633373247 seatDemandStdev=0.1744840579581174 seatDemandSmoothed=12.346331838678857 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:19:46.406369241+00:00 stderr F E1212 16:19:46.404794 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/image.openshift.io/v1/imagestreams?allowWatchBookmarks=true&resourceVersion=39533&timeout=7m42s&timeoutSeconds=462&watch=true" auditID="60778b1b-21a8-4d19-88bc-3dc646beb9f6" 2025-12-12T16:19:46.406369241+00:00 stderr F E1212 16:19:46.404818 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/image.openshift.io/v1/images?allowWatchBookmarks=true&resourceVersion=39568&timeout=7m7s&timeoutSeconds=427&watch=true" auditID="88e33892-264b-441a-bb68-dcb15a4be20b" 2025-12-12T16:19:46.406369241+00:00 stderr F E1212 16:19:46.404981 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/template.openshift.io/v1/templateinstances?allowWatchBookmarks=true&resourceVersion=39568&timeout=9m55s&timeoutSeconds=595&watch=true" auditID="4df3352d-9af9-4ca9-882a-bf1a4908bb9e" 2025-12-12T16:19:46.406369241+00:00 stderr F E1212 16:19:46.405004 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/build.openshift.io/v1/builds?allowWatchBookmarks=true&resourceVersion=39561&timeout=9m18s&timeoutSeconds=558&watch=true" auditID="b11ce602-134d-4f1a-8786-f5a4040f2431" 2025-12-12T16:19:46.406369241+00:00 stderr F E1212 16:19:46.405172 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/build.openshift.io/v1/buildconfigs?allowWatchBookmarks=true&resourceVersion=39534&timeout=5m33s&timeoutSeconds=333&watch=true" auditID="1a6179f0-f269-4c17-bb89-caac078fa8b2" 2025-12-12T16:19:46.406369241+00:00 stderr F E1212 16:19:46.405249 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/apps.openshift.io/v1/deploymentconfigs?allowWatchBookmarks=true&resourceVersion=39534&timeout=8m24s&timeoutSeconds=504&watch=true" auditID="bc8a8790-9e30-434d-b5c9-fb90bb41ee28" 2025-12-12T16:20:04.104300408+00:00 stderr F I1212 16:20:04.104095 12 controller.go:667] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io 2025-12-12T16:20:04.503591893+00:00 stderr F I1212 16:20:04.503436 12 controller.go:667] quota admission added evaluator for: roles.rbac.authorization.k8s.io 2025-12-12T16:20:05.711881041+00:00 stderr F I1212 16:20:05.711679 12 controller.go:667] quota admission added evaluator for: daemonsets.apps 2025-12-12T16:20:06.903962942+00:00 stderr F I1212 16:20:06.903819 12 controller.go:667] quota admission added evaluator for: servicemonitors.monitoring.coreos.com 2025-12-12T16:20:09.504365763+00:00 stderr F I1212 16:20:09.504167 12 controller.go:667] quota admission added evaluator for: prometheusrules.monitoring.coreos.com 2025-12-12T16:20:14.502824383+00:00 stderr F I1212 16:20:14.502647 12 controller.go:667] quota admission added evaluator for: operatorpkis.network.operator.openshift.io 2025-12-12T16:20:16.984313768+00:00 stderr F I1212 16:20:16.983851 12 controller.go:667] quota admission added evaluator for: daemonsets.apps 2025-12-12T16:20:19.304057303+00:00 stderr F I1212 16:20:19.303909 12 controller.go:667] quota admission added evaluator for: roles.rbac.authorization.k8s.io 2025-12-12T16:20:19.503890640+00:00 stderr F I1212 16:20:19.503605 12 controller.go:667] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io 2025-12-12T16:20:20.703722075+00:00 stderr F I1212 16:20:20.703545 12 controller.go:667] quota admission added evaluator for: servicemonitors.monitoring.coreos.com 2025-12-12T16:20:23.703385481+00:00 stderr F I1212 16:20:23.703243 12 controller.go:667] quota admission added evaluator for: operatorpkis.network.operator.openshift.io 2025-12-12T16:20:30.518322825+00:00 stderr F I1212 16:20:30.518078 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0008151955373238845 seatDemandStdev=0.028539989375607897 seatDemandSmoothed=11.00209650033895 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:20:46.434861571+00:00 stderr F E1212 16:20:46.434461 12 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/apis/route.openshift.io/v1/routes?allowWatchBookmarks=true&resourceVersion=39520&timeout=9m58s&timeoutSeconds=598&watch=true" auditID="f619d915-0fc3-4ded-a913-e98d8c6e6719" 2025-12-12T16:20:50.523550066+00:00 stderr F I1212 16:20:50.519751 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.009380996918353507 seatDemandStdev=0.1519131680168506 seatDemandSmoothed=10.506976003713866 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:21:00.521506939+00:00 stderr F I1212 16:21:00.521340 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0007151982188461138 seatDemandStdev=0.02673362508815206 seatDemandSmoothed=10.265946878564508 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:21:50.527130686+00:00 stderr F I1212 16:21:50.525616 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.007516367552070065 seatDemandStdev=0.1580585446916516 seatDemandSmoothed=9.14587417905764 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:22:00.526044193+00:00 stderr F I1212 16:22:00.525871 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.000925913769504485 seatDemandStdev=0.03041474072215522 seatDemandSmoothed=8.936239907992622 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:22:10.530329243+00:00 stderr F I1212 16:22:10.528820 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0012951623968060772 seatDemandStdev=0.04045587573715754 seatDemandSmoothed=8.731666663985873 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:22:20.531211962+00:00 stderr F I1212 16:22:20.529692 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0024873177829915286 seatDemandStdev=0.049810952944487445 seatDemandSmoothed=8.53204119094093 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:22:40.532225502+00:00 stderr F I1212 16:22:40.531969 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0009967898420712845 seatDemandStdev=0.03348098612769787 seatDemandSmoothed=8.145497205022055 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:22:50.533481243+00:00 stderr F I1212 16:22:50.532739 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.006732340552369596 seatDemandStdev=0.14428033149439295 seatDemandSmoothed=7.961624060763624 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:23:00.534797412+00:00 stderr F I1212 16:23:00.533805 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0009054781362701299 seatDemandStdev=0.030077537226556077 seatDemandSmoothed=7.779219316719405 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:23:10.534609774+00:00 stderr F I1212 16:23:10.534366 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.001378528033566604 seatDemandStdev=0.04403798206686641 seatDemandSmoothed=7.601341852167168 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:23:20.535622863+00:00 stderr F I1212 16:23:20.535000 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0008544988530002765 seatDemandStdev=0.02921932040124304 seatDemandSmoothed=7.42720268741017 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:23:30.651864920+00:00 stderr F I1212 16:23:30.651595 12 controller.go:231] Updating CRD OpenAPI spec because clusterimagepolicies.config.openshift.io changed 2025-12-12T16:23:30.651864920+00:00 stderr F I1212 16:23:30.651747 12 controller.go:231] Updating CRD OpenAPI spec because consoles.config.openshift.io changed 2025-12-12T16:23:30.652144167+00:00 stderr F I1212 16:23:30.651860 12 controller.go:231] Updating CRD OpenAPI spec because machineosconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.652144167+00:00 stderr F I1212 16:23:30.652042 12 controller.go:231] Updating CRD OpenAPI spec because imagetagmirrorsets.config.openshift.io changed 2025-12-12T16:23:30.652144167+00:00 stderr F I1212 16:23:30.652096 12 controller.go:231] Updating CRD OpenAPI spec because ippools.whereabouts.cni.cncf.io changed 2025-12-12T16:23:30.652345762+00:00 stderr F I1212 16:23:30.652273 12 controller.go:231] Updating CRD OpenAPI spec because operators.operators.coreos.com changed 2025-12-12T16:23:30.652361532+00:00 stderr F I1212 16:23:30.652322 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.coreos.com changed 2025-12-12T16:23:30.652436194+00:00 stderr F I1212 16:23:30.652377 12 controller.go:231] Updating CRD OpenAPI spec because projecthelmchartrepositories.helm.openshift.io changed 2025-12-12T16:23:30.652558757+00:00 stderr F I1212 16:23:30.652480 12 controller.go:231] Updating CRD OpenAPI spec because rangeallocations.security.internal.openshift.io changed 2025-12-12T16:23:30.653165343+00:00 stderr F I1212 16:23:30.653058 12 controller.go:231] Updating CRD OpenAPI spec because adminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:23:30.653308566+00:00 stderr F I1212 16:23:30.653211 12 controller.go:231] Updating CRD OpenAPI spec because clusterresourcequotas.quota.openshift.io changed 2025-12-12T16:23:30.653308566+00:00 stderr F I1212 16:23:30.653243 12 controller.go:231] Updating CRD OpenAPI spec because configs.operator.openshift.io changed 2025-12-12T16:23:30.653308566+00:00 stderr F I1212 16:23:30.653253 12 controller.go:231] Updating CRD OpenAPI spec because egressfirewalls.k8s.ovn.org changed 2025-12-12T16:23:30.653308566+00:00 stderr F I1212 16:23:30.653263 12 controller.go:231] Updating CRD OpenAPI spec because egressrouters.network.operator.openshift.io changed 2025-12-12T16:23:30.653542972+00:00 stderr F I1212 16:23:30.653447 12 controller.go:231] Updating CRD OpenAPI spec because gatewayclasses.gateway.networking.k8s.io changed 2025-12-12T16:23:30.653785218+00:00 stderr F I1212 16:23:30.653695 12 controller.go:231] Updating CRD OpenAPI spec because network-attachment-definitions.k8s.cni.cncf.io changed 2025-12-12T16:23:30.653785218+00:00 stderr F I1212 16:23:30.653721 12 controller.go:231] Updating CRD OpenAPI spec because operatorconditions.operators.coreos.com changed 2025-12-12T16:23:30.653785218+00:00 stderr F I1212 16:23:30.653727 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.coreos.com changed 2025-12-12T16:23:30.653785218+00:00 stderr F I1212 16:23:30.653734 12 controller.go:231] Updating CRD OpenAPI spec because consolenotifications.console.openshift.io changed 2025-12-12T16:23:30.653869330+00:00 stderr F I1212 16:23:30.653796 12 controller.go:231] Updating CRD OpenAPI spec because consolesamples.console.openshift.io changed 2025-12-12T16:23:30.654024844+00:00 stderr F I1212 16:23:30.653964 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentpolicies.config.openshift.io changed 2025-12-12T16:23:30.654024844+00:00 stderr F I1212 16:23:30.653984 12 controller.go:231] Updating CRD OpenAPI spec because imagepolicies.config.openshift.io changed 2025-12-12T16:23:30.654024844+00:00 stderr F I1212 16:23:30.653999 12 controller.go:231] Updating CRD OpenAPI spec because operatorpkis.network.operator.openshift.io changed 2025-12-12T16:23:30.654271950+00:00 stderr F I1212 16:23:30.654153 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.coreos.com changed 2025-12-12T16:23:30.654271950+00:00 stderr F I1212 16:23:30.654215 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigurations.operator.openshift.io changed 2025-12-12T16:23:30.654441605+00:00 stderr F I1212 16:23:30.654381 12 controller.go:231] Updating CRD OpenAPI spec because kubeletconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.654451145+00:00 stderr F I1212 16:23:30.654428 12 controller.go:231] Updating CRD OpenAPI spec because machineautoscalers.autoscaling.openshift.io changed 2025-12-12T16:23:30.654735302+00:00 stderr F I1212 16:23:30.654656 12 controller.go:231] Updating CRD OpenAPI spec because egressservices.k8s.ovn.org changed 2025-12-12T16:23:30.654735302+00:00 stderr F I1212 16:23:30.654675 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigpools.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.654735302+00:00 stderr F I1212 16:23:30.654701 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed 2025-12-12T16:23:30.654966038+00:00 stderr F I1212 16:23:30.654833 12 controller.go:231] Updating CRD OpenAPI spec because userdefinednetworks.k8s.ovn.org changed 2025-12-12T16:23:30.654966038+00:00 stderr F I1212 16:23:30.654864 12 controller.go:231] Updating CRD OpenAPI spec because subscriptions.operators.coreos.com changed 2025-12-12T16:23:30.655505241+00:00 stderr F I1212 16:23:30.655404 12 controller.go:231] Updating CRD OpenAPI spec because consoleclidownloads.console.openshift.io changed 2025-12-12T16:23:30.655505241+00:00 stderr F I1212 16:23:30.655422 12 controller.go:231] Updating CRD OpenAPI spec because consoleplugins.console.openshift.io changed 2025-12-12T16:23:30.655675796+00:00 stderr F I1212 16:23:30.655582 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediationtemplates.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:23:30.655690196+00:00 stderr F I1212 16:23:30.655652 12 controller.go:231] Updating CRD OpenAPI spec because overlappingrangeipreservations.whereabouts.cni.cncf.io changed 2025-12-12T16:23:30.655690196+00:00 stderr F I1212 16:23:30.655669 12 controller.go:231] Updating CRD OpenAPI spec because schedulers.config.openshift.io changed 2025-12-12T16:23:30.655769918+00:00 stderr F I1212 16:23:30.655709 12 controller.go:231] Updating CRD OpenAPI spec because containerruntimeconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.655902041+00:00 stderr F I1212 16:23:30.655812 12 controller.go:231] Updating CRD OpenAPI spec because dnsrecords.ingress.operator.openshift.io changed 2025-12-12T16:23:30.656019024+00:00 stderr F I1212 16:23:30.655966 12 controller.go:231] Updating CRD OpenAPI spec because networks.operator.openshift.io changed 2025-12-12T16:23:30.656052835+00:00 stderr F I1212 16:23:30.656020 12 controller.go:231] Updating CRD OpenAPI spec because httproutes.gateway.networking.k8s.io changed 2025-12-12T16:23:30.656599049+00:00 stderr F I1212 16:23:30.656518 12 controller.go:231] Updating CRD OpenAPI spec because clusterversions.config.openshift.io changed 2025-12-12T16:23:30.656599049+00:00 stderr F I1212 16:23:30.656546 12 controller.go:231] Updating CRD OpenAPI spec because consolequickstarts.console.openshift.io changed 2025-12-12T16:23:30.656655150+00:00 stderr F I1212 16:23:30.656612 12 controller.go:231] Updating CRD OpenAPI spec because ingresscontrollers.operator.openshift.io changed 2025-12-12T16:23:30.657198734+00:00 stderr F I1212 16:23:30.657097 12 controller.go:231] Updating CRD OpenAPI spec because machines.machine.openshift.io changed 2025-12-12T16:23:30.657748998+00:00 stderr F I1212 16:23:30.657671 12 controller.go:231] Updating CRD OpenAPI spec because apiservers.config.openshift.io changed 2025-12-12T16:23:30.657748998+00:00 stderr F I1212 16:23:30.657694 12 controller.go:231] Updating CRD OpenAPI spec because authentications.config.openshift.io changed 2025-12-12T16:23:30.657860561+00:00 stderr F I1212 16:23:30.657779 12 controller.go:231] Updating CRD OpenAPI spec because egressqoses.k8s.ovn.org changed 2025-12-12T16:23:30.658028615+00:00 stderr F I1212 16:23:30.657939 12 controller.go:231] Updating CRD OpenAPI spec because helmchartrepositories.helm.openshift.io changed 2025-12-12T16:23:30.658329013+00:00 stderr F I1212 16:23:30.658073 12 controller.go:231] Updating CRD OpenAPI spec because kubecontrollermanagers.operator.openshift.io changed 2025-12-12T16:23:30.658329013+00:00 stderr F I1212 16:23:30.658101 12 controller.go:231] Updating CRD OpenAPI spec because podnetworkconnectivitychecks.controlplane.operator.openshift.io changed 2025-12-12T16:23:30.658715142+00:00 stderr F I1212 16:23:30.658629 12 controller.go:231] Updating CRD OpenAPI spec because apirequestcounts.apiserver.openshift.io changed 2025-12-12T16:23:30.658715142+00:00 stderr F I1212 16:23:30.658661 12 controller.go:231] Updating CRD OpenAPI spec because featuregates.config.openshift.io changed 2025-12-12T16:23:30.658715142+00:00 stderr F I1212 16:23:30.658690 12 controller.go:231] Updating CRD OpenAPI spec because nodes.config.openshift.io changed 2025-12-12T16:23:30.658794254+00:00 stderr F I1212 16:23:30.658726 12 controller.go:231] Updating CRD OpenAPI spec because controllerconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.658794254+00:00 stderr F I1212 16:23:30.658762 12 controller.go:231] Updating CRD OpenAPI spec because csisnapshotcontrollers.operator.openshift.io changed 2025-12-12T16:23:30.658794254+00:00 stderr F I1212 16:23:30.658771 12 controller.go:231] Updating CRD OpenAPI spec because installplans.operators.coreos.com changed 2025-12-12T16:23:30.658883887+00:00 stderr F I1212 16:23:30.658800 12 controller.go:231] Updating CRD OpenAPI spec because oauths.config.openshift.io changed 2025-12-12T16:23:30.658883887+00:00 stderr F I1212 16:23:30.658826 12 controller.go:231] Updating CRD OpenAPI spec because referencegrants.gateway.networking.k8s.io changed 2025-12-12T16:23:30.658883887+00:00 stderr F I1212 16:23:30.658848 12 controller.go:231] Updating CRD OpenAPI spec because images.config.openshift.io changed 2025-12-12T16:23:30.658883887+00:00 stderr F I1212 16:23:30.658865 12 controller.go:231] Updating CRD OpenAPI spec because machineosbuilds.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.658927278+00:00 stderr F I1212 16:23:30.658886 12 controller.go:231] Updating CRD OpenAPI spec because openshiftapiservers.operator.openshift.io changed 2025-12-12T16:23:30.658951508+00:00 stderr F I1212 16:23:30.658918 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.coreos.com changed 2025-12-12T16:23:30.658951508+00:00 stderr F I1212 16:23:30.658938 12 controller.go:231] Updating CRD OpenAPI spec because consolelinks.console.openshift.io changed 2025-12-12T16:23:30.659034760+00:00 stderr F I1212 16:23:30.658979 12 controller.go:231] Updating CRD OpenAPI spec because controlplanemachinesets.machine.openshift.io changed 2025-12-12T16:23:30.659034760+00:00 stderr F I1212 16:23:30.659001 12 controller.go:231] Updating CRD OpenAPI spec because etcds.operator.openshift.io changed 2025-12-12T16:23:30.659067461+00:00 stderr F I1212 16:23:30.659034 12 controller.go:231] Updating CRD OpenAPI spec because machinehealthchecks.machine.openshift.io changed 2025-12-12T16:23:30.659067461+00:00 stderr F I1212 16:23:30.659051 12 controller.go:231] Updating CRD OpenAPI spec because storagestates.migration.k8s.io changed 2025-12-12T16:23:30.659151703+00:00 stderr F I1212 16:23:30.659079 12 controller.go:231] Updating CRD OpenAPI spec because builds.config.openshift.io changed 2025-12-12T16:23:30.659175064+00:00 stderr F I1212 16:23:30.659138 12 controller.go:231] Updating CRD OpenAPI spec because egressips.k8s.ovn.org changed 2025-12-12T16:23:30.659175064+00:00 stderr F I1212 16:23:30.659154 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediations.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:23:30.659203445+00:00 stderr F I1212 16:23:30.659165 12 controller.go:231] Updating CRD OpenAPI spec because storageversionmigrations.migration.k8s.io changed 2025-12-12T16:23:30.659246236+00:00 stderr F I1212 16:23:30.659207 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentsourcepolicies.operator.openshift.io changed 2025-12-12T16:23:30.659268086+00:00 stderr F I1212 16:23:30.659236 12 controller.go:231] Updating CRD OpenAPI spec because olmconfigs.operators.coreos.com changed 2025-12-12T16:23:30.659268086+00:00 stderr F I1212 16:23:30.659246 12 controller.go:231] Updating CRD OpenAPI spec because alertingrules.monitoring.openshift.io changed 2025-12-12T16:23:30.659315007+00:00 stderr F I1212 16:23:30.659277 12 controller.go:231] Updating CRD OpenAPI spec because ipamclaims.k8s.cni.cncf.io changed 2025-12-12T16:23:30.659417510+00:00 stderr F I1212 16:23:30.659369 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed 2025-12-12T16:23:30.659417510+00:00 stderr F I1212 16:23:30.659386 12 controller.go:231] Updating CRD OpenAPI spec because storages.operator.openshift.io changed 2025-12-12T16:23:30.659426400+00:00 stderr F I1212 16:23:30.659408 12 controller.go:231] Updating CRD OpenAPI spec because configs.imageregistry.operator.openshift.io changed 2025-12-12T16:23:30.659620605+00:00 stderr F I1212 16:23:30.659549 12 controller.go:231] Updating CRD OpenAPI spec because dnses.config.openshift.io changed 2025-12-12T16:23:30.659620605+00:00 stderr F I1212 16:23:30.659571 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.coreos.com changed 2025-12-12T16:23:30.659655286+00:00 stderr F I1212 16:23:30.659612 12 controller.go:231] Updating CRD OpenAPI spec because proxies.config.openshift.io changed 2025-12-12T16:23:30.659655286+00:00 stderr F I1212 16:23:30.659639 12 controller.go:231] Updating CRD OpenAPI spec because clusterautoscalers.autoscaling.openshift.io changed 2025-12-12T16:23:30.659713387+00:00 stderr F I1212 16:23:30.659671 12 controller.go:231] Updating CRD OpenAPI spec because imagepruners.imageregistry.operator.openshift.io changed 2025-12-12T16:23:30.659713387+00:00 stderr F I1212 16:23:30.659699 12 controller.go:231] Updating CRD OpenAPI spec because adminpolicybasedexternalroutes.k8s.ovn.org changed 2025-12-12T16:23:30.659789249+00:00 stderr F I1212 16:23:30.659726 12 controller.go:231] Updating CRD OpenAPI spec because clusteroperators.config.openshift.io changed 2025-12-12T16:23:30.659813650+00:00 stderr F I1212 16:23:30.659777 12 controller.go:231] Updating CRD OpenAPI spec because clusterserviceversions.operators.coreos.com changed 2025-12-12T16:23:30.659866831+00:00 stderr F I1212 16:23:30.659828 12 controller.go:231] Updating CRD OpenAPI spec because clusteruserdefinednetworks.k8s.ovn.org changed 2025-12-12T16:23:30.659896702+00:00 stderr F I1212 16:23:30.659858 12 controller.go:231] Updating CRD OpenAPI spec because dnses.operator.openshift.io changed 2025-12-12T16:23:30.659929623+00:00 stderr F I1212 16:23:30.659886 12 controller.go:231] Updating CRD OpenAPI spec because gateways.gateway.networking.k8s.io changed 2025-12-12T16:23:30.659970864+00:00 stderr F I1212 16:23:30.659932 12 controller.go:231] Updating CRD OpenAPI spec because infrastructures.config.openshift.io changed 2025-12-12T16:23:30.660026485+00:00 stderr F I1212 16:23:30.659977 12 controller.go:231] Updating CRD OpenAPI spec because ipaddresses.ipam.cluster.x-k8s.io changed 2025-12-12T16:23:30.660026485+00:00 stderr F I1212 16:23:30.659997 12 controller.go:231] Updating CRD OpenAPI spec because ingresses.config.openshift.io changed 2025-12-12T16:23:30.660076247+00:00 stderr F I1212 16:23:30.660028 12 controller.go:231] Updating CRD OpenAPI spec because kubestorageversionmigrators.operator.openshift.io changed 2025-12-12T16:23:30.660132618+00:00 stderr F I1212 16:23:30.660090 12 controller.go:231] Updating CRD OpenAPI spec because machineconfignodes.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.660132618+00:00 stderr F I1212 16:23:30.660105 12 controller.go:231] Updating CRD OpenAPI spec because machinesets.machine.openshift.io changed 2025-12-12T16:23:30.660132618+00:00 stderr F I1212 16:23:30.660118 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.coreos.com changed 2025-12-12T16:23:30.660232961+00:00 stderr F I1212 16:23:30.660170 12 controller.go:231] Updating CRD OpenAPI spec because rolebindingrestrictions.authorization.openshift.io changed 2025-12-12T16:23:30.660232961+00:00 stderr F I1212 16:23:30.660218 12 controller.go:231] Updating CRD OpenAPI spec because consoleyamlsamples.console.openshift.io changed 2025-12-12T16:23:30.660286982+00:00 stderr F I1212 16:23:30.660257 12 controller.go:231] Updating CRD OpenAPI spec because ipaddressclaims.ipam.cluster.x-k8s.io changed 2025-12-12T16:23:30.660286982+00:00 stderr F I1212 16:23:30.660279 12 controller.go:231] Updating CRD OpenAPI spec because openshiftcontrollermanagers.operator.openshift.io changed 2025-12-12T16:23:30.660580279+00:00 stderr F I1212 16:23:30.660497 12 controller.go:231] Updating CRD OpenAPI spec because securitycontextconstraints.security.openshift.io changed 2025-12-12T16:23:30.660580279+00:00 stderr F I1212 16:23:30.660522 12 controller.go:231] Updating CRD OpenAPI spec because alertrelabelconfigs.monitoring.openshift.io changed 2025-12-12T16:23:30.660786594+00:00 stderr F I1212 16:23:30.660679 12 controller.go:231] Updating CRD OpenAPI spec because authentications.operator.openshift.io changed 2025-12-12T16:23:30.660786594+00:00 stderr F I1212 16:23:30.660728 12 controller.go:231] Updating CRD OpenAPI spec because imagedigestmirrorsets.config.openshift.io changed 2025-12-12T16:23:30.660786594+00:00 stderr F I1212 16:23:30.660739 12 controller.go:231] Updating CRD OpenAPI spec because pinnedimagesets.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.660979899+00:00 stderr F I1212 16:23:30.660807 12 controller.go:231] Updating CRD OpenAPI spec because baselineadminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:23:30.660979899+00:00 stderr F I1212 16:23:30.660847 12 controller.go:231] Updating CRD OpenAPI spec because configs.samples.operator.openshift.io changed 2025-12-12T16:23:30.660979899+00:00 stderr F I1212 16:23:30.660901 12 controller.go:231] Updating CRD OpenAPI spec because consoles.operator.openshift.io changed 2025-12-12T16:23:30.660979899+00:00 stderr F I1212 16:23:30.660946 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:23:30.661070122+00:00 stderr F I1212 16:23:30.660958 12 controller.go:231] Updating CRD OpenAPI spec because kubeapiservers.operator.openshift.io changed 2025-12-12T16:23:30.661070122+00:00 stderr F I1212 16:23:30.660975 12 controller.go:231] Updating CRD OpenAPI spec because projects.config.openshift.io changed 2025-12-12T16:23:30.661070122+00:00 stderr F I1212 16:23:30.660985 12 controller.go:231] Updating CRD OpenAPI spec because kubeschedulers.operator.openshift.io changed 2025-12-12T16:23:30.661083562+00:00 stderr F I1212 16:23:30.661030 12 controller.go:231] Updating CRD OpenAPI spec because networks.config.openshift.io changed 2025-12-12T16:23:30.661083562+00:00 stderr F I1212 16:23:30.661064 12 controller.go:231] Updating CRD OpenAPI spec because nodeslicepools.whereabouts.cni.cncf.io changed 2025-12-12T16:23:30.661092902+00:00 stderr F I1212 16:23:30.661080 12 controller.go:231] Updating CRD OpenAPI spec because clustercsidrivers.operator.openshift.io changed 2025-12-12T16:23:30.661264996+00:00 stderr F I1212 16:23:30.661193 12 controller.go:231] Updating CRD OpenAPI spec because consoleexternalloglinks.console.openshift.io changed 2025-12-12T16:23:30.661264996+00:00 stderr F I1212 16:23:30.661218 12 controller.go:231] Updating CRD OpenAPI spec because grpcroutes.gateway.networking.k8s.io changed 2025-12-12T16:23:30.661303117+00:00 stderr F I1212 16:23:30.661254 12 controller.go:231] Updating CRD OpenAPI spec because catalogsources.operators.coreos.com changed 2025-12-12T16:23:30.661371509+00:00 stderr F I1212 16:23:30.661322 12 controller.go:231] Updating CRD OpenAPI spec because operatorgroups.operators.coreos.com changed 2025-12-12T16:23:30.661408880+00:00 stderr F I1212 16:23:30.661363 12 controller.go:231] Updating CRD OpenAPI spec because operatorhubs.config.openshift.io changed 2025-12-12T16:23:30.661408880+00:00 stderr F I1212 16:23:30.661384 12 controller.go:231] Updating CRD OpenAPI spec because servicecas.operator.openshift.io changed 2025-12-12T16:23:40.536493450+00:00 stderr F I1212 16:23:40.536265 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0012641363290150122 seatDemandStdev=0.03721897817359884 seatDemandSmoothed=7.091079600742691 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:23:50.536771688+00:00 stderr F I1212 16:23:50.536562 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.00802448826960451 seatDemandStdev=0.1585612868327276 seatDemandSmoothed=6.931816242752963 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:24:00.538034789+00:00 stderr F I1212 16:24:00.537823 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0009330054085432804 seatDemandStdev=0.030530884517991113 seatDemandSmoothed=6.773108138637955 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:24:10.539466413+00:00 stderr F I1212 16:24:10.539252 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.001534502540156092 seatDemandStdev=0.041755628806620766 seatDemandSmoothed=6.618322324470258 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:24:20.541438901+00:00 stderr F I1212 16:24:20.540601 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0013298730812668354 seatDemandStdev=0.03644316834270255 seatDemandSmoothed=6.466969690960193 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:24:40.543655895+00:00 stderr F I1212 16:24:40.542827 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.001759834094244392 seatDemandStdev=0.043882207107266716 seatDemandSmoothed=6.174853191189736 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:24:50.543926092+00:00 stderr F I1212 16:24:50.543374 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008153767398789702 seatDemandStdev=0.15844495187397975 seatDemandSmoothed=6.036663338335646 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:25:00.544721089+00:00 stderr F I1212 16:25:00.544410 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.004969982187849873 seatDemandStdev=0.07032269523349008 seatDemandSmoothed=5.899551813134616 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:25:04.330488631+00:00 stderr F I1212 16:25:04.330032 12 cacher.go:847] cacher (secrets): 1 objects queued in incoming channel. 2025-12-12T16:25:04.330488631+00:00 stderr F I1212 16:25:04.330069 12 cacher.go:847] cacher (secrets): 2 objects queued in incoming channel. 2025-12-12T16:25:04.426953424+00:00 stderr F I1212 16:25:04.426770 12 cacher.go:847] cacher (rolebindings.rbac.authorization.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:25:04.426953424+00:00 stderr F I1212 16:25:04.426810 12 cacher.go:847] cacher (rolebindings.rbac.authorization.k8s.io): 2 objects queued in incoming channel. 2025-12-12T16:25:10.545301376+00:00 stderr F I1212 16:25:10.545006 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.023583704978021265 seatDemandStdev=0.20749475695557995 seatDemandSmoothed=5.769176926056993 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:25:20.546682693+00:00 stderr F I1212 16:25:20.546345 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=3 seatDemandAvg=0.014051506704205447 seatDemandStdev=0.11952230473801344 seatDemandSmoothed=5.639558054420853 fairFrac=2.2796127562642368 currentCL=3 concurrencyDenominator=3 backstop=false 2025-12-12T16:25:27.080913546+00:00 stderr F I1212 16:25:27.080757 12 controller.go:667] quota admission added evaluator for: controllerrevisions.apps 2025-12-12T16:25:30.547105052+00:00 stderr F I1212 16:25:30.546897 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0011768994968583788 seatDemandStdev=0.034285775540778905 seatDemandSmoothed=5.510663860695039 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:25:40.548510088+00:00 stderr F I1212 16:25:40.548290 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0031262869439543043 seatDemandStdev=0.0564649382838896 seatDemandSmoothed=5.385289190079293 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:25:50.549300630+00:00 stderr F I1212 16:25:50.549079 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008788185336799958 seatDemandStdev=0.1701545496438442 seatDemandSmoothed=5.265543221612024 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:26:00.551025392+00:00 stderr F I1212 16:26:00.549788 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0011959739920522695 seatDemandStdev=0.034562170624291004 seatDemandSmoothed=5.145258164841123 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:26:09.225221992+00:00 stderr F I1212 16:26:09.224992 12 cacher.go:847] cacher (leases.coordination.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:26:09.225221992+00:00 stderr F I1212 16:26:09.225037 12 cacher.go:847] cacher (leases.coordination.k8s.io): 2 objects queued in incoming channel. 2025-12-12T16:26:15.056361886+00:00 stderr F I1212 16:26:15.056161 12 cacher.go:847] cacher (serviceaccounts): 1 objects queued in incoming channel. 2025-12-12T16:26:15.056361886+00:00 stderr F I1212 16:26:15.056213 12 cacher.go:847] cacher (serviceaccounts): 2 objects queued in incoming channel. 2025-12-12T16:26:20.552053696+00:00 stderr F I1212 16:26:20.551869 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.011706723740290415 seatDemandStdev=0.10906083393420049 seatDemandSmoothed=4.9150829862982786 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:26:24.857503537+00:00 stderr F I1212 16:26:24.856616 12 cacher.go:847] cacher (machineconfignodes.machineconfiguration.openshift.io): 1 objects queued in incoming channel. 2025-12-12T16:26:30.553471276+00:00 stderr F I1212 16:26:30.553244 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.002348370101018507 seatDemandStdev=0.048403050099008735 seatDemandSmoothed=4.803203360278019 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:26:38.919452827+00:00 stderr F I1212 16:26:38.918494 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:26:38.919452827+00:00 stderr F I1212 16:26:38.918529 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:26:38.919452827+00:00 stderr F I1212 16:26:38.918536 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 52.241µs 2025-12-12T16:26:39.822565843+00:00 stderr F I1212 16:26:39.822415 12 controller.go:667] quota admission added evaluator for: operatorgroups.operators.coreos.com 2025-12-12T16:26:39.822692177+00:00 stderr F I1212 16:26:39.822509 12 controller.go:667] quota admission added evaluator for: operatorgroups.operators.coreos.com 2025-12-12T16:26:39.953941241+00:00 stderr F I1212 16:26:39.948791 12 controller.go:667] quota admission added evaluator for: poddisruptionbudgets.policy 2025-12-12T16:26:40.940116066+00:00 stderr F I1212 16:26:40.939958 12 controller.go:667] quota admission added evaluator for: subscriptions.operators.coreos.com 2025-12-12T16:26:40.940116066+00:00 stderr F I1212 16:26:40.940064 12 controller.go:667] quota admission added evaluator for: subscriptions.operators.coreos.com 2025-12-12T16:26:42.032986686+00:00 stderr F I1212 16:26:42.032810 12 cacher.go:847] cacher (configmaps): 1 objects queued in incoming channel. 2025-12-12T16:26:42.032986686+00:00 stderr F I1212 16:26:42.032840 12 cacher.go:847] cacher (configmaps): 2 objects queued in incoming channel. 2025-12-12T16:26:42.794426703+00:00 stderr F I1212 16:26:42.794268 12 controller.go:667] quota admission added evaluator for: jobs.batch 2025-12-12T16:26:42.794426703+00:00 stderr F I1212 16:26:42.794368 12 controller.go:667] quota admission added evaluator for: jobs.batch 2025-12-12T16:26:49.556112106+00:00 stderr F W1212 16:26:49.555896 12 watcher.go:338] watch chan error: etcdserver: mvcc: required revision has been compacted 2025-12-12T16:26:50.555864723+00:00 stderr F I1212 16:26:50.555624 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.013112020092940253 seatDemandStdev=0.17511230824097976 seatDemandSmoothed=4.590429203489103 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:26:51.593997929+00:00 stderr F I1212 16:26:51.593781 12 controller.go:667] quota admission added evaluator for: installplans.operators.coreos.com 2025-12-12T16:26:51.594072391+00:00 stderr F I1212 16:26:51.593966 12 controller.go:667] quota admission added evaluator for: installplans.operators.coreos.com 2025-12-12T16:26:52.358948465+00:00 stderr F I1212 16:26:52.358725 12 controller.go:667] quota admission added evaluator for: clusterserviceversions.operators.coreos.com 2025-12-12T16:26:52.358948465+00:00 stderr F I1212 16:26:52.358839 12 controller.go:667] quota admission added evaluator for: clusterserviceversions.operators.coreos.com 2025-12-12T16:26:52.381697939+00:00 stderr F I1212 16:26:52.377277 12 controller.go:667] quota admission added evaluator for: operatorconditions.operators.coreos.com 2025-12-12T16:26:52.381697939+00:00 stderr F I1212 16:26:52.377335 12 controller.go:667] quota admission added evaluator for: operatorconditions.operators.coreos.com 2025-12-12T16:26:53.422985116+00:00 stderr F I1212 16:26:53.422769 12 controller.go:237] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:26:53.422985116+00:00 stderr F I1212 16:26:53.422941 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1alpha1 to ResourceManager 2025-12-12T16:26:53.958911986+00:00 stderr F I1212 16:26:53.958010 12 controller.go:237] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:26:53.958911986+00:00 stderr F I1212 16:26:53.958114 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1 to ResourceManager 2025-12-12T16:26:53.962823905+00:00 stderr F I1212 16:26:53.962716 12 controller.go:237] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:26:53.963915462+00:00 stderr F I1212 16:26:53.963017 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1alpha1 to ResourceManager 2025-12-12T16:26:53.988354560+00:00 stderr F I1212 16:26:53.982097 12 controller.go:237] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:26:53.988354560+00:00 stderr F I1212 16:26:53.982401 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1 to ResourceManager 2025-12-12T16:26:54.041310247+00:00 stderr F I1212 16:26:54.038781 12 controller.go:237] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:26:54.041310247+00:00 stderr F I1212 16:26:54.038826 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1 to ResourceManager 2025-12-12T16:26:54.311559124+00:00 stderr F I1212 16:26:54.310534 12 cacher.go:847] cacher (customresourcedefinitions.apiextensions.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:26:54.311559124+00:00 stderr F I1212 16:26:54.310936 12 cacher.go:847] cacher (customresourcedefinitions.apiextensions.k8s.io): 2 objects queued in incoming channel. 2025-12-12T16:26:54.614553869+00:00 stderr F I1212 16:26:54.613540 12 trace.go:236] Trace[2007362765]: "Update" accept:application/vnd.kubernetes.protobuf, */*,audit-id:4768b696-fd3a-4ef3-9e9c-55fcc58c0c57,client:::1,api-group:apiextensions.k8s.io,api-version:v1,name:prometheusagents.monitoring.rhobs,subresource:status,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/prometheusagents.monitoring.rhobs/status,user-agent:kube-apiserver/v1.33.5 (linux/amd64) kubernetes/27f72e0,verb:PUT (12-Dec-2025 16:26:54.042) (total time: 571ms): 2025-12-12T16:26:54.614553869+00:00 stderr F Trace[2007362765]: ---"Conversion done" 18ms (16:26:54.068) 2025-12-12T16:26:54.614553869+00:00 stderr F Trace[2007362765]: ["GuaranteedUpdate etcd3" audit-id:4768b696-fd3a-4ef3-9e9c-55fcc58c0c57,key:/apiextensions.k8s.io/customresourcedefinitions/prometheusagents.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 529ms (16:26:54.083) 2025-12-12T16:26:54.614553869+00:00 stderr F Trace[2007362765]: ---"About to Encode" 342ms (16:26:54.444)] 2025-12-12T16:26:54.614553869+00:00 stderr F Trace[2007362765]: [571.383805ms] [571.383805ms] END 2025-12-12T16:26:54.617893393+00:00 stderr F I1212 16:26:54.617520 12 trace.go:236] Trace[732011009]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:5ad6e490-292d-4b74-b509-9ec983be3799,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:alertmanagerconfigs.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/alertmanagerconfigs.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:26:52.553) (total time: 2064ms): 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ["GuaranteedUpdate etcd3" audit-id:5ad6e490-292d-4b74-b509-9ec983be3799,key:/apiextensions.k8s.io/customresourcedefinitions/alertmanagerconfigs.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 2057ms (16:26:52.559) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"initial value restored" 29ms (16:26:52.589) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to Encode" 433ms (16:26:53.022) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"Encode succeeded" len:354069 25ms (16:26:53.048) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"Retry value restored" 73ms (16:26:53.130) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to Encode" 401ms (16:26:53.531) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"Retry value restored" 68ms (16:26:53.621) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to Encode" 771ms (16:26:54.392) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"Encode succeeded" len:355066 104ms (16:26:54.496) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"Txn call completed" 34ms (16:26:54.530) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"decode succeeded" len:355066 86ms (16:26:54.617)] 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to check admission control" 256ms (16:26:52.845) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to apply patch" 284ms (16:26:53.130) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to check admission control" 264ms (16:26:53.394) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to apply patch" 226ms (16:26:53.621) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"About to check admission control" 444ms (16:26:54.066) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: ---"Object stored in database" 551ms (16:26:54.617) 2025-12-12T16:26:54.617893393+00:00 stderr F Trace[732011009]: [2.064136516s] [2.064136516s] END 2025-12-12T16:26:54.661047383+00:00 stderr F I1212 16:26:54.660906 12 controller.go:237] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:26:54.661380722+00:00 stderr F I1212 16:26:54.661332 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1alpha1 to ResourceManager 2025-12-12T16:26:54.668887011+00:00 stderr F I1212 16:26:54.668710 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:26:54.775973257+00:00 stderr F I1212 16:26:54.775656 12 trace.go:236] Trace[1220683174]: "Update" accept:application/vnd.kubernetes.protobuf, */*,audit-id:ba592853-fe53-46ab-b5d3-4f2607852459,client:::1,api-group:apiextensions.k8s.io,api-version:v1,name:prometheuses.monitoring.rhobs,subresource:status,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/prometheuses.monitoring.rhobs/status,user-agent:kube-apiserver/v1.33.5 (linux/amd64) kubernetes/27f72e0,verb:PUT (12-Dec-2025 16:26:54.271) (total time: 504ms): 2025-12-12T16:26:54.775973257+00:00 stderr F Trace[1220683174]: ---"limitedReadBody succeeded" len:461376 34ms (16:26:54.305) 2025-12-12T16:26:54.775973257+00:00 stderr F Trace[1220683174]: ---"Conversion done" 122ms (16:26:54.428) 2025-12-12T16:26:54.775973257+00:00 stderr F Trace[1220683174]: ---"About to store object in database" 58ms (16:26:54.487) 2025-12-12T16:26:54.775973257+00:00 stderr F Trace[1220683174]: [504.085635ms] [504.085635ms] END 2025-12-12T16:26:54.783620620+00:00 stderr F I1212 16:26:54.783170 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:26:54.793147461+00:00 stderr F I1212 16:26:54.793037 12 controller.go:231] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:26:54.793147461+00:00 stderr F I1212 16:26:54.793073 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:26:55.193549066+00:00 stderr F I1212 16:26:55.193257 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1 to ResourceManager 2025-12-12T16:26:55.193549066+00:00 stderr F I1212 16:26:55.193324 12 controller.go:237] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:26:55.194603353+00:00 stderr F I1212 16:26:55.194530 12 controller.go:237] Updating CRD OpenAPI spec because prometheusrules.monitoring.rhobs changed 2025-12-12T16:26:55.229208937+00:00 stderr F I1212 16:26:55.228880 12 trace.go:236] Trace[451929101]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:571fb8f9-58be-4dbe-986a-d7532eca800c,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:alertmanagers.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/alertmanagers.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:26:54.671) (total time: 557ms): 2025-12-12T16:26:55.229208937+00:00 stderr F Trace[451929101]: ["GuaranteedUpdate etcd3" audit-id:571fb8f9-58be-4dbe-986a-d7532eca800c,key:/apiextensions.k8s.io/customresourcedefinitions/alertmanagers.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 550ms (16:26:54.678) 2025-12-12T16:26:55.229208937+00:00 stderr F Trace[451929101]: ---"About to Encode" 490ms (16:26:55.187)] 2025-12-12T16:26:55.229208937+00:00 stderr F Trace[451929101]: ---"About to check admission control" 184ms (16:26:54.881) 2025-12-12T16:26:55.229208937+00:00 stderr F Trace[451929101]: ---"Object stored in database" 347ms (16:26:55.228) 2025-12-12T16:26:55.229208937+00:00 stderr F Trace[451929101]: [557.229097ms] [557.229097ms] END 2025-12-12T16:26:55.245083198+00:00 stderr F I1212 16:26:55.244926 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:26:55.292390623+00:00 stderr F I1212 16:26:55.292235 12 trace.go:236] Trace[1614406269]: "Update" accept:application/vnd.kubernetes.protobuf, */*,audit-id:afd2d92c-08b6-47a5-9433-52d4b049ab75,client:::1,api-group:apiextensions.k8s.io,api-version:v1,name:scrapeconfigs.monitoring.rhobs,subresource:status,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/scrapeconfigs.monitoring.rhobs/status,user-agent:kube-apiserver/v1.33.5 (linux/amd64) kubernetes/27f72e0,verb:PUT (12-Dec-2025 16:26:54.790) (total time: 501ms): 2025-12-12T16:26:55.292390623+00:00 stderr F Trace[1614406269]: ---"Conversion done" 27ms (16:26:54.819) 2025-12-12T16:26:55.292390623+00:00 stderr F Trace[1614406269]: ---"About to store object in database" 112ms (16:26:54.932) 2025-12-12T16:26:55.292390623+00:00 stderr F Trace[1614406269]: [501.387846ms] [501.387846ms] END 2025-12-12T16:26:55.298098318+00:00 stderr F I1212 16:26:55.297937 12 trace.go:236] Trace[800419275]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:bac8c4fc-4aa1-4446-bcf4-c2b248b6d9ee,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:prometheusagents.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/prometheusagents.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:26:54.671) (total time: 625ms): 2025-12-12T16:26:55.298098318+00:00 stderr F Trace[800419275]: ["GuaranteedUpdate etcd3" audit-id:bac8c4fc-4aa1-4446-bcf4-c2b248b6d9ee,key:/apiextensions.k8s.io/customresourcedefinitions/prometheusagents.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 611ms (16:26:54.686) 2025-12-12T16:26:55.298098318+00:00 stderr F Trace[800419275]: ---"About to Encode" 561ms (16:26:55.263)] 2025-12-12T16:26:55.298098318+00:00 stderr F Trace[800419275]: ---"About to check admission control" 266ms (16:26:54.968) 2025-12-12T16:26:55.298098318+00:00 stderr F Trace[800419275]: ---"Object stored in database" 329ms (16:26:55.297) 2025-12-12T16:26:55.298098318+00:00 stderr F Trace[800419275]: [625.951124ms] [625.951124ms] END 2025-12-12T16:26:55.426289156+00:00 stderr F I1212 16:26:55.426140 12 controller.go:667] quota admission added evaluator for: poddisruptionbudgets.policy 2025-12-12T16:26:55.461878115+00:00 stderr F I1212 16:26:55.461343 12 trace.go:236] Trace[1253293719]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:09cd9ddd-0f3e-4a97-9d2b-45f0516212d2,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:alertmanagerconfigs.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/alertmanagerconfigs.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:26:54.671) (total time: 790ms): 2025-12-12T16:26:55.461878115+00:00 stderr F Trace[1253293719]: ["GuaranteedUpdate etcd3" audit-id:09cd9ddd-0f3e-4a97-9d2b-45f0516212d2,key:/apiextensions.k8s.io/customresourcedefinitions/alertmanagerconfigs.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 786ms (16:26:54.675) 2025-12-12T16:26:55.461878115+00:00 stderr F Trace[1253293719]: ---"About to Encode" 634ms (16:26:55.325)] 2025-12-12T16:26:55.461878115+00:00 stderr F Trace[1253293719]: ---"About to check admission control" 470ms (16:26:55.162) 2025-12-12T16:26:55.461878115+00:00 stderr F Trace[1253293719]: ---"Object stored in database" 296ms (16:26:55.458) 2025-12-12T16:26:55.461878115+00:00 stderr F Trace[1253293719]: [790.212603ms] [790.212603ms] END 2025-12-12T16:26:55.469472887+00:00 stderr F I1212 16:26:55.468569 12 controller.go:231] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:26:55.734882302+00:00 stderr F I1212 16:26:55.734582 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1alpha1 to ResourceManager 2025-12-12T16:26:55.734882302+00:00 stderr F I1212 16:26:55.734648 12 controller.go:237] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:26:55.770475412+00:00 stderr F I1212 16:26:55.770345 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1 to ResourceManager 2025-12-12T16:26:55.770475412+00:00 stderr F I1212 16:26:55.770377 12 controller.go:237] Updating CRD OpenAPI spec because servicemonitors.monitoring.rhobs changed 2025-12-12T16:26:55.776433372+00:00 stderr F I1212 16:26:55.776310 12 controller.go:237] Updating CRD OpenAPI spec because thanosqueriers.monitoring.rhobs changed 2025-12-12T16:26:55.776797411+00:00 stderr F I1212 16:26:55.776690 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1alpha1 to ResourceManager 2025-12-12T16:26:55.907712099+00:00 stderr F I1212 16:26:55.907565 12 trace.go:236] Trace[1099985697]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:0e5ac0af-b8b6-4422-aac3-78d0df852ccf,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:prometheuses.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/prometheuses.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:26:54.672) (total time: 1235ms): 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"limitedReadBody succeeded" len:102 12ms (16:26:54.684) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ["GuaranteedUpdate etcd3" audit-id:0e5ac0af-b8b6-4422-aac3-78d0df852ccf,key:/apiextensions.k8s.io/customresourcedefinitions/prometheuses.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 1194ms (16:26:54.712) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"About to Encode" 687ms (16:26:55.425) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"Txn call completed" 34ms (16:26:55.485) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"About to Encode" 324ms (16:26:55.837) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"decode succeeded" len:475803 53ms (16:26:55.907)] 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"About to check admission control" 415ms (16:26:55.153) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"About to apply patch" 359ms (16:26:55.512) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"About to check admission control" 196ms (16:26:55.709) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: ---"Object stored in database" 197ms (16:26:55.907) 2025-12-12T16:26:55.907712099+00:00 stderr F Trace[1099985697]: [1.235140594s] [1.235140594s] END 2025-12-12T16:26:55.938340483+00:00 stderr F I1212 16:26:55.934474 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:26:56.006201627+00:00 stderr F I1212 16:26:56.006039 12 controller.go:237] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:26:56.006477634+00:00 stderr F I1212 16:26:56.006438 12 handler.go:288] Adding GroupVersion monitoring.rhobs v1 to ResourceManager 2025-12-12T16:26:56.028427508+00:00 stderr F I1212 16:26:56.028271 12 alloc.go:328] "allocated clusterIPs" service="openshift-operators/obo-prometheus-operator-admission-webhook" clusterIPs={"IPv4":"10.217.5.77"} 2025-12-12T16:26:56.034315227+00:00 stderr F I1212 16:26:56.034050 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:26:56.034315227+00:00 stderr F I1212 16:26:56.034084 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:26:56.034315227+00:00 stderr F I1212 16:26:56.034089 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 48.701µs 2025-12-12T16:26:56.263349783+00:00 stderr F I1212 16:26:56.263134 12 trace.go:236] Trace[2125088978]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:e919c154-527c-45e3-8a46-38b2af69b8a1,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:scrapeconfigs.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/scrapeconfigs.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:26:54.676) (total time: 1586ms): 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"limitedReadBody succeeded" len:102 34ms (16:26:54.710) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ["GuaranteedUpdate etcd3" audit-id:e919c154-527c-45e3-8a46-38b2af69b8a1,key:/apiextensions.k8s.io/customresourcedefinitions/scrapeconfigs.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 1549ms (16:26:54.713) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"initial value restored" 33ms (16:26:54.747) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to Encode" 538ms (16:26:55.285) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"Retry value restored" 27ms (16:26:55.338) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to Encode" 472ms (16:26:55.810) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to Encode" 383ms (16:26:56.230)] 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to check admission control" 281ms (16:26:55.028) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to apply patch" 310ms (16:26:55.338) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to check admission control" 353ms (16:26:55.691) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to apply patch" 155ms (16:26:55.847) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"About to check admission control" 278ms (16:26:56.125) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: ---"Object stored in database" 137ms (16:26:56.262) 2025-12-12T16:26:56.263349783+00:00 stderr F Trace[2125088978]: [1.586979093s] [1.586979093s] END 2025-12-12T16:26:56.286104418+00:00 stderr F I1212 16:26:56.285741 12 controller.go:231] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:26:56.518460559+00:00 stderr F I1212 16:26:56.518330 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:26:57.004500398+00:00 stderr F I1212 16:26:57.004255 12 controller.go:667] quota admission added evaluator for: prometheusrules.monitoring.coreos.com 2025-12-12T16:26:57.417590084+00:00 stderr F I1212 16:26:57.415100 12 alloc.go:328] "allocated clusterIPs" service="openshift-operators/observability-operator" clusterIPs={"IPv4":"10.217.4.212"} 2025-12-12T16:26:57.420406825+00:00 stderr F I1212 16:26:57.420036 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:26:57.420406825+00:00 stderr F I1212 16:26:57.420068 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:26:57.420406825+00:00 stderr F I1212 16:26:57.420076 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 44.372µs 2025-12-12T16:26:57.474509252+00:00 stderr F I1212 16:26:57.474347 12 handler.go:288] Adding GroupVersion observability.openshift.io v1alpha1 to ResourceManager 2025-12-12T16:26:57.474509252+00:00 stderr F I1212 16:26:57.474394 12 controller.go:237] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:26:57.500302814+00:00 stderr F I1212 16:26:57.500139 12 handler.go:288] Adding GroupVersion observability.openshift.io v1alpha1 to ResourceManager 2025-12-12T16:26:57.500302814+00:00 stderr F I1212 16:26:57.500165 12 controller.go:237] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:26:57.519292603+00:00 stderr F I1212 16:26:57.517840 12 controller.go:231] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:26:57.530681191+00:00 stderr F I1212 16:26:57.530011 12 controller.go:231] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:26:57.716235579+00:00 stderr F I1212 16:26:57.715171 12 controller.go:237] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:26:57.716235579+00:00 stderr F I1212 16:26:57.715357 12 handler.go:288] Adding GroupVersion perses.dev v1alpha1 to ResourceManager 2025-12-12T16:26:57.724216470+00:00 stderr F I1212 16:26:57.724112 12 controller.go:237] Updating CRD OpenAPI spec because persesdatasources.perses.dev changed 2025-12-12T16:26:57.724377424+00:00 stderr F I1212 16:26:57.724313 12 handler.go:288] Adding GroupVersion perses.dev v1alpha1 to ResourceManager 2025-12-12T16:26:57.739637600+00:00 stderr F I1212 16:26:57.738809 12 controller.go:237] Updating CRD OpenAPI spec because persesdashboards.perses.dev changed 2025-12-12T16:26:57.739637600+00:00 stderr F I1212 16:26:57.738917 12 handler.go:288] Adding GroupVersion perses.dev v1alpha1 to ResourceManager 2025-12-12T16:26:58.010333438+00:00 stderr F I1212 16:26:58.009666 12 controller.go:231] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:27:00.202750860+00:00 stderr F I1212 16:27:00.202603 12 alloc.go:328] "allocated clusterIPs" service="openshift-operators/obo-prometheus-operator-admission-webhook-service" clusterIPs={"IPv4":"10.217.4.232"} 2025-12-12T16:27:00.207207143+00:00 stderr F I1212 16:27:00.206700 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:00.207207143+00:00 stderr F I1212 16:27:00.206723 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:00.207207143+00:00 stderr F I1212 16:27:00.206728 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 33.83µs 2025-12-12T16:27:00.557623071+00:00 stderr F I1212 16:27:00.557336 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=9 seatDemandAvg=1.2150154043849344 seatDemandStdev=1.7223210628943246 seatDemandSmoothed=4.552408070556277 fairFrac=2.2796127562642368 currentCL=9 concurrencyDenominator=9 backstop=false 2025-12-12T16:27:02.449241345+00:00 stderr F I1212 16:27:02.449047 12 store.go:1663] "Monitoring resource count at path" resource="alertmanagerconfigs.monitoring.rhobs" path="//monitoring.rhobs/alertmanagerconfigs" 2025-12-12T16:27:02.453230496+00:00 stderr F I1212 16:27:02.451407 12 cacher.go:469] cacher (alertmanagerconfigs.monitoring.rhobs): initialized 2025-12-12T16:27:02.453230496+00:00 stderr F I1212 16:27:02.451451 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1alpha1, Kind=AlertmanagerConfig" reflector="storage/cacher.go:/monitoring.rhobs/alertmanagerconfigs" 2025-12-12T16:27:02.913889355+00:00 stderr F I1212 16:27:02.913733 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:27:03.158266770+00:00 stderr F I1212 16:27:03.158034 12 store.go:1663] "Monitoring resource count at path" resource="alertmanagers.monitoring.rhobs" path="//monitoring.rhobs/alertmanagers" 2025-12-12T16:27:03.159139702+00:00 stderr F I1212 16:27:03.159045 12 cacher.go:469] cacher (alertmanagers.monitoring.rhobs): initialized 2025-12-12T16:27:03.159139702+00:00 stderr F I1212 16:27:03.159095 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=Alertmanager" reflector="storage/cacher.go:/monitoring.rhobs/alertmanagers" 2025-12-12T16:27:03.328167310+00:00 stderr F I1212 16:27:03.328010 12 cacher.go:847] cacher (apiservices.apiregistration.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:27:03.328167310+00:00 stderr F I1212 16:27:03.328042 12 cacher.go:847] cacher (apiservices.apiregistration.k8s.io): 2 objects queued in incoming channel. 2025-12-12T16:27:03.386812724+00:00 stderr F I1212 16:27:03.386675 12 controller.go:237] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:27:03.387082521+00:00 stderr F I1212 16:27:03.387036 12 handler.go:288] Adding GroupVersion agent.k8s.elastic.co v1alpha1 to ResourceManager 2025-12-12T16:27:03.525407762+00:00 stderr F I1212 16:27:03.525264 12 controller.go:237] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:27:03.527636288+00:00 stderr F I1212 16:27:03.527494 12 handler.go:288] Adding GroupVersion apm.k8s.elastic.co v1 to ResourceManager 2025-12-12T16:27:03.527870964+00:00 stderr F I1212 16:27:03.527827 12 handler.go:288] Adding GroupVersion apm.k8s.elastic.co v1beta1 to ResourceManager 2025-12-12T16:27:03.575813498+00:00 stderr F I1212 16:27:03.575629 12 trace.go:236] Trace[1872777795]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:5dbfd456-ada2-4da4-a066-a2dd92c9b819,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:alertmanagerconfigs.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/alertmanagerconfigs.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:27:02.964) (total time: 611ms): 2025-12-12T16:27:03.575813498+00:00 stderr F Trace[1872777795]: ["GuaranteedUpdate etcd3" audit-id:5dbfd456-ada2-4da4-a066-a2dd92c9b819,key:/apiextensions.k8s.io/customresourcedefinitions/alertmanagerconfigs.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 608ms (16:27:02.966) 2025-12-12T16:27:03.575813498+00:00 stderr F Trace[1872777795]: ---"About to Encode" 470ms (16:27:03.454) 2025-12-12T16:27:03.575813498+00:00 stderr F Trace[1872777795]: ---"decode succeeded" len:355066 59ms (16:27:03.574)] 2025-12-12T16:27:03.575813498+00:00 stderr F Trace[1872777795]: ---"About to check admission control" 188ms (16:27:03.171) 2025-12-12T16:27:03.575813498+00:00 stderr F Trace[1872777795]: ---"Object stored in database" 402ms (16:27:03.574) 2025-12-12T16:27:03.575813498+00:00 stderr F Trace[1872777795]: [611.502407ms] [611.502407ms] END 2025-12-12T16:27:03.616988720+00:00 stderr F I1212 16:27:03.611260 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:27:03.617565144+00:00 stderr F I1212 16:27:03.617402 12 controller.go:237] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:27:03.617767539+00:00 stderr F I1212 16:27:03.617698 12 handler.go:288] Adding GroupVersion beat.k8s.elastic.co v1beta1 to ResourceManager 2025-12-12T16:27:03.618864497+00:00 stderr F I1212 16:27:03.618806 12 controller.go:237] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:27:03.619194216+00:00 stderr F I1212 16:27:03.619138 12 handler.go:288] Adding GroupVersion maps.k8s.elastic.co v1alpha1 to ResourceManager 2025-12-12T16:27:03.697987420+00:00 stderr F I1212 16:27:03.697825 12 trace.go:236] Trace[902586381]: "Update" accept:application/vnd.kubernetes.protobuf,application/json,audit-id:1af35470-0fc6-4e70-8e1f-c733f10fe052,client:10.217.0.16,api-group:apiextensions.k8s.io,api-version:v1,name:alertmanagers.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/alertmanagers.monitoring.rhobs,user-agent:catalog/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PUT (12-Dec-2025 16:27:03.173) (total time: 524ms): 2025-12-12T16:27:03.697987420+00:00 stderr F Trace[902586381]: [524.475664ms] [524.475664ms] END 2025-12-12T16:27:03.718440487+00:00 stderr F I1212 16:27:03.718311 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:27:03.718757835+00:00 stderr F I1212 16:27:03.718708 12 controller.go:237] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:27:03.718887159+00:00 stderr F I1212 16:27:03.718478 12 handler.go:288] Adding GroupVersion autoscaling.k8s.elastic.co v1alpha1 to ResourceManager 2025-12-12T16:27:03.752656393+00:00 stderr F I1212 16:27:03.752502 12 handler.go:288] Adding GroupVersion elasticsearch.k8s.elastic.co v1 to ResourceManager 2025-12-12T16:27:03.752656393+00:00 stderr F I1212 16:27:03.752564 12 handler.go:288] Adding GroupVersion elasticsearch.k8s.elastic.co v1beta1 to ResourceManager 2025-12-12T16:27:03.752731535+00:00 stderr F I1212 16:27:03.752637 12 controller.go:237] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:27:03.765383895+00:00 stderr F I1212 16:27:03.765220 12 controller.go:237] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:27:03.765524269+00:00 stderr F I1212 16:27:03.765474 12 handler.go:288] Adding GroupVersion enterprisesearch.k8s.elastic.co v1 to ResourceManager 2025-12-12T16:27:03.765561550+00:00 stderr F I1212 16:27:03.765520 12 handler.go:288] Adding GroupVersion enterprisesearch.k8s.elastic.co v1beta1 to ResourceManager 2025-12-12T16:27:03.770754011+00:00 stderr F I1212 16:27:03.770660 12 store.go:1663] "Monitoring resource count at path" resource="monitoringstacks.monitoring.rhobs" path="//monitoring.rhobs/monitoringstacks" 2025-12-12T16:27:03.773107771+00:00 stderr F I1212 16:27:03.772729 12 cacher.go:469] cacher (monitoringstacks.monitoring.rhobs): initialized 2025-12-12T16:27:03.773107771+00:00 stderr F I1212 16:27:03.772767 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1alpha1, Kind=MonitoringStack" reflector="storage/cacher.go:/monitoring.rhobs/monitoringstacks" 2025-12-12T16:27:03.794198655+00:00 stderr F I1212 16:27:03.794049 12 handler.go:288] Adding GroupVersion kibana.k8s.elastic.co v1 to ResourceManager 2025-12-12T16:27:03.794198655+00:00 stderr F I1212 16:27:03.794098 12 handler.go:288] Adding GroupVersion kibana.k8s.elastic.co v1beta1 to ResourceManager 2025-12-12T16:27:03.794433211+00:00 stderr F I1212 16:27:03.794307 12 controller.go:237] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:27:03.824909312+00:00 stderr F I1212 16:27:03.824004 12 controller.go:237] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:27:03.824909312+00:00 stderr F I1212 16:27:03.824306 12 handler.go:288] Adding GroupVersion logstash.k8s.elastic.co v1alpha1 to ResourceManager 2025-12-12T16:27:03.845568135+00:00 stderr F I1212 16:27:03.844386 12 controller.go:231] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:27:03.845568135+00:00 stderr F I1212 16:27:03.844732 12 handler.go:288] Adding GroupVersion stackconfigpolicy.k8s.elastic.co v1alpha1 to ResourceManager 2025-12-12T16:27:03.845568135+00:00 stderr F I1212 16:27:03.844773 12 controller.go:237] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:27:03.921283361+00:00 stderr F I1212 16:27:03.921128 12 store.go:1663] "Monitoring resource count at path" resource="podmonitors.monitoring.rhobs" path="//monitoring.rhobs/podmonitors" 2025-12-12T16:27:03.922579774+00:00 stderr F I1212 16:27:03.922505 12 cacher.go:469] cacher (podmonitors.monitoring.rhobs): initialized 2025-12-12T16:27:03.922667776+00:00 stderr F I1212 16:27:03.922557 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=PodMonitor" reflector="storage/cacher.go:/monitoring.rhobs/podmonitors" 2025-12-12T16:27:04.028585947+00:00 stderr F I1212 16:27:04.028458 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:27:04.084521373+00:00 stderr F I1212 16:27:04.084389 12 cacher.go:469] cacher (probes.monitoring.rhobs): initialized 2025-12-12T16:27:04.084521373+00:00 stderr F I1212 16:27:04.084452 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=Probe" reflector="storage/cacher.go:/monitoring.rhobs/probes" 2025-12-12T16:27:04.096360432+00:00 stderr F I1212 16:27:04.096224 12 store.go:1663] "Monitoring resource count at path" resource="probes.monitoring.rhobs" path="//monitoring.rhobs/probes" 2025-12-12T16:27:04.166404465+00:00 stderr F I1212 16:27:04.166264 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:27:04.273905336+00:00 stderr F I1212 16:27:04.273767 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:27:04.377461347+00:00 stderr F I1212 16:27:04.377315 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:27:04.384245578+00:00 stderr F I1212 16:27:04.384094 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:27:04.396269543+00:00 stderr F I1212 16:27:04.396077 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:27:04.396598961+00:00 stderr F I1212 16:27:04.396391 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:27:04.412564645+00:00 stderr F I1212 16:27:04.412425 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:27:04.422276311+00:00 stderr F I1212 16:27:04.421267 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:27:04.423295246+00:00 stderr F I1212 16:27:04.423220 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:27:04.423295246+00:00 stderr F I1212 16:27:04.423268 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:27:04.439931218+00:00 stderr F I1212 16:27:04.439773 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:27:04.485251105+00:00 stderr F I1212 16:27:04.484101 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:27:04.528757506+00:00 stderr F I1212 16:27:04.528602 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:27:04.547436788+00:00 stderr F I1212 16:27:04.547286 12 store.go:1663] "Monitoring resource count at path" resource="prometheusagents.monitoring.rhobs" path="//monitoring.rhobs/prometheusagents" 2025-12-12T16:27:04.548241539+00:00 stderr F I1212 16:27:04.548133 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:27:04.550248570+00:00 stderr F I1212 16:27:04.549234 12 cacher.go:469] cacher (prometheusagents.monitoring.rhobs): initialized 2025-12-12T16:27:04.550248570+00:00 stderr F I1212 16:27:04.549341 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1alpha1, Kind=PrometheusAgent" reflector="storage/cacher.go:/monitoring.rhobs/prometheusagents" 2025-12-12T16:27:04.577139610+00:00 stderr F I1212 16:27:04.576956 12 controller.go:231] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:27:04.943249286+00:00 stderr F I1212 16:27:04.942262 12 alloc.go:328] "allocated clusterIPs" service="service-telemetry/elastic-operator-service" clusterIPs={"IPv4":"10.217.4.51"} 2025-12-12T16:27:04.946301413+00:00 stderr F I1212 16:27:04.946158 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:04.946301413+00:00 stderr F I1212 16:27:04.946207 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:04.946301413+00:00 stderr F I1212 16:27:04.946216 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 70.082µs 2025-12-12T16:27:04.983553815+00:00 stderr F I1212 16:27:04.983427 12 controller.go:231] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:27:05.398538888+00:00 stderr F I1212 16:27:05.398386 12 store.go:1663] "Monitoring resource count at path" resource="prometheuses.monitoring.rhobs" path="//monitoring.rhobs/prometheuses" 2025-12-12T16:27:05.399867981+00:00 stderr F I1212 16:27:05.399792 12 cacher.go:469] cacher (prometheuses.monitoring.rhobs): initialized 2025-12-12T16:27:05.399867981+00:00 stderr F I1212 16:27:05.399831 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=Prometheus" reflector="storage/cacher.go:/monitoring.rhobs/prometheuses" 2025-12-12T16:27:05.601274809+00:00 stderr F I1212 16:27:05.600337 12 controller.go:231] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:27:05.875597171+00:00 stderr F I1212 16:27:05.872276 12 store.go:1663] "Monitoring resource count at path" resource="beats.beat.k8s.elastic.co" path="//beat.k8s.elastic.co/beats" 2025-12-12T16:27:05.882686311+00:00 stderr F I1212 16:27:05.880491 12 cacher.go:469] cacher (beats.beat.k8s.elastic.co): initialized 2025-12-12T16:27:05.882686311+00:00 stderr F I1212 16:27:05.880527 12 reflector.go:430] "Caches populated" type="beat.k8s.elastic.co/v1beta1, Kind=Beat" reflector="storage/cacher.go:/beat.k8s.elastic.co/beats" 2025-12-12T16:27:05.898020159+00:00 stderr F I1212 16:27:05.897859 12 store.go:1663] "Monitoring resource count at path" resource="enterprisesearches.enterprisesearch.k8s.elastic.co" path="//enterprisesearch.k8s.elastic.co/enterprisesearches" 2025-12-12T16:27:05.899439325+00:00 stderr F I1212 16:27:05.899367 12 cacher.go:469] cacher (enterprisesearches.enterprisesearch.k8s.elastic.co): initialized 2025-12-12T16:27:05.899439325+00:00 stderr F I1212 16:27:05.899393 12 reflector.go:430] "Caches populated" type="enterprisesearch.k8s.elastic.co/v1, Kind=EnterpriseSearch" reflector="storage/cacher.go:/enterprisesearch.k8s.elastic.co/enterprisesearches" 2025-12-12T16:27:05.908937185+00:00 stderr F I1212 16:27:05.908619 12 store.go:1663] "Monitoring resource count at path" resource="enterprisesearches.enterprisesearch.k8s.elastic.co" path="//enterprisesearch.k8s.elastic.co/enterprisesearches" 2025-12-12T16:27:05.915784979+00:00 stderr F I1212 16:27:05.915643 12 cacher.go:469] cacher (enterprisesearches.enterprisesearch.k8s.elastic.co): initialized 2025-12-12T16:27:05.915784979+00:00 stderr F I1212 16:27:05.915699 12 reflector.go:430] "Caches populated" type="enterprisesearch.k8s.elastic.co/v1beta1, Kind=EnterpriseSearch" reflector="storage/cacher.go:/enterprisesearch.k8s.elastic.co/enterprisesearches" 2025-12-12T16:27:05.929522276+00:00 stderr F I1212 16:27:05.926391 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:27:05.968591145+00:00 stderr F I1212 16:27:05.968446 12 store.go:1663] "Monitoring resource count at path" resource="elasticsearches.elasticsearch.k8s.elastic.co" path="//elasticsearch.k8s.elastic.co/elasticsearches" 2025-12-12T16:27:05.974449323+00:00 stderr F I1212 16:27:05.971841 12 cacher.go:469] cacher (elasticsearches.elasticsearch.k8s.elastic.co): initialized 2025-12-12T16:27:05.974449323+00:00 stderr F I1212 16:27:05.971910 12 reflector.go:430] "Caches populated" type="elasticsearch.k8s.elastic.co/v1, Kind=Elasticsearch" reflector="storage/cacher.go:/elasticsearch.k8s.elastic.co/elasticsearches" 2025-12-12T16:27:05.983877892+00:00 stderr F I1212 16:27:05.983758 12 store.go:1663] "Monitoring resource count at path" resource="elasticsearches.elasticsearch.k8s.elastic.co" path="//elasticsearch.k8s.elastic.co/elasticsearches" 2025-12-12T16:27:05.990738246+00:00 stderr F I1212 16:27:05.990624 12 cacher.go:469] cacher (elasticsearches.elasticsearch.k8s.elastic.co): initialized 2025-12-12T16:27:05.990738246+00:00 stderr F I1212 16:27:05.990679 12 reflector.go:430] "Caches populated" type="elasticsearch.k8s.elastic.co/v1beta1, Kind=Elasticsearch" reflector="storage/cacher.go:/elasticsearch.k8s.elastic.co/elasticsearches" 2025-12-12T16:27:05.993601998+00:00 stderr F I1212 16:27:05.993521 12 store.go:1663] "Monitoring resource count at path" resource="elasticsearches.elasticsearch.k8s.elastic.co" path="//elasticsearch.k8s.elastic.co/elasticsearches" 2025-12-12T16:27:06.000038221+00:00 stderr F I1212 16:27:05.997557 12 cacher.go:469] cacher (elasticsearches.elasticsearch.k8s.elastic.co): initialized 2025-12-12T16:27:06.000038221+00:00 stderr F I1212 16:27:05.997600 12 reflector.go:430] "Caches populated" type="elasticsearch.k8s.elastic.co/v1alpha1, Kind=Elasticsearch" reflector="storage/cacher.go:/elasticsearch.k8s.elastic.co/elasticsearches" 2025-12-12T16:27:06.013898692+00:00 stderr F I1212 16:27:06.012257 12 store.go:1663] "Monitoring resource count at path" resource="kibanas.kibana.k8s.elastic.co" path="//kibana.k8s.elastic.co/kibanas" 2025-12-12T16:27:06.015112403+00:00 stderr F I1212 16:27:06.013463 12 cacher.go:469] cacher (kibanas.kibana.k8s.elastic.co): initialized 2025-12-12T16:27:06.015112403+00:00 stderr F I1212 16:27:06.013520 12 reflector.go:430] "Caches populated" type="kibana.k8s.elastic.co/v1, Kind=Kibana" reflector="storage/cacher.go:/kibana.k8s.elastic.co/kibanas" 2025-12-12T16:27:06.022217122+00:00 stderr F I1212 16:27:06.020027 12 store.go:1663] "Monitoring resource count at path" resource="kibanas.kibana.k8s.elastic.co" path="//kibana.k8s.elastic.co/kibanas" 2025-12-12T16:27:06.025658739+00:00 stderr F I1212 16:27:06.022521 12 cacher.go:469] cacher (kibanas.kibana.k8s.elastic.co): initialized 2025-12-12T16:27:06.025658739+00:00 stderr F I1212 16:27:06.022555 12 reflector.go:430] "Caches populated" type="kibana.k8s.elastic.co/v1beta1, Kind=Kibana" reflector="storage/cacher.go:/kibana.k8s.elastic.co/kibanas" 2025-12-12T16:27:06.032196935+00:00 stderr F I1212 16:27:06.032004 12 store.go:1663] "Monitoring resource count at path" resource="kibanas.kibana.k8s.elastic.co" path="//kibana.k8s.elastic.co/kibanas" 2025-12-12T16:27:06.035459017+00:00 stderr F I1212 16:27:06.035123 12 cacher.go:469] cacher (kibanas.kibana.k8s.elastic.co): initialized 2025-12-12T16:27:06.035459017+00:00 stderr F I1212 16:27:06.035160 12 reflector.go:430] "Caches populated" type="kibana.k8s.elastic.co/v1alpha1, Kind=Kibana" reflector="storage/cacher.go:/kibana.k8s.elastic.co/kibanas" 2025-12-12T16:27:06.063321373+00:00 stderr F I1212 16:27:06.062670 12 store.go:1663] "Monitoring resource count at path" resource="elasticmapsservers.maps.k8s.elastic.co" path="//maps.k8s.elastic.co/elasticmapsservers" 2025-12-12T16:27:06.068321219+00:00 stderr F I1212 16:27:06.065536 12 cacher.go:469] cacher (elasticmapsservers.maps.k8s.elastic.co): initialized 2025-12-12T16:27:06.068321219+00:00 stderr F I1212 16:27:06.065581 12 reflector.go:430] "Caches populated" type="maps.k8s.elastic.co/v1alpha1, Kind=ElasticMapsServer" reflector="storage/cacher.go:/maps.k8s.elastic.co/elasticmapsservers" 2025-12-12T16:27:06.099637612+00:00 stderr F I1212 16:27:06.099505 12 store.go:1663] "Monitoring resource count at path" resource="thanosrulers.monitoring.rhobs" path="//monitoring.rhobs/thanosrulers" 2025-12-12T16:27:06.100610066+00:00 stderr F I1212 16:27:06.100530 12 cacher.go:469] cacher (thanosrulers.monitoring.rhobs): initialized 2025-12-12T16:27:06.100610066+00:00 stderr F I1212 16:27:06.100578 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=ThanosRuler" reflector="storage/cacher.go:/monitoring.rhobs/thanosrulers" 2025-12-12T16:27:06.117424012+00:00 stderr F I1212 16:27:06.115231 12 store.go:1663] "Monitoring resource count at path" resource="persesdatasources.perses.dev" path="//perses.dev/persesdatasources" 2025-12-12T16:27:06.117424012+00:00 stderr F I1212 16:27:06.116529 12 cacher.go:469] cacher (persesdatasources.perses.dev): initialized 2025-12-12T16:27:06.117424012+00:00 stderr F I1212 16:27:06.116570 12 reflector.go:430] "Caches populated" type="perses.dev/v1alpha1, Kind=PersesDatasource" reflector="storage/cacher.go:/perses.dev/persesdatasources" 2025-12-12T16:27:06.127793914+00:00 stderr F I1212 16:27:06.127578 12 store.go:1663] "Monitoring resource count at path" resource="elasticsearchautoscalers.autoscaling.k8s.elastic.co" path="//autoscaling.k8s.elastic.co/elasticsearchautoscalers" 2025-12-12T16:27:06.129701813+00:00 stderr F I1212 16:27:06.129630 12 cacher.go:469] cacher (elasticsearchautoscalers.autoscaling.k8s.elastic.co): initialized 2025-12-12T16:27:06.129763124+00:00 stderr F I1212 16:27:06.129720 12 reflector.go:430] "Caches populated" type="autoscaling.k8s.elastic.co/v1alpha1, Kind=ElasticsearchAutoscaler" reflector="storage/cacher.go:/autoscaling.k8s.elastic.co/elasticsearchautoscalers" 2025-12-12T16:27:06.139668505+00:00 stderr F I1212 16:27:06.139543 12 store.go:1663] "Monitoring resource count at path" resource="agents.agent.k8s.elastic.co" path="//agent.k8s.elastic.co/agents" 2025-12-12T16:27:06.141047950+00:00 stderr F I1212 16:27:06.140965 12 cacher.go:469] cacher (agents.agent.k8s.elastic.co): initialized 2025-12-12T16:27:06.141062700+00:00 stderr F I1212 16:27:06.141025 12 reflector.go:430] "Caches populated" type="agent.k8s.elastic.co/v1alpha1, Kind=Agent" reflector="storage/cacher.go:/agent.k8s.elastic.co/agents" 2025-12-12T16:27:06.150274183+00:00 stderr F I1212 16:27:06.150071 12 store.go:1663] "Monitoring resource count at path" resource="prometheusrules.monitoring.rhobs" path="//monitoring.rhobs/prometheusrules" 2025-12-12T16:27:06.153285069+00:00 stderr F I1212 16:27:06.152010 12 cacher.go:469] cacher (prometheusrules.monitoring.rhobs): initialized 2025-12-12T16:27:06.153285069+00:00 stderr F I1212 16:27:06.152056 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=PrometheusRule" reflector="storage/cacher.go:/monitoring.rhobs/prometheusrules" 2025-12-12T16:27:06.165299614+00:00 stderr F I1212 16:27:06.165047 12 store.go:1663] "Monitoring resource count at path" resource="perses.perses.dev" path="//perses.dev/perses" 2025-12-12T16:27:06.166439012+00:00 stderr F I1212 16:27:06.166351 12 cacher.go:469] cacher (perses.perses.dev): initialized 2025-12-12T16:27:06.166521944+00:00 stderr F I1212 16:27:06.166448 12 reflector.go:430] "Caches populated" type="perses.dev/v1alpha1, Kind=Perses" reflector="storage/cacher.go:/perses.dev/perses" 2025-12-12T16:27:06.179985695+00:00 stderr F I1212 16:27:06.179844 12 store.go:1663] "Monitoring resource count at path" resource="servicemonitors.monitoring.rhobs" path="//monitoring.rhobs/servicemonitors" 2025-12-12T16:27:06.181398811+00:00 stderr F I1212 16:27:06.181031 12 cacher.go:469] cacher (servicemonitors.monitoring.rhobs): initialized 2025-12-12T16:27:06.181398811+00:00 stderr F I1212 16:27:06.181058 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1, Kind=ServiceMonitor" reflector="storage/cacher.go:/monitoring.rhobs/servicemonitors" 2025-12-12T16:27:06.186741676+00:00 stderr F I1212 16:27:06.186632 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:27:06.203581122+00:00 stderr F I1212 16:27:06.203367 12 store.go:1663] "Monitoring resource count at path" resource="observabilityinstallers.observability.openshift.io" path="//observability.openshift.io/observabilityinstallers" 2025-12-12T16:27:06.204682840+00:00 stderr F I1212 16:27:06.204602 12 cacher.go:469] cacher (observabilityinstallers.observability.openshift.io): initialized 2025-12-12T16:27:06.204682840+00:00 stderr F I1212 16:27:06.204636 12 reflector.go:430] "Caches populated" type="observability.openshift.io/v1alpha1, Kind=ObservabilityInstaller" reflector="storage/cacher.go:/observability.openshift.io/observabilityinstallers" 2025-12-12T16:27:06.259393695+00:00 stderr F I1212 16:27:06.256143 12 store.go:1663] "Monitoring resource count at path" resource="scrapeconfigs.monitoring.rhobs" path="//monitoring.rhobs/scrapeconfigs" 2025-12-12T16:27:06.261114399+00:00 stderr F I1212 16:27:06.260635 12 cacher.go:469] cacher (scrapeconfigs.monitoring.rhobs): initialized 2025-12-12T16:27:06.261114399+00:00 stderr F I1212 16:27:06.260691 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1alpha1, Kind=ScrapeConfig" reflector="storage/cacher.go:/monitoring.rhobs/scrapeconfigs" 2025-12-12T16:27:06.286673346+00:00 stderr F I1212 16:27:06.282067 12 store.go:1663] "Monitoring resource count at path" resource="logstashes.logstash.k8s.elastic.co" path="//logstash.k8s.elastic.co/logstashes" 2025-12-12T16:27:06.286673346+00:00 stderr F I1212 16:27:06.283835 12 cacher.go:469] cacher (logstashes.logstash.k8s.elastic.co): initialized 2025-12-12T16:27:06.286673346+00:00 stderr F I1212 16:27:06.283866 12 reflector.go:430] "Caches populated" type="logstash.k8s.elastic.co/v1alpha1, Kind=Logstash" reflector="storage/cacher.go:/logstash.k8s.elastic.co/logstashes" 2025-12-12T16:27:06.298843894+00:00 stderr F I1212 16:27:06.298704 12 store.go:1663] "Monitoring resource count at path" resource="stackconfigpolicies.stackconfigpolicy.k8s.elastic.co" path="//stackconfigpolicy.k8s.elastic.co/stackconfigpolicies" 2025-12-12T16:27:06.299844979+00:00 stderr F I1212 16:27:06.299789 12 cacher.go:469] cacher (stackconfigpolicies.stackconfigpolicy.k8s.elastic.co): initialized 2025-12-12T16:27:06.299844979+00:00 stderr F I1212 16:27:06.299819 12 reflector.go:430] "Caches populated" type="stackconfigpolicy.k8s.elastic.co/v1alpha1, Kind=StackConfigPolicy" reflector="storage/cacher.go:/stackconfigpolicy.k8s.elastic.co/stackconfigpolicies" 2025-12-12T16:27:06.312452638+00:00 stderr F I1212 16:27:06.312313 12 store.go:1663] "Monitoring resource count at path" resource="persesdashboards.perses.dev" path="//perses.dev/persesdashboards" 2025-12-12T16:27:06.314584092+00:00 stderr F I1212 16:27:06.314486 12 cacher.go:469] cacher (persesdashboards.perses.dev): initialized 2025-12-12T16:27:06.314584092+00:00 stderr F I1212 16:27:06.314537 12 reflector.go:430] "Caches populated" type="perses.dev/v1alpha1, Kind=PersesDashboard" reflector="storage/cacher.go:/perses.dev/persesdashboards" 2025-12-12T16:27:06.337128743+00:00 stderr F I1212 16:27:06.333714 12 store.go:1663] "Monitoring resource count at path" resource="apmservers.apm.k8s.elastic.co" path="//apm.k8s.elastic.co/apmservers" 2025-12-12T16:27:06.337128743+00:00 stderr F I1212 16:27:06.335742 12 cacher.go:469] cacher (apmservers.apm.k8s.elastic.co): initialized 2025-12-12T16:27:06.337128743+00:00 stderr F I1212 16:27:06.335784 12 reflector.go:430] "Caches populated" type="apm.k8s.elastic.co/v1, Kind=ApmServer" reflector="storage/cacher.go:/apm.k8s.elastic.co/apmservers" 2025-12-12T16:27:06.342570740+00:00 stderr F I1212 16:27:06.342443 12 store.go:1663] "Monitoring resource count at path" resource="apmservers.apm.k8s.elastic.co" path="//apm.k8s.elastic.co/apmservers" 2025-12-12T16:27:06.346929831+00:00 stderr F I1212 16:27:06.346779 12 cacher.go:469] cacher (apmservers.apm.k8s.elastic.co): initialized 2025-12-12T16:27:06.346929831+00:00 stderr F I1212 16:27:06.346840 12 reflector.go:430] "Caches populated" type="apm.k8s.elastic.co/v1beta1, Kind=ApmServer" reflector="storage/cacher.go:/apm.k8s.elastic.co/apmservers" 2025-12-12T16:27:06.355454676+00:00 stderr F I1212 16:27:06.354568 12 store.go:1663] "Monitoring resource count at path" resource="apmservers.apm.k8s.elastic.co" path="//apm.k8s.elastic.co/apmservers" 2025-12-12T16:27:06.356239886+00:00 stderr F I1212 16:27:06.356163 12 cacher.go:469] cacher (apmservers.apm.k8s.elastic.co): initialized 2025-12-12T16:27:06.356256597+00:00 stderr F I1212 16:27:06.356227 12 reflector.go:430] "Caches populated" type="apm.k8s.elastic.co/v1alpha1, Kind=ApmServer" reflector="storage/cacher.go:/apm.k8s.elastic.co/apmservers" 2025-12-12T16:27:06.367123542+00:00 stderr F I1212 16:27:06.366977 12 store.go:1663] "Monitoring resource count at path" resource="thanosqueriers.monitoring.rhobs" path="//monitoring.rhobs/thanosqueriers" 2025-12-12T16:27:06.368679881+00:00 stderr F I1212 16:27:06.368596 12 cacher.go:847] cacher (pods): 1 objects queued in incoming channel. 2025-12-12T16:27:06.368679881+00:00 stderr F I1212 16:27:06.368615 12 cacher.go:847] cacher (pods): 2 objects queued in incoming channel. 2025-12-12T16:27:06.369526842+00:00 stderr F I1212 16:27:06.369444 12 cacher.go:469] cacher (thanosqueriers.monitoring.rhobs): initialized 2025-12-12T16:27:06.369526842+00:00 stderr F I1212 16:27:06.369487 12 reflector.go:430] "Caches populated" type="monitoring.rhobs/v1alpha1, Kind=ThanosQuerier" reflector="storage/cacher.go:/monitoring.rhobs/thanosqueriers" 2025-12-12T16:27:06.417492916+00:00 stderr F I1212 16:27:06.417352 12 store.go:1663] "Monitoring resource count at path" resource="uiplugins.observability.openshift.io" path="//observability.openshift.io/uiplugins" 2025-12-12T16:27:06.439907254+00:00 stderr F I1212 16:27:06.423345 12 cacher.go:469] cacher (uiplugins.observability.openshift.io): initialized 2025-12-12T16:27:06.439907254+00:00 stderr F I1212 16:27:06.423396 12 reflector.go:430] "Caches populated" type="observability.openshift.io/v1alpha1, Kind=UIPlugin" reflector="storage/cacher.go:/observability.openshift.io/uiplugins" 2025-12-12T16:27:06.439907254+00:00 stderr F I1212 16:27:06.424576 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.rhobs changed 2025-12-12T16:27:06.521442777+00:00 stderr F I1212 16:27:06.521253 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:27:06.630713863+00:00 stderr F I1212 16:27:06.630609 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:27:06.682433032+00:00 stderr F I1212 16:27:06.681271 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:27:06.836645335+00:00 stderr F I1212 16:27:06.836486 12 trace.go:236] Trace[595155319]: "Patch" accept:application/vnd.kubernetes.protobuf;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json;as=PartialObjectMetadata;g=meta.k8s.io;v=v1,application/json,audit-id:4463e663-dec0-43a1-bbbe-8efe3d34d434,client:10.217.0.21,api-group:apiextensions.k8s.io,api-version:v1,name:prometheuses.monitoring.rhobs,subresource:,namespace:,protocol:HTTP/2.0,resource:customresourcedefinitions,scope:resource,url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/prometheuses.monitoring.rhobs,user-agent:olm/v0.0.0 (linux/amd64) kubernetes/$Format,verb:PATCH (12-Dec-2025 16:27:06.142) (total time: 694ms): 2025-12-12T16:27:06.836645335+00:00 stderr F Trace[595155319]: ["GuaranteedUpdate etcd3" audit-id:4463e663-dec0-43a1-bbbe-8efe3d34d434,key:/apiextensions.k8s.io/customresourcedefinitions/prometheuses.monitoring.rhobs,type:*apiextensions.CustomResourceDefinition,resource:customresourcedefinitions.apiextensions.k8s.io 690ms (16:27:06.145) 2025-12-12T16:27:06.836645335+00:00 stderr F Trace[595155319]: ---"About to Encode" 623ms (16:27:06.784)] 2025-12-12T16:27:06.836645335+00:00 stderr F Trace[595155319]: ---"About to check admission control" 236ms (16:27:06.397) 2025-12-12T16:27:06.836645335+00:00 stderr F Trace[595155319]: ---"Object stored in database" 439ms (16:27:06.836) 2025-12-12T16:27:06.836645335+00:00 stderr F Trace[595155319]: [694.219171ms] [694.219171ms] END 2025-12-12T16:27:06.858278993+00:00 stderr F I1212 16:27:06.857978 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:27:06.932757967+00:00 stderr F I1212 16:27:06.932625 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:27:06.939472297+00:00 stderr F I1212 16:27:06.937793 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:27:06.941301504+00:00 stderr F I1212 16:27:06.941164 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:27:06.941922719+00:00 stderr F I1212 16:27:06.941656 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:27:06.973653653+00:00 stderr F I1212 16:27:06.973513 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.rhobs changed 2025-12-12T16:27:07.130333418+00:00 stderr F I1212 16:27:07.130108 12 controller.go:231] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:27:07.140836534+00:00 stderr F I1212 16:27:07.140514 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:27:07.208390313+00:00 stderr F I1212 16:27:07.207245 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.rhobs changed 2025-12-12T16:27:07.241930742+00:00 stderr F I1212 16:27:07.241116 12 controller.go:231] Updating CRD OpenAPI spec because thanosqueriers.monitoring.rhobs changed 2025-12-12T16:27:07.257091886+00:00 stderr F I1212 16:27:07.256913 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.rhobs changed 2025-12-12T16:27:07.664076726+00:00 stderr F I1212 16:27:07.657703 12 controller.go:231] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:27:07.747813575+00:00 stderr F I1212 16:27:07.747520 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:27:07.795097142+00:00 stderr F I1212 16:27:07.793995 12 controller.go:231] Updating CRD OpenAPI spec because thanosqueriers.monitoring.rhobs changed 2025-12-12T16:27:07.807787353+00:00 stderr F I1212 16:27:07.806964 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:27:07.921826150+00:00 stderr F I1212 16:27:07.921699 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:27:08.001529937+00:00 stderr F I1212 16:27:08.001396 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:27:08.347975525+00:00 stderr F I1212 16:27:08.343019 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:27:08.360161913+00:00 stderr F I1212 16:27:08.359335 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:27:08.411590785+00:00 stderr F E1212 16:27:08.411407 12 repairip.go:523] "Unhandled Error" err="the IPAddress: 10.217.4.177 for Service obo-prometheus-operator-admission-webhook/openshift-operators has a wrong reference &v1.ParentReference{Group:\"\", Resource:\"services\", Namespace:\"openshift-operators\", Name:\"obo-prometheus-operator-admission-webhook\"}; cleaning up" logger="UnhandledError" 2025-12-12T16:27:08.420447779+00:00 stderr F I1212 16:27:08.420258 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:08.420447779+00:00 stderr F I1212 16:27:08.420308 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:08.420447779+00:00 stderr F I1212 16:27:08.420317 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 71.922µs 2025-12-12T16:27:08.428596705+00:00 stderr F I1212 16:27:08.428463 12 ipallocator.go:374] error releasing ip 10.217.4.177 : ipaddresses.networking.k8s.io "10.217.4.177" not found 2025-12-12T16:27:08.482624243+00:00 stderr F I1212 16:27:08.482472 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:27:08.951344214+00:00 stderr F I1212 16:27:08.950540 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:27:09.012883432+00:00 stderr F E1212 16:27:09.012473 12 repairip.go:523] "Unhandled Error" err="the IPAddress: 10.217.5.230 for Service observability-operator/openshift-operators has a wrong reference &v1.ParentReference{Group:\"\", Resource:\"services\", Namespace:\"openshift-operators\", Name:\"observability-operator\"}; cleaning up" logger="UnhandledError" 2025-12-12T16:27:09.017734985+00:00 stderr F I1212 16:27:09.016923 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:09.017734985+00:00 stderr F I1212 16:27:09.016950 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:09.017734985+00:00 stderr F I1212 16:27:09.016955 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 38.471µs 2025-12-12T16:27:09.025392618+00:00 stderr F I1212 16:27:09.025261 12 ipallocator.go:374] error releasing ip 10.217.5.230 : ipaddresses.networking.k8s.io "10.217.5.230" not found 2025-12-12T16:27:09.058654450+00:00 stderr F I1212 16:27:09.057663 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:27:09.183752076+00:00 stderr F I1212 16:27:09.183579 12 controller.go:231] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:27:09.278016452+00:00 stderr F I1212 16:27:09.277868 12 controller.go:231] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:27:09.302077751+00:00 stderr F I1212 16:27:09.301939 12 controller.go:231] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:27:09.353559824+00:00 stderr F I1212 16:27:09.353395 12 controller.go:231] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:27:09.577331867+00:00 stderr F I1212 16:27:09.575453 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:27:09.606542247+00:00 stderr F I1212 16:27:09.604105 12 controller.go:231] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:27:09.687653790+00:00 stderr F I1212 16:27:09.686723 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:27:09.792429941+00:00 stderr F I1212 16:27:09.791843 12 controller.go:231] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:27:10.121733666+00:00 stderr F I1212 16:27:10.121419 12 controller.go:231] Updating CRD OpenAPI spec because persesdashboards.perses.dev changed 2025-12-12T16:27:10.145380344+00:00 stderr F I1212 16:27:10.143219 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:27:10.171323571+00:00 stderr F I1212 16:27:10.170767 12 controller.go:231] Updating CRD OpenAPI spec because persesdashboards.perses.dev changed 2025-12-12T16:27:10.261131684+00:00 stderr F I1212 16:27:10.260971 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:27:10.612405554+00:00 stderr F I1212 16:27:10.612261 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:27:10.681208635+00:00 stderr F I1212 16:27:10.681054 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:27:10.710830145+00:00 stderr F I1212 16:27:10.710085 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:27:10.724549822+00:00 stderr F I1212 16:27:10.724356 12 controller.go:231] Updating CRD OpenAPI spec because persesdatasources.perses.dev changed 2025-12-12T16:27:10.728298787+00:00 stderr F I1212 16:27:10.728150 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:27:10.794278137+00:00 stderr F I1212 16:27:10.792374 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:27:10.808655301+00:00 stderr F I1212 16:27:10.807793 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:27:10.840049985+00:00 stderr F I1212 16:27:10.838463 12 controller.go:231] Updating CRD OpenAPI spec because persesdatasources.perses.dev changed 2025-12-12T16:27:10.958671047+00:00 stderr F I1212 16:27:10.957811 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:27:11.023388345+00:00 stderr F I1212 16:27:11.023221 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:27:11.560068048+00:00 stderr F I1212 16:27:11.559921 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:27:11.647694116+00:00 stderr F I1212 16:27:11.647572 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:27:12.211626887+00:00 stderr F I1212 16:27:12.211456 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:27:12.333880111+00:00 stderr F I1212 16:27:12.333715 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:27:12.769690341+00:00 stderr F I1212 16:27:12.769516 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:27:12.867423955+00:00 stderr F I1212 16:27:12.867255 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:27:13.382259655+00:00 stderr F I1212 16:27:13.382091 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:27:13.478548711+00:00 stderr F I1212 16:27:13.478386 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:27:13.987288277+00:00 stderr F I1212 16:27:13.987095 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:27:14.084913728+00:00 stderr F I1212 16:27:14.084105 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:27:14.558650288+00:00 stderr F I1212 16:27:14.558444 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:27:14.619905328+00:00 stderr F I1212 16:27:14.619766 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:27:17.356166868+00:00 stderr F I1212 16:27:17.355978 12 handler.go:288] Adding GroupVersion acme.cert-manager.io v1 to ResourceManager 2025-12-12T16:27:17.356166868+00:00 stderr F I1212 16:27:17.356045 12 controller.go:237] Updating CRD OpenAPI spec because challenges.acme.cert-manager.io changed 2025-12-12T16:27:17.366848959+00:00 stderr F I1212 16:27:17.366453 12 controller.go:237] Updating CRD OpenAPI spec because orders.acme.cert-manager.io changed 2025-12-12T16:27:17.366848959+00:00 stderr F I1212 16:27:17.366717 12 handler.go:288] Adding GroupVersion acme.cert-manager.io v1 to ResourceManager 2025-12-12T16:27:18.005910403+00:00 stderr F I1212 16:27:18.005765 12 alloc.go:328] "allocated clusterIPs" service="cert-manager-operator/cert-manager-operator-controller-manager-metrics-service" clusterIPs={"IPv4":"10.217.4.158"} 2025-12-12T16:27:18.013612678+00:00 stderr F I1212 16:27:18.012281 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:18.013612678+00:00 stderr F I1212 16:27:18.012365 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:18.013612678+00:00 stderr F I1212 16:27:18.012373 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 108.363µs 2025-12-12T16:27:18.038591330+00:00 stderr F I1212 16:27:18.038460 12 controller.go:237] Updating CRD OpenAPI spec because certificaterequests.cert-manager.io changed 2025-12-12T16:27:18.038695162+00:00 stderr F I1212 16:27:18.038638 12 handler.go:288] Adding GroupVersion cert-manager.io v1 to ResourceManager 2025-12-12T16:27:18.074002186+00:00 stderr F I1212 16:27:18.072165 12 controller.go:237] Updating CRD OpenAPI spec because certificates.cert-manager.io changed 2025-12-12T16:27:18.074002186+00:00 stderr F I1212 16:27:18.072701 12 handler.go:288] Adding GroupVersion cert-manager.io v1 to ResourceManager 2025-12-12T16:27:18.291854500+00:00 stderr F I1212 16:27:18.291649 12 handler.go:288] Adding GroupVersion cert-manager.io v1 to ResourceManager 2025-12-12T16:27:18.291854500+00:00 stderr F I1212 16:27:18.291731 12 controller.go:237] Updating CRD OpenAPI spec because clusterissuers.cert-manager.io changed 2025-12-12T16:27:18.451617513+00:00 stderr F I1212 16:27:18.451469 12 controller.go:237] Updating CRD OpenAPI spec because issuers.cert-manager.io changed 2025-12-12T16:27:18.451989142+00:00 stderr F I1212 16:27:18.451879 12 handler.go:288] Adding GroupVersion cert-manager.io v1 to ResourceManager 2025-12-12T16:27:18.485949282+00:00 stderr F I1212 16:27:18.485429 12 controller.go:237] Updating CRD OpenAPI spec because certmanagers.operator.openshift.io changed 2025-12-12T16:27:18.485949282+00:00 stderr F I1212 16:27:18.485788 12 handler.go:288] Adding GroupVersion operator.openshift.io v1alpha1 to ResourceManager 2025-12-12T16:27:18.518345962+00:00 stderr F I1212 16:27:18.509913 12 controller.go:237] Updating CRD OpenAPI spec because istiocsrs.operator.openshift.io changed 2025-12-12T16:27:18.518345962+00:00 stderr F I1212 16:27:18.509975 12 handler.go:288] Adding GroupVersion operator.openshift.io v1alpha1 to ResourceManager 2025-12-12T16:27:18.591386651+00:00 stderr F W1212 16:27:18.591236 12 dispatcher.go:205] Failed calling webhook, failing open elastic-es-validation-v1.k8s.elastic.co: failed calling webhook "elastic-es-validation-v1.k8s.elastic.co": failed to call webhook: Post "https://elastic-operator-service.service-telemetry.svc:443/validate-elasticsearch-k8s-elastic-co-v1-elasticsearch?timeout=10s": no endpoints available for service "elastic-operator-service" 2025-12-12T16:27:18.591426762+00:00 stderr F E1212 16:27:18.591385 12 dispatcher.go:213] "Unhandled Error" err="failed calling webhook \"elastic-es-validation-v1.k8s.elastic.co\": failed to call webhook: Post \"https://elastic-operator-service.service-telemetry.svc:443/validate-elasticsearch-k8s-elastic-co-v1-elasticsearch?timeout=10s\": no endpoints available for service \"elastic-operator-service\"" logger="UnhandledError" 2025-12-12T16:27:18.593103834+00:00 stderr F I1212 16:27:18.592838 12 controller.go:667] quota admission added evaluator for: elasticsearches.elasticsearch.k8s.elastic.co 2025-12-12T16:27:18.593103834+00:00 stderr F I1212 16:27:18.592932 12 controller.go:667] quota admission added evaluator for: elasticsearches.elasticsearch.k8s.elastic.co 2025-12-12T16:27:22.087554523+00:00 stderr F I1212 16:27:22.083661 12 controller.go:231] Updating CRD OpenAPI spec because certificaterequests.cert-manager.io changed 2025-12-12T16:27:22.111671794+00:00 stderr F I1212 16:27:22.107080 12 controller.go:231] Updating CRD OpenAPI spec because certificates.cert-manager.io changed 2025-12-12T16:27:22.123393640+00:00 stderr F I1212 16:27:22.122335 12 controller.go:231] Updating CRD OpenAPI spec because orders.acme.cert-manager.io changed 2025-12-12T16:27:22.143255233+00:00 stderr F I1212 16:27:22.142563 12 controller.go:231] Updating CRD OpenAPI spec because certmanagers.operator.openshift.io changed 2025-12-12T16:27:22.168443970+00:00 stderr F I1212 16:27:22.167171 12 controller.go:231] Updating CRD OpenAPI spec because istiocsrs.operator.openshift.io changed 2025-12-12T16:27:22.193469844+00:00 stderr F I1212 16:27:22.193304 12 controller.go:231] Updating CRD OpenAPI spec because challenges.acme.cert-manager.io changed 2025-12-12T16:27:22.207383356+00:00 stderr F I1212 16:27:22.207222 12 controller.go:231] Updating CRD OpenAPI spec because issuers.cert-manager.io changed 2025-12-12T16:27:22.216292921+00:00 stderr F I1212 16:27:22.216109 12 controller.go:231] Updating CRD OpenAPI spec because clusterissuers.cert-manager.io changed 2025-12-12T16:27:30.351119161+00:00 stderr F I1212 16:27:30.350965 12 alloc.go:328] "allocated clusterIPs" service="service-telemetry/elasticsearch-es-http" clusterIPs={"IPv4":"10.217.4.107"} 2025-12-12T16:27:30.352398914+00:00 stderr F I1212 16:27:30.352323 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:30.352398914+00:00 stderr F I1212 16:27:30.352362 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:30.352398914+00:00 stderr F I1212 16:27:30.352369 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 56.852µs 2025-12-12T16:27:30.365864504+00:00 stderr F I1212 16:27:30.365656 12 alloc.go:328] "allocated clusterIPs" service="service-telemetry/elasticsearch-es-internal-http" clusterIPs={"IPv4":"10.217.5.46"} 2025-12-12T16:27:30.559338191+00:00 stderr F I1212 16:27:30.559115 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.003690989790793707 seatDemandStdev=0.06064129273983169 seatDemandSmoothed=4.2888668947693915 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:27:31.287011108+00:00 stderr F I1212 16:27:31.286253 12 controller.go:667] quota admission added evaluator for: statefulsets.apps 2025-12-12T16:27:31.287011108+00:00 stderr F I1212 16:27:31.286344 12 controller.go:667] quota admission added evaluator for: statefulsets.apps 2025-12-12T16:27:31.321124331+00:00 stderr F I1212 16:27:31.320872 12 controller.go:667] quota admission added evaluator for: controllerrevisions.apps 2025-12-12T16:27:32.942755243+00:00 stderr F I1212 16:27:32.938285 12 cacher.go:847] cacher (clusterroles.rbac.authorization.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:27:32.942755243+00:00 stderr F I1212 16:27:32.938313 12 cacher.go:847] cacher (clusterroles.rbac.authorization.k8s.io): 2 objects queued in incoming channel. 2025-12-12T16:27:32.942755243+00:00 stderr F I1212 16:27:32.938324 12 cacher.go:847] cacher (clusterroles.rbac.authorization.k8s.io): 3 objects queued in incoming channel. 2025-12-12T16:27:32.942755243+00:00 stderr F I1212 16:27:32.938328 12 cacher.go:847] cacher (clusterroles.rbac.authorization.k8s.io): 4 objects queued in incoming channel. 2025-12-12T16:27:32.942755243+00:00 stderr F I1212 16:27:32.938334 12 cacher.go:847] cacher (clusterroles.rbac.authorization.k8s.io): 5 objects queued in incoming channel. 2025-12-12T16:27:32.942755243+00:00 stderr F I1212 16:27:32.938338 12 cacher.go:847] cacher (clusterroles.rbac.authorization.k8s.io): 6 objects queued in incoming channel. 2025-12-12T16:27:36.106321308+00:00 stderr F I1212 16:27:36.106172 12 store.go:1663] "Monitoring resource count at path" resource="certmanagers.operator.openshift.io" path="//operator.openshift.io/certmanagers" 2025-12-12T16:27:36.109204801+00:00 stderr F I1212 16:27:36.108913 12 cacher.go:469] cacher (certmanagers.operator.openshift.io): initialized 2025-12-12T16:27:36.109204801+00:00 stderr F I1212 16:27:36.108949 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1alpha1, Kind=CertManager" reflector="storage/cacher.go:/operator.openshift.io/certmanagers" 2025-12-12T16:27:36.159489933+00:00 stderr F I1212 16:27:36.156799 12 store.go:1663] "Monitoring resource count at path" resource="istiocsrs.operator.openshift.io" path="//operator.openshift.io/istiocsrs" 2025-12-12T16:27:36.159489933+00:00 stderr F I1212 16:27:36.158779 12 cacher.go:469] cacher (istiocsrs.operator.openshift.io): initialized 2025-12-12T16:27:36.159489933+00:00 stderr F I1212 16:27:36.158820 12 reflector.go:430] "Caches populated" type="operator.openshift.io/v1alpha1, Kind=IstioCSR" reflector="storage/cacher.go:/operator.openshift.io/istiocsrs" 2025-12-12T16:27:36.183926522+00:00 stderr F I1212 16:27:36.183776 12 store.go:1663] "Monitoring resource count at path" resource="certificates.cert-manager.io" path="//cert-manager.io/certificates" 2025-12-12T16:27:36.186234930+00:00 stderr F I1212 16:27:36.185742 12 cacher.go:469] cacher (certificates.cert-manager.io): initialized 2025-12-12T16:27:36.186234930+00:00 stderr F I1212 16:27:36.185799 12 reflector.go:430] "Caches populated" type="cert-manager.io/v1, Kind=Certificate" reflector="storage/cacher.go:/cert-manager.io/certificates" 2025-12-12T16:27:36.253376109+00:00 stderr F I1212 16:27:36.252984 12 cacher.go:847] cacher (mutatingwebhookconfigurations.admissionregistration.k8s.io): 1 objects queued in incoming channel. 2025-12-12T16:27:36.487592077+00:00 stderr F I1212 16:27:36.487429 12 store.go:1663] "Monitoring resource count at path" resource="orders.acme.cert-manager.io" path="//acme.cert-manager.io/orders" 2025-12-12T16:27:36.491308121+00:00 stderr F I1212 16:27:36.491221 12 cacher.go:469] cacher (orders.acme.cert-manager.io): initialized 2025-12-12T16:27:36.491308121+00:00 stderr F I1212 16:27:36.491264 12 reflector.go:430] "Caches populated" type="acme.cert-manager.io/v1, Kind=Order" reflector="storage/cacher.go:/acme.cert-manager.io/orders" 2025-12-12T16:27:36.515995596+00:00 stderr F I1212 16:27:36.515674 12 store.go:1663] "Monitoring resource count at path" resource="challenges.acme.cert-manager.io" path="//acme.cert-manager.io/challenges" 2025-12-12T16:27:36.517808002+00:00 stderr F I1212 16:27:36.517690 12 cacher.go:469] cacher (challenges.acme.cert-manager.io): initialized 2025-12-12T16:27:36.517808002+00:00 stderr F I1212 16:27:36.517749 12 reflector.go:430] "Caches populated" type="acme.cert-manager.io/v1, Kind=Challenge" reflector="storage/cacher.go:/acme.cert-manager.io/challenges" 2025-12-12T16:27:36.546844577+00:00 stderr F I1212 16:27:36.546375 12 store.go:1663] "Monitoring resource count at path" resource="certificaterequests.cert-manager.io" path="//cert-manager.io/certificaterequests" 2025-12-12T16:27:36.552769727+00:00 stderr F I1212 16:27:36.552644 12 cacher.go:469] cacher (certificaterequests.cert-manager.io): initialized 2025-12-12T16:27:36.552769727+00:00 stderr F I1212 16:27:36.552684 12 reflector.go:430] "Caches populated" type="cert-manager.io/v1, Kind=CertificateRequest" reflector="storage/cacher.go:/cert-manager.io/certificaterequests" 2025-12-12T16:27:36.608801185+00:00 stderr F I1212 16:27:36.608640 12 store.go:1663] "Monitoring resource count at path" resource="issuers.cert-manager.io" path="//cert-manager.io/issuers" 2025-12-12T16:27:36.614324305+00:00 stderr F I1212 16:27:36.614165 12 cacher.go:469] cacher (issuers.cert-manager.io): initialized 2025-12-12T16:27:36.614324305+00:00 stderr F I1212 16:27:36.614261 12 reflector.go:430] "Caches populated" type="cert-manager.io/v1, Kind=Issuer" reflector="storage/cacher.go:/cert-manager.io/issuers" 2025-12-12T16:27:36.642576450+00:00 stderr F I1212 16:27:36.642067 12 store.go:1663] "Monitoring resource count at path" resource="clusterissuers.cert-manager.io" path="//cert-manager.io/clusterissuers" 2025-12-12T16:27:36.643922824+00:00 stderr F I1212 16:27:36.643299 12 cacher.go:469] cacher (clusterissuers.cert-manager.io): initialized 2025-12-12T16:27:36.643922824+00:00 stderr F I1212 16:27:36.643343 12 reflector.go:430] "Caches populated" type="cert-manager.io/v1, Kind=ClusterIssuer" reflector="storage/cacher.go:/cert-manager.io/clusterissuers" 2025-12-12T16:27:39.726739595+00:00 stderr F I1212 16:27:39.726297 12 alloc.go:328] "allocated clusterIPs" service="cert-manager/cert-manager-cainjector" clusterIPs={"IPv4":"10.217.5.98"} 2025-12-12T16:27:39.732959033+00:00 stderr F I1212 16:27:39.732819 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:39.732959033+00:00 stderr F I1212 16:27:39.732843 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:39.732959033+00:00 stderr F I1212 16:27:39.732849 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 38.681µs 2025-12-12T16:27:40.117912475+00:00 stderr F I1212 16:27:40.117769 12 alloc.go:328] "allocated clusterIPs" service="cert-manager/cert-manager-webhook" clusterIPs={"IPv4":"10.217.5.200"} 2025-12-12T16:27:40.124079561+00:00 stderr F I1212 16:27:40.123081 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:40.124079561+00:00 stderr F I1212 16:27:40.123115 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:40.124079561+00:00 stderr F I1212 16:27:40.123121 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 50.301µs 2025-12-12T16:27:40.559982683+00:00 stderr F I1212 16:27:40.559743 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=3 seatDemandAvg=0.01928787508805532 seatDemandStdev=0.14873133112722742 seatDemandSmoothed=4.194087397932647 fairFrac=2.2796127562642368 currentCL=3 concurrencyDenominator=3 backstop=false 2025-12-12T16:27:47.128156437+00:00 stderr F I1212 16:27:47.127871 12 alloc.go:328] "allocated clusterIPs" service="cert-manager/cert-manager" clusterIPs={"IPv4":"10.217.5.198"} 2025-12-12T16:27:47.135816361+00:00 stderr F I1212 16:27:47.133079 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:27:47.135816361+00:00 stderr F I1212 16:27:47.133118 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:27:47.135816361+00:00 stderr F I1212 16:27:47.133125 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 57.582µs 2025-12-12T16:27:50.562230440+00:00 stderr F I1212 16:27:50.560724 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.014000835494546726 seatDemandStdev=0.17069127297069633 seatDemandSmoothed=4.101871306274897 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:28:00.561944200+00:00 stderr F I1212 16:28:00.561646 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0014707010589282944 seatDemandStdev=0.038321509590875484 seatDemandSmoothed=4.00844348707552 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:28:10.562828807+00:00 stderr F I1212 16:28:10.562233 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.003894641694736285 seatDemandStdev=0.06610476969060498 seatDemandSmoothed=3.917859273334646 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:28:20.564360901+00:00 stderr F I1212 16:28:20.563466 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=4 seatDemandAvg=0.006820459929208591 seatDemandStdev=0.09345145603953167 seatDemandSmoothed=3.83005476411523 fairFrac=2.2796127562642368 currentCL=4 concurrencyDenominator=4 backstop=false 2025-12-12T16:28:24.480270059+00:00 stderr F E1212 16:28:24.480050 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:28:30.437374905+00:00 stderr F I1212 16:28:30.437210 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:28:30.437374905+00:00 stderr F I1212 16:28:30.437258 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:28:30.437374905+00:00 stderr F I1212 16:28:30.437265 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 71.502µs 2025-12-12T16:28:30.564791340+00:00 stderr F I1212 16:28:30.564566 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0016918505533858625 seatDemandStdev=0.04109730155485723 seatDemandSmoothed=3.742947655039069 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:28:30.652863239+00:00 stderr F I1212 16:28:30.652710 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentsourcepolicies.operator.openshift.io changed 2025-12-12T16:28:30.653993257+00:00 stderr F I1212 16:28:30.653667 12 controller.go:231] Updating CRD OpenAPI spec because olmconfigs.operators.coreos.com changed 2025-12-12T16:28:30.653993257+00:00 stderr F I1212 16:28:30.653702 12 controller.go:231] Updating CRD OpenAPI spec because alertingrules.monitoring.openshift.io changed 2025-12-12T16:28:30.653993257+00:00 stderr F I1212 16:28:30.653864 12 controller.go:231] Updating CRD OpenAPI spec because ipamclaims.k8s.cni.cncf.io changed 2025-12-12T16:28:30.654477500+00:00 stderr F I1212 16:28:30.654157 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed 2025-12-12T16:28:30.654614333+00:00 stderr F I1212 16:28:30.654547 12 controller.go:231] Updating CRD OpenAPI spec because storages.operator.openshift.io changed 2025-12-12T16:28:30.655053064+00:00 stderr F I1212 16:28:30.654970 12 controller.go:231] Updating CRD OpenAPI spec because configs.imageregistry.operator.openshift.io changed 2025-12-12T16:28:30.656200153+00:00 stderr F I1212 16:28:30.656034 12 controller.go:231] Updating CRD OpenAPI spec because dnses.config.openshift.io changed 2025-12-12T16:28:30.656200153+00:00 stderr F I1212 16:28:30.656063 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.coreos.com changed 2025-12-12T16:28:30.656456640+00:00 stderr F I1212 16:28:30.656338 12 controller.go:231] Updating CRD OpenAPI spec because proxies.config.openshift.io changed 2025-12-12T16:28:30.656456640+00:00 stderr F I1212 16:28:30.656392 12 controller.go:231] Updating CRD OpenAPI spec because clusterautoscalers.autoscaling.openshift.io changed 2025-12-12T16:28:30.656567583+00:00 stderr F I1212 16:28:30.656488 12 controller.go:231] Updating CRD OpenAPI spec because imagepruners.imageregistry.operator.openshift.io changed 2025-12-12T16:28:30.656734887+00:00 stderr F I1212 16:28:30.656580 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:28:30.656734887+00:00 stderr F I1212 16:28:30.656610 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:28:30.656877130+00:00 stderr F I1212 16:28:30.656815 12 controller.go:231] Updating CRD OpenAPI spec because adminpolicybasedexternalroutes.k8s.ovn.org changed 2025-12-12T16:28:30.656948572+00:00 stderr F I1212 16:28:30.656859 12 controller.go:231] Updating CRD OpenAPI spec because clusteroperators.config.openshift.io changed 2025-12-12T16:28:30.657099916+00:00 stderr F I1212 16:28:30.657050 12 controller.go:231] Updating CRD OpenAPI spec because clusterserviceversions.operators.coreos.com changed 2025-12-12T16:28:30.657099916+00:00 stderr F I1212 16:28:30.657067 12 controller.go:231] Updating CRD OpenAPI spec because clusteruserdefinednetworks.k8s.ovn.org changed 2025-12-12T16:28:30.657099916+00:00 stderr F I1212 16:28:30.657075 12 controller.go:231] Updating CRD OpenAPI spec because dnses.operator.openshift.io changed 2025-12-12T16:28:30.657312251+00:00 stderr F I1212 16:28:30.657163 12 controller.go:231] Updating CRD OpenAPI spec because gateways.gateway.networking.k8s.io changed 2025-12-12T16:28:30.657312251+00:00 stderr F I1212 16:28:30.657239 12 controller.go:231] Updating CRD OpenAPI spec because infrastructures.config.openshift.io changed 2025-12-12T16:28:30.657330682+00:00 stderr F I1212 16:28:30.657308 12 controller.go:231] Updating CRD OpenAPI spec because ipaddresses.ipam.cluster.x-k8s.io changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657481 12 controller.go:231] Updating CRD OpenAPI spec because ingresses.config.openshift.io changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657524 12 controller.go:231] Updating CRD OpenAPI spec because kubestorageversionmigrators.operator.openshift.io changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657533 12 controller.go:231] Updating CRD OpenAPI spec because machineconfignodes.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657540 12 controller.go:231] Updating CRD OpenAPI spec because machinesets.machine.openshift.io changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657696 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.coreos.com changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657716 12 controller.go:231] Updating CRD OpenAPI spec because rolebindingrestrictions.authorization.openshift.io changed 2025-12-12T16:28:30.657755363+00:00 stderr F I1212 16:28:30.657726 12 controller.go:231] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:28:30.657856925+00:00 stderr F I1212 16:28:30.657803 12 controller.go:231] Updating CRD OpenAPI spec because persesdatasources.perses.dev changed 2025-12-12T16:28:30.657945337+00:00 stderr F I1212 16:28:30.657903 12 controller.go:231] Updating CRD OpenAPI spec because consoleyamlsamples.console.openshift.io changed 2025-12-12T16:28:30.657994759+00:00 stderr F I1212 16:28:30.657953 12 controller.go:231] Updating CRD OpenAPI spec because ipaddressclaims.ipam.cluster.x-k8s.io changed 2025-12-12T16:28:30.658092991+00:00 stderr F I1212 16:28:30.658053 12 controller.go:231] Updating CRD OpenAPI spec because openshiftcontrollermanagers.operator.openshift.io changed 2025-12-12T16:28:30.658092991+00:00 stderr F I1212 16:28:30.658065 12 controller.go:231] Updating CRD OpenAPI spec because securitycontextconstraints.security.openshift.io changed 2025-12-12T16:28:30.658163683+00:00 stderr F I1212 16:28:30.658127 12 controller.go:231] Updating CRD OpenAPI spec because certificates.cert-manager.io changed 2025-12-12T16:28:30.658212484+00:00 stderr F I1212 16:28:30.658154 12 controller.go:231] Updating CRD OpenAPI spec because alertrelabelconfigs.monitoring.openshift.io changed 2025-12-12T16:28:30.658376558+00:00 stderr F I1212 16:28:30.658247 12 controller.go:231] Updating CRD OpenAPI spec because authentications.operator.openshift.io changed 2025-12-12T16:28:30.658376558+00:00 stderr F I1212 16:28:30.658292 12 controller.go:231] Updating CRD OpenAPI spec because imagedigestmirrorsets.config.openshift.io changed 2025-12-12T16:28:30.658623565+00:00 stderr F I1212 16:28:30.658356 12 controller.go:231] Updating CRD OpenAPI spec because pinnedimagesets.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659100 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659230 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659283 12 controller.go:231] Updating CRD OpenAPI spec because certmanagers.operator.openshift.io changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659297 12 controller.go:231] Updating CRD OpenAPI spec because baselineadminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659305 12 controller.go:231] Updating CRD OpenAPI spec because configs.samples.operator.openshift.io changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659315 12 controller.go:231] Updating CRD OpenAPI spec because consoles.operator.openshift.io changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659322 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.659339373+00:00 stderr F I1212 16:28:30.659329 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.rhobs changed 2025-12-12T16:28:30.659411485+00:00 stderr F I1212 16:28:30.659340 12 controller.go:231] Updating CRD OpenAPI spec because kubeapiservers.operator.openshift.io changed 2025-12-12T16:28:30.659411485+00:00 stderr F I1212 16:28:30.659396 12 controller.go:231] Updating CRD OpenAPI spec because projects.config.openshift.io changed 2025-12-12T16:28:30.659411485+00:00 stderr F I1212 16:28:30.659404 12 controller.go:231] Updating CRD OpenAPI spec because kubeschedulers.operator.openshift.io changed 2025-12-12T16:28:30.659444735+00:00 stderr F I1212 16:28:30.659412 12 controller.go:231] Updating CRD OpenAPI spec because networks.config.openshift.io changed 2025-12-12T16:28:30.659605900+00:00 stderr F I1212 16:28:30.659533 12 controller.go:231] Updating CRD OpenAPI spec because nodeslicepools.whereabouts.cni.cncf.io changed 2025-12-12T16:28:30.659774714+00:00 stderr F I1212 16:28:30.659704 12 controller.go:231] Updating CRD OpenAPI spec because clustercsidrivers.operator.openshift.io changed 2025-12-12T16:28:30.659774714+00:00 stderr F I1212 16:28:30.659750 12 controller.go:231] Updating CRD OpenAPI spec because consoleexternalloglinks.console.openshift.io changed 2025-12-12T16:28:30.659787364+00:00 stderr F I1212 16:28:30.659763 12 controller.go:231] Updating CRD OpenAPI spec because grpcroutes.gateway.networking.k8s.io changed 2025-12-12T16:28:30.659821785+00:00 stderr F I1212 16:28:30.659780 12 controller.go:231] Updating CRD OpenAPI spec because istiocsrs.operator.openshift.io changed 2025-12-12T16:28:30.659821785+00:00 stderr F I1212 16:28:30.659790 12 controller.go:231] Updating CRD OpenAPI spec because catalogsources.operators.coreos.com changed 2025-12-12T16:28:30.659821785+00:00 stderr F I1212 16:28:30.659799 12 controller.go:231] Updating CRD OpenAPI spec because operatorgroups.operators.coreos.com changed 2025-12-12T16:28:30.659821785+00:00 stderr F I1212 16:28:30.659808 12 controller.go:231] Updating CRD OpenAPI spec because operatorhubs.config.openshift.io changed 2025-12-12T16:28:30.659871526+00:00 stderr F I1212 16:28:30.659817 12 controller.go:231] Updating CRD OpenAPI spec because servicecas.operator.openshift.io changed 2025-12-12T16:28:30.659871526+00:00 stderr F I1212 16:28:30.659830 12 controller.go:231] Updating CRD OpenAPI spec because clusterimagepolicies.config.openshift.io changed 2025-12-12T16:28:30.660099972+00:00 stderr F I1212 16:28:30.660042 12 controller.go:231] Updating CRD OpenAPI spec because consoles.config.openshift.io changed 2025-12-12T16:28:30.660422360+00:00 stderr F I1212 16:28:30.660310 12 controller.go:231] Updating CRD OpenAPI spec because machineosconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.660422360+00:00 stderr F I1212 16:28:30.660331 12 controller.go:231] Updating CRD OpenAPI spec because issuers.cert-manager.io changed 2025-12-12T16:28:30.660791549+00:00 stderr F I1212 16:28:30.660536 12 controller.go:231] Updating CRD OpenAPI spec because imagetagmirrorsets.config.openshift.io changed 2025-12-12T16:28:30.660791549+00:00 stderr F I1212 16:28:30.660612 12 controller.go:231] Updating CRD OpenAPI spec because ippools.whereabouts.cni.cncf.io changed 2025-12-12T16:28:30.660791549+00:00 stderr F I1212 16:28:30.660698 12 controller.go:231] Updating CRD OpenAPI spec because operators.operators.coreos.com changed 2025-12-12T16:28:30.660938683+00:00 stderr F I1212 16:28:30.660778 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.coreos.com changed 2025-12-12T16:28:30.660979474+00:00 stderr F I1212 16:28:30.660898 12 controller.go:231] Updating CRD OpenAPI spec because projecthelmchartrepositories.helm.openshift.io changed 2025-12-12T16:28:30.660979474+00:00 stderr F I1212 16:28:30.660952 12 controller.go:231] Updating CRD OpenAPI spec because rangeallocations.security.internal.openshift.io changed 2025-12-12T16:28:30.661284162+00:00 stderr F I1212 16:28:30.661008 12 controller.go:231] Updating CRD OpenAPI spec because persesdashboards.perses.dev changed 2025-12-12T16:28:30.661284162+00:00 stderr F I1212 16:28:30.661137 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:28:30.661284162+00:00 stderr F I1212 16:28:30.661150 12 controller.go:231] Updating CRD OpenAPI spec because challenges.acme.cert-manager.io changed 2025-12-12T16:28:30.661284162+00:00 stderr F I1212 16:28:30.661174 12 controller.go:231] Updating CRD OpenAPI spec because adminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:28:30.661303882+00:00 stderr F I1212 16:28:30.661275 12 controller.go:231] Updating CRD OpenAPI spec because clusterresourcequotas.quota.openshift.io changed 2025-12-12T16:28:30.661361964+00:00 stderr F I1212 16:28:30.661305 12 controller.go:231] Updating CRD OpenAPI spec because configs.operator.openshift.io changed 2025-12-12T16:28:30.661427476+00:00 stderr F I1212 16:28:30.661384 12 controller.go:231] Updating CRD OpenAPI spec because egressfirewalls.k8s.ovn.org changed 2025-12-12T16:28:30.662107713+00:00 stderr F I1212 16:28:30.662008 12 controller.go:231] Updating CRD OpenAPI spec because egressrouters.network.operator.openshift.io changed 2025-12-12T16:28:30.662107713+00:00 stderr F I1212 16:28:30.662053 12 controller.go:231] Updating CRD OpenAPI spec because gatewayclasses.gateway.networking.k8s.io changed 2025-12-12T16:28:30.662107713+00:00 stderr F I1212 16:28:30.662064 12 controller.go:231] Updating CRD OpenAPI spec because network-attachment-definitions.k8s.cni.cncf.io changed 2025-12-12T16:28:30.662107713+00:00 stderr F I1212 16:28:30.662077 12 controller.go:231] Updating CRD OpenAPI spec because operatorconditions.operators.coreos.com changed 2025-12-12T16:28:30.662159044+00:00 stderr F I1212 16:28:30.662104 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.coreos.com changed 2025-12-12T16:28:30.662529454+00:00 stderr F I1212 16:28:30.662253 12 controller.go:231] Updating CRD OpenAPI spec because consolenotifications.console.openshift.io changed 2025-12-12T16:28:30.662529454+00:00 stderr F I1212 16:28:30.662298 12 controller.go:231] Updating CRD OpenAPI spec because consolesamples.console.openshift.io changed 2025-12-12T16:28:30.662529454+00:00 stderr F I1212 16:28:30.662399 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentpolicies.config.openshift.io changed 2025-12-12T16:28:30.662555834+00:00 stderr F I1212 16:28:30.662513 12 controller.go:231] Updating CRD OpenAPI spec because imagepolicies.config.openshift.io changed 2025-12-12T16:28:30.662555834+00:00 stderr F I1212 16:28:30.662534 12 controller.go:231] Updating CRD OpenAPI spec because operatorpkis.network.operator.openshift.io changed 2025-12-12T16:28:30.662611306+00:00 stderr F I1212 16:28:30.662564 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.coreos.com changed 2025-12-12T16:28:30.662706918+00:00 stderr F I1212 16:28:30.662633 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.rhobs changed 2025-12-12T16:28:30.662706918+00:00 stderr F I1212 16:28:30.662668 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigurations.operator.openshift.io changed 2025-12-12T16:28:30.662922863+00:00 stderr F I1212 16:28:30.662842 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:28:30.662922863+00:00 stderr F I1212 16:28:30.662875 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:28:30.662969495+00:00 stderr F I1212 16:28:30.662919 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:28:30.663043017+00:00 stderr F I1212 16:28:30.662980 12 controller.go:231] Updating CRD OpenAPI spec because kubeletconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.663043017+00:00 stderr F I1212 16:28:30.662999 12 controller.go:231] Updating CRD OpenAPI spec because machineautoscalers.autoscaling.openshift.io changed 2025-12-12T16:28:30.663165140+00:00 stderr F I1212 16:28:30.663101 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:28:30.663165140+00:00 stderr F I1212 16:28:30.663133 12 controller.go:231] Updating CRD OpenAPI spec because egressservices.k8s.ovn.org changed 2025-12-12T16:28:30.663239771+00:00 stderr F I1212 16:28:30.663169 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigpools.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.663239771+00:00 stderr F I1212 16:28:30.663207 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed 2025-12-12T16:28:30.663328454+00:00 stderr F I1212 16:28:30.663280 12 controller.go:231] Updating CRD OpenAPI spec because userdefinednetworks.k8s.ovn.org changed 2025-12-12T16:28:30.663411316+00:00 stderr F I1212 16:28:30.663363 12 controller.go:231] Updating CRD OpenAPI spec because certificaterequests.cert-manager.io changed 2025-12-12T16:28:30.663451757+00:00 stderr F I1212 16:28:30.663399 12 controller.go:231] Updating CRD OpenAPI spec because subscriptions.operators.coreos.com changed 2025-12-12T16:28:30.663451757+00:00 stderr F I1212 16:28:30.663425 12 controller.go:231] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:28:30.663610221+00:00 stderr F I1212 16:28:30.663541 12 controller.go:231] Updating CRD OpenAPI spec because clusterissuers.cert-manager.io changed 2025-12-12T16:28:30.663788865+00:00 stderr F I1212 16:28:30.663720 12 controller.go:231] Updating CRD OpenAPI spec because consoleclidownloads.console.openshift.io changed 2025-12-12T16:28:30.663882048+00:00 stderr F I1212 16:28:30.663823 12 controller.go:231] Updating CRD OpenAPI spec because consoleplugins.console.openshift.io changed 2025-12-12T16:28:30.664039862+00:00 stderr F I1212 16:28:30.663988 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediationtemplates.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:28:30.664242237+00:00 stderr F I1212 16:28:30.664158 12 controller.go:231] Updating CRD OpenAPI spec because overlappingrangeipreservations.whereabouts.cni.cncf.io changed 2025-12-12T16:28:30.664242237+00:00 stderr F I1212 16:28:30.664206 12 controller.go:231] Updating CRD OpenAPI spec because schedulers.config.openshift.io changed 2025-12-12T16:28:30.666428972+00:00 stderr F I1212 16:28:30.666315 12 controller.go:231] Updating CRD OpenAPI spec because containerruntimeconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.666428972+00:00 stderr F I1212 16:28:30.666400 12 controller.go:231] Updating CRD OpenAPI spec because dnsrecords.ingress.operator.openshift.io changed 2025-12-12T16:28:30.666639648+00:00 stderr F I1212 16:28:30.666568 12 controller.go:231] Updating CRD OpenAPI spec because networks.operator.openshift.io changed 2025-12-12T16:28:30.666809992+00:00 stderr F I1212 16:28:30.666736 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:28:30.666820262+00:00 stderr F I1212 16:28:30.666793 12 controller.go:231] Updating CRD OpenAPI spec because thanosqueriers.monitoring.rhobs changed 2025-12-12T16:28:30.666820262+00:00 stderr F I1212 16:28:30.666813 12 controller.go:231] Updating CRD OpenAPI spec because httproutes.gateway.networking.k8s.io changed 2025-12-12T16:28:30.667173131+00:00 stderr F I1212 16:28:30.667067 12 controller.go:231] Updating CRD OpenAPI spec because clusterversions.config.openshift.io changed 2025-12-12T16:28:30.667173131+00:00 stderr F I1212 16:28:30.667097 12 controller.go:231] Updating CRD OpenAPI spec because consolequickstarts.console.openshift.io changed 2025-12-12T16:28:30.668099474+00:00 stderr F I1212 16:28:30.667986 12 controller.go:231] Updating CRD OpenAPI spec because ingresscontrollers.operator.openshift.io changed 2025-12-12T16:28:30.668099474+00:00 stderr F I1212 16:28:30.668053 12 controller.go:231] Updating CRD OpenAPI spec because machines.machine.openshift.io changed 2025-12-12T16:28:30.668314370+00:00 stderr F I1212 16:28:30.668103 12 controller.go:231] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:28:30.668314370+00:00 stderr F I1212 16:28:30.668281 12 controller.go:231] Updating CRD OpenAPI spec because orders.acme.cert-manager.io changed 2025-12-12T16:28:30.668600677+00:00 stderr F I1212 16:28:30.668520 12 controller.go:231] Updating CRD OpenAPI spec because apiservers.config.openshift.io changed 2025-12-12T16:28:30.669065729+00:00 stderr F I1212 16:28:30.668980 12 controller.go:231] Updating CRD OpenAPI spec because authentications.config.openshift.io changed 2025-12-12T16:28:30.669065729+00:00 stderr F I1212 16:28:30.669042 12 controller.go:231] Updating CRD OpenAPI spec because egressqoses.k8s.ovn.org changed 2025-12-12T16:28:30.670212068+00:00 stderr F I1212 16:28:30.670091 12 controller.go:231] Updating CRD OpenAPI spec because helmchartrepositories.helm.openshift.io changed 2025-12-12T16:28:30.670212068+00:00 stderr F I1212 16:28:30.670146 12 controller.go:231] Updating CRD OpenAPI spec because kubecontrollermanagers.operator.openshift.io changed 2025-12-12T16:28:30.670212068+00:00 stderr F I1212 16:28:30.670154 12 controller.go:231] Updating CRD OpenAPI spec because podnetworkconnectivitychecks.controlplane.operator.openshift.io changed 2025-12-12T16:28:30.670212068+00:00 stderr F I1212 16:28:30.670164 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:28:30.670212068+00:00 stderr F I1212 16:28:30.670174 12 controller.go:231] Updating CRD OpenAPI spec because apirequestcounts.apiserver.openshift.io changed 2025-12-12T16:28:30.670274840+00:00 stderr F I1212 16:28:30.670219 12 controller.go:231] Updating CRD OpenAPI spec because featuregates.config.openshift.io changed 2025-12-12T16:28:30.670274840+00:00 stderr F I1212 16:28:30.670238 12 controller.go:231] Updating CRD OpenAPI spec because nodes.config.openshift.io changed 2025-12-12T16:28:30.670274840+00:00 stderr F I1212 16:28:30.670249 12 controller.go:231] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:28:30.670274840+00:00 stderr F I1212 16:28:30.670258 12 controller.go:231] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:28:30.670285630+00:00 stderr F I1212 16:28:30.670268 12 controller.go:231] Updating CRD OpenAPI spec because controllerconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.670285630+00:00 stderr F I1212 16:28:30.670275 12 controller.go:231] Updating CRD OpenAPI spec because csisnapshotcontrollers.operator.openshift.io changed 2025-12-12T16:28:30.670302140+00:00 stderr F I1212 16:28:30.670281 12 controller.go:231] Updating CRD OpenAPI spec because installplans.operators.coreos.com changed 2025-12-12T16:28:30.670302140+00:00 stderr F I1212 16:28:30.670288 12 controller.go:231] Updating CRD OpenAPI spec because oauths.config.openshift.io changed 2025-12-12T16:28:30.670309960+00:00 stderr F I1212 16:28:30.670295 12 controller.go:231] Updating CRD OpenAPI spec because referencegrants.gateway.networking.k8s.io changed 2025-12-12T16:28:30.670309960+00:00 stderr F I1212 16:28:30.670302 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:28:30.670350021+00:00 stderr F I1212 16:28:30.670310 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:28:30.670978907+00:00 stderr F I1212 16:28:30.670898 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:28:30.670978907+00:00 stderr F I1212 16:28:30.670939 12 controller.go:231] Updating CRD OpenAPI spec because images.config.openshift.io changed 2025-12-12T16:28:30.671117131+00:00 stderr F I1212 16:28:30.671062 12 controller.go:231] Updating CRD OpenAPI spec because machineosbuilds.machineconfiguration.openshift.io changed 2025-12-12T16:28:30.671147322+00:00 stderr F I1212 16:28:30.671108 12 controller.go:231] Updating CRD OpenAPI spec because openshiftapiservers.operator.openshift.io changed 2025-12-12T16:28:30.671281875+00:00 stderr F I1212 16:28:30.671206 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.coreos.com changed 2025-12-12T16:28:30.671436389+00:00 stderr F I1212 16:28:30.671376 12 controller.go:231] Updating CRD OpenAPI spec because consolelinks.console.openshift.io changed 2025-12-12T16:28:30.671436389+00:00 stderr F I1212 16:28:30.671398 12 controller.go:231] Updating CRD OpenAPI spec because controlplanemachinesets.machine.openshift.io changed 2025-12-12T16:28:30.671436389+00:00 stderr F I1212 16:28:30.671407 12 controller.go:231] Updating CRD OpenAPI spec because etcds.operator.openshift.io changed 2025-12-12T16:28:30.671563432+00:00 stderr F I1212 16:28:30.671480 12 controller.go:231] Updating CRD OpenAPI spec because machinehealthchecks.machine.openshift.io changed 2025-12-12T16:28:30.671563432+00:00 stderr F I1212 16:28:30.671545 12 controller.go:231] Updating CRD OpenAPI spec because storagestates.migration.k8s.io changed 2025-12-12T16:28:30.671716076+00:00 stderr F I1212 16:28:30.671646 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:28:30.671784058+00:00 stderr F I1212 16:28:30.671739 12 controller.go:231] Updating CRD OpenAPI spec because builds.config.openshift.io changed 2025-12-12T16:28:30.672111386+00:00 stderr F I1212 16:28:30.672025 12 controller.go:231] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:28:30.672111386+00:00 stderr F I1212 16:28:30.672059 12 controller.go:231] Updating CRD OpenAPI spec because egressips.k8s.ovn.org changed 2025-12-12T16:28:30.672111386+00:00 stderr F I1212 16:28:30.672068 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediations.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:28:30.672453265+00:00 stderr F I1212 16:28:30.672281 12 controller.go:231] Updating CRD OpenAPI spec because storageversionmigrations.migration.k8s.io changed 2025-12-12T16:28:30.672453265+00:00 stderr F I1212 16:28:30.672314 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:28:40.566782119+00:00 stderr F I1212 16:28:40.565687 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0028424989228933175 seatDemandStdev=0.055931293798082954 seatDemandSmoothed=3.6582116562057525 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:28:49.672802170+00:00 stderr F E1212 16:28:49.672552 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:28:50.566983011+00:00 stderr F I1212 16:28:50.566675 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008961979995349294 seatDemandStdev=0.15964942091732484 seatDemandSmoothed=3.5779508503340116 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:29:00.568138875+00:00 stderr F I1212 16:29:00.567953 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0011795685521461248 seatDemandStdev=0.03432458550626523 seatDemandSmoothed=3.4964745763196725 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:29:05.491960102+00:00 stderr F I1212 16:29:05.490427 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:29:05.491960102+00:00 stderr F I1212 16:29:05.490468 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:29:05.491960102+00:00 stderr F I1212 16:29:05.490474 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 56.931µs 2025-12-12T16:29:05.799782392+00:00 stderr F E1212 16:29:05.799646 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:29:10.569060087+00:00 stderr F I1212 16:29:10.568843 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0036323201776126694 seatDemandStdev=0.06166032309133895 seatDemandSmoothed=3.4175573918595057 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:29:20.569757062+00:00 stderr F I1212 16:29:20.569388 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0019973536906627014 seatDemandStdev=0.04464710818067725 seatDemandSmoothed=3.340026394469778 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:29:20.974040814+00:00 stderr F E1212 16:29:20.973801 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:29:23.026545918+00:00 stderr F I1212 16:29:23.024935 12 controller.go:667] quota admission added evaluator for: catalogsources.operators.coreos.com 2025-12-12T16:29:23.036910450+00:00 stderr F I1212 16:29:23.036755 12 controller.go:667] quota admission added evaluator for: networkpolicies.networking.k8s.io 2025-12-12T16:29:23.036944461+00:00 stderr F I1212 16:29:23.036881 12 controller.go:667] quota admission added evaluator for: networkpolicies.networking.k8s.io 2025-12-12T16:29:23.640308882+00:00 stderr F I1212 16:29:23.640130 12 alloc.go:328] "allocated clusterIPs" service="service-telemetry/infrawatch-operators" clusterIPs={"IPv4":"10.217.5.116"} 2025-12-12T16:29:23.645089242+00:00 stderr F I1212 16:29:23.644415 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:29:23.645089242+00:00 stderr F I1212 16:29:23.644442 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:29:23.645089242+00:00 stderr F I1212 16:29:23.644446 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 39.301µs 2025-12-12T16:29:30.570199579+00:00 stderr F I1212 16:29:30.569700 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.003372905608306247 seatDemandStdev=0.07921749897658249 seatDemandSmoothed=3.2651053667024255 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:30:00.143156889+00:00 stderr F I1212 16:30:00.142957 12 controller.go:667] quota admission added evaluator for: cronjobs.batch 2025-12-12T16:30:00.572508712+00:00 stderr F I1212 16:30:00.571680 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=4 seatDemandAvg=0.0030942254538791135 seatDemandStdev=0.0765937854204665 seatDemandSmoothed=3.053902189400224 fairFrac=2.2796127562642368 currentCL=4 concurrencyDenominator=4 backstop=false 2025-12-12T16:30:10.573507764+00:00 stderr F I1212 16:30:10.572071 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.0029815347460057993 seatDemandStdev=0.08945298809082951 seatDemandSmoothed=2.985788433069266 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:30:30.573364532+00:00 stderr F I1212 16:30:30.573146 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0014663304697485665 seatDemandStdev=0.038264609558991365 seatDemandSmoothed=2.8549853753457044 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:30:40.573963187+00:00 stderr F I1212 16:30:40.573657 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.001454632489291397 seatDemandStdev=0.04021385810034307 seatDemandSmoothed=2.790279086996315 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:30:50.574862397+00:00 stderr F I1212 16:30:50.574691 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008593007610369772 seatDemandStdev=0.1696021798772666 seatDemandSmoothed=2.7302011573076155 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:31:00.575235005+00:00 stderr F I1212 16:31:00.575004 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.001288753477515563 seatDemandStdev=0.035876072694621335 seatDemandSmoothed=2.6682613216914994 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:31:10.575997468+00:00 stderr F I1212 16:31:10.575770 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0012379757574712705 seatDemandStdev=0.039290509298977074 seatDemandSmoothed=2.607823466448893 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:31:20.577228469+00:00 stderr F I1212 16:31:20.576349 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0013929639747742258 seatDemandStdev=0.03729642913388904 seatDemandSmoothed=2.5487333827620677 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:31:40.578471458+00:00 stderr F I1212 16:31:40.577466 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0020237913173432693 seatDemandStdev=0.04831021034157873 seatDemandSmoothed=2.43485126999795 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:31:50.578905459+00:00 stderr F I1212 16:31:50.578695 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.009097487011271594 seatDemandStdev=0.15702414666691494 seatDemandSmoothed=2.382670488362595 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:32:00.579580591+00:00 stderr F I1212 16:32:00.579214 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0011203801049935302 seatDemandStdev=0.03345332350326145 seatDemandSmoothed=2.328664262313245 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:32:10.580230853+00:00 stderr F I1212 16:32:10.579962 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.007370584437719272 seatDemandStdev=0.13379242340802464 seatDemandSmoothed=2.2783517334604926 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:32:20.581094919+00:00 stderr F I1212 16:32:20.580331 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0011013814463601743 seatDemandStdev=0.03316878661135779 seatDemandSmoothed=2.226737857456229 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:32:50.588540057+00:00 stderr F I1212 16:32:50.587561 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008908104599736115 seatDemandStdev=0.1598170150133574 seatDemandSmoothed=2.082324316292238 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:33:00.588365548+00:00 stderr F I1212 16:33:00.588120 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0014601589431136723 seatDemandStdev=0.03818411815106534 seatDemandSmoothed=2.0353426753906825 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:33:10.589922397+00:00 stderr F I1212 16:33:10.588933 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0015712477924895143 seatDemandStdev=0.04629288968185894 seatDemandSmoothed=1.9896306690186067 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:33:20.590165273+00:00 stderr F I1212 16:33:20.589945 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0008005983802655645 seatDemandStdev=0.028283518566456344 seatDemandSmoothed=1.9445380983209533 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:33:30.654235819+00:00 stderr F I1212 16:33:30.653247 12 controller.go:231] Updating CRD OpenAPI spec because clustercsidrivers.operator.openshift.io changed 2025-12-12T16:33:30.654235819+00:00 stderr F I1212 16:33:30.653313 12 controller.go:231] Updating CRD OpenAPI spec because consoleexternalloglinks.console.openshift.io changed 2025-12-12T16:33:30.654235819+00:00 stderr F I1212 16:33:30.653940 12 controller.go:231] Updating CRD OpenAPI spec because grpcroutes.gateway.networking.k8s.io changed 2025-12-12T16:33:30.654235819+00:00 stderr F I1212 16:33:30.654083 12 controller.go:231] Updating CRD OpenAPI spec because istiocsrs.operator.openshift.io changed 2025-12-12T16:33:30.654235819+00:00 stderr F I1212 16:33:30.654097 12 controller.go:231] Updating CRD OpenAPI spec because catalogsources.operators.coreos.com changed 2025-12-12T16:33:30.654433064+00:00 stderr F I1212 16:33:30.654281 12 controller.go:231] Updating CRD OpenAPI spec because operatorgroups.operators.coreos.com changed 2025-12-12T16:33:30.654433064+00:00 stderr F I1212 16:33:30.654330 12 controller.go:231] Updating CRD OpenAPI spec because operatorhubs.config.openshift.io changed 2025-12-12T16:33:30.654433064+00:00 stderr F I1212 16:33:30.654339 12 controller.go:231] Updating CRD OpenAPI spec because servicecas.operator.openshift.io changed 2025-12-12T16:33:30.654433064+00:00 stderr F I1212 16:33:30.654405 12 controller.go:231] Updating CRD OpenAPI spec because clusterimagepolicies.config.openshift.io changed 2025-12-12T16:33:30.654465615+00:00 stderr F I1212 16:33:30.654430 12 controller.go:231] Updating CRD OpenAPI spec because consoles.config.openshift.io changed 2025-12-12T16:33:30.654564927+00:00 stderr F I1212 16:33:30.654520 12 controller.go:231] Updating CRD OpenAPI spec because machineosconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.654564927+00:00 stderr F I1212 16:33:30.654541 12 controller.go:231] Updating CRD OpenAPI spec because issuers.cert-manager.io changed 2025-12-12T16:33:30.654644689+00:00 stderr F I1212 16:33:30.654613 12 controller.go:231] Updating CRD OpenAPI spec because imagetagmirrorsets.config.openshift.io changed 2025-12-12T16:33:30.654670140+00:00 stderr F I1212 16:33:30.654646 12 controller.go:231] Updating CRD OpenAPI spec because ippools.whereabouts.cni.cncf.io changed 2025-12-12T16:33:30.654677810+00:00 stderr F I1212 16:33:30.654664 12 controller.go:231] Updating CRD OpenAPI spec because operators.operators.coreos.com changed 2025-12-12T16:33:30.654741282+00:00 stderr F I1212 16:33:30.654702 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.coreos.com changed 2025-12-12T16:33:30.654803303+00:00 stderr F I1212 16:33:30.654776 12 controller.go:231] Updating CRD OpenAPI spec because projecthelmchartrepositories.helm.openshift.io changed 2025-12-12T16:33:30.654876395+00:00 stderr F I1212 16:33:30.654826 12 controller.go:231] Updating CRD OpenAPI spec because rangeallocations.security.internal.openshift.io changed 2025-12-12T16:33:30.654913286+00:00 stderr F I1212 16:33:30.654875 12 controller.go:231] Updating CRD OpenAPI spec because persesdashboards.perses.dev changed 2025-12-12T16:33:30.655071800+00:00 stderr F I1212 16:33:30.655023 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:33:30.655140692+00:00 stderr F I1212 16:33:30.655094 12 controller.go:231] Updating CRD OpenAPI spec because challenges.acme.cert-manager.io changed 2025-12-12T16:33:30.655303706+00:00 stderr F I1212 16:33:30.655253 12 controller.go:231] Updating CRD OpenAPI spec because adminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:33:30.655303706+00:00 stderr F I1212 16:33:30.655269 12 controller.go:231] Updating CRD OpenAPI spec because clusterresourcequotas.quota.openshift.io changed 2025-12-12T16:33:30.655354047+00:00 stderr F I1212 16:33:30.655324 12 controller.go:231] Updating CRD OpenAPI spec because configs.operator.openshift.io changed 2025-12-12T16:33:30.655375798+00:00 stderr F I1212 16:33:30.655349 12 controller.go:231] Updating CRD OpenAPI spec because egressfirewalls.k8s.ovn.org changed 2025-12-12T16:33:30.655466490+00:00 stderr F I1212 16:33:30.655427 12 controller.go:231] Updating CRD OpenAPI spec because egressrouters.network.operator.openshift.io changed 2025-12-12T16:33:30.655656945+00:00 stderr F I1212 16:33:30.655613 12 controller.go:231] Updating CRD OpenAPI spec because gatewayclasses.gateway.networking.k8s.io changed 2025-12-12T16:33:30.655656945+00:00 stderr F I1212 16:33:30.655630 12 controller.go:231] Updating CRD OpenAPI spec because network-attachment-definitions.k8s.cni.cncf.io changed 2025-12-12T16:33:30.655656945+00:00 stderr F I1212 16:33:30.655637 12 controller.go:231] Updating CRD OpenAPI spec because operatorconditions.operators.coreos.com changed 2025-12-12T16:33:30.655691586+00:00 stderr F I1212 16:33:30.655665 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.coreos.com changed 2025-12-12T16:33:30.655824009+00:00 stderr F I1212 16:33:30.655786 12 controller.go:231] Updating CRD OpenAPI spec because consolenotifications.console.openshift.io changed 2025-12-12T16:33:30.655863050+00:00 stderr F I1212 16:33:30.655833 12 controller.go:231] Updating CRD OpenAPI spec because consolesamples.console.openshift.io changed 2025-12-12T16:33:30.655943722+00:00 stderr F I1212 16:33:30.655903 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentpolicies.config.openshift.io changed 2025-12-12T16:33:30.655992493+00:00 stderr F I1212 16:33:30.655959 12 controller.go:231] Updating CRD OpenAPI spec because imagepolicies.config.openshift.io changed 2025-12-12T16:33:30.656065385+00:00 stderr F I1212 16:33:30.656028 12 controller.go:231] Updating CRD OpenAPI spec because operatorpkis.network.operator.openshift.io changed 2025-12-12T16:33:30.656104096+00:00 stderr F I1212 16:33:30.656063 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.coreos.com changed 2025-12-12T16:33:30.656104096+00:00 stderr F I1212 16:33:30.656087 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.rhobs changed 2025-12-12T16:33:30.656167137+00:00 stderr F I1212 16:33:30.656124 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigurations.operator.openshift.io changed 2025-12-12T16:33:30.656281740+00:00 stderr F I1212 16:33:30.656235 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:33:30.656281740+00:00 stderr F I1212 16:33:30.656269 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:33:30.656385363+00:00 stderr F I1212 16:33:30.656349 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:33:30.656449835+00:00 stderr F I1212 16:33:30.656413 12 controller.go:231] Updating CRD OpenAPI spec because kubeletconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.656492846+00:00 stderr F I1212 16:33:30.656454 12 controller.go:231] Updating CRD OpenAPI spec because machineautoscalers.autoscaling.openshift.io changed 2025-12-12T16:33:30.656526276+00:00 stderr F I1212 16:33:30.656493 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:33:30.656569378+00:00 stderr F I1212 16:33:30.656543 12 controller.go:231] Updating CRD OpenAPI spec because egressservices.k8s.ovn.org changed 2025-12-12T16:33:30.656643909+00:00 stderr F I1212 16:33:30.656614 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigpools.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.656714371+00:00 stderr F I1212 16:33:30.656689 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed 2025-12-12T16:33:30.656866405+00:00 stderr F I1212 16:33:30.656824 12 controller.go:231] Updating CRD OpenAPI spec because userdefinednetworks.k8s.ovn.org changed 2025-12-12T16:33:30.656866405+00:00 stderr F I1212 16:33:30.656838 12 controller.go:231] Updating CRD OpenAPI spec because certificaterequests.cert-manager.io changed 2025-12-12T16:33:30.656949347+00:00 stderr F I1212 16:33:30.656912 12 controller.go:231] Updating CRD OpenAPI spec because subscriptions.operators.coreos.com changed 2025-12-12T16:33:30.656998748+00:00 stderr F I1212 16:33:30.656966 12 controller.go:231] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:33:30.657067170+00:00 stderr F I1212 16:33:30.657038 12 controller.go:231] Updating CRD OpenAPI spec because clusterissuers.cert-manager.io changed 2025-12-12T16:33:30.657123301+00:00 stderr F I1212 16:33:30.657076 12 controller.go:231] Updating CRD OpenAPI spec because consoleclidownloads.console.openshift.io changed 2025-12-12T16:33:30.657214084+00:00 stderr F I1212 16:33:30.657159 12 controller.go:231] Updating CRD OpenAPI spec because consoleplugins.console.openshift.io changed 2025-12-12T16:33:30.657272285+00:00 stderr F I1212 16:33:30.657230 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediationtemplates.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:33:30.657333997+00:00 stderr F I1212 16:33:30.657285 12 controller.go:231] Updating CRD OpenAPI spec because overlappingrangeipreservations.whereabouts.cni.cncf.io changed 2025-12-12T16:33:30.657390218+00:00 stderr F I1212 16:33:30.657357 12 controller.go:231] Updating CRD OpenAPI spec because schedulers.config.openshift.io changed 2025-12-12T16:33:30.657517731+00:00 stderr F I1212 16:33:30.657448 12 controller.go:231] Updating CRD OpenAPI spec because containerruntimeconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.657517731+00:00 stderr F I1212 16:33:30.657495 12 controller.go:231] Updating CRD OpenAPI spec because dnsrecords.ingress.operator.openshift.io changed 2025-12-12T16:33:30.657533082+00:00 stderr F I1212 16:33:30.657507 12 controller.go:231] Updating CRD OpenAPI spec because networks.operator.openshift.io changed 2025-12-12T16:33:30.657579263+00:00 stderr F I1212 16:33:30.657550 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:33:30.657635204+00:00 stderr F I1212 16:33:30.657603 12 controller.go:231] Updating CRD OpenAPI spec because thanosqueriers.monitoring.rhobs changed 2025-12-12T16:33:30.657682295+00:00 stderr F I1212 16:33:30.657654 12 controller.go:231] Updating CRD OpenAPI spec because httproutes.gateway.networking.k8s.io changed 2025-12-12T16:33:30.657710826+00:00 stderr F I1212 16:33:30.657680 12 controller.go:231] Updating CRD OpenAPI spec because clusterversions.config.openshift.io changed 2025-12-12T16:33:30.657759667+00:00 stderr F I1212 16:33:30.657729 12 controller.go:231] Updating CRD OpenAPI spec because consolequickstarts.console.openshift.io changed 2025-12-12T16:33:30.657862540+00:00 stderr F I1212 16:33:30.657832 12 controller.go:231] Updating CRD OpenAPI spec because ingresscontrollers.operator.openshift.io changed 2025-12-12T16:33:30.657919831+00:00 stderr F I1212 16:33:30.657858 12 controller.go:231] Updating CRD OpenAPI spec because machines.machine.openshift.io changed 2025-12-12T16:33:30.657982603+00:00 stderr F I1212 16:33:30.657950 12 controller.go:231] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:33:30.658008764+00:00 stderr F I1212 16:33:30.657979 12 controller.go:231] Updating CRD OpenAPI spec because orders.acme.cert-manager.io changed 2025-12-12T16:33:30.658106356+00:00 stderr F I1212 16:33:30.658070 12 controller.go:231] Updating CRD OpenAPI spec because apiservers.config.openshift.io changed 2025-12-12T16:33:30.658135927+00:00 stderr F I1212 16:33:30.658104 12 controller.go:231] Updating CRD OpenAPI spec because authentications.config.openshift.io changed 2025-12-12T16:33:30.658168278+00:00 stderr F I1212 16:33:30.658136 12 controller.go:231] Updating CRD OpenAPI spec because egressqoses.k8s.ovn.org changed 2025-12-12T16:33:30.658253590+00:00 stderr F I1212 16:33:30.658216 12 controller.go:231] Updating CRD OpenAPI spec because helmchartrepositories.helm.openshift.io changed 2025-12-12T16:33:30.658288041+00:00 stderr F I1212 16:33:30.658254 12 controller.go:231] Updating CRD OpenAPI spec because kubecontrollermanagers.operator.openshift.io changed 2025-12-12T16:33:30.658288041+00:00 stderr F I1212 16:33:30.658274 12 controller.go:231] Updating CRD OpenAPI spec because podnetworkconnectivitychecks.controlplane.operator.openshift.io changed 2025-12-12T16:33:30.658375393+00:00 stderr F I1212 16:33:30.658338 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:33:30.658448805+00:00 stderr F I1212 16:33:30.658404 12 controller.go:231] Updating CRD OpenAPI spec because apirequestcounts.apiserver.openshift.io changed 2025-12-12T16:33:30.658586468+00:00 stderr F I1212 16:33:30.658534 12 controller.go:231] Updating CRD OpenAPI spec because featuregates.config.openshift.io changed 2025-12-12T16:33:30.658691911+00:00 stderr F I1212 16:33:30.658653 12 controller.go:231] Updating CRD OpenAPI spec because nodes.config.openshift.io changed 2025-12-12T16:33:30.658752822+00:00 stderr F I1212 16:33:30.658718 12 controller.go:231] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:33:30.658841964+00:00 stderr F I1212 16:33:30.658806 12 controller.go:231] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:33:30.658898866+00:00 stderr F I1212 16:33:30.658864 12 controller.go:231] Updating CRD OpenAPI spec because controllerconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.659089460+00:00 stderr F I1212 16:33:30.659028 12 controller.go:231] Updating CRD OpenAPI spec because csisnapshotcontrollers.operator.openshift.io changed 2025-12-12T16:33:30.659089460+00:00 stderr F I1212 16:33:30.659052 12 controller.go:231] Updating CRD OpenAPI spec because installplans.operators.coreos.com changed 2025-12-12T16:33:30.659149542+00:00 stderr F I1212 16:33:30.659117 12 controller.go:231] Updating CRD OpenAPI spec because oauths.config.openshift.io changed 2025-12-12T16:33:30.659236094+00:00 stderr F I1212 16:33:30.659169 12 controller.go:231] Updating CRD OpenAPI spec because referencegrants.gateway.networking.k8s.io changed 2025-12-12T16:33:30.659368727+00:00 stderr F I1212 16:33:30.659334 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:33:30.659395608+00:00 stderr F I1212 16:33:30.659372 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:33:30.659492171+00:00 stderr F I1212 16:33:30.659450 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:33:30.659570503+00:00 stderr F I1212 16:33:30.659538 12 controller.go:231] Updating CRD OpenAPI spec because images.config.openshift.io changed 2025-12-12T16:33:30.659634664+00:00 stderr F I1212 16:33:30.659597 12 controller.go:231] Updating CRD OpenAPI spec because machineosbuilds.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.659686775+00:00 stderr F I1212 16:33:30.659656 12 controller.go:231] Updating CRD OpenAPI spec because openshiftapiservers.operator.openshift.io changed 2025-12-12T16:33:30.659750617+00:00 stderr F I1212 16:33:30.659716 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.coreos.com changed 2025-12-12T16:33:30.659820549+00:00 stderr F I1212 16:33:30.659791 12 controller.go:231] Updating CRD OpenAPI spec because consolelinks.console.openshift.io changed 2025-12-12T16:33:30.659900071+00:00 stderr F I1212 16:33:30.659867 12 controller.go:231] Updating CRD OpenAPI spec because controlplanemachinesets.machine.openshift.io changed 2025-12-12T16:33:30.659935282+00:00 stderr F I1212 16:33:30.659904 12 controller.go:231] Updating CRD OpenAPI spec because etcds.operator.openshift.io changed 2025-12-12T16:33:30.659935282+00:00 stderr F I1212 16:33:30.659916 12 controller.go:231] Updating CRD OpenAPI spec because machinehealthchecks.machine.openshift.io changed 2025-12-12T16:33:30.660035884+00:00 stderr F I1212 16:33:30.660002 12 controller.go:231] Updating CRD OpenAPI spec because storagestates.migration.k8s.io changed 2025-12-12T16:33:30.660108726+00:00 stderr F I1212 16:33:30.660078 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:33:30.660151597+00:00 stderr F I1212 16:33:30.660121 12 controller.go:231] Updating CRD OpenAPI spec because builds.config.openshift.io changed 2025-12-12T16:33:30.660214419+00:00 stderr F I1212 16:33:30.660149 12 controller.go:231] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:33:30.662543187+00:00 stderr F I1212 16:33:30.662457 12 controller.go:231] Updating CRD OpenAPI spec because egressips.k8s.ovn.org changed 2025-12-12T16:33:30.663219114+00:00 stderr F I1212 16:33:30.663132 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediations.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:33:30.663219114+00:00 stderr F I1212 16:33:30.663161 12 controller.go:231] Updating CRD OpenAPI spec because storageversionmigrations.migration.k8s.io changed 2025-12-12T16:33:30.663219114+00:00 stderr F I1212 16:33:30.663172 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:33:30.663466330+00:00 stderr F I1212 16:33:30.663389 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentsourcepolicies.operator.openshift.io changed 2025-12-12T16:33:30.663675005+00:00 stderr F I1212 16:33:30.663611 12 controller.go:231] Updating CRD OpenAPI spec because olmconfigs.operators.coreos.com changed 2025-12-12T16:33:30.663675005+00:00 stderr F I1212 16:33:30.663647 12 controller.go:231] Updating CRD OpenAPI spec because alertingrules.monitoring.openshift.io changed 2025-12-12T16:33:30.663738197+00:00 stderr F I1212 16:33:30.663692 12 controller.go:231] Updating CRD OpenAPI spec because ipamclaims.k8s.cni.cncf.io changed 2025-12-12T16:33:30.664108886+00:00 stderr F I1212 16:33:30.664062 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed 2025-12-12T16:33:30.664352972+00:00 stderr F I1212 16:33:30.664290 12 controller.go:231] Updating CRD OpenAPI spec because storages.operator.openshift.io changed 2025-12-12T16:33:30.664810043+00:00 stderr F I1212 16:33:30.664752 12 controller.go:231] Updating CRD OpenAPI spec because configs.imageregistry.operator.openshift.io changed 2025-12-12T16:33:30.664810043+00:00 stderr F I1212 16:33:30.664786 12 controller.go:231] Updating CRD OpenAPI spec because dnses.config.openshift.io changed 2025-12-12T16:33:30.664915466+00:00 stderr F I1212 16:33:30.664870 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.coreos.com changed 2025-12-12T16:33:30.665214554+00:00 stderr F I1212 16:33:30.665143 12 controller.go:231] Updating CRD OpenAPI spec because proxies.config.openshift.io changed 2025-12-12T16:33:30.665366157+00:00 stderr F I1212 16:33:30.665315 12 controller.go:231] Updating CRD OpenAPI spec because clusterautoscalers.autoscaling.openshift.io changed 2025-12-12T16:33:30.665473000+00:00 stderr F I1212 16:33:30.665419 12 controller.go:231] Updating CRD OpenAPI spec because imagepruners.imageregistry.operator.openshift.io changed 2025-12-12T16:33:30.665473000+00:00 stderr F I1212 16:33:30.665435 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:33:30.665473000+00:00 stderr F I1212 16:33:30.665443 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:33:30.665629244+00:00 stderr F I1212 16:33:30.665576 12 controller.go:231] Updating CRD OpenAPI spec because adminpolicybasedexternalroutes.k8s.ovn.org changed 2025-12-12T16:33:30.665697976+00:00 stderr F I1212 16:33:30.665657 12 controller.go:231] Updating CRD OpenAPI spec because clusteroperators.config.openshift.io changed 2025-12-12T16:33:30.665886620+00:00 stderr F I1212 16:33:30.665789 12 controller.go:231] Updating CRD OpenAPI spec because clusterserviceversions.operators.coreos.com changed 2025-12-12T16:33:30.665886620+00:00 stderr F I1212 16:33:30.665817 12 controller.go:231] Updating CRD OpenAPI spec because clusteruserdefinednetworks.k8s.ovn.org changed 2025-12-12T16:33:30.665954642+00:00 stderr F I1212 16:33:30.665911 12 controller.go:231] Updating CRD OpenAPI spec because dnses.operator.openshift.io changed 2025-12-12T16:33:30.666024874+00:00 stderr F I1212 16:33:30.665990 12 controller.go:231] Updating CRD OpenAPI spec because gateways.gateway.networking.k8s.io changed 2025-12-12T16:33:30.666111486+00:00 stderr F I1212 16:33:30.666078 12 controller.go:231] Updating CRD OpenAPI spec because infrastructures.config.openshift.io changed 2025-12-12T16:33:30.666207628+00:00 stderr F I1212 16:33:30.666165 12 controller.go:231] Updating CRD OpenAPI spec because ipaddresses.ipam.cluster.x-k8s.io changed 2025-12-12T16:33:30.666221059+00:00 stderr F I1212 16:33:30.666199 12 controller.go:231] Updating CRD OpenAPI spec because ingresses.config.openshift.io changed 2025-12-12T16:33:30.666301911+00:00 stderr F I1212 16:33:30.666269 12 controller.go:231] Updating CRD OpenAPI spec because kubestorageversionmigrators.operator.openshift.io changed 2025-12-12T16:33:30.666373133+00:00 stderr F I1212 16:33:30.666336 12 controller.go:231] Updating CRD OpenAPI spec because machineconfignodes.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.666434874+00:00 stderr F I1212 16:33:30.666398 12 controller.go:231] Updating CRD OpenAPI spec because machinesets.machine.openshift.io changed 2025-12-12T16:33:30.666492596+00:00 stderr F I1212 16:33:30.666460 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.coreos.com changed 2025-12-12T16:33:30.666492596+00:00 stderr F I1212 16:33:30.666480 12 controller.go:231] Updating CRD OpenAPI spec because rolebindingrestrictions.authorization.openshift.io changed 2025-12-12T16:33:30.666579378+00:00 stderr F I1212 16:33:30.666550 12 controller.go:231] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:33:30.666622939+00:00 stderr F I1212 16:33:30.666587 12 controller.go:231] Updating CRD OpenAPI spec because persesdatasources.perses.dev changed 2025-12-12T16:33:30.666697451+00:00 stderr F I1212 16:33:30.666669 12 controller.go:231] Updating CRD OpenAPI spec because consoleyamlsamples.console.openshift.io changed 2025-12-12T16:33:30.666763242+00:00 stderr F I1212 16:33:30.666737 12 controller.go:231] Updating CRD OpenAPI spec because ipaddressclaims.ipam.cluster.x-k8s.io changed 2025-12-12T16:33:30.666831934+00:00 stderr F I1212 16:33:30.666794 12 controller.go:231] Updating CRD OpenAPI spec because openshiftcontrollermanagers.operator.openshift.io changed 2025-12-12T16:33:30.666831934+00:00 stderr F I1212 16:33:30.666809 12 controller.go:231] Updating CRD OpenAPI spec because securitycontextconstraints.security.openshift.io changed 2025-12-12T16:33:30.666881755+00:00 stderr F I1212 16:33:30.666846 12 controller.go:231] Updating CRD OpenAPI spec because certificates.cert-manager.io changed 2025-12-12T16:33:30.666919516+00:00 stderr F I1212 16:33:30.666881 12 controller.go:231] Updating CRD OpenAPI spec because alertrelabelconfigs.monitoring.openshift.io changed 2025-12-12T16:33:30.667005608+00:00 stderr F I1212 16:33:30.666963 12 controller.go:231] Updating CRD OpenAPI spec because authentications.operator.openshift.io changed 2025-12-12T16:33:30.667056060+00:00 stderr F I1212 16:33:30.667019 12 controller.go:231] Updating CRD OpenAPI spec because imagedigestmirrorsets.config.openshift.io changed 2025-12-12T16:33:30.667099191+00:00 stderr F I1212 16:33:30.667068 12 controller.go:231] Updating CRD OpenAPI spec because pinnedimagesets.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.667207533+00:00 stderr F I1212 16:33:30.667164 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:33:30.667255715+00:00 stderr F I1212 16:33:30.667209 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:33:30.667335427+00:00 stderr F I1212 16:33:30.667296 12 controller.go:231] Updating CRD OpenAPI spec because certmanagers.operator.openshift.io changed 2025-12-12T16:33:30.667487800+00:00 stderr F I1212 16:33:30.667448 12 controller.go:231] Updating CRD OpenAPI spec because baselineadminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:33:30.667487800+00:00 stderr F I1212 16:33:30.667470 12 controller.go:231] Updating CRD OpenAPI spec because configs.samples.operator.openshift.io changed 2025-12-12T16:33:30.667722056+00:00 stderr F I1212 16:33:30.667673 12 controller.go:231] Updating CRD OpenAPI spec because consoles.operator.openshift.io changed 2025-12-12T16:33:30.667917571+00:00 stderr F I1212 16:33:30.667872 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:33:30.668089365+00:00 stderr F I1212 16:33:30.668028 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.rhobs changed 2025-12-12T16:33:30.668168657+00:00 stderr F I1212 16:33:30.668123 12 controller.go:231] Updating CRD OpenAPI spec because kubeapiservers.operator.openshift.io changed 2025-12-12T16:33:30.668282250+00:00 stderr F I1212 16:33:30.668140 12 controller.go:231] Updating CRD OpenAPI spec because projects.config.openshift.io changed 2025-12-12T16:33:30.668312241+00:00 stderr F I1212 16:33:30.668260 12 controller.go:231] Updating CRD OpenAPI spec because kubeschedulers.operator.openshift.io changed 2025-12-12T16:33:30.668384333+00:00 stderr F I1212 16:33:30.668347 12 controller.go:231] Updating CRD OpenAPI spec because networks.config.openshift.io changed 2025-12-12T16:33:30.668449604+00:00 stderr F I1212 16:33:30.668408 12 controller.go:231] Updating CRD OpenAPI spec because nodeslicepools.whereabouts.cni.cncf.io changed 2025-12-12T16:33:40.594897162+00:00 stderr F I1212 16:33:40.594483 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0017841128457560872 seatDemandStdev=0.04640891490475326 seatDemandSmoothed=1.8582055181335029 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:33:50.595439430+00:00 stderr F I1212 16:33:50.595284 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.00977018850182231 seatDemandStdev=0.17397964796296458 seatDemandSmoothed=1.8196930374551223 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:34:00.595929632+00:00 stderr F I1212 16:34:00.595683 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0017844920941986681 seatDemandStdev=0.04220554089411023 seatDemandSmoothed=1.7788518683523857 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:34:04.617326869+00:00 stderr F W1212 16:34:04.616928 12 watcher.go:338] watch chan error: etcdserver: mvcc: required revision has been compacted 2025-12-12T16:34:10.596582394+00:00 stderr F I1212 16:34:10.596333 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0021963237671310098 seatDemandStdev=0.05171827701177123 seatDemandSmoothed=1.7391783111981955 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:34:20.597429610+00:00 stderr F I1212 16:34:20.597231 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0009726317712507073 seatDemandStdev=0.031171874481465838 seatDemandSmoothed=1.6999165336844495 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:34:50.601582322+00:00 stderr F I1212 16:34:50.600673 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.010246357167958 seatDemandStdev=0.17461304623537216 seatDemandSmoothed=1.591376596113272 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:35:00.601322561+00:00 stderr F I1212 16:35:00.601069 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.001256290800779231 seatDemandStdev=0.035421921661636435 seatDemandSmoothed=1.5556185332893022 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:35:10.601998438+00:00 stderr F I1212 16:35:10.601803 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0022410006175833694 seatDemandStdev=0.05487254534034357 seatDemandSmoothed=1.5211529185806805 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:35:20.602961904+00:00 stderr F I1212 16:35:20.602745 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0009898196733621645 seatDemandStdev=0.03144582532509506 seatDemandSmoothed=1.4869124212882892 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:35:31.735915714+00:00 stderr F W1212 16:35:31.735710 12 watcher.go:338] watch chan error: etcdserver: mvcc: required revision has been compacted 2025-12-12T16:35:50.606846341+00:00 stderr F I1212 16:35:50.606142 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.00977227682669343 seatDemandStdev=0.18257688942926714 seatDemandSmoothed=1.393281635499089 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:36:00.607499058+00:00 stderr F I1212 16:36:00.607315 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0013817888621380783 seatDemandStdev=0.03714672962292306 seatDemandSmoothed=1.3621223138077663 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:36:10.609675414+00:00 stderr F I1212 16:36:10.608272 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0015178488962501995 seatDemandStdev=0.046060445442921504 seatDemandSmoothed=1.3318878013599886 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:36:20.609659188+00:00 stderr F I1212 16:36:20.609452 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0007131242932304411 seatDemandStdev=0.026694863681480857 seatDemandSmoothed=1.301884765652127 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:36:50.613287864+00:00 stderr F I1212 16:36:50.612341 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008259704228792557 seatDemandStdev=0.1659864977773264 seatDemandSmoothed=1.219982695213844 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:37:00.614853588+00:00 stderr F I1212 16:37:00.614655 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0018814789284118298 seatDemandStdev=0.04333519315122262 seatDemandSmoothed=1.1929630766817572 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:37:10.616114249+00:00 stderr F I1212 16:37:10.615927 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.0016628555341027169 seatDemandStdev=0.04396113246013065 seatDemandSmoothed=1.166574277641944 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:37:20.616671387+00:00 stderr F I1212 16:37:20.616393 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.001170798561645748 seatDemandStdev=0.034196897408592435 seatDemandSmoothed=1.140556526263495 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:37:50.620500644+00:00 stderr F I1212 16:37:50.619341 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008386846902040074 seatDemandStdev=0.16067262732182938 seatDemandSmoothed=1.069341541075622 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:38:00.621836968+00:00 stderr F I1212 16:38:00.621649 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.001097739724554055 seatDemandStdev=0.03311396521184364 seatDemandSmoothed=1.0455335548444198 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:38:30.439121914+00:00 stderr F I1212 16:38:30.437695 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:38:30.439121914+00:00 stderr F I1212 16:38:30.437743 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:38:30.439121914+00:00 stderr F I1212 16:38:30.437750 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 70.542µs 2025-12-12T16:38:30.572648009+00:00 stderr F I1212 16:38:30.572515 12 policy_source.go:240] refreshing policies 2025-12-12T16:38:30.654525557+00:00 stderr F I1212 16:38:30.654354 12 controller.go:231] Updating CRD OpenAPI spec because ingresscontrollers.operator.openshift.io changed 2025-12-12T16:38:30.654525557+00:00 stderr F I1212 16:38:30.654410 12 controller.go:231] Updating CRD OpenAPI spec because machines.machine.openshift.io changed 2025-12-12T16:38:30.655665935+00:00 stderr F I1212 16:38:30.655490 12 controller.go:231] Updating CRD OpenAPI spec because uiplugins.observability.openshift.io changed 2025-12-12T16:38:30.655665935+00:00 stderr F I1212 16:38:30.655564 12 controller.go:231] Updating CRD OpenAPI spec because orders.acme.cert-manager.io changed 2025-12-12T16:38:30.655946622+00:00 stderr F I1212 16:38:30.655880 12 controller.go:231] Updating CRD OpenAPI spec because apiservers.config.openshift.io changed 2025-12-12T16:38:30.655983913+00:00 stderr F I1212 16:38:30.655934 12 controller.go:231] Updating CRD OpenAPI spec because authentications.config.openshift.io changed 2025-12-12T16:38:30.655983913+00:00 stderr F I1212 16:38:30.655958 12 controller.go:231] Updating CRD OpenAPI spec because egressqoses.k8s.ovn.org changed 2025-12-12T16:38:30.656009264+00:00 stderr F I1212 16:38:30.655993 12 controller.go:231] Updating CRD OpenAPI spec because helmchartrepositories.helm.openshift.io changed 2025-12-12T16:38:30.656188708+00:00 stderr F I1212 16:38:30.656115 12 controller.go:231] Updating CRD OpenAPI spec because kubecontrollermanagers.operator.openshift.io changed 2025-12-12T16:38:30.656413304+00:00 stderr F I1212 16:38:30.656366 12 controller.go:231] Updating CRD OpenAPI spec because podnetworkconnectivitychecks.controlplane.operator.openshift.io changed 2025-12-12T16:38:30.656441265+00:00 stderr F I1212 16:38:30.656410 12 controller.go:231] Updating CRD OpenAPI spec because stackconfigpolicies.stackconfigpolicy.k8s.elastic.co changed 2025-12-12T16:38:30.656691601+00:00 stderr F I1212 16:38:30.656650 12 controller.go:231] Updating CRD OpenAPI spec because apirequestcounts.apiserver.openshift.io changed 2025-12-12T16:38:30.656712212+00:00 stderr F I1212 16:38:30.656681 12 controller.go:231] Updating CRD OpenAPI spec because featuregates.config.openshift.io changed 2025-12-12T16:38:30.656844555+00:00 stderr F I1212 16:38:30.656813 12 controller.go:231] Updating CRD OpenAPI spec because nodes.config.openshift.io changed 2025-12-12T16:38:30.658007304+00:00 stderr F I1212 16:38:30.657950 12 controller.go:231] Updating CRD OpenAPI spec because prometheusagents.monitoring.rhobs changed 2025-12-12T16:38:30.658007304+00:00 stderr F I1212 16:38:30.657986 12 controller.go:231] Updating CRD OpenAPI spec because observabilityinstallers.observability.openshift.io changed 2025-12-12T16:38:30.658035805+00:00 stderr F I1212 16:38:30.657998 12 controller.go:231] Updating CRD OpenAPI spec because controllerconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.658035805+00:00 stderr F I1212 16:38:30.658007 12 controller.go:231] Updating CRD OpenAPI spec because csisnapshotcontrollers.operator.openshift.io changed 2025-12-12T16:38:30.658416114+00:00 stderr F I1212 16:38:30.658366 12 controller.go:231] Updating CRD OpenAPI spec because installplans.operators.coreos.com changed 2025-12-12T16:38:30.658416114+00:00 stderr F I1212 16:38:30.658394 12 controller.go:231] Updating CRD OpenAPI spec because oauths.config.openshift.io changed 2025-12-12T16:38:30.658440285+00:00 stderr F I1212 16:38:30.658404 12 controller.go:231] Updating CRD OpenAPI spec because referencegrants.gateway.networking.k8s.io changed 2025-12-12T16:38:30.658702052+00:00 stderr F I1212 16:38:30.658656 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.rhobs changed 2025-12-12T16:38:30.658743103+00:00 stderr F I1212 16:38:30.658722 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.rhobs changed 2025-12-12T16:38:30.658761703+00:00 stderr F I1212 16:38:30.658735 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.rhobs changed 2025-12-12T16:38:30.658864146+00:00 stderr F I1212 16:38:30.658823 12 controller.go:231] Updating CRD OpenAPI spec because images.config.openshift.io changed 2025-12-12T16:38:30.658930147+00:00 stderr F I1212 16:38:30.658895 12 controller.go:231] Updating CRD OpenAPI spec because machineosbuilds.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.659045130+00:00 stderr F I1212 16:38:30.659005 12 controller.go:231] Updating CRD OpenAPI spec because openshiftapiservers.operator.openshift.io changed 2025-12-12T16:38:30.659045130+00:00 stderr F I1212 16:38:30.659025 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagers.monitoring.coreos.com changed 2025-12-12T16:38:30.659198714+00:00 stderr F I1212 16:38:30.659143 12 controller.go:231] Updating CRD OpenAPI spec because consolelinks.console.openshift.io changed 2025-12-12T16:38:30.659283016+00:00 stderr F I1212 16:38:30.659255 12 controller.go:231] Updating CRD OpenAPI spec because controlplanemachinesets.machine.openshift.io changed 2025-12-12T16:38:30.659307147+00:00 stderr F I1212 16:38:30.659283 12 controller.go:231] Updating CRD OpenAPI spec because etcds.operator.openshift.io changed 2025-12-12T16:38:30.659438030+00:00 stderr F I1212 16:38:30.659397 12 controller.go:231] Updating CRD OpenAPI spec because machinehealthchecks.machine.openshift.io changed 2025-12-12T16:38:30.659469511+00:00 stderr F I1212 16:38:30.659453 12 controller.go:231] Updating CRD OpenAPI spec because storagestates.migration.k8s.io changed 2025-12-12T16:38:30.659492221+00:00 stderr F I1212 16:38:30.659476 12 controller.go:231] Updating CRD OpenAPI spec because elasticmapsservers.maps.k8s.elastic.co changed 2025-12-12T16:38:30.659680696+00:00 stderr F I1212 16:38:30.659632 12 controller.go:231] Updating CRD OpenAPI spec because builds.config.openshift.io changed 2025-12-12T16:38:30.659680696+00:00 stderr F I1212 16:38:30.659651 12 controller.go:231] Updating CRD OpenAPI spec because perses.perses.dev changed 2025-12-12T16:38:30.659741058+00:00 stderr F I1212 16:38:30.659714 12 controller.go:231] Updating CRD OpenAPI spec because egressips.k8s.ovn.org changed 2025-12-12T16:38:30.659829040+00:00 stderr F I1212 16:38:30.659796 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediations.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:38:30.659829040+00:00 stderr F I1212 16:38:30.659810 12 controller.go:231] Updating CRD OpenAPI spec because storageversionmigrations.migration.k8s.io changed 2025-12-12T16:38:30.659848730+00:00 stderr F I1212 16:38:30.659818 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearchautoscalers.autoscaling.k8s.elastic.co changed 2025-12-12T16:38:30.660008914+00:00 stderr F I1212 16:38:30.659977 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentsourcepolicies.operator.openshift.io changed 2025-12-12T16:38:30.660049755+00:00 stderr F I1212 16:38:30.660019 12 controller.go:231] Updating CRD OpenAPI spec because olmconfigs.operators.coreos.com changed 2025-12-12T16:38:30.660068756+00:00 stderr F I1212 16:38:30.660043 12 controller.go:231] Updating CRD OpenAPI spec because alertingrules.monitoring.openshift.io changed 2025-12-12T16:38:30.660169508+00:00 stderr F I1212 16:38:30.660137 12 controller.go:231] Updating CRD OpenAPI spec because ipamclaims.k8s.cni.cncf.io changed 2025-12-12T16:38:30.660293071+00:00 stderr F I1212 16:38:30.660258 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed 2025-12-12T16:38:30.660293071+00:00 stderr F I1212 16:38:30.660275 12 controller.go:231] Updating CRD OpenAPI spec because storages.operator.openshift.io changed 2025-12-12T16:38:30.660372293+00:00 stderr F I1212 16:38:30.660343 12 controller.go:231] Updating CRD OpenAPI spec because configs.imageregistry.operator.openshift.io changed 2025-12-12T16:38:30.660392444+00:00 stderr F I1212 16:38:30.660366 12 controller.go:231] Updating CRD OpenAPI spec because dnses.config.openshift.io changed 2025-12-12T16:38:30.660646120+00:00 stderr F I1212 16:38:30.660591 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.coreos.com changed 2025-12-12T16:38:30.660666421+00:00 stderr F I1212 16:38:30.660649 12 controller.go:231] Updating CRD OpenAPI spec because proxies.config.openshift.io changed 2025-12-12T16:38:30.660685241+00:00 stderr F I1212 16:38:30.660660 12 controller.go:231] Updating CRD OpenAPI spec because clusterautoscalers.autoscaling.openshift.io changed 2025-12-12T16:38:30.660703932+00:00 stderr F I1212 16:38:30.660681 12 controller.go:231] Updating CRD OpenAPI spec because imagepruners.imageregistry.operator.openshift.io changed 2025-12-12T16:38:30.660703932+00:00 stderr F I1212 16:38:30.660692 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.rhobs changed 2025-12-12T16:38:30.660722882+00:00 stderr F I1212 16:38:30.660706 12 controller.go:231] Updating CRD OpenAPI spec because logstashes.logstash.k8s.elastic.co changed 2025-12-12T16:38:30.660827935+00:00 stderr F I1212 16:38:30.660791 12 controller.go:231] Updating CRD OpenAPI spec because adminpolicybasedexternalroutes.k8s.ovn.org changed 2025-12-12T16:38:30.660827935+00:00 stderr F I1212 16:38:30.660815 12 controller.go:231] Updating CRD OpenAPI spec because clusteroperators.config.openshift.io changed 2025-12-12T16:38:30.660896287+00:00 stderr F I1212 16:38:30.660860 12 controller.go:231] Updating CRD OpenAPI spec because clusterserviceversions.operators.coreos.com changed 2025-12-12T16:38:30.661045470+00:00 stderr F I1212 16:38:30.660981 12 controller.go:231] Updating CRD OpenAPI spec because clusteruserdefinednetworks.k8s.ovn.org changed 2025-12-12T16:38:30.661064971+00:00 stderr F I1212 16:38:30.661051 12 controller.go:231] Updating CRD OpenAPI spec because dnses.operator.openshift.io changed 2025-12-12T16:38:30.661083391+00:00 stderr F I1212 16:38:30.661065 12 controller.go:231] Updating CRD OpenAPI spec because gateways.gateway.networking.k8s.io changed 2025-12-12T16:38:30.661101732+00:00 stderr F I1212 16:38:30.661080 12 controller.go:231] Updating CRD OpenAPI spec because infrastructures.config.openshift.io changed 2025-12-12T16:38:30.661221045+00:00 stderr F I1212 16:38:30.661165 12 controller.go:231] Updating CRD OpenAPI spec because ipaddresses.ipam.cluster.x-k8s.io changed 2025-12-12T16:38:30.661250986+00:00 stderr F I1212 16:38:30.661215 12 controller.go:231] Updating CRD OpenAPI spec because ingresses.config.openshift.io changed 2025-12-12T16:38:30.661250986+00:00 stderr F I1212 16:38:30.661230 12 controller.go:231] Updating CRD OpenAPI spec because kubestorageversionmigrators.operator.openshift.io changed 2025-12-12T16:38:30.661300037+00:00 stderr F I1212 16:38:30.661268 12 controller.go:231] Updating CRD OpenAPI spec because machineconfignodes.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.661321697+00:00 stderr F I1212 16:38:30.661291 12 controller.go:231] Updating CRD OpenAPI spec because machinesets.machine.openshift.io changed 2025-12-12T16:38:30.661392669+00:00 stderr F I1212 16:38:30.661357 12 controller.go:231] Updating CRD OpenAPI spec because podmonitors.monitoring.coreos.com changed 2025-12-12T16:38:30.661416800+00:00 stderr F I1212 16:38:30.661383 12 controller.go:231] Updating CRD OpenAPI spec because rolebindingrestrictions.authorization.openshift.io changed 2025-12-12T16:38:30.661439730+00:00 stderr F I1212 16:38:30.661414 12 controller.go:231] Updating CRD OpenAPI spec because monitoringstacks.monitoring.rhobs changed 2025-12-12T16:38:30.661466741+00:00 stderr F I1212 16:38:30.661435 12 controller.go:231] Updating CRD OpenAPI spec because persesdatasources.perses.dev changed 2025-12-12T16:38:30.661489752+00:00 stderr F I1212 16:38:30.661477 12 controller.go:231] Updating CRD OpenAPI spec because consoleyamlsamples.console.openshift.io changed 2025-12-12T16:38:30.661567194+00:00 stderr F I1212 16:38:30.661532 12 controller.go:231] Updating CRD OpenAPI spec because ipaddressclaims.ipam.cluster.x-k8s.io changed 2025-12-12T16:38:30.661590954+00:00 stderr F I1212 16:38:30.661557 12 controller.go:231] Updating CRD OpenAPI spec because openshiftcontrollermanagers.operator.openshift.io changed 2025-12-12T16:38:30.661590954+00:00 stderr F I1212 16:38:30.661571 12 controller.go:231] Updating CRD OpenAPI spec because securitycontextconstraints.security.openshift.io changed 2025-12-12T16:38:30.661627465+00:00 stderr F I1212 16:38:30.661603 12 controller.go:231] Updating CRD OpenAPI spec because certificates.cert-manager.io changed 2025-12-12T16:38:30.661732538+00:00 stderr F I1212 16:38:30.661685 12 controller.go:231] Updating CRD OpenAPI spec because alertrelabelconfigs.monitoring.openshift.io changed 2025-12-12T16:38:30.661732538+00:00 stderr F I1212 16:38:30.661713 12 controller.go:231] Updating CRD OpenAPI spec because authentications.operator.openshift.io changed 2025-12-12T16:38:30.661763688+00:00 stderr F I1212 16:38:30.661747 12 controller.go:231] Updating CRD OpenAPI spec because imagedigestmirrorsets.config.openshift.io changed 2025-12-12T16:38:30.661820960+00:00 stderr F I1212 16:38:30.661785 12 controller.go:231] Updating CRD OpenAPI spec because pinnedimagesets.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.661846661+00:00 stderr F I1212 16:38:30.661814 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.rhobs changed 2025-12-12T16:38:30.661846661+00:00 stderr F I1212 16:38:30.661832 12 controller.go:231] Updating CRD OpenAPI spec because enterprisesearches.enterprisesearch.k8s.elastic.co changed 2025-12-12T16:38:30.661924183+00:00 stderr F I1212 16:38:30.661897 12 controller.go:231] Updating CRD OpenAPI spec because certmanagers.operator.openshift.io changed 2025-12-12T16:38:30.661982964+00:00 stderr F I1212 16:38:30.661955 12 controller.go:231] Updating CRD OpenAPI spec because baselineadminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:38:30.662002134+00:00 stderr F I1212 16:38:30.661985 12 controller.go:231] Updating CRD OpenAPI spec because configs.samples.operator.openshift.io changed 2025-12-12T16:38:30.662051416+00:00 stderr F I1212 16:38:30.662023 12 controller.go:231] Updating CRD OpenAPI spec because consoles.operator.openshift.io changed 2025-12-12T16:38:30.662070556+00:00 stderr F I1212 16:38:30.662058 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.662200079+00:00 stderr F I1212 16:38:30.662156 12 controller.go:231] Updating CRD OpenAPI spec because servicemonitors.monitoring.rhobs changed 2025-12-12T16:38:30.662268521+00:00 stderr F I1212 16:38:30.662239 12 controller.go:231] Updating CRD OpenAPI spec because kubeapiservers.operator.openshift.io changed 2025-12-12T16:38:30.662298842+00:00 stderr F I1212 16:38:30.662279 12 controller.go:231] Updating CRD OpenAPI spec because projects.config.openshift.io changed 2025-12-12T16:38:30.662363354+00:00 stderr F I1212 16:38:30.662338 12 controller.go:231] Updating CRD OpenAPI spec because kubeschedulers.operator.openshift.io changed 2025-12-12T16:38:30.662407515+00:00 stderr F I1212 16:38:30.662371 12 controller.go:231] Updating CRD OpenAPI spec because networks.config.openshift.io changed 2025-12-12T16:38:30.662426845+00:00 stderr F I1212 16:38:30.662406 12 controller.go:231] Updating CRD OpenAPI spec because nodeslicepools.whereabouts.cni.cncf.io changed 2025-12-12T16:38:30.662448016+00:00 stderr F I1212 16:38:30.662421 12 controller.go:231] Updating CRD OpenAPI spec because clustercsidrivers.operator.openshift.io changed 2025-12-12T16:38:30.662521528+00:00 stderr F I1212 16:38:30.662493 12 controller.go:231] Updating CRD OpenAPI spec because consoleexternalloglinks.console.openshift.io changed 2025-12-12T16:38:30.662564099+00:00 stderr F I1212 16:38:30.662539 12 controller.go:231] Updating CRD OpenAPI spec because grpcroutes.gateway.networking.k8s.io changed 2025-12-12T16:38:30.662598049+00:00 stderr F I1212 16:38:30.662576 12 controller.go:231] Updating CRD OpenAPI spec because istiocsrs.operator.openshift.io changed 2025-12-12T16:38:30.662667981+00:00 stderr F I1212 16:38:30.662642 12 controller.go:231] Updating CRD OpenAPI spec because catalogsources.operators.coreos.com changed 2025-12-12T16:38:30.662686942+00:00 stderr F I1212 16:38:30.662664 12 controller.go:231] Updating CRD OpenAPI spec because operatorgroups.operators.coreos.com changed 2025-12-12T16:38:30.662828745+00:00 stderr F I1212 16:38:30.662791 12 controller.go:231] Updating CRD OpenAPI spec because operatorhubs.config.openshift.io changed 2025-12-12T16:38:30.662885057+00:00 stderr F I1212 16:38:30.662859 12 controller.go:231] Updating CRD OpenAPI spec because servicecas.operator.openshift.io changed 2025-12-12T16:38:30.662904067+00:00 stderr F I1212 16:38:30.662876 12 controller.go:231] Updating CRD OpenAPI spec because clusterimagepolicies.config.openshift.io changed 2025-12-12T16:38:30.662946298+00:00 stderr F I1212 16:38:30.662918 12 controller.go:231] Updating CRD OpenAPI spec because consoles.config.openshift.io changed 2025-12-12T16:38:30.663039571+00:00 stderr F I1212 16:38:30.663009 12 controller.go:231] Updating CRD OpenAPI spec because machineosconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.663058791+00:00 stderr F I1212 16:38:30.663039 12 controller.go:231] Updating CRD OpenAPI spec because issuers.cert-manager.io changed 2025-12-12T16:38:30.663166424+00:00 stderr F I1212 16:38:30.663138 12 controller.go:231] Updating CRD OpenAPI spec because imagetagmirrorsets.config.openshift.io changed 2025-12-12T16:38:30.663200985+00:00 stderr F I1212 16:38:30.663168 12 controller.go:231] Updating CRD OpenAPI spec because ippools.whereabouts.cni.cncf.io changed 2025-12-12T16:38:30.663292327+00:00 stderr F I1212 16:38:30.663262 12 controller.go:231] Updating CRD OpenAPI spec because operators.operators.coreos.com changed 2025-12-12T16:38:30.663374309+00:00 stderr F I1212 16:38:30.663346 12 controller.go:231] Updating CRD OpenAPI spec because probes.monitoring.coreos.com changed 2025-12-12T16:38:30.663440751+00:00 stderr F I1212 16:38:30.663409 12 controller.go:231] Updating CRD OpenAPI spec because projecthelmchartrepositories.helm.openshift.io changed 2025-12-12T16:38:30.663460791+00:00 stderr F I1212 16:38:30.663432 12 controller.go:231] Updating CRD OpenAPI spec because rangeallocations.security.internal.openshift.io changed 2025-12-12T16:38:30.663507702+00:00 stderr F I1212 16:38:30.663481 12 controller.go:231] Updating CRD OpenAPI spec because persesdashboards.perses.dev changed 2025-12-12T16:38:30.663604785+00:00 stderr F I1212 16:38:30.663576 12 controller.go:231] Updating CRD OpenAPI spec because beats.beat.k8s.elastic.co changed 2025-12-12T16:38:30.663624105+00:00 stderr F I1212 16:38:30.663606 12 controller.go:231] Updating CRD OpenAPI spec because challenges.acme.cert-manager.io changed 2025-12-12T16:38:30.663688277+00:00 stderr F I1212 16:38:30.663660 12 controller.go:231] Updating CRD OpenAPI spec because adminnetworkpolicies.policy.networking.k8s.io changed 2025-12-12T16:38:30.663745928+00:00 stderr F I1212 16:38:30.663722 12 controller.go:231] Updating CRD OpenAPI spec because clusterresourcequotas.quota.openshift.io changed 2025-12-12T16:38:30.663809600+00:00 stderr F I1212 16:38:30.663781 12 controller.go:231] Updating CRD OpenAPI spec because configs.operator.openshift.io changed 2025-12-12T16:38:30.663865671+00:00 stderr F I1212 16:38:30.663837 12 controller.go:231] Updating CRD OpenAPI spec because egressfirewalls.k8s.ovn.org changed 2025-12-12T16:38:30.663890032+00:00 stderr F I1212 16:38:30.663860 12 controller.go:231] Updating CRD OpenAPI spec because egressrouters.network.operator.openshift.io changed 2025-12-12T16:38:30.663890032+00:00 stderr F I1212 16:38:30.663875 12 controller.go:231] Updating CRD OpenAPI spec because gatewayclasses.gateway.networking.k8s.io changed 2025-12-12T16:38:30.663973194+00:00 stderr F I1212 16:38:30.663945 12 controller.go:231] Updating CRD OpenAPI spec because network-attachment-definitions.k8s.cni.cncf.io changed 2025-12-12T16:38:30.664012675+00:00 stderr F I1212 16:38:30.663987 12 controller.go:231] Updating CRD OpenAPI spec because operatorconditions.operators.coreos.com changed 2025-12-12T16:38:30.664032276+00:00 stderr F I1212 16:38:30.664022 12 controller.go:231] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.coreos.com changed 2025-12-12T16:38:30.664117168+00:00 stderr F I1212 16:38:30.664091 12 controller.go:231] Updating CRD OpenAPI spec because consolenotifications.console.openshift.io changed 2025-12-12T16:38:30.664136438+00:00 stderr F I1212 16:38:30.664119 12 controller.go:231] Updating CRD OpenAPI spec because consolesamples.console.openshift.io changed 2025-12-12T16:38:30.664230231+00:00 stderr F I1212 16:38:30.664170 12 controller.go:231] Updating CRD OpenAPI spec because imagecontentpolicies.config.openshift.io changed 2025-12-12T16:38:30.664253441+00:00 stderr F I1212 16:38:30.664233 12 controller.go:231] Updating CRD OpenAPI spec because imagepolicies.config.openshift.io changed 2025-12-12T16:38:30.664272332+00:00 stderr F I1212 16:38:30.664252 12 controller.go:231] Updating CRD OpenAPI spec because operatorpkis.network.operator.openshift.io changed 2025-12-12T16:38:30.664325143+00:00 stderr F I1212 16:38:30.664299 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.coreos.com changed 2025-12-12T16:38:30.664367264+00:00 stderr F I1212 16:38:30.664342 12 controller.go:231] Updating CRD OpenAPI spec because prometheusrules.monitoring.rhobs changed 2025-12-12T16:38:30.664386514+00:00 stderr F I1212 16:38:30.664374 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigurations.operator.openshift.io changed 2025-12-12T16:38:30.664405165+00:00 stderr F I1212 16:38:30.664388 12 controller.go:231] Updating CRD OpenAPI spec because agents.agent.k8s.elastic.co changed 2025-12-12T16:38:30.664457406+00:00 stderr F I1212 16:38:30.664431 12 controller.go:231] Updating CRD OpenAPI spec because elasticsearches.elasticsearch.k8s.elastic.co changed 2025-12-12T16:38:30.664611990+00:00 stderr F I1212 16:38:30.664586 12 controller.go:231] Updating CRD OpenAPI spec because kibanas.kibana.k8s.elastic.co changed 2025-12-12T16:38:30.664631491+00:00 stderr F I1212 16:38:30.664614 12 controller.go:231] Updating CRD OpenAPI spec because kubeletconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.664649801+00:00 stderr F I1212 16:38:30.664632 12 controller.go:231] Updating CRD OpenAPI spec because machineautoscalers.autoscaling.openshift.io changed 2025-12-12T16:38:30.664746293+00:00 stderr F I1212 16:38:30.664715 12 controller.go:231] Updating CRD OpenAPI spec because apmservers.apm.k8s.elastic.co changed 2025-12-12T16:38:30.664901097+00:00 stderr F I1212 16:38:30.664846 12 controller.go:231] Updating CRD OpenAPI spec because egressservices.k8s.ovn.org changed 2025-12-12T16:38:30.664969499+00:00 stderr F I1212 16:38:30.664941 12 controller.go:231] Updating CRD OpenAPI spec because machineconfigpools.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.664988619+00:00 stderr F I1212 16:38:30.664963 12 controller.go:231] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed 2025-12-12T16:38:30.664988619+00:00 stderr F I1212 16:38:30.664975 12 controller.go:231] Updating CRD OpenAPI spec because userdefinednetworks.k8s.ovn.org changed 2025-12-12T16:38:30.665063891+00:00 stderr F I1212 16:38:30.665032 12 controller.go:231] Updating CRD OpenAPI spec because certificaterequests.cert-manager.io changed 2025-12-12T16:38:30.665128883+00:00 stderr F I1212 16:38:30.665094 12 controller.go:231] Updating CRD OpenAPI spec because subscriptions.operators.coreos.com changed 2025-12-12T16:38:30.665261416+00:00 stderr F I1212 16:38:30.665222 12 controller.go:231] Updating CRD OpenAPI spec because scrapeconfigs.monitoring.rhobs changed 2025-12-12T16:38:30.665289967+00:00 stderr F I1212 16:38:30.665253 12 controller.go:231] Updating CRD OpenAPI spec because clusterissuers.cert-manager.io changed 2025-12-12T16:38:30.665309128+00:00 stderr F I1212 16:38:30.665278 12 controller.go:231] Updating CRD OpenAPI spec because consoleclidownloads.console.openshift.io changed 2025-12-12T16:38:30.665309128+00:00 stderr F I1212 16:38:30.665289 12 controller.go:231] Updating CRD OpenAPI spec because consoleplugins.console.openshift.io changed 2025-12-12T16:38:30.665327898+00:00 stderr F I1212 16:38:30.665299 12 controller.go:231] Updating CRD OpenAPI spec because metal3remediationtemplates.infrastructure.cluster.x-k8s.io changed 2025-12-12T16:38:30.665433371+00:00 stderr F I1212 16:38:30.665404 12 controller.go:231] Updating CRD OpenAPI spec because overlappingrangeipreservations.whereabouts.cni.cncf.io changed 2025-12-12T16:38:30.665476862+00:00 stderr F I1212 16:38:30.665449 12 controller.go:231] Updating CRD OpenAPI spec because schedulers.config.openshift.io changed 2025-12-12T16:38:30.665546924+00:00 stderr F I1212 16:38:30.665511 12 controller.go:231] Updating CRD OpenAPI spec because containerruntimeconfigs.machineconfiguration.openshift.io changed 2025-12-12T16:38:30.665566864+00:00 stderr F I1212 16:38:30.665543 12 controller.go:231] Updating CRD OpenAPI spec because dnsrecords.ingress.operator.openshift.io changed 2025-12-12T16:38:30.665692007+00:00 stderr F I1212 16:38:30.665661 12 controller.go:231] Updating CRD OpenAPI spec because networks.operator.openshift.io changed 2025-12-12T16:38:30.665837631+00:00 stderr F I1212 16:38:30.665800 12 controller.go:231] Updating CRD OpenAPI spec because prometheuses.monitoring.rhobs changed 2025-12-12T16:38:30.666033566+00:00 stderr F I1212 16:38:30.665997 12 controller.go:231] Updating CRD OpenAPI spec because thanosqueriers.monitoring.rhobs changed 2025-12-12T16:38:30.666033566+00:00 stderr F I1212 16:38:30.666021 12 controller.go:231] Updating CRD OpenAPI spec because httproutes.gateway.networking.k8s.io changed 2025-12-12T16:38:30.666053686+00:00 stderr F I1212 16:38:30.666030 12 controller.go:231] Updating CRD OpenAPI spec because clusterversions.config.openshift.io changed 2025-12-12T16:38:30.666072357+00:00 stderr F I1212 16:38:30.666047 12 controller.go:231] Updating CRD OpenAPI spec because consolequickstarts.console.openshift.io changed 2025-12-12T16:38:50.625690058+00:00 stderr F I1212 16:38:50.625418 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.01232272482478099 seatDemandStdev=0.1932955281029901 seatDemandSmoothed=0.9387876232247523 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:39:00.625967259+00:00 stderr F I1212 16:39:00.625710 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0011474969762691068 seatDemandStdev=0.03385528358998873 seatDemandSmoothed=0.918000571843607 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:39:05.493258079+00:00 stderr F I1212 16:39:05.493030 12 cidrallocator.go:241] syncing ServiceCIDR allocators 2025-12-12T16:39:05.493258079+00:00 stderr F I1212 16:39:05.493068 12 cidrallocator.go:277] updated ClusterIP allocator for Service CIDR 10.217.4.0/23 2025-12-12T16:39:05.493258079+00:00 stderr F I1212 16:39:05.493074 12 cidrallocator.go:243] syncing ServiceCIDR allocators took: 53.651µs 2025-12-12T16:39:17.565748803+00:00 stderr F W1212 16:39:17.565470 12 watcher.go:338] watch chan error: etcdserver: mvcc: required revision has been compacted 2025-12-12T16:39:30.628348326+00:00 stderr F I1212 16:39:30.628153 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.0036045581827339997 seatDemandStdev=0.08315260350154635 seatDemandSmoothed=0.8601125885015038 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:40:30.632776935+00:00 stderr F I1212 16:40:30.632481 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0012174114749914555 seatDemandStdev=0.03487017901147074 seatDemandSmoothed=0.7637201097400107 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:40:38.282206040+00:00 stderr F I1212 16:40:38.279634 12 cacher.go:847] cacher (rolebindings.rbac.authorization.k8s.io): 3 objects queued in incoming channel. 2025-12-12T16:40:38.282206040+00:00 stderr F I1212 16:40:38.279665 12 cacher.go:847] cacher (rolebindings.rbac.authorization.k8s.io): 4 objects queued in incoming channel. 2025-12-12T16:40:40.633669302+00:00 stderr F I1212 16:40:40.633472 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=3 seatDemandAvg=0.004594712032171221 seatDemandStdev=0.0869704312400425 seatDemandSmoothed=0.7482605455112513 fairFrac=2.2796127562642368 currentCL=3 concurrencyDenominator=3 backstop=false 2025-12-12T16:40:48.229541371+00:00 stderr F E1212 16:40:48.228880 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?follow=true×tamps=true\", Err:(*tls.permanentError)(0xc03901a730)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?follow=true×tamps=true\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:40:50.634056436+00:00 stderr F I1212 16:40:50.633827 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.008657000376422409 seatDemandStdev=0.1627751593625244 seatDemandSmoothed=0.7349934926384882 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:41:00.635293601+00:00 stderr F I1212 16:41:00.635089 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.0014101974053042983 seatDemandStdev=0.037526107559702634 seatDemandSmoothed=0.7189841773219982 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:41:25.072439532+00:00 stderr F E1212 16:41:25.072282 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7/control-plane-machine-set-operator\", Err:(*tls.permanentError)(0xc024a91c10)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7/control-plane-machine-set-operator\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:25.184704433+00:00 stderr F E1212 16:41:25.184278 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-machine-api/machine-api-operator-755bb95488-dmjfw/kube-rbac-proxy\", Err:(*tls.permanentError)(0xc02a18d560)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-machine-api/machine-api-operator-755bb95488-dmjfw/kube-rbac-proxy\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:25.189617856+00:00 stderr F E1212 16:41:25.189504 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-machine-api/machine-api-operator-755bb95488-dmjfw/machine-api-operator\", Err:(*tls.permanentError)(0xc02fae4170)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-machine-api/machine-api-operator-755bb95488-dmjfw/machine-api-operator\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:37.047228934+00:00 stderr F E1212 16:41:37.047032 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/cert-manager/cert-manager-858d87f86b-r7f8q/cert-manager-controller\", Err:(*tls.permanentError)(0xc051dc5120)}: Get \"https://192.168.126.11:10250/containerLogs/cert-manager/cert-manager-858d87f86b-r7f8q/cert-manager-controller\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:37.237323580+00:00 stderr F E1212 16:41:37.237067 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/cert-manager/cert-manager-cainjector-7dbf76d5c8-lv2hl/cert-manager-cainjector\", Err:(*tls.permanentError)(0xc0530bdf70)}: Get \"https://192.168.126.11:10250/containerLogs/cert-manager/cert-manager-cainjector-7dbf76d5c8-lv2hl/cert-manager-cainjector\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:37.249617289+00:00 stderr F E1212 16:41:37.249479 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/cert-manager/cert-manager-webhook-7894b5b9b4-2kmrt/cert-manager-webhook\", Err:(*tls.permanentError)(0xc0542bafb0)}: Get \"https://192.168.126.11:10250/containerLogs/cert-manager/cert-manager-webhook-7894b5b9b4-2kmrt/cert-manager-webhook\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:50.639520465+00:00 stderr F I1212 16:41:50.638875 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.00869732521559669 seatDemandStdev=0.16371188705727774 seatDemandSmoothed=0.6485453029782781 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:41:52.903660933+00:00 stderr F E1212 16:41:52.902911 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/util\", Err:(*tls.permanentError)(0xc05b5ce710)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.105640907+00:00 stderr F E1212 16:41:53.105486 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/util\", Err:(*tls.permanentError)(0xc063e7dc90)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.124143062+00:00 stderr F E1212 16:41:53.123994 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/pull\", Err:(*tls.permanentError)(0xc0641f46e0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.144872043+00:00 stderr F E1212 16:41:53.144691 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/pull\", Err:(*tls.permanentError)(0xc051ce3d00)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.357517856+00:00 stderr F E1212 16:41:53.356802 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/extract\", Err:(*tls.permanentError)(0xc059f9c490)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/extract\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.365755893+00:00 stderr F E1212 16:41:53.363421 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/pull\", Err:(*tls.permanentError)(0xc06246a5a0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.365755893+00:00 stderr F E1212 16:41:53.365120 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/util\", Err:(*tls.permanentError)(0xc06246a670)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.564455665+00:00 stderr F E1212 16:41:53.564274 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/util\", Err:(*tls.permanentError)(0xc06527bea0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.735111393+00:00 stderr F E1212 16:41:53.702731 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/pull\", Err:(*tls.permanentError)(0xc066bcde80)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.735111393+00:00 stderr F E1212 16:41:53.703268 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/pull\", Err:(*tls.permanentError)(0xc066bcdf10)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.735111393+00:00 stderr F E1212 16:41:53.728256 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/util\", Err:(*tls.permanentError)(0xc0673db120)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.904902970+00:00 stderr F E1212 16:41:53.904696 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/pull\", Err:(*tls.permanentError)(0xc06737a6e0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.930088662+00:00 stderr F E1212 16:41:53.929909 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/util\", Err:(*tls.permanentError)(0xc067321880)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:53.957898501+00:00 stderr F E1212 16:41:53.957739 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/extract\", Err:(*tls.permanentError)(0xc0678980f0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85/extract\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.094378530+00:00 stderr F E1212 16:41:54.094162 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/util\", Err:(*tls.permanentError)(0xc067636860)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.277241565+00:00 stderr F E1212 16:41:54.277074 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/util\", Err:(*tls.permanentError)(0xc067899250)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.277821249+00:00 stderr F E1212 16:41:54.277752 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/pull\", Err:(*tls.permanentError)(0xc068889610)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.309749232+00:00 stderr F E1212 16:41:54.309590 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/pull\", Err:(*tls.permanentError)(0xc0688fa940)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.475782893+00:00 stderr F E1212 16:41:54.475629 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/pull\", Err:(*tls.permanentError)(0xc067899e30)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/pull\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.479166128+00:00 stderr F E1212 16:41:54.479065 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/util\", Err:(*tls.permanentError)(0xc065f50210)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/util\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.509817228+00:00 stderr F E1212 16:41:54.509667 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/extract\", Err:(*tls.permanentError)(0xc03e3c8ed0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx/extract\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.690887768+00:00 stderr F E1212 16:41:54.690211 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-utilities\", Err:(*tls.permanentError)(0xc02fafccb0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.868817739+00:00 stderr F E1212 16:41:54.868660 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-utilities\", Err:(*tls.permanentError)(0xc0299fd870)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.916887306+00:00 stderr F E1212 16:41:54.916706 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-content\", Err:(*tls.permanentError)(0xc0664e2270)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:54.919787039+00:00 stderr F E1212 16:41:54.919682 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-content\", Err:(*tls.permanentError)(0xc053157070)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.098715205+00:00 stderr F E1212 16:41:55.098550 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-content\", Err:(*tls.permanentError)(0xc01ac52a20)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.118543063+00:00 stderr F E1212 16:41:55.118284 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-utilities\", Err:(*tls.permanentError)(0xc03344e650)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.126599796+00:00 stderr F E1212 16:41:55.126503 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/registry-server\", Err:(*tls.permanentError)(0xc063d70f20)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/certified-operators-psnw2/registry-server\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.280649696+00:00 stderr F E1212 16:41:55.280521 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-utilities\", Err:(*tls.permanentError)(0xc054e128a0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.454496754+00:00 stderr F E1212 16:41:55.454353 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-content\", Err:(*tls.permanentError)(0xc054e13380)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.476263121+00:00 stderr F E1212 16:41:55.476111 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-utilities\", Err:(*tls.permanentError)(0xc05b09e870)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.513740133+00:00 stderr F E1212 16:41:55.513579 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-content\", Err:(*tls.permanentError)(0xc03e357720)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.654044388+00:00 stderr F E1212 16:41:55.653895 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/registry-server\", Err:(*tls.permanentError)(0xc037258ca0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/registry-server\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.660492360+00:00 stderr F E1212 16:41:55.660385 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-content\", Err:(*tls.permanentError)(0xc03ed62d60)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.663264380+00:00 stderr F E1212 16:41:55.663158 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-utilities\", Err:(*tls.permanentError)(0xc03ed920d0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/community-operators-6jgv5/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.713720507+00:00 stderr F E1212 16:41:55.713582 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/marketplace-operator-547dbd544d-4vhrb/marketplace-operator\", Err:(*tls.permanentError)(0xc03344fa70)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/marketplace-operator-547dbd544d-4vhrb/marketplace-operator\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:55.857632993+00:00 stderr F E1212 16:41:55.857495 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-utilities\", Err:(*tls.permanentError)(0xc03ebe8020)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:56.013429328+00:00 stderr F E1212 16:41:56.013248 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-content\", Err:(*tls.permanentError)(0xc03ed673c0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:56.031753538+00:00 stderr F E1212 16:41:56.031615 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-content\", Err:(*tls.permanentError)(0xc03f962290)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:56.031913702+00:00 stderr F E1212 16:41:56.031852 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-utilities\", Err:(*tls.permanentError)(0xc03f962320)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:56.181088130+00:00 stderr F E1212 16:41:56.180921 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-utilities\", Err:(*tls.permanentError)(0xc03ee826f0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-utilities\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:56.184022224+00:00 stderr F E1212 16:41:56.183913 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-content\", Err:(*tls.permanentError)(0xc03ee82c00)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/extract-content\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:41:56.186474716+00:00 stderr F E1212 16:41:56.186359 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/registry-server\", Err:(*tls.permanentError)(0xc03fb6ad60)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-marketplace/redhat-operators-wqdb8/registry-server\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:00.641656193+00:00 stderr F I1212 16:42:00.639873 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.002087562637258634 seatDemandStdev=0.04564213754300028 seatDemandSmoothed=0.6347265441139236 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:42:02.964675351+00:00 stderr F W1212 16:42:02.964515 12 watcher.go:338] watch chan error: etcdserver: mvcc: required revision has been compacted 2025-12-12T16:42:08.344084890+00:00 stderr F E1212 16:42:08.343924 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-operators/obo-prometheus-operator-86648f486b-wbj29/prometheus-operator\", Err:(*tls.permanentError)(0xc06275e4b0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-operators/obo-prometheus-operator-86648f486b-wbj29/prometheus-operator\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:08.520814761+00:00 stderr F E1212 16:42:08.520650 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g/prometheus-operator-admission-webhook\", Err:(*tls.permanentError)(0xc062959570)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g/prometheus-operator-admission-webhook\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:08.563281768+00:00 stderr F E1212 16:42:08.563113 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr/prometheus-operator-admission-webhook\", Err:(*tls.permanentError)(0xc062e1c750)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr/prometheus-operator-admission-webhook\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:08.708098986+00:00 stderr F E1212 16:42:08.707921 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-operators/observability-operator-78c97476f4-qxqmn/operator\", Err:(*tls.permanentError)(0xc062c01700)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-operators/observability-operator-78c97476f4-qxqmn/operator\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:08.776955846+00:00 stderr F E1212 16:42:08.776783 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-operators/perses-operator-68bdb49cbf-nqtp8/perses-operator\", Err:(*tls.permanentError)(0xc062f7b9f0)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-operators/perses-operator-68bdb49cbf-nqtp8/perses-operator\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:10.641059282+00:00 stderr F I1212 16:42:10.640893 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=2 seatDemandAvg=0.003504073441817935 seatDemandStdev=0.05954390805593493 seatDemandSmoothed=0.6215779371737516 fairFrac=2.2796127562642368 currentCL=2 concurrencyDenominator=2 backstop=false 2025-12-12T16:42:20.643409227+00:00 stderr F I1212 16:42:20.641961 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.001152349616094886 seatDemandStdev=0.03392671081105818 seatDemandSmoothed=0.6080884630085799 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:42:50.644817354+00:00 stderr F I1212 16:42:50.644300 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=5 seatDemandAvg=0.00590933262084574 seatDemandStdev=0.1154246776851872 seatDemandSmoothed=0.5719977201937618 fairFrac=2.2796127562642368 currentCL=5 concurrencyDenominator=5 backstop=false 2025-12-12T16:42:58.247819284+00:00 stderr F E1212 16:42:58.247616 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &url.Error{Op:\"Get\", URL:\"https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?timestamps=true\", Err:(*tls.permanentError)(0xc034524880)}: Get \"https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?timestamps=true\": remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.396845439+00:00 stderr F E1212 16:42:58.396675 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.410532533+00:00 stderr F E1212 16:42:58.410369 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.433433078+00:00 stderr F E1212 16:42:58.433280 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.447346558+00:00 stderr F E1212 16:42:58.447130 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.463460743+00:00 stderr F E1212 16:42:58.463313 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.474630273+00:00 stderr F E1212 16:42:58.474493 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.486344178+00:00 stderr F E1212 16:42:58.486217 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.497021186+00:00 stderr F E1212 16:42:58.496906 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.692843886+00:00 stderr F E1212 16:42:58.692657 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.706390246+00:00 stderr F E1212 16:42:58.706236 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.733407425+00:00 stderr F E1212 16:42:58.733252 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.746238517+00:00 stderr F E1212 16:42:58.746053 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.761785208+00:00 stderr F E1212 16:42:58.761625 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.774977560+00:00 stderr F E1212 16:42:58.774750 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.789211807+00:00 stderr F E1212 16:42:58.788862 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:42:58.801050375+00:00 stderr F E1212 16:42:58.800917 12 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"error dialing backend: remote error: tls: internal error\"}: error dialing backend: remote error: tls: internal error" logger="UnhandledError" 2025-12-12T16:43:00.645346072+00:00 stderr F I1212 16:43:00.645139 12 apf_controller.go:493] "Update CurrentCL" plName="exempt" seatDemandHighWatermark=1 seatDemandAvg=0.002841449040995989 seatDemandStdev=0.05322945808801187 seatDemandSmoothed=0.5601314034932725 fairFrac=2.2796127562642368 currentCL=1 concurrencyDenominator=1 backstop=false 2025-12-12T16:43:03.850866361+00:00 stderr F I1212 16:43:03.848412 12 node_authorizer.go:224] "NODE DENY" err="node 'crc' cannot get unknown pod openshift-must-gather-2sjxj/must-gather-v4h5l" 2025-12-12T16:43:04.013662852+00:00 stderr F I1212 16:43:04.013502 12 httplog.go:93] system:serviceaccount:openshift-apiserver:openshift-apiserver-sa[system:serviceaccounts,system:serviceaccounts:openshift-apiserver,system:authenticated] is impersonating system:serviceaccount:kube-system:namespace-controller[system:serviceaccounts,system:serviceaccounts:kube-system,system:authenticated] 2025-12-12T16:43:04.040634819+00:00 stderr F I1212 16:43:04.040492 12 httplog.go:93] system:serviceaccount:openshift-apiserver:openshift-apiserver-sa[system:serviceaccounts,system:serviceaccounts:openshift-apiserver,system:authenticated] is impersonating system:serviceaccount:kube-system:namespace-controller[system:serviceaccounts,system:serviceaccounts:kube-system,system:authenticated] 2025-12-12T16:43:04.243767162+00:00 stderr F I1212 16:43:04.243617 12 node_authorizer.go:224] "NODE DENY" err="node 'crc' cannot get unknown pod openshift-must-gather-2sjxj/must-gather-v4h5l" 2025-12-12T16:43:04.329047095+00:00 stderr F I1212 16:43:04.328519 12 node_authorizer.go:224] "NODE DENY" err="node 'crc' cannot get unknown pod openshift-must-gather-2sjxj/must-gather-v4h5l" 2025-12-12T16:43:04.469377361+00:00 stderr F I1212 16:43:04.469198 12 httplog.go:93] system:serviceaccount:openshift-apiserver:openshift-apiserver-sa[system:serviceaccounts,system:serviceaccounts:openshift-apiserver,system:authenticated] is impersonating system:serviceaccount:kube-system:namespace-controller[system:serviceaccounts,system:serviceaccounts:kube-system,system:authenticated] 2025-12-12T16:43:04.473859864+00:00 stderr F I1212 16:43:04.473741 12 httplog.go:93] system:serviceaccount:openshift-apiserver:openshift-apiserver-sa[system:serviceaccounts,system:serviceaccounts:openshift-apiserver,system:authenticated] is impersonating system:serviceaccount:kube-system:namespace-controller[system:serviceaccounts,system:serviceaccounts:kube-system,system:authenticated] ././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000000041115117043043033041 0ustar zuulzuul2025-12-12T16:18:27.690575582+00:00 stdout F Fixing audit permissions ... 2025-12-12T16:18:27.698758304+00:00 stdout F Acquiring exclusive lock /var/log/kube-apiserver/.lock ... 2025-12-12T16:18:27.699991745+00:00 stdout F flock: getting lock took 0.000006 seconds ././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000004310215117043043033045 0ustar zuulzuul2025-12-12T16:18:30.190670221+00:00 stderr F W1212 16:18:30.190455 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:18:30.190815694+00:00 stderr F I1212 16:18:30.190755 1 crypto.go:594] Generating new CA for check-endpoints-signer@1765556310 cert, and key in /tmp/serving-cert-1704176655/serving-signer.crt, /tmp/serving-cert-1704176655/serving-signer.key 2025-12-12T16:18:30.190815694+00:00 stderr F Validity period of the certificate for "check-endpoints-signer@1765556310" is unset, resetting to 43800h0m0s! 2025-12-12T16:18:30.744085433+00:00 stderr F I1212 16:18:30.744018 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:18:30.744085433+00:00 stderr F I1212 16:18:30.744041 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:18:30.744085433+00:00 stderr F I1212 16:18:30.744046 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:18:30.744085433+00:00 stderr F I1212 16:18:30.744053 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:18:30.744085433+00:00 stderr F I1212 16:18:30.744057 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:18:30.744326489+00:00 stderr F I1212 16:18:30.744290 1 observer_polling.go:159] Starting file observer 2025-12-12T16:18:35.767935956+00:00 stderr F I1212 16:18:35.767720 1 builder.go:304] check-endpoints version v0.0.0-unknown-c3d9642-c3d9642 2025-12-12T16:18:35.768797028+00:00 stderr F I1212 16:18:35.768630 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/tmp/serving-cert-1704176655/tls.crt::/tmp/serving-cert-1704176655/tls.key" 2025-12-12T16:18:36.130642103+00:00 stderr F I1212 16:18:36.130586 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:18:36.132169841+00:00 stderr F I1212 16:18:36.132132 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:18:36.132169841+00:00 stderr F I1212 16:18:36.132153 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:18:36.132234463+00:00 stderr F I1212 16:18:36.132216 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:18:36.132234463+00:00 stderr F I1212 16:18:36.132226 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:18:36.142728952+00:00 stderr F I1212 16:18:36.142641 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:18:36.142728952+00:00 stderr F W1212 16:18:36.142681 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:18:36.142728952+00:00 stderr F W1212 16:18:36.142687 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:18:36.142728952+00:00 stderr F I1212 16:18:36.142677 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:18:36.142728952+00:00 stderr F W1212 16:18:36.142693 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:18:36.142728952+00:00 stderr F W1212 16:18:36.142720 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:18:36.142784714+00:00 stderr F W1212 16:18:36.142724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:18:36.142784714+00:00 stderr F W1212 16:18:36.142728 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:18:36.145347057+00:00 stderr F I1212 16:18:36.145205 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:18:36.145347057+00:00 stderr F I1212 16:18:36.145209 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:18:36.145347057+00:00 stderr F I1212 16:18:36.145246 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:18:36.145347057+00:00 stderr F I1212 16:18:36.145286 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:18:36.145347057+00:00 stderr F I1212 16:18:36.145298 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:18:36.145565442+00:00 stderr F I1212 16:18:36.145301 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:18:36.145601493+00:00 stderr F I1212 16:18:36.145569 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1704176655/tls.crt::/tmp/serving-cert-1704176655/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556310\" (2025-12-12 16:18:29 +0000 UTC to 2025-12-12 16:18:30 +0000 UTC (now=2025-12-12 16:18:36.145523621 +0000 UTC))" 2025-12-12T16:18:36.145760097+00:00 stderr F I1212 16:18:36.145733 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556316\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556315\" (2025-12-12 15:18:35 +0000 UTC to 2028-12-12 15:18:35 +0000 UTC (now=2025-12-12 16:18:36.145718036 +0000 UTC))" 2025-12-12T16:18:36.145760097+00:00 stderr F I1212 16:18:36.145752 1 secure_serving.go:211] Serving securely on [::]:17697 2025-12-12T16:18:36.145781588+00:00 stderr F I1212 16:18:36.145768 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:18:36.145802358+00:00 stderr F I1212 16:18:36.145786 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/tmp/serving-cert-1704176655/tls.crt::/tmp/serving-cert-1704176655/tls.key" 2025-12-12T16:18:36.145875150+00:00 stderr F I1212 16:18:36.145845 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:18:36.147109901+00:00 stderr F I1212 16:18:36.147062 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:36.147267564+00:00 stderr F I1212 16:18:36.147232 1 base_controller.go:76] Waiting for caches to sync for CheckEndpointsTimeToStart 2025-12-12T16:18:36.147523701+00:00 stderr F I1212 16:18:36.147490 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:36.147707455+00:00 stderr F I1212 16:18:36.147676 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:36.245490003+00:00 stderr F I1212 16:18:36.245415 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:18:36.245726309+00:00 stderr F I1212 16:18:36.245671 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:18:36.245895623+00:00 stderr F I1212 16:18:36.245862 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:18:36.246405155+00:00 stderr F I1212 16:18:36.246366 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:36.246285992 +0000 UTC))" 2025-12-12T16:18:36.246773735+00:00 stderr F I1212 16:18:36.246750 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1704176655/tls.crt::/tmp/serving-cert-1704176655/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556310\" (2025-12-12 16:18:29 +0000 UTC to 2025-12-12 16:18:30 +0000 UTC (now=2025-12-12 16:18:36.246725853 +0000 UTC))" 2025-12-12T16:18:36.247054622+00:00 stderr F I1212 16:18:36.247035 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556316\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556315\" (2025-12-12 15:18:35 +0000 UTC to 2028-12-12 15:18:35 +0000 UTC (now=2025-12-12 16:18:36.24701105 +0000 UTC))" 2025-12-12T16:18:36.247440791+00:00 stderr F I1212 16:18:36.247417 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:18:36.24739064 +0000 UTC))" 2025-12-12T16:18:36.247817370+00:00 stderr F I1212 16:18:36.247791 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:18:36.247739208 +0000 UTC))" 2025-12-12T16:18:36.247873652+00:00 stderr F I1212 16:18:36.247861 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:18:36.247844211 +0000 UTC))" 2025-12-12T16:18:36.247920993+00:00 stderr F I1212 16:18:36.247910 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:18:36.247895142 +0000 UTC))" 2025-12-12T16:18:36.248001865+00:00 stderr F I1212 16:18:36.247980 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:18:36.247937033 +0000 UTC))" 2025-12-12T16:18:36.248061676+00:00 stderr F I1212 16:18:36.248047 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:18:36.248026125 +0000 UTC))" 2025-12-12T16:18:36.248114808+00:00 stderr F I1212 16:18:36.248101 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:18:36.248080537 +0000 UTC))" 2025-12-12T16:18:36.248191590+00:00 stderr F I1212 16:18:36.248160 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:18:36.248141108 +0000 UTC))" 2025-12-12T16:18:36.248253401+00:00 stderr F I1212 16:18:36.248240 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:18:36.24821935 +0000 UTC))" 2025-12-12T16:18:36.248308972+00:00 stderr F I1212 16:18:36.248296 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:18:36.248279122 +0000 UTC))" 2025-12-12T16:18:36.248355364+00:00 stderr F I1212 16:18:36.248343 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:36.248326223 +0000 UTC))" 2025-12-12T16:18:36.248594170+00:00 stderr F I1212 16:18:36.248576 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1704176655/tls.crt::/tmp/serving-cert-1704176655/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"check-endpoints-signer@1765556310\" (2025-12-12 16:18:29 +0000 UTC to 2025-12-12 16:18:30 +0000 UTC (now=2025-12-12 16:18:36.248556549 +0000 UTC))" 2025-12-12T16:18:36.248822585+00:00 stderr F I1212 16:18:36.248796 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556316\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556315\" (2025-12-12 15:18:35 +0000 UTC to 2028-12-12 15:18:35 +0000 UTC (now=2025-12-12 16:18:36.248776794 +0000 UTC))" 2025-12-12T16:18:36.278360335+00:00 stderr F I1212 16:18:36.278273 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:36.347843543+00:00 stderr F I1212 16:18:36.347781 1 base_controller.go:82] Caches are synced for CheckEndpointsTimeToStart 2025-12-12T16:18:36.347922225+00:00 stderr F I1212 16:18:36.347910 1 base_controller.go:119] Starting #1 worker of CheckEndpointsTimeToStart controller ... 2025-12-12T16:18:36.348035178+00:00 stderr F I1212 16:18:36.348021 1 base_controller.go:76] Waiting for caches to sync for CheckEndpointsStop 2025-12-12T16:18:36.348063099+00:00 stderr F I1212 16:18:36.348053 1 base_controller.go:82] Caches are synced for CheckEndpointsStop 2025-12-12T16:18:36.348122000+00:00 stderr F I1212 16:18:36.348111 1 base_controller.go:119] Starting #1 worker of CheckEndpointsStop controller ... 2025-12-12T16:18:36.348167861+00:00 stderr F I1212 16:18:36.348158 1 base_controller.go:181] Shutting down CheckEndpointsTimeToStart ... 2025-12-12T16:18:36.348477519+00:00 stderr F I1212 16:18:36.348427 1 base_controller.go:76] Waiting for caches to sync for check-endpoints 2025-12-12T16:18:36.348534970+00:00 stderr F I1212 16:18:36.348467 1 base_controller.go:123] Shutting down worker of CheckEndpointsTimeToStart controller ... 2025-12-12T16:18:36.348559861+00:00 stderr F I1212 16:18:36.348537 1 base_controller.go:113] All CheckEndpointsTimeToStart workers have been terminated 2025-12-12T16:18:36.350716154+00:00 stderr F I1212 16:18:36.350678 1 reflector.go:430] "Caches populated" type="*v1alpha1.PodNetworkConnectivityCheck" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:36.356073217+00:00 stderr F I1212 16:18:36.355974 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:36.448737998+00:00 stderr F I1212 16:18:36.448672 1 base_controller.go:82] Caches are synced for check-endpoints 2025-12-12T16:18:36.448737998+00:00 stderr F I1212 16:18:36.448701 1 base_controller.go:119] Starting #1 worker of check-endpoints controller ... ././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000755000175000017500000000000015117043043033062 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000755000175000017500000000000015117043063033064 5ustar zuulzuul././@LongLink0000644000000000000000000000027500000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000644000175000017500000000716215117043043033072 0ustar zuulzuul2025-12-12T16:16:22.852457702+00:00 stderr F + [[ -f /env/_master ]] 2025-12-12T16:16:22.852457702+00:00 stderr F + ho_enable=--enable-hybrid-overlay 2025-12-12T16:16:22.852968134+00:00 stderr F ++ date '+%m%d %H:%M:%S.%N' 2025-12-12T16:16:22.858822577+00:00 stdout F I1212 16:16:22.857994347 - network-node-identity - start webhook 2025-12-12T16:16:22.858859678+00:00 stderr F + echo 'I1212 16:16:22.857994347 - network-node-identity - start webhook' 2025-12-12T16:16:22.858923900+00:00 stderr F + exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 --webhook-cert-dir=/etc/webhook-cert --webhook-host=127.0.0.1 --webhook-port=9743 --enable-hybrid-overlay --enable-interconnect --disable-approver --extra-allowed-user=system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane --wait-for-kubernetes-api=200s --pod-admission-conditions=/var/run/ovnkube-identity-config/additional-pod-admission-cond.json --loglevel=2 2025-12-12T16:16:23.272093127+00:00 stderr F I1212 16:16:23.271913 1 ovnkubeidentity.go:133] Config: {kubeconfig: apiServer:https://api-int.crc.testing:6443 logLevel:2 port:9743 host:127.0.0.1 certDir:/etc/webhook-cert metricsAddress:0 leaseNamespace: enableInterconnect:true enableHybridOverlay:true disableWebhook:false disableApprover:true waitForKAPIDuration:200000000000 localKAPIPort:6443 extraAllowedUsers:{slice:[system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane] separator:{sep: disabled:false customized:false} hasBeenSet:true keepSpace:false} csrAcceptanceConditionFile: csrAcceptanceConditions:[] podAdmissionConditionFile:/var/run/ovnkube-identity-config/additional-pod-admission-cond.json podAdmissionConditions:[]} 2025-12-12T16:16:23.272093127+00:00 stderr F W1212 16:16:23.272034 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:23.275454659+00:00 stderr F I1212 16:16:23.275322 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:23.275454659+00:00 stderr F I1212 16:16:23.275355 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:23.275454659+00:00 stderr F I1212 16:16:23.275361 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:23.275454659+00:00 stderr F I1212 16:16:23.275366 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:23.275454659+00:00 stderr F I1212 16:16:23.275370 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:23.297565839+00:00 stderr F I1212 16:16:23.297481 1 ovnkubeidentity.go:352] Waiting for caches to sync 2025-12-12T16:16:23.315358373+00:00 stderr F I1212 16:16:23.315277 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:23.399217341+00:00 stderr F I1212 16:16:23.399065 1 certwatcher.go:211] "Updated current TLS certificate" logger="controller-runtime.certwatcher" 2025-12-12T16:16:23.399578720+00:00 stderr F I1212 16:16:23.399507 1 certwatcher.go:133] "Starting certificate poll+watcher" logger="controller-runtime.certwatcher" interval="10s" 2025-12-12T16:16:23.399655171+00:00 stderr F I1212 16:16:23.399627 1 ovnkubeidentity.go:431] Starting the webhook server 2025-12-12T16:18:58.415000593+00:00 stderr F I1212 16:18:58.414878 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000755000175000017500000000000015117043063033064 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000644000175000017500000003022115117043043033062 0ustar zuulzuul2025-12-12T16:16:23.021283184+00:00 stderr F + [[ -f /env/_master ]] 2025-12-12T16:16:23.021283184+00:00 stderr F ++ date '+%m%d %H:%M:%S.%N' 2025-12-12T16:16:23.023695933+00:00 stdout F I1212 16:16:23.022964985 - network-node-identity - start approver 2025-12-12T16:16:23.024007130+00:00 stderr F + echo 'I1212 16:16:23.022964985 - network-node-identity - start approver' 2025-12-12T16:16:23.024007130+00:00 stderr F + exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 --disable-webhook --csr-acceptance-conditions=/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json --loglevel=4 2025-12-12T16:16:23.270712853+00:00 stderr F I1212 16:16:23.270545 1 ovnkubeidentity.go:133] Config: {kubeconfig: apiServer:https://api-int.crc.testing:6443 logLevel:4 port:9443 host:localhost certDir: metricsAddress:0 leaseNamespace: enableInterconnect:false enableHybridOverlay:false disableWebhook:true disableApprover:false waitForKAPIDuration:0 localKAPIPort:6443 extraAllowedUsers:{slice:[] separator:{sep: disabled:false customized:false} hasBeenSet:false keepSpace:false} csrAcceptanceConditionFile:/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json csrAcceptanceConditions:[] podAdmissionConditionFile: podAdmissionConditions:[]} 2025-12-12T16:16:23.270712853+00:00 stderr F W1212 16:16:23.270676 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:23.272981359+00:00 stderr F I1212 16:16:23.272908 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:23.272981359+00:00 stderr F I1212 16:16:23.272935 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:23.272981359+00:00 stderr F I1212 16:16:23.272940 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:23.272981359+00:00 stderr F I1212 16:16:23.272945 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:23.272981359+00:00 stderr F I1212 16:16:23.272949 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:23.297255851+00:00 stderr F I1212 16:16:23.297147 1 ovnkubeidentity.go:472] Starting certificate signing request approver 2025-12-12T16:16:23.297458386+00:00 stderr F I1212 16:16:23.297346 1 leaderelection.go:257] attempting to acquire leader lease openshift-network-node-identity/ovnkube-identity... 2025-12-12T16:16:23.316074511+00:00 stderr F I1212 16:16:23.316007 1 leaderelection.go:271] successfully acquired lease openshift-network-node-identity/ovnkube-identity 2025-12-12T16:16:23.316686376+00:00 stderr F I1212 16:16:23.316504 1 recorder.go:104] "crc_d9a46633-c672-48e9-9ec1-edd6009b044b became leader" logger="events" type="Normal" object={"kind":"Lease","namespace":"openshift-network-node-identity","name":"ovnkube-identity","uid":"8b910161-69c6-4366-ab04-08567be3701e","apiVersion":"coordination.k8s.io/v1","resourceVersion":"36385"} reason="LeaderElection" 2025-12-12T16:16:23.320252183+00:00 stderr F I1212 16:16:23.317014 1 controller.go:246] "Starting EventSource" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" source="kind source: *v1.CertificateSigningRequest" 2025-12-12T16:16:23.330611756+00:00 stderr F I1212 16:16:23.330513 1 reflector.go:357] "Starting reflector" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" resyncPeriod="9h2m36.961632939s" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:23.330713508+00:00 stderr F I1212 16:16:23.330698 1 reflector.go:403] "Listing and watching" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:23.333830654+00:00 stderr F I1212 16:16:23.333784 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:23.432000951+00:00 stderr F I1212 16:16:23.431886 1 controller.go:186] "Starting Controller" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" 2025-12-12T16:16:23.432132334+00:00 stderr F I1212 16:16:23.432082 1 controller.go:195] "Starting workers" controller="certificatesigningrequest" controllerGroup="certificates.k8s.io" controllerKind="CertificateSigningRequest" worker count=1 2025-12-12T16:16:23.434049631+00:00 stderr F I1212 16:16:23.433965 1 approver.go:230] Finished syncing CSR csr-rlqr8 for unknown node in 1.642431ms 2025-12-12T16:16:23.435657050+00:00 stderr F I1212 16:16:23.435590 1 approver.go:230] Finished syncing CSR csr-852t7 for unknown node in 83.622µs 2025-12-12T16:16:23.435885016+00:00 stderr F I1212 16:16:23.435841 1 approver.go:230] Finished syncing CSR csr-5pdpj for unknown node in 14.931µs 2025-12-12T16:16:23.436146742+00:00 stderr F I1212 16:16:23.436118 1 approver.go:230] Finished syncing CSR csr-qcbqt for unknown node in 19.621µs 2025-12-12T16:16:31.112032513+00:00 stderr F I1212 16:16:31.111949 1 recorder.go:104] "CSR \"csr-r6hlx\" has been approved" logger="events" type="Normal" object={"kind":"CertificateSigningRequest","name":"csr-r6hlx"} reason="CSRApproved" 2025-12-12T16:16:31.116543343+00:00 stderr F I1212 16:16:31.116487 1 approver.go:230] Finished syncing CSR csr-r6hlx for crc node in 5.138146ms 2025-12-12T16:16:31.116673527+00:00 stderr F I1212 16:16:31.116642 1 approver.go:230] Finished syncing CSR csr-r6hlx for unknown node in 82.442µs 2025-12-12T16:16:31.125782619+00:00 stderr F I1212 16:16:31.125733 1 approver.go:230] Finished syncing CSR csr-r6hlx for unknown node in 80.202µs 2025-12-12T16:16:37.722123433+00:00 stderr F I1212 16:16:37.721959 1 recorder.go:104] "CSR \"csr-b5xrg\" has been approved" logger="events" type="Normal" object={"kind":"CertificateSigningRequest","name":"csr-b5xrg"} reason="CSRApproved" 2025-12-12T16:16:37.727235538+00:00 stderr F I1212 16:16:37.727170 1 approver.go:230] Finished syncing CSR csr-b5xrg for crc node in 5.531815ms 2025-12-12T16:16:37.727339001+00:00 stderr F I1212 16:16:37.727315 1 approver.go:230] Finished syncing CSR csr-b5xrg for unknown node in 61.221µs 2025-12-12T16:16:37.733793038+00:00 stderr F I1212 16:16:37.733687 1 approver.go:230] Finished syncing CSR csr-b5xrg for unknown node in 88.612µs 2025-12-12T16:18:15.999825405+00:00 stderr F I1212 16:18:15.999761 1 reflector.go:946] "Watch close" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" totalItems=11 2025-12-12T16:18:16.001194489+00:00 stderr F I1212 16:18:16.001152 1 reflector.go:518] "Watch failed - backing off" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" err="Get \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests?allowWatchBookmarks=true&resourceVersion=38489&timeoutSeconds=337&watch=true\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:17.148729230+00:00 stderr F I1212 16:18:17.148632 1 reflector.go:518] "Watch failed - backing off" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" err="Get \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests?allowWatchBookmarks=true&resourceVersion=38489&timeoutSeconds=560&watch=true\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:19.451001728+00:00 stderr F I1212 16:18:19.450881 1 reflector.go:518] "Watch failed - backing off" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" err="Get \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests?allowWatchBookmarks=true&resourceVersion=38489&timeoutSeconds=319&watch=true\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:23.371791391+00:00 stderr F E1212 16:18:23.371728 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-network-node-identity/leases/ovnkube-identity?timeout=15s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:23.372073137+00:00 stderr F E1212 16:18:23.372049 1 leaderelection.go:436] error retrieving resource lock openshift-network-node-identity/ovnkube-identity: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-network-node-identity/leases/ovnkube-identity?timeout=15s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:24.376872449+00:00 stderr F I1212 16:18:24.376822 1 reflector.go:518] "Watch failed - backing off" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" err="Get \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests?allowWatchBookmarks=true&resourceVersion=38489&timeoutSeconds=586&watch=true\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:35.327791455+00:00 stderr F I1212 16:18:35.327199 1 reflector.go:543] "Watch closed" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" err="too old resource version: 38489 (39133)" 2025-12-12T16:18:56.790802399+00:00 stderr F I1212 16:18:56.790736 1 reflector.go:403] "Listing and watching" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:18:56.793764522+00:00 stderr F I1212 16:18:56.793728 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.CertificateSigningRequest" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:18:56.794146202+00:00 stderr F I1212 16:18:56.794126 1 approver.go:230] Finished syncing CSR csr-5pdpj for unknown node in 90.922µs 2025-12-12T16:18:56.794255115+00:00 stderr F I1212 16:18:56.794237 1 approver.go:230] Finished syncing CSR csr-852t7 for unknown node in 26.751µs 2025-12-12T16:18:56.794324986+00:00 stderr F I1212 16:18:56.794311 1 approver.go:230] Finished syncing CSR csr-b5xrg for unknown node in 21.141µs 2025-12-12T16:18:56.794471890+00:00 stderr F I1212 16:18:56.794456 1 approver.go:230] Finished syncing CSR csr-qcbqt for unknown node in 16.541µs 2025-12-12T16:18:56.794584893+00:00 stderr F I1212 16:18:56.794568 1 approver.go:230] Finished syncing CSR csr-r6hlx for unknown node in 23.5µs 2025-12-12T16:18:56.794659165+00:00 stderr F I1212 16:18:56.794645 1 approver.go:230] Finished syncing CSR csr-rlqr8 for unknown node in 21.17µs 2025-12-12T16:25:51.795983479+00:00 stderr F I1212 16:25:51.795890 1 reflector.go:946] "Watch close" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" totalItems=7 2025-12-12T16:32:19.799350118+00:00 stderr F I1212 16:32:19.799150 1 reflector.go:946] "Watch close" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" totalItems=8 2025-12-12T16:38:26.803154430+00:00 stderr F I1212 16:38:26.803052 1 reflector.go:946] "Watch close" logger="controller-runtime.cache" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" type="*v1.CertificateSigningRequest" totalItems=7 ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-cons0000755000175000017500000000000015117043043033077 5ustar zuulzuul././@LongLink0000644000000000000000000000032300000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-cons0000755000175000017500000000000015117043062033100 5ustar zuulzuul././@LongLink0000644000000000000000000000033000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-cons0000644000175000017500000000141315117043043033100 0ustar zuulzuul2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: using the "epoll" event method 2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: nginx/1.24.0 2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: built by gcc 11.4.1 20231218 (Red Hat 11.4.1-4) (GCC) 2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: OS: Linux 5.14.0-570.57.1.el9_6.x86_64 2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: getrlimit(RLIMIT_NOFILE): 1048576:1048576 2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: start worker processes 2025-12-12T16:16:56.014532917+00:00 stdout F 2025/12/12 16:16:55 [notice] 1#1: start worker process 5 ././@LongLink0000644000000000000000000000024500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_s0000755000175000017500000000000015117043043033011 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_s0000755000175000017500000000000015117043062033012 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_s0000644000175000017500000012356715117043043033031 0ustar zuulzuul2025-12-12T16:16:45.029350803+00:00 stderr F W1212 16:16:45.009316 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:16:45.061664732+00:00 stderr F I1212 16:16:45.058529 1 crypto.go:601] Generating new CA for service-ca-controller-signer@1765556205 cert, and key in /tmp/serving-cert-1758180674/serving-signer.crt, /tmp/serving-cert-1758180674/serving-signer.key 2025-12-12T16:16:45.061664732+00:00 stderr F Validity period of the certificate for "service-ca-controller-signer@1765556205" is unset, resetting to 157680000000000000 years! 2025-12-12T16:16:46.507056820+00:00 stderr F I1212 16:16:46.503103 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:46.507056820+00:00 stderr F I1212 16:16:46.504284 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:46.507056820+00:00 stderr F I1212 16:16:46.504299 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:46.507056820+00:00 stderr F I1212 16:16:46.504303 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:46.507056820+00:00 stderr F I1212 16:16:46.504308 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:46.507056820+00:00 stderr F I1212 16:16:46.505050 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:46.593983602+00:00 stderr F I1212 16:16:46.593815 1 builder.go:304] service-ca-controller version - 2025-12-12T16:16:46.597240291+00:00 stderr F I1212 16:16:46.594951 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" 2025-12-12T16:16:47.518293998+00:00 stderr F I1212 16:16:47.514992 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:16:47.532375062+00:00 stderr F I1212 16:16:47.530928 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:47.532375062+00:00 stderr F I1212 16:16:47.530953 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:47.532375062+00:00 stderr F I1212 16:16:47.530984 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:16:47.532375062+00:00 stderr F I1212 16:16:47.530990 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:16:47.536295738+00:00 stderr F I1212 16:16:47.535773 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:47.536295738+00:00 stderr F W1212 16:16:47.536125 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:47.536295738+00:00 stderr F W1212 16:16:47.536133 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:47.536295738+00:00 stderr F W1212 16:16:47.536138 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:47.536295738+00:00 stderr F W1212 16:16:47.536143 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:47.536295738+00:00 stderr F W1212 16:16:47.536147 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:47.536295738+00:00 stderr F W1212 16:16:47.536151 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:47.536419731+00:00 stderr F I1212 16:16:47.536390 1 genericapiserver.go:535] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.549699 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"service-ca-controller-signer@1765556205\" (2025-12-12 16:16:45 +0000 UTC to 2025-12-12 16:16:46 +0000 UTC (now=2025-12-12 16:16:47.549659884 +0000 UTC))" 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550117 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550252 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2026-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:16:47.550232988 +0000 UTC))" 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550271 1 secure_serving.go:213] Serving securely on [::]:8443 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550304 1 genericapiserver.go:685] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550335 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550399 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550529 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550660 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550676 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550702 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550707 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:16:47.552161905+00:00 stderr F I1212 16:16:47.550948 1 leaderelection.go:257] attempting to acquire leader lease openshift-service-ca/service-ca-controller-lock... 2025-12-12T16:16:47.560260033+00:00 stderr F I1212 16:16:47.554806 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.560260033+00:00 stderr F I1212 16:16:47.555097 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.566351962+00:00 stderr F I1212 16:16:47.564563 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.569349625+00:00 stderr F I1212 16:16:47.569056 1 leaderelection.go:271] successfully acquired lease openshift-service-ca/service-ca-controller-lock 2025-12-12T16:16:47.569917879+00:00 stderr F I1212 16:16:47.569876 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-service-ca", Name:"service-ca-controller-lock", UID:"974bbcc4-e7c5-4725-ac83-e4cea409d063", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37326", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' service-ca-74545575db-gsm6t_e029f1e5-ab98-4708-8e07-63b6639a94b3 became leader 2025-12-12T16:16:47.592212143+00:00 stderr F I1212 16:16:47.583513 1 base_controller.go:76] Waiting for caches to sync for APIServiceCABundleInjector 2025-12-12T16:16:47.592212143+00:00 stderr F I1212 16:16:47.583551 1 base_controller.go:76] Waiting for caches to sync for CRDCABundleInjector 2025-12-12T16:16:47.592212143+00:00 stderr F I1212 16:16:47.583603 1 base_controller.go:76] Waiting for caches to sync for ConfigMapCABundleInjector 2025-12-12T16:16:47.592212143+00:00 stderr F I1212 16:16:47.583819 1 base_controller.go:76] Waiting for caches to sync for MutatingWebhookCABundleInjector 2025-12-12T16:16:47.592212143+00:00 stderr F I1212 16:16:47.583868 1 base_controller.go:76] Waiting for caches to sync for ValidatingWebhookCABundleInjector 2025-12-12T16:16:47.592212143+00:00 stderr F I1212 16:16:47.584043 1 base_controller.go:76] Waiting for caches to sync for LegacyVulnerableConfigMapCABundleInjector 2025-12-12T16:16:47.596368015+00:00 stderr F I1212 16:16:47.595637 1 starter.go:62] Setting certificate lifetime to 17520h0m0s, refresh certificate at 1h0m0s 2025-12-12T16:16:47.599281976+00:00 stderr F I1212 16:16:47.598135 1 base_controller.go:76] Waiting for caches to sync for ServiceServingCertUpdateController 2025-12-12T16:16:47.599281976+00:00 stderr F I1212 16:16:47.598792 1 base_controller.go:76] Waiting for caches to sync for ServiceServingCertController 2025-12-12T16:16:47.601730536+00:00 stderr F I1212 16:16:47.600154 1 reflector.go:376] Caches populated for *v1.MutatingWebhookConfiguration from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.602121105+00:00 stderr F I1212 16:16:47.600892 1 reflector.go:376] Caches populated for *v1.APIService from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.602362671+00:00 stderr F I1212 16:16:47.601421 1 reflector.go:376] Caches populated for *v1.ValidatingWebhookConfiguration from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.603659503+00:00 stderr F I1212 16:16:47.603624 1 reflector.go:376] Caches populated for *v1.Service from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:47.650980358+00:00 stderr F I1212 16:16:47.650905 1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:16:47.651092781+00:00 stderr F I1212 16:16:47.651082 1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController 2025-12-12T16:16:47.651270585+00:00 stderr F I1212 16:16:47.651258 1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:16:47.651532141+00:00 stderr F I1212 16:16:47.651512 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:47.65147214 +0000 UTC))" 2025-12-12T16:16:47.651761607+00:00 stderr F I1212 16:16:47.651748 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"service-ca-controller-signer@1765556205\" (2025-12-12 16:16:45 +0000 UTC to 2025-12-12 16:16:46 +0000 UTC (now=2025-12-12 16:16:47.651732726 +0000 UTC))" 2025-12-12T16:16:47.651944371+00:00 stderr F I1212 16:16:47.651932 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2026-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:16:47.651915621 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653759 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:47.653699684 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653798 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:47.653786996 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653817 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:47.653804767 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653831 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:47.653821747 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653845 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:47.653836158 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653865 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:47.653854778 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653880 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:47.653869468 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.653897 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:47.653885369 +0000 UTC))" 2025-12-12T16:16:47.655851917+00:00 stderr F I1212 16:16:47.655755 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"service-ca-controller-signer@1765556205\" (2025-12-12 16:16:45 +0000 UTC to 2025-12-12 16:16:46 +0000 UTC (now=2025-12-12 16:16:47.655731694 +0000 UTC))" 2025-12-12T16:16:47.657099957+00:00 stderr F I1212 16:16:47.656912 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2026-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:16:47.656892982 +0000 UTC))" 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684135 1 base_controller.go:82] Caches are synced for APIServiceCABundleInjector 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684191 1 base_controller.go:119] Starting #1 worker of APIServiceCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684204 1 base_controller.go:119] Starting #2 worker of APIServiceCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684209 1 base_controller.go:119] Starting #3 worker of APIServiceCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684213 1 base_controller.go:119] Starting #4 worker of APIServiceCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684229 1 base_controller.go:119] Starting #5 worker of APIServiceCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684313 1 base_controller.go:82] Caches are synced for MutatingWebhookCABundleInjector 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684323 1 base_controller.go:119] Starting #1 worker of MutatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684327 1 base_controller.go:119] Starting #2 worker of MutatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684330 1 base_controller.go:119] Starting #3 worker of MutatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684333 1 base_controller.go:119] Starting #4 worker of MutatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684336 1 base_controller.go:119] Starting #5 worker of MutatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684351 1 base_controller.go:82] Caches are synced for ValidatingWebhookCABundleInjector 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684355 1 base_controller.go:119] Starting #1 worker of ValidatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684362 1 base_controller.go:119] Starting #2 worker of ValidatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684366 1 base_controller.go:119] Starting #3 worker of ValidatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684369 1 base_controller.go:119] Starting #4 worker of ValidatingWebhookCABundleInjector controller ... 2025-12-12T16:16:47.688008402+00:00 stderr F I1212 16:16:47.684374 1 base_controller.go:119] Starting #5 worker of ValidatingWebhookCABundleInjector controller ... 2025-12-12T16:16:48.118282787+00:00 stderr F I1212 16:16:48.117487 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:48.215046499+00:00 stderr F I1212 16:16:48.214363 1 base_controller.go:82] Caches are synced for ServiceServingCertUpdateController 2025-12-12T16:16:48.215046499+00:00 stderr F I1212 16:16:48.214997 1 base_controller.go:119] Starting #1 worker of ServiceServingCertUpdateController controller ... 2025-12-12T16:16:48.215046499+00:00 stderr F I1212 16:16:48.215009 1 base_controller.go:119] Starting #2 worker of ServiceServingCertUpdateController controller ... 2025-12-12T16:16:48.215046499+00:00 stderr F I1212 16:16:48.215020 1 base_controller.go:119] Starting #3 worker of ServiceServingCertUpdateController controller ... 2025-12-12T16:16:48.215046499+00:00 stderr F I1212 16:16:48.215024 1 base_controller.go:119] Starting #4 worker of ServiceServingCertUpdateController controller ... 2025-12-12T16:16:48.215046499+00:00 stderr F I1212 16:16:48.215029 1 base_controller.go:119] Starting #5 worker of ServiceServingCertUpdateController controller ... 2025-12-12T16:16:48.218161735+00:00 stderr F I1212 16:16:48.218102 1 base_controller.go:82] Caches are synced for ServiceServingCertController 2025-12-12T16:16:48.218161735+00:00 stderr F I1212 16:16:48.218151 1 base_controller.go:119] Starting #1 worker of ServiceServingCertController controller ... 2025-12-12T16:16:48.218482843+00:00 stderr F I1212 16:16:48.218162 1 base_controller.go:119] Starting #2 worker of ServiceServingCertController controller ... 2025-12-12T16:16:48.218482843+00:00 stderr F I1212 16:16:48.218170 1 base_controller.go:119] Starting #3 worker of ServiceServingCertController controller ... 2025-12-12T16:16:48.218482843+00:00 stderr F I1212 16:16:48.218282 1 base_controller.go:119] Starting #4 worker of ServiceServingCertController controller ... 2025-12-12T16:16:48.218482843+00:00 stderr F I1212 16:16:48.218291 1 base_controller.go:119] Starting #5 worker of ServiceServingCertController controller ... 2025-12-12T16:16:48.342254355+00:00 stderr F I1212 16:16:48.338093 1 reflector.go:376] Caches populated for *v1.CustomResourceDefinition from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:48.384479146+00:00 stderr F I1212 16:16:48.384316 1 base_controller.go:82] Caches are synced for CRDCABundleInjector 2025-12-12T16:16:48.384479146+00:00 stderr F I1212 16:16:48.384358 1 base_controller.go:119] Starting #1 worker of CRDCABundleInjector controller ... 2025-12-12T16:16:48.384479146+00:00 stderr F I1212 16:16:48.384371 1 base_controller.go:119] Starting #2 worker of CRDCABundleInjector controller ... 2025-12-12T16:16:48.384479146+00:00 stderr F I1212 16:16:48.384375 1 base_controller.go:119] Starting #3 worker of CRDCABundleInjector controller ... 2025-12-12T16:16:48.384479146+00:00 stderr F I1212 16:16:48.384379 1 base_controller.go:119] Starting #4 worker of CRDCABundleInjector controller ... 2025-12-12T16:16:48.384479146+00:00 stderr F I1212 16:16:48.384383 1 base_controller.go:119] Starting #5 worker of CRDCABundleInjector controller ... 2025-12-12T16:16:48.384554978+00:00 stderr F W1212 16:16:48.384465 1 crd.go:61] customresourcedefinition consoleplugins.console.openshift.io is annotated for ca bundle injection but does not use strategy "Webhook" 2025-12-12T16:16:48.492243857+00:00 stderr F I1212 16:16:48.490736 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:48.587084012+00:00 stderr F I1212 16:16:48.586738 1 base_controller.go:82] Caches are synced for LegacyVulnerableConfigMapCABundleInjector 2025-12-12T16:16:48.587084012+00:00 stderr F I1212 16:16:48.586771 1 base_controller.go:119] Starting #1 worker of LegacyVulnerableConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.587084012+00:00 stderr F I1212 16:16:48.586784 1 base_controller.go:119] Starting #2 worker of LegacyVulnerableConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.587084012+00:00 stderr F I1212 16:16:48.586789 1 base_controller.go:119] Starting #3 worker of LegacyVulnerableConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.587084012+00:00 stderr F I1212 16:16:48.586793 1 base_controller.go:119] Starting #4 worker of LegacyVulnerableConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.587084012+00:00 stderr F I1212 16:16:48.586797 1 base_controller.go:119] Starting #5 worker of LegacyVulnerableConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.590102886+00:00 stderr F I1212 16:16:48.589658 1 base_controller.go:82] Caches are synced for ConfigMapCABundleInjector 2025-12-12T16:16:48.590102886+00:00 stderr F I1212 16:16:48.589692 1 base_controller.go:119] Starting #1 worker of ConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.590102886+00:00 stderr F I1212 16:16:48.589701 1 base_controller.go:119] Starting #2 worker of ConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.590102886+00:00 stderr F I1212 16:16:48.589705 1 base_controller.go:119] Starting #3 worker of ConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.590102886+00:00 stderr F I1212 16:16:48.589709 1 base_controller.go:119] Starting #4 worker of ConfigMapCABundleInjector controller ... 2025-12-12T16:16:48.590102886+00:00 stderr F I1212 16:16:48.589712 1 base_controller.go:119] Starting #5 worker of ConfigMapCABundleInjector controller ... 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.903947 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.903904216 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904123 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.904097791 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904138 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.904129122 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904149 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.904142292 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904165 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.904157813 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904729 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.904169403 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904754 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.904739077 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904767 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.904758467 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904794 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.904770728 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.904817 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.904804178 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.905004 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"service-ca-controller-signer@1765556205\" (2025-12-12 16:16:45 +0000 UTC to 2025-12-12 16:16:46 +0000 UTC (now=2025-12-12 16:16:55.904993993 +0000 UTC))" 2025-12-12T16:16:55.906667184+00:00 stderr F I1212 16:16:55.905139 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2026-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:16:55.905128556 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.313502 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.313437449 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317642 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.317567061 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317679 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.317664403 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317719 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.317706124 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317744 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.317725855 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317770 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.317756195 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317791 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.317777176 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317814 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.317798676 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317853 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.317825017 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317879 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.317864868 +0000 UTC))" 2025-12-12T16:17:46.318021172+00:00 stderr F I1212 16:17:46.317902 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.317888629 +0000 UTC))" 2025-12-12T16:17:46.320004751+00:00 stderr F I1212 16:17:46.318246 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/tmp/serving-cert-1758180674/tls.crt::/tmp/serving-cert-1758180674/tls.key" certDetail="\"localhost\" [serving] validServingFor=[localhost] issuer=\"service-ca-controller-signer@1765556205\" (2025-12-12 16:16:45 +0000 UTC to 2025-12-12 16:16:46 +0000 UTC (now=2025-12-12 16:17:46.318224437 +0000 UTC))" 2025-12-12T16:17:46.320004751+00:00 stderr F I1212 16:17:46.318462 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556207\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2026-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:17:46.318441532 +0000 UTC))" 2025-12-12T16:18:47.585126300+00:00 stderr F E1212 16:18:47.584482 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-service-ca/leases/service-ca-controller-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:47.586264628+00:00 stderr F E1212 16:18:47.586207 1 leaderelection.go:436] error retrieving resource lock openshift-service-ca/service-ca-controller-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-service-ca/leases/service-ca-controller-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:19:30.750359540+00:00 stderr F I1212 16:19:30.749378 1 reflector.go:376] Caches populated for *v1.Service from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:19:36.916144272+00:00 stderr F I1212 16:19:36.915147 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:19:42.662782707+00:00 stderr F I1212 16:19:42.661875 1 reflector.go:376] Caches populated for *v1.MutatingWebhookConfiguration from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:19:45.382246517+00:00 stderr F I1212 16:19:45.381904 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:19:53.967535855+00:00 stderr F I1212 16:19:53.966998 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:06.470107848+00:00 stderr F I1212 16:20:06.469558 1 reflector.go:376] Caches populated for *v1.CustomResourceDefinition from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:06.470157660+00:00 stderr F W1212 16:20:06.469967 1 crd.go:61] customresourcedefinition consoleplugins.console.openshift.io is annotated for ca bundle injection but does not use strategy "Webhook" 2025-12-12T16:20:08.015475119+00:00 stderr F I1212 16:20:08.015398 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:12.658692661+00:00 stderr F I1212 16:20:12.658113 1 reflector.go:376] Caches populated for *v1.APIService from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:19.509500841+00:00 stderr F I1212 16:20:19.508654 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:25.159858149+00:00 stderr F I1212 16:20:25.158799 1 reflector.go:376] Caches populated for *v1.ValidatingWebhookConfiguration from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:25:04.299959579+00:00 stderr F I1212 16:25:04.298999 1 configmap.go:109] updating configmap openstack/openshift-service-ca.crt with the service signing CA bundle 2025-12-12T16:25:05.006090502+00:00 stderr F I1212 16:25:05.002809 1 configmap.go:109] updating configmap openstack-operators/openshift-service-ca.crt with the service signing CA bundle 2025-12-12T16:26:15.055631197+00:00 stderr F I1212 16:26:15.055035 1 configmap.go:109] updating configmap service-telemetry/openshift-service-ca.crt with the service signing CA bundle 2025-12-12T16:26:42.039394838+00:00 stderr F I1212 16:26:42.038324 1 configmap.go:109] updating configmap cert-manager-operator/openshift-service-ca.crt with the service signing CA bundle 2025-12-12T16:26:58.252410563+00:00 stderr F E1212 16:26:58.251435 1 base_controller.go:277] "Unhandled Error" err="\"ServiceServingCertController\" controller failed to sync \"openshift-operators/observability-operator\", err: Operation cannot be fulfilled on services \"observability-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:36.277247094+00:00 stderr F I1212 16:27:36.274210 1 configmap.go:109] updating configmap cert-manager/openshift-service-ca.crt with the service signing CA bundle 2025-12-12T16:40:06.473282698+00:00 stderr F W1212 16:40:06.472382 1 crd.go:61] customresourcedefinition consoleplugins.console.openshift.io is annotated for ca bundle injection but does not use strategy "Webhook" 2025-12-12T16:40:38.203551274+00:00 stderr F I1212 16:40:38.202172 1 configmap.go:109] updating configmap openshift-must-gather-2sjxj/openshift-service-ca.crt with the service signing CA bundle ././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015117043043033036 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015117043062033037 5ustar zuulzuul././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000644000175000017500000000352315117043043033043 0ustar zuulzuul2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.565032 2 migrator.go:18] FLAG: --add_dir_header="false" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569291 2 migrator.go:18] FLAG: --alsologtostderr="true" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569310 2 migrator.go:18] FLAG: --kube-api-burst="1000" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569320 2 migrator.go:18] FLAG: --kube-api-qps="40" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569328 2 migrator.go:18] FLAG: --kubeconfig="" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569333 2 migrator.go:18] FLAG: --log_backtrace_at=":0" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569341 2 migrator.go:18] FLAG: --log_dir="" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569347 2 migrator.go:18] FLAG: --log_file="" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569350 2 migrator.go:18] FLAG: --log_file_max_size="1800" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569353 2 migrator.go:18] FLAG: --logtostderr="true" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569357 2 migrator.go:18] FLAG: --one_output="false" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569359 2 migrator.go:18] FLAG: --skip_headers="false" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569362 2 migrator.go:18] FLAG: --skip_log_headers="false" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569365 2 migrator.go:18] FLAG: --stderrthreshold="2" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569368 2 migrator.go:18] FLAG: --v="2" 2025-12-12T16:16:47.574588023+00:00 stderr F I1212 16:16:47.569371 2 migrator.go:18] FLAG: --vmodule="" ././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015117043062033037 5ustar zuulzuul././@LongLink0000644000000000000000000000032000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000644000175000017500000000011015117043043033030 0ustar zuulzuul2025-12-12T16:16:47.741531869+00:00 stdout F Waiting for termination... ././@LongLink0000644000000000000000000000024600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015117043043033102 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015117043063033104 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000644000175000017500000000017015117043043033102 0ustar zuulzuul2025-12-12T16:16:26.696441890+00:00 stdout F Fri Dec 12 16:16:26 UTC 2025 2025-12-12T16:16:36.196892476+00:00 stdout F ././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015117043043033023 5ustar zuulzuul././@LongLink0000644000000000000000000000033600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015117043062033024 5ustar zuulzuul././@LongLink0000644000000000000000000000034300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000644000175000017500000012703715117043043033037 0ustar zuulzuul2025-12-12T16:16:43.995142293+00:00 stdout F Overwriting root TLS certificate authority trust store 2025-12-12T16:16:44.512113715+00:00 stderr F I1212 16:16:44.510907 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:44.514038142+00:00 stderr F I1212 16:16:44.513465 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:44.566118444+00:00 stderr F I1212 16:16:44.566036 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:44.567291022+00:00 stderr F I1212 16:16:44.567273 1 leaderelection.go:254] attempting to acquire leader lease openshift-image-registry/openshift-master-controllers... 2025-12-12T16:16:44.580020383+00:00 stderr F I1212 16:16:44.579626 1 leaderelection.go:268] successfully acquired lease openshift-image-registry/openshift-master-controllers 2025-12-12T16:16:44.580219188+00:00 stderr F I1212 16:16:44.580166 1 main.go:34] Cluster Image Registry Operator Version: 4a2f646ef 2025-12-12T16:16:44.580258559+00:00 stderr F I1212 16:16:44.580246 1 main.go:35] Go Version: go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime 2025-12-12T16:16:44.580299300+00:00 stderr F I1212 16:16:44.580287 1 main.go:36] Go OS/Arch: linux/amd64 2025-12-12T16:16:44.580375252+00:00 stderr F I1212 16:16:44.580350 1 main.go:67] Watching files [/var/run/configmaps/trusted-ca/tls-ca-bundle.pem /etc/secrets/tls.crt /etc/secrets/tls.key]... 2025-12-12T16:16:44.585641100+00:00 stderr F I1212 16:16:44.581406 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-image-registry", Name:"openshift-master-controllers", UID:"6f2de6c7-5c77-4d4f-8684-b1971700a7c7", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37037", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' cluster-image-registry-operator-86c45576b9-sfm9v_de745503-dbe2-4d5f-8369-29d6b1a85d4d became leader 2025-12-12T16:16:44.620252415+00:00 stderr F I1212 16:16:44.619433 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:44.622882539+00:00 stderr F I1212 16:16:44.622437 1 starter.go:89] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:44.625133984+00:00 stderr F I1212 16:16:44.624452 1 metrics.go:88] Starting MetricsController 2025-12-12T16:16:44.625133984+00:00 stderr F I1212 16:16:44.624484 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-image-registry", Name:"cluster-image-registry-operator", UID:"a4c18a44-787c-4851-97ac-f3da87e8d0e3", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626367 1 clusteroperator.go:143] Starting ClusterOperatorStatusController 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626392 1 nodecadaemon.go:204] Starting NodeCADaemonController 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626461 1 imageregistrycertificates.go:211] Starting ImageRegistryCertificatesController 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626486 1 imageconfig.go:105] Starting ImageConfigController 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626621 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626649 1 azurestackcloud.go:174] Starting AzureStackCloudController 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626669 1 azurepathfixcontroller.go:202] Starting AzurePathFixController 2025-12-12T16:16:44.626871707+00:00 stderr F I1212 16:16:44.626690 1 awstagcontroller.go:160] Starting AWS Tag Controller 2025-12-12T16:16:44.632931005+00:00 stderr F W1212 16:16:44.631975 1 reflector.go:561] github.com/openshift/client-go/image/informers/externalversions/factory.go:125: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io) 2025-12-12T16:16:44.632931005+00:00 stderr F E1212 16:16:44.632065 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/image/informers/externalversions/factory.go:125: Failed to watch *v1.ImageStream: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:44.671075596+00:00 stderr F W1212 16:16:44.670466 1 reflector.go:561] github.com/openshift/client-go/route/informers/externalversions/factory.go:125: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:44.671075596+00:00 stderr F E1212 16:16:44.670566 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/route/informers/externalversions/factory.go:125: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.726989 1 awstagcontroller.go:167] Started AWS Tag Controller 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.727063 1 clusteroperator.go:150] Started ClusterOperatorStatusController 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.727288 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.727338 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.727567 1 nodecadaemon.go:211] Started NodeCADaemonController 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.728409 1 azurestackcloud.go:181] Started AzureStackCloudController 2025-12-12T16:16:44.730242070+00:00 stderr F I1212 16:16:44.728990 1 azurepathfixcontroller.go:209] Started AzurePathFixController 2025-12-12T16:16:44.777972646+00:00 stderr F I1212 16:16:44.777002 1 reflector.go:368] Caches populated for *v1.ClusterRole from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:16:44.786159306+00:00 stderr F I1212 16:16:44.786045 1 generator.go:63] object *v1.ClusterOperator, Name=image-registry updated: removed:apiVersion="config.openshift.io/v1", removed:kind="ClusterOperator", changed:metadata.managedFields.2.time={"2025-11-03T09:40:47Z" -> "2025-12-12T16:16:44Z"}, changed:metadata.resourceVersion={"34624" -> "37072"}, changed:status.conditions.0.message={"Available: The deployment does not have available replicas\nNodeCADaemonAvailable: The daemon set node-ca does not have available replicas\nImagePrunerAvailable: Pruner CronJob has been created" -> "Available: The deployment does not have available replicas\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created"}, changed:status.conditions.0.reason={"NoReplicasAvailable::NodeCADaemonNoAvailableReplicas" -> "NoReplicasAvailable"}, changed:status.conditions.1.message={"Progressing: The deployment has not completed\nNodeCADaemonProgressing: The daemon set node-ca is deploying node pods" -> "Progressing: The deployment has not completed\nNodeCADaemonProgressing: The daemon set node-ca is deployed"}, changed:status.conditions.1.reason={"DeploymentNotCompleted::NodeCADaemonUnavailable" -> "DeploymentNotCompleted"} 2025-12-12T16:16:44.797701858+00:00 stderr F I1212 16:16:44.797557 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:16:44.827154727+00:00 stderr F I1212 16:16:44.826683 1 controllerimagepruner.go:386] Starting ImagePrunerController 2025-12-12T16:16:44.827154727+00:00 stderr F I1212 16:16:44.826892 1 imageregistrycertificates.go:218] Started ImageRegistryCertificatesController 2025-12-12T16:16:45.830828450+00:00 stderr F W1212 16:16:45.827075 1 reflector.go:561] github.com/openshift/client-go/image/informers/externalversions/factory.go:125: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io) 2025-12-12T16:16:45.830828450+00:00 stderr F E1212 16:16:45.827973 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/image/informers/externalversions/factory.go:125: Failed to watch *v1.ImageStream: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:45.845327754+00:00 stderr F W1212 16:16:45.842955 1 reflector.go:561] github.com/openshift/client-go/route/informers/externalversions/factory.go:125: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:45.845327754+00:00 stderr F E1212 16:16:45.843014 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/route/informers/externalversions/factory.go:125: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:47.578964450+00:00 stderr F W1212 16:16:47.578167 1 reflector.go:561] github.com/openshift/client-go/image/informers/externalversions/factory.go:125: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io) 2025-12-12T16:16:47.578964450+00:00 stderr F E1212 16:16:47.578702 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/image/informers/externalversions/factory.go:125: Failed to watch *v1.ImageStream: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:47.812309737+00:00 stderr F W1212 16:16:47.811531 1 reflector.go:561] github.com/openshift/client-go/route/informers/externalversions/factory.go:125: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:47.812309737+00:00 stderr F E1212 16:16:47.811582 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/route/informers/externalversions/factory.go:125: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:52.597832352+00:00 stderr F W1212 16:16:52.597211 1 reflector.go:561] github.com/openshift/client-go/image/informers/externalversions/factory.go:125: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io) 2025-12-12T16:16:52.597880533+00:00 stderr F E1212 16:16:52.597830 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/image/informers/externalversions/factory.go:125: Failed to watch *v1.ImageStream: failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:53.846729972+00:00 stderr F W1212 16:16:53.845530 1 reflector.go:561] github.com/openshift/client-go/route/informers/externalversions/factory.go:125: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:53.846787343+00:00 stderr F E1212 16:16:53.846714 1 reflector.go:158] "Unhandled Error" err="github.com/openshift/client-go/route/informers/externalversions/factory.go:125: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" logger="UnhandledError" 2025-12-12T16:16:59.335929027+00:00 stderr F I1212 16:16:59.334941 1 reflector.go:368] Caches populated for *v1.ImageStream from github.com/openshift/client-go/image/informers/externalversions/factory.go:125 2025-12-12T16:16:59.428631050+00:00 stderr F I1212 16:16:59.425307 1 metrics.go:94] Started MetricsController 2025-12-12T16:17:03.765203983+00:00 stderr F I1212 16:17:03.764328 1 reflector.go:368] Caches populated for *v1.Route from github.com/openshift/client-go/route/informers/externalversions/factory.go:125 2025-12-12T16:17:03.827481384+00:00 stderr F I1212 16:17:03.827413 1 imageconfig.go:112] Started ImageConfigController 2025-12-12T16:17:03.827524195+00:00 stderr F I1212 16:17:03.827474 1 controller.go:454] Starting Controller 2025-12-12T16:17:03.869808887+00:00 stderr F I1212 16:17:03.869698 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.conditions.3.lastTransitionTime={"2025-11-02T08:08:51Z" -> "2025-12-12T16:17:03Z"}, added:status.conditions.3.message="The deployment does not have available replicas", added:status.conditions.3.reason="Unavailable", changed:status.conditions.3.status={"False" -> "True"} 2025-12-12T16:17:03.889451697+00:00 stderr F I1212 16:17:03.889371 1 generator.go:63] object *v1.ClusterOperator, Name=image-registry updated: changed:metadata.managedFields.2.time={"2025-12-12T16:16:44Z" -> "2025-12-12T16:17:03Z"}, changed:metadata.resourceVersion={"37072" -> "38323"}, changed:status.conditions.2.lastTransitionTime={"2025-11-03T08:44:28Z" -> "2025-12-12T16:17:03Z"}, added:status.conditions.2.message="Degraded: The deployment does not have available replicas", changed:status.conditions.2.reason={"AsExpected" -> "Unavailable"}, changed:status.conditions.2.status={"False" -> "True"} 2025-12-12T16:17:17.393786617+00:00 stderr F I1212 16:17:17.393703 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.conditions.1.lastTransitionTime={"2025-11-03T09:40:46Z" -> "2025-12-12T16:17:17Z"}, changed:status.conditions.1.message={"The deployment has not completed" -> "The registry is ready"}, changed:status.conditions.1.reason={"DeploymentNotCompleted" -> "Ready"}, changed:status.conditions.1.status={"True" -> "False"}, changed:status.conditions.2.lastTransitionTime={"2025-11-03T09:40:46Z" -> "2025-12-12T16:17:17Z"}, changed:status.conditions.2.message={"The deployment does not have available replicas" -> "The registry is ready"}, changed:status.conditions.2.reason={"NoReplicasAvailable" -> "Ready"}, changed:status.conditions.2.status={"False" -> "True"}, changed:status.conditions.3.lastTransitionTime={"2025-12-12T16:17:03Z" -> "2025-12-12T16:17:17Z"}, removed:status.conditions.3.message="The deployment does not have available replicas", removed:status.conditions.3.reason="Unavailable", changed:status.conditions.3.status={"True" -> "False"}, changed:status.readyReplicas={"0.000000" -> "1.000000"} 2025-12-12T16:17:17.416620922+00:00 stderr F I1212 16:17:17.416524 1 generator.go:63] object *v1.ClusterOperator, Name=image-registry updated: changed:metadata.managedFields.2.time={"2025-12-12T16:17:03Z" -> "2025-12-12T16:17:17Z"}, changed:metadata.resourceVersion={"38323" -> "38465"}, changed:status.conditions.0.lastTransitionTime={"2025-11-03T09:40:46Z" -> "2025-12-12T16:17:17Z"}, changed:status.conditions.0.message={"Available: The deployment does not have available replicas\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created" -> "Available: The registry is ready\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created"}, changed:status.conditions.0.reason={"NoReplicasAvailable" -> "Ready"}, changed:status.conditions.0.status={"False" -> "True"}, changed:status.conditions.1.lastTransitionTime={"2025-11-03T09:40:46Z" -> "2025-12-12T16:17:17Z"}, changed:status.conditions.1.message={"Progressing: The deployment has not completed\nNodeCADaemonProgressing: The daemon set node-ca is deployed" -> "Progressing: The registry is ready\nNodeCADaemonProgressing: The daemon set node-ca is deployed"}, changed:status.conditions.1.reason={"DeploymentNotCompleted" -> "Ready"}, changed:status.conditions.1.status={"True" -> "False"}, changed:status.conditions.2.lastTransitionTime={"2025-12-12T16:17:03Z" -> "2025-12-12T16:17:17Z"}, removed:status.conditions.2.message="Degraded: The deployment does not have available replicas", changed:status.conditions.2.reason={"Unavailable" -> "AsExpected"}, changed:status.conditions.2.status={"True" -> "False"} 2025-12-12T16:17:17.492706015+00:00 stderr F I1212 16:17:17.492599 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.conditions.1.lastTransitionTime={"2025-11-03T09:40:46Z" -> "2025-12-12T16:17:17Z"}, changed:status.conditions.1.message={"The deployment has not completed" -> "The registry is ready"}, changed:status.conditions.1.reason={"DeploymentNotCompleted" -> "Ready"}, changed:status.conditions.1.status={"True" -> "False"}, changed:status.conditions.2.lastTransitionTime={"2025-11-03T09:40:46Z" -> "2025-12-12T16:17:17Z"}, changed:status.conditions.2.message={"The deployment does not have available replicas" -> "The registry is ready"}, changed:status.conditions.2.reason={"NoReplicasAvailable" -> "Ready"}, changed:status.conditions.2.status={"False" -> "True"}, changed:status.conditions.3.lastTransitionTime={"2025-12-12T16:17:03Z" -> "2025-12-12T16:17:17Z"}, removed:status.conditions.3.message="The deployment does not have available replicas", removed:status.conditions.3.reason="Unavailable", changed:status.conditions.3.status={"True" -> "False"}, changed:status.readyReplicas={"0.000000" -> "1.000000"} 2025-12-12T16:17:17.499928292+00:00 stderr F E1212 16:17:17.499822 1 controller.go:379] unable to sync: Operation cannot be fulfilled on configs.imageregistry.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again, requeuing 2025-12-12T16:18:44.594868502+00:00 stderr F E1212 16:18:44.594766 1 leaderelection.go:429] Failed to update lock optimitically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-image-registry/leases/openshift-master-controllers?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:44.595650951+00:00 stderr F E1212 16:18:44.595599 1 leaderelection.go:436] error retrieving resource lock openshift-image-registry/openshift-master-controllers: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-image-registry/leases/openshift-master-controllers?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:19:27.251600364+00:00 stderr F I1212 16:19:27.251026 1 reflector.go:368] Caches populated for *v1.ClusterRole from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:19:29.794301747+00:00 stderr F I1212 16:19:29.793739 1 reflector.go:368] Caches populated for *v1.Secret from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:19:32.422558816+00:00 stderr F I1212 16:19:32.422478 1 reflector.go:368] Caches populated for *v1.Deployment from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:19:35.625618269+00:00 stderr F I1212 16:19:35.625526 1 reflector.go:368] Caches populated for *v1.Infrastructure from github.com/openshift/client-go/config/informers/externalversions/factory.go:125 2025-12-12T16:19:38.369912262+00:00 stderr F I1212 16:19:38.369839 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:19:39.289261945+00:00 stderr F I1212 16:19:39.287656 1 reflector.go:368] Caches populated for *v1.Config from github.com/openshift/client-go/imageregistry/informers/externalversions/factory.go:125 2025-12-12T16:19:50.848388301+00:00 stderr F I1212 16:19:50.847628 1 reflector.go:368] Caches populated for *v1.FeatureGate from github.com/openshift/client-go/config/informers/externalversions/factory.go:125 2025-12-12T16:19:56.660080219+00:00 stderr F I1212 16:19:56.659799 1 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:19:57.581649968+00:00 stderr F I1212 16:19:57.581441 1 reflector.go:368] Caches populated for *v1.ClusterVersion from github.com/openshift/client-go/config/informers/externalversions/factory.go:125 2025-12-12T16:19:59.014695629+00:00 stderr F I1212 16:19:59.014618 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:02.901718833+00:00 stderr F I1212 16:20:02.901019 1 reflector.go:368] Caches populated for *v1.DaemonSet from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:03.920682178+00:00 stderr F I1212 16:20:03.920622 1 reflector.go:368] Caches populated for *v1.ClusterOperator from github.com/openshift/client-go/config/informers/externalversions/factory.go:125 2025-12-12T16:20:04.310485575+00:00 stderr F I1212 16:20:04.309998 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:08.890418738+00:00 stderr F I1212 16:20:08.890340 1 reflector.go:368] Caches populated for *v1.PodDisruptionBudget from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:09.383576910+00:00 stderr F I1212 16:20:09.383474 1 reflector.go:368] Caches populated for *v1.ClusterRoleBinding from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:12.490855817+00:00 stderr F I1212 16:20:12.490787 1 reflector.go:368] Caches populated for *v1.ImagePruner from github.com/openshift/client-go/imageregistry/informers/externalversions/factory.go:125 2025-12-12T16:20:13.462130943+00:00 stderr F I1212 16:20:13.461642 1 reflector.go:368] Caches populated for *v1.ServiceAccount from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:20.174333544+00:00 stderr F I1212 16:20:20.173745 1 reflector.go:368] Caches populated for *v1.Proxy from github.com/openshift/client-go/config/informers/externalversions/factory.go:125 2025-12-12T16:20:23.937692374+00:00 stderr F I1212 16:20:23.937615 1 reflector.go:368] Caches populated for *v1.Job from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:28.292432991+00:00 stderr F I1212 16:20:28.291779 1 reflector.go:368] Caches populated for *v1.CronJob from k8s.io/client-go/informers/factory.go:160 2025-12-12T16:20:39.648594979+00:00 stderr F I1212 16:20:39.647986 1 reflector.go:368] Caches populated for *v1.Image from github.com/openshift/client-go/config/informers/externalversions/factory.go:125 2025-12-12T16:26:39.865407436+00:00 stderr F I1212 16:26:39.864484 1 generator.go:63] object *v1.Secret, Namespace=openshift-image-registry, Name=installation-pull-secrets updated: changed:data..dockerconfigjson={ -> }, changed:metadata.annotations.imageregistry.operator.openshift.io/checksum={"sha256:085fdb2709b57d501872b4e20b38e3618d21be40f24851b4fad2074469e1fa6d" -> "sha256:134d2023417aa99dc70c099f12731fc3d94cb8fe5fef3d499d5c1ff70d124cfb"}, changed:metadata.managedFields.0.time={"2025-11-03T09:38:57Z" -> "2025-12-12T16:26:39Z"}, changed:metadata.resourceVersion={"33773" -> "40982"} 2025-12-12T16:26:39.884608411+00:00 stderr F I1212 16:26:39.884472 1 apps.go:155] Deployment "openshift-image-registry/image-registry" changes: {"metadata":{"annotations":{"imageregistry.operator.openshift.io/checksum":"sha256:d23fe596b7d9fc259fb157109543a2f06ea79d075813138c3877cc58b09f333c","operator.openshift.io/spec-hash":"e94145b8bfa5fed31d44791402fd166c99d8742aaf3aa863300dac7192876016"}},"spec":{"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"imageregistry.operator.openshift.io/dependencies-checksum":"sha256:e847f40829a71bf8250456acdd010e475b1519e05c57e70ed4f1b28e3fea8414"}},"spec":{"containers":[{"command":["/bin/sh","-c","mkdir -p /etc/pki/ca-trust/extracted/edk2 /etc/pki/ca-trust/extracted/java /etc/pki/ca-trust/extracted/openssl /etc/pki/ca-trust/extracted/pem \u0026\u0026 update-ca-trust extract --output /etc/pki/ca-trust/extracted/ \u0026\u0026 exec /usr/bin/dockerregistry"],"env":[{"name":"REGISTRY_STORAGE","value":"filesystem"},{"name":"REGISTRY_STORAGE_FILESYSTEM_ROOTDIRECTORY","value":"/registry"},{"name":"REGISTRY_HTTP_ADDR","value":":5000"},{"name":"REGISTRY_HTTP_NET","value":"tcp"},{"name":"REGISTRY_HTTP_SECRET","value":"15ea86fe7fd20108cc09cc69d7f57ed1b7e1d87f5a6e0fa46fc90f41636c8647af8785432e6c579448e463c0f1a63039c63836565af1e25fb2e1809cad0a283b"},{"name":"REGISTRY_LOG_LEVEL","value":"info"},{"name":"REGISTRY_OPENSHIFT_QUOTA_ENABLED","value":"true"},{"name":"REGISTRY_STORAGE_CACHE_BLOBDESCRIPTOR","value":"inmemory"},{"name":"REGISTRY_STORAGE_DELETE_ENABLED","value":"true"},{"name":"REGISTRY_HEALTH_STORAGEDRIVER_ENABLED","value":"true"},{"name":"REGISTRY_HEALTH_STORAGEDRIVER_INTERVAL","value":"10s"},{"name":"REGISTRY_HEALTH_STORAGEDRIVER_THRESHOLD","value":"1"},{"name":"REGISTRY_OPENSHIFT_METRICS_ENABLED","value":"true"},{"name":"REGISTRY_OPENSHIFT_SERVER_ADDR","value":"image-registry.openshift-image-registry.svc:5000"},{"name":"REGISTRY_HTTP_TLS_CERTIFICATE","value":"/etc/secrets/tls.crt"},{"name":"REGISTRY_HTTP_TLS_KEY","value":"/etc/secrets/tls.key"}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dcb03ccba25366bbdf74cbab6738e7ef1f97f62760886ec445a40cdf29b60418","lifecycle":{"preStop":{"exec":{"command":["sleep","25"]}}},"livenessProbe":{"httpGet":{"path":"/healthz","port":5000,"scheme":"HTTPS"},"initialDelaySeconds":5,"timeoutSeconds":5},"name":"registry","ports":[{"containerPort":5000,"protocol":"TCP"}],"readinessProbe":{"httpGet":{"path":"/healthz","port":5000,"scheme":"HTTPS"},"initialDelaySeconds":15,"timeoutSeconds":5},"resources":{"requests":{"cpu":"100m","memory":"256Mi"}},"securityContext":{"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/registry","name":"registry-storage"},{"mountPath":"/etc/secrets","name":"registry-tls"},{"mountPath":"/etc/pki/ca-trust/extracted","name":"ca-trust-extracted"},{"mountPath":"/etc/pki/ca-trust/source/anchors","name":"registry-certificates"},{"mountPath":"/usr/share/pki/ca-trust-source","name":"trusted-ca"},{"mountPath":"/var/lib/kubelet/","name":"installation-pull-secrets"},{"mountPath":"/var/run/secrets/openshift/serviceaccount","name":"bound-sa-token","readOnly":true}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"volumes":[{"name":"registry-storage","persistentVolumeClaim":{"claimName":"crc-image-registry-storage"}},{"name":"registry-tls","projected":{"sources":[{"secret":{"name":"image-registry-tls"}}]}},{"emptyDir":{},"name":"ca-trust-extracted"},{"configMap":{"name":"image-registry-certificates"},"name":"registry-certificates"},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"anchors/ca-bundle.crt"}],"name":"trusted-ca","optional":true},"name":"trusted-ca"},{"name":"installation-pull-secrets","secret":{"items":[{"key":".dockerconfigjson","path":"config.json"}],"optional":true,"secretName":"installation-pull-secrets"}},{"name":"bound-sa-token","projected":{"sources":[{"serviceAccountToken":{"audience":"openshift","path":"token"}}]}}]}}}} 2025-12-12T16:26:39.899101807+00:00 stderr F I1212 16:26:39.898977 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-image-registry", Name:"cluster-image-registry-operator", UID:"a4c18a44-787c-4851-97ac-f3da87e8d0e3", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/image-registry -n openshift-image-registry because it changed 2025-12-12T16:26:39.901886967+00:00 stderr F I1212 16:26:39.900700 1 generator.go:63] object *v1.Deployment, Namespace=openshift-image-registry, Name=image-registry updated: changed:metadata.annotations.imageregistry.operator.openshift.io/checksum={"sha256:139986d456a8523e223a7bdbae4bf75e50ee56eb6703e01854cd742422c172ed" -> "sha256:d23fe596b7d9fc259fb157109543a2f06ea79d075813138c3877cc58b09f333c"}, changed:metadata.annotations.operator.openshift.io/spec-hash={"0da3aa3810d8130bdf08d8a57b4039d1da5044977d20c11a075f665d12963ac1" -> "e94145b8bfa5fed31d44791402fd166c99d8742aaf3aa863300dac7192876016"}, changed:metadata.generation={"4.000000" -> "5.000000"}, changed:metadata.managedFields.0.manager={"cluster-image-registry-operator" -> "kube-controller-manager"}, added:metadata.managedFields.0.subresource="status", changed:metadata.managedFields.0.time={"2025-11-03T09:38:57Z" -> "2025-12-12T16:17:17Z"}, changed:metadata.managedFields.1.manager={"kube-controller-manager" -> "cluster-image-registry-operator"}, removed:metadata.managedFields.1.subresource="status", changed:metadata.managedFields.1.time={"2025-12-12T16:17:17Z" -> "2025-12-12T16:26:39Z"}, changed:metadata.resourceVersion={"38463" -> "40983"}, changed:spec.template.metadata.annotations.imageregistry.operator.openshift.io/dependencies-checksum={"sha256:e5e688ac594438d0527a62648f5fb19a6628fa965fb9dde9880c9f9462e2cb93" -> "sha256:e847f40829a71bf8250456acdd010e475b1519e05c57e70ed4f1b28e3fea8414"} 2025-12-12T16:26:39.901886967+00:00 stderr F I1212 16:26:39.901651 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.conditions.1.lastTransitionTime={"2025-12-12T16:17:17Z" -> "2025-12-12T16:26:39Z"}, changed:status.conditions.1.message={"The registry is ready" -> "The deployment has not completed"}, changed:status.conditions.1.reason={"Ready" -> "DeploymentNotCompleted"}, changed:status.conditions.1.status={"False" -> "True"}, changed:status.conditions.2.message={"The registry is ready" -> "The registry has minimum availability"}, changed:status.conditions.2.reason={"Ready" -> "MinimumAvailability"}, changed:status.generations.1.lastGeneration={"4.000000" -> "5.000000"} 2025-12-12T16:26:39.927287948+00:00 stderr F I1212 16:26:39.927163 1 generator.go:63] object *v1.ClusterOperator, Name=image-registry updated: removed:apiVersion="config.openshift.io/v1", removed:kind="ClusterOperator", changed:metadata.managedFields.2.time={"2025-12-12T16:17:17Z" -> "2025-12-12T16:26:39Z"}, changed:metadata.resourceVersion={"38465" -> "40989"}, changed:status.conditions.0.message={"Available: The registry is ready\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created" -> "Available: The registry has minimum availability\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created"}, changed:status.conditions.0.reason={"Ready" -> "MinimumAvailability"}, changed:status.conditions.1.lastTransitionTime={"2025-12-12T16:17:17Z" -> "2025-12-12T16:26:39Z"}, changed:status.conditions.1.message={"Progressing: The registry is ready\nNodeCADaemonProgressing: The daemon set node-ca is deployed" -> "Progressing: The deployment has not completed\nNodeCADaemonProgressing: The daemon set node-ca is deployed"}, changed:status.conditions.1.reason={"Ready" -> "DeploymentNotCompleted"}, changed:status.conditions.1.status={"False" -> "True"} 2025-12-12T16:26:39.984963815+00:00 stderr F I1212 16:26:39.984893 1 apps.go:155] Deployment "openshift-image-registry/image-registry" changes: {"spec":{"revisionHistoryLimit":null,"template":{"spec":{"containers":[{"command":["/bin/sh","-c","mkdir -p /etc/pki/ca-trust/extracted/edk2 /etc/pki/ca-trust/extracted/java /etc/pki/ca-trust/extracted/openssl /etc/pki/ca-trust/extracted/pem \u0026\u0026 update-ca-trust extract --output /etc/pki/ca-trust/extracted/ \u0026\u0026 exec /usr/bin/dockerregistry"],"env":[{"name":"REGISTRY_STORAGE","value":"filesystem"},{"name":"REGISTRY_STORAGE_FILESYSTEM_ROOTDIRECTORY","value":"/registry"},{"name":"REGISTRY_HTTP_ADDR","value":":5000"},{"name":"REGISTRY_HTTP_NET","value":"tcp"},{"name":"REGISTRY_HTTP_SECRET","value":"15ea86fe7fd20108cc09cc69d7f57ed1b7e1d87f5a6e0fa46fc90f41636c8647af8785432e6c579448e463c0f1a63039c63836565af1e25fb2e1809cad0a283b"},{"name":"REGISTRY_LOG_LEVEL","value":"info"},{"name":"REGISTRY_OPENSHIFT_QUOTA_ENABLED","value":"true"},{"name":"REGISTRY_STORAGE_CACHE_BLOBDESCRIPTOR","value":"inmemory"},{"name":"REGISTRY_STORAGE_DELETE_ENABLED","value":"true"},{"name":"REGISTRY_HEALTH_STORAGEDRIVER_ENABLED","value":"true"},{"name":"REGISTRY_HEALTH_STORAGEDRIVER_INTERVAL","value":"10s"},{"name":"REGISTRY_HEALTH_STORAGEDRIVER_THRESHOLD","value":"1"},{"name":"REGISTRY_OPENSHIFT_METRICS_ENABLED","value":"true"},{"name":"REGISTRY_OPENSHIFT_SERVER_ADDR","value":"image-registry.openshift-image-registry.svc:5000"},{"name":"REGISTRY_HTTP_TLS_CERTIFICATE","value":"/etc/secrets/tls.crt"},{"name":"REGISTRY_HTTP_TLS_KEY","value":"/etc/secrets/tls.key"}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dcb03ccba25366bbdf74cbab6738e7ef1f97f62760886ec445a40cdf29b60418","lifecycle":{"preStop":{"exec":{"command":["sleep","25"]}}},"livenessProbe":{"httpGet":{"path":"/healthz","port":5000,"scheme":"HTTPS"},"initialDelaySeconds":5,"timeoutSeconds":5},"name":"registry","ports":[{"containerPort":5000,"protocol":"TCP"}],"readinessProbe":{"httpGet":{"path":"/healthz","port":5000,"scheme":"HTTPS"},"initialDelaySeconds":15,"timeoutSeconds":5},"resources":{"requests":{"cpu":"100m","memory":"256Mi"}},"securityContext":{"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/registry","name":"registry-storage"},{"mountPath":"/etc/secrets","name":"registry-tls"},{"mountPath":"/etc/pki/ca-trust/extracted","name":"ca-trust-extracted"},{"mountPath":"/etc/pki/ca-trust/source/anchors","name":"registry-certificates"},{"mountPath":"/usr/share/pki/ca-trust-source","name":"trusted-ca"},{"mountPath":"/var/lib/kubelet/","name":"installation-pull-secrets"},{"mountPath":"/var/run/secrets/openshift/serviceaccount","name":"bound-sa-token","readOnly":true}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"volumes":[{"name":"registry-storage","persistentVolumeClaim":{"claimName":"crc-image-registry-storage"}},{"name":"registry-tls","projected":{"sources":[{"secret":{"name":"image-registry-tls"}}]}},{"emptyDir":{},"name":"ca-trust-extracted"},{"configMap":{"name":"image-registry-certificates"},"name":"registry-certificates"},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"anchors/ca-bundle.crt"}],"name":"trusted-ca","optional":true},"name":"trusted-ca"},{"name":"installation-pull-secrets","secret":{"items":[{"key":".dockerconfigjson","path":"config.json"}],"optional":true,"secretName":"installation-pull-secrets"}},{"name":"bound-sa-token","projected":{"sources":[{"serviceAccountToken":{"audience":"openshift","path":"token"}}]}}]}}}} 2025-12-12T16:26:39.996950808+00:00 stderr F I1212 16:26:39.996845 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-image-registry", Name:"cluster-image-registry-operator", UID:"a4c18a44-787c-4851-97ac-f3da87e8d0e3", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/image-registry -n openshift-image-registry because it changed 2025-12-12T16:26:39.998517738+00:00 stderr F I1212 16:26:39.998478 1 generator.go:63] object *v1.Deployment, Namespace=openshift-image-registry, Name=image-registry updated: changed:metadata.resourceVersion={"40988" -> "40999"}, changed:status.conditions.1.message={"Created new replica set \"image-registry-5d9d95bf5b\"" -> "ReplicaSet \"image-registry-5d9d95bf5b\" is progressing."}, changed:status.conditions.1.reason={"NewReplicaSetCreated" -> "ReplicaSetUpdated"}, changed:status.observedGeneration={"4.000000" -> "5.000000"}, changed:status.replicas={"1.000000" -> "2.000000"}, added:status.unavailableReplicas="1.000000" 2025-12-12T16:26:39.999490722+00:00 stderr F I1212 16:26:39.999472 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.conditions.1.lastTransitionTime={"2025-12-12T16:17:17Z" -> "2025-12-12T16:26:39Z"}, changed:status.conditions.1.message={"The registry is ready" -> "The deployment has not completed"}, changed:status.conditions.1.reason={"Ready" -> "DeploymentNotCompleted"}, changed:status.conditions.1.status={"False" -> "True"}, changed:status.conditions.2.message={"The registry is ready" -> "The registry has minimum availability"}, changed:status.conditions.2.reason={"Ready" -> "MinimumAvailability"}, changed:status.generations.1.lastGeneration={"4.000000" -> "5.000000"} 2025-12-12T16:26:40.009677249+00:00 stderr F E1212 16:26:40.009603 1 controller.go:379] unable to sync: Operation cannot be fulfilled on configs.imageregistry.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again, requeuing 2025-12-12T16:27:03.094058125+00:00 stderr F I1212 16:27:03.093993 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.readyReplicas={"1.000000" -> "2.000000"} 2025-12-12T16:27:03.161813980+00:00 stderr F I1212 16:27:03.161734 1 controller.go:340] object changed: *v1.Config, Name=cluster (status=true): changed:status.conditions.1.lastTransitionTime={"2025-12-12T16:26:39Z" -> "2025-12-12T16:27:03Z"}, changed:status.conditions.1.message={"The deployment has not completed" -> "The registry is ready"}, changed:status.conditions.1.reason={"DeploymentNotCompleted" -> "Ready"}, changed:status.conditions.1.status={"True" -> "False"}, changed:status.conditions.2.message={"The registry has minimum availability" -> "The registry is ready"}, changed:status.conditions.2.reason={"MinimumAvailability" -> "Ready"}, changed:status.readyReplicas={"2.000000" -> "1.000000"} 2025-12-12T16:27:03.192239610+00:00 stderr F I1212 16:27:03.188748 1 generator.go:63] object *v1.ClusterOperator, Name=image-registry updated: changed:metadata.managedFields.2.time={"2025-12-12T16:26:39Z" -> "2025-12-12T16:27:03Z"}, changed:metadata.resourceVersion={"40989" -> "41810"}, changed:status.conditions.0.message={"Available: The registry has minimum availability\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created" -> "Available: The registry is ready\nNodeCADaemonAvailable: The daemon set node-ca has available replicas\nImagePrunerAvailable: Pruner CronJob has been created"}, changed:status.conditions.0.reason={"MinimumAvailability" -> "Ready"}, changed:status.conditions.1.lastTransitionTime={"2025-12-12T16:26:39Z" -> "2025-12-12T16:27:03Z"}, changed:status.conditions.1.message={"Progressing: The deployment has not completed\nNodeCADaemonProgressing: The daemon set node-ca is deployed" -> "Progressing: The registry is ready\nNodeCADaemonProgressing: The daemon set node-ca is deployed"}, changed:status.conditions.1.reason={"DeploymentNotCompleted" -> "Ready"}, changed:status.conditions.1.status={"True" -> "False"} ././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043043033051 5ustar zuulzuul././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043063033053 5ustar zuulzuul././@LongLink0000644000000000000000000000031100000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000110043215117043043033054 0ustar zuulzuul2025-12-12T16:16:45.264124865+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="log level info" 2025-12-12T16:16:45.264124865+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="TLS keys set, using https for metrics" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="rbac.authorization.k8s.io/v1, Resource=rolebindings" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="rbac.authorization.k8s.io/v1, Resource=roles" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="/v1, Resource=serviceaccounts" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="/v1, Resource=configmaps" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="/v1, Resource=services" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="/v1, Resource=pods" 2025-12-12T16:16:45.433229354+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="skipping irrelevant gvr" gvr="batch/v1, Resource=jobs" 2025-12-12T16:16:45.472229276+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="detected ability to filter informers" canFilter=true 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="OpenShift Proxy API available - setting up watch for Proxy type" 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="OpenShift Proxy query will be used to fetch cluster proxy configuration" 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="[CSV NS Plug-in] setting up csv namespace plug-in for namespaces: []" 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="[CSV NS Plug-in] registering namespace informer" 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="[CSV NS Plug-in] setting up namespace: " 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="[CSV NS Plug-in] registered csv queue informer for: " 2025-12-12T16:16:45.506836911+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="[CSV NS Plug-in] finished setting up csv namespace labeler plugin" 2025-12-12T16:16:45.512217752+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="connection established. cluster-version: v1.33.5" 2025-12-12T16:16:45.512217752+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="operator ready" 2025-12-12T16:16:45.512217752+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="starting informers..." 2025-12-12T16:16:45.512217752+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="informers started" 2025-12-12T16:16:45.512217752+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="waiting for caches to sync..." 2025-12-12T16:16:45.613261319+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="starting workers..." 2025-12-12T16:16:45.613261319+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="Initializing cluster operator monitor for package server" 2025-12-12T16:16:45.613261319+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="monitoring the following components [operator-lifecycle-manager-packageserver]" monitor=clusteroperator 2025-12-12T16:16:45.625396545+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="starting clusteroperator monitor loop" monitor=clusteroperator 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","source":"kind source: *v1.Deployment"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","source":"kind source: *v1.Role"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","source":"kind source: *v1.RoleBinding"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","source":"kind source: *v2.OperatorCondition"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.Operator"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator-condition-generator","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v2.OperatorCondition"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.Deployment"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.Namespace"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"subscription","controllerGroup":"operators.coreos.com","controllerKind":"Subscription","source":"kind source: *v1alpha1.InstallPlan"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.CustomResourceDefinition"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.APIService"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusteroperator","controllerGroup":"config.openshift.io","controllerKind":"ClusterOperator","source":"channel source: 0xc0003c10a0"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1alpha1.Subscription"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1alpha1.ClusterServiceVersion"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.Deployment"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"subscription","controllerGroup":"operators.coreos.com","controllerKind":"Subscription","source":"kind source: *v1alpha1.Subscription"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1alpha1.ClusterServiceVersion"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.Namespace"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"subscription","controllerGroup":"operators.coreos.com","controllerKind":"Subscription","source":"kind source: *v1alpha1.ClusterServiceVersion"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.Service"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v2.OperatorCondition"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.CustomResourceDefinition"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.APIService"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1alpha1.Subscription"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v2.OperatorCondition"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625396545+00:00 stderr P {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusteroperator","controllerGroup":"config.openshift.io","controllerKind":"ClusterOperator","sour 2025-12-12T16:16:45.625478927+00:00 stderr F ce":"kind source: *v1.ClusterOperator"} 2025-12-12T16:16:45.625478927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625478927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusteroperator","controllerGroup":"config.openshift.io","controllerKind":"ClusterOperator","source":"kind source: *v1alpha1.ClusterServiceVersion"} 2025-12-12T16:16:45.625478927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625478927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625478927+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1.PartialObjectMetadata"} 2025-12-12T16:16:45.625643931+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator-condition-generator","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","source":"kind source: *v1alpha1.ClusterServiceVersion"} 2025-12-12T16:16:45.625730433+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting EventSource","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","source":"kind source: *v1alpha1.InstallPlan"} 2025-12-12T16:16:45.671099371+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="ClusterOperator api is present" monitor=clusteroperator 2025-12-12T16:16:45.671099371+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="initializing clusteroperator resource(s) for [operator-lifecycle-manager-packageserver]" monitor=clusteroperator 2025-12-12T16:16:45.739931942+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="initialized cluster resource - operator-lifecycle-manager-packageserver" monitor=clusteroperator 2025-12-12T16:16:45.744224726+00:00 stderr F time="2025-12-12T16:16:45Z" level=warning msg="install timed out" csv=packageserver id=6seyg namespace=openshift-operator-lifecycle-manager phase=Installing 2025-12-12T16:16:45.751233118+00:00 stderr F I1212 16:16:45.746531 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"34842", FieldPath:""}): type: 'Warning' reason: 'InstallCheckFailed' install timeout 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting Controller","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition"} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting workers","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","worker count":1} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting Controller","controller":"operator-condition-generator","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion"} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting workers","controller":"operator-condition-generator","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","worker count":1} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting Controller","controller":"subscription","controllerGroup":"operators.coreos.com","controllerKind":"Subscription"} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting workers","controller":"subscription","controllerGroup":"operators.coreos.com","controllerKind":"Subscription","worker count":1} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting Controller","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion"} 2025-12-12T16:16:45.868224623+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting workers","controller":"clusterserviceversion","controllerGroup":"operators.coreos.com","controllerKind":"ClusterServiceVersion","worker count":1} 2025-12-12T16:16:45.869216627+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting Controller","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator"} 2025-12-12T16:16:45.869216627+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting workers","controller":"operator","controllerGroup":"operators.coreos.com","controllerKind":"Operator","worker count":1} 2025-12-12T16:16:45.869216627+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting Controller","controller":"clusteroperator","controllerGroup":"config.openshift.io","controllerKind":"ClusterOperator"} 2025-12-12T16:16:45.869216627+00:00 stderr F {"level":"info","ts":"2025-12-12T16:16:45Z","msg":"Starting workers","controller":"clusteroperator","controllerGroup":"config.openshift.io","controllerKind":"ClusterOperator","worker count":1} 2025-12-12T16:16:45.911377806+00:00 stderr F time="2025-12-12T16:16:45Z" level=warning msg="needs reinstall: apiServices not installed" csv=packageserver id=88RAa namespace=openshift-operator-lifecycle-manager phase=Failed strategy=deployment 2025-12-12T16:16:45.911377806+00:00 stderr F I1212 16:16:45.905512 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"37150", FieldPath:""}): type: 'Normal' reason: 'NeedsReinstall' apiServices not installed 2025-12-12T16:16:45.973043532+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="scheduling ClusterServiceVersion for install" csv=packageserver id=lmcgM namespace=openshift-operator-lifecycle-manager phase=Pending 2025-12-12T16:16:45.973043532+00:00 stderr F I1212 16:16:45.972357 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"37169", FieldPath:""}): type: 'Normal' reason: 'AllRequirementsMet' all requirements found, attempting install 2025-12-12T16:16:46.031241193+00:00 stderr F time="2025-12-12T16:16:46Z" level=warning msg="reusing existing cert packageserver-service-cert" 2025-12-12T16:16:46.123352122+00:00 stderr F I1212 16:16:46.123152 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:46.206805029+00:00 stderr F I1212 16:16:46.204159 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"37182", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' waiting for install components to report healthy 2025-12-12T16:16:46.266256351+00:00 stderr F time="2025-12-12T16:16:46Z" level=warning msg="reusing existing cert packageserver-service-cert" 2025-12-12T16:16:46.430167862+00:00 stderr F I1212 16:16:46.428688 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:46.602223343+00:00 stderr F I1212 16:16:46.602147 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"37182", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' waiting for install components to report healthy 2025-12-12T16:16:46.640019286+00:00 stderr F time="2025-12-12T16:16:46Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"packageserver\": the object has been modified; please apply your changes to the latest version and try again" csv=packageserver id=/o8iz namespace=openshift-operator-lifecycle-manager phase=InstallReady 2025-12-12T16:16:46.640070077+00:00 stderr F E1212 16:16:46.640045 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operator-lifecycle-manager/packageserver\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"packageserver\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:16:46.734851111+00:00 stderr F time="2025-12-12T16:16:46Z" level=info msg="install strategy successful" csv=packageserver id=24A9l namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:46.735592539+00:00 stderr F I1212 16:16:46.734996 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"37207", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' apiServices not installed 2025-12-12T16:16:46.890249875+00:00 stderr F time="2025-12-12T16:16:46Z" level=info msg="install strategy successful" csv=packageserver id=dyhbd namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:46.989219931+00:00 stderr F time="2025-12-12T16:16:46Z" level=info msg="install strategy successful" csv=packageserver id=wFZUd namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.098023328+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=cLX1H namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.190294161+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=1U+m/ namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.295855878+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=XRJ6K namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.399479168+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=SoKBR namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.506596413+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=yR/mp namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.549353537+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=JlFoM namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.600978377+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=yOizm namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.699893202+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=JMQ+2 namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.812225185+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=sryjS namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:47.913570929+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="install strategy successful" csv=packageserver id=W2ZWu namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.003215608+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=M9zlr namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.117048887+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=+H8lg namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.209836182+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=LsBUS namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.310881929+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=aHLwF namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.410345717+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=BmSIj namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.511298562+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=rxyOq namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.610241478+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=Ukkk7 namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.716621745+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=ZtU5N namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.824441687+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=JxD0H namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:48.916329541+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="install strategy successful" csv=packageserver id=HLIVm namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:49.016376453+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="install strategy successful" csv=packageserver id=AX/hn namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:49.116136269+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="install strategy successful" csv=packageserver id=wYA6/ namespace=openshift-operator-lifecycle-manager phase=Installing strategy=deployment 2025-12-12T16:16:49.117989414+00:00 stderr F I1212 16:16:49.116441 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver", UID:"09b3d4b2-fc47-4ee0-a331-67a39502cf21", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"37269", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' install strategy completed with no errors 2025-12-12T16:26:39.832729860+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="creating cluster role: olm.og.service-telemetry-operator-group.admin-6XinD3qDPRoufFZY9j0Gesfq6cfP7qeV80bzMU owned by operator group: service-telemetry/service-telemetry-operator-group" 2025-12-12T16:26:39.837860430+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="creating cluster role: olm.og.service-telemetry-operator-group.edit-5FCh2FwFebR0ucQCOrpRprVt227zr6pDCh4P9I owned by operator group: service-telemetry/service-telemetry-operator-group" 2025-12-12T16:26:39.844293622+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="creating cluster role: olm.og.service-telemetry-operator-group.view-19H0oeQZasak9kBFyGYeMcbw9IgDiyNKbAOJvC owned by operator group: service-telemetry/service-telemetry-operator-group" 2025-12-12T16:26:43.012715627+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="creating cluster role: olm.og.cert-manager-operator.admin-cNnRMe4AZ6jFumaJsnwdwxTA3LASLEavu7WVnK owned by operator group: cert-manager-operator/cert-manager-operator" 2025-12-12T16:26:43.017384665+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="creating cluster role: olm.og.cert-manager-operator.edit-kLXRGib6xzHLCUDYHM29CXWY892FmGafLchZd owned by operator group: cert-manager-operator/cert-manager-operator" 2025-12-12T16:26:43.021740555+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="creating cluster role: olm.og.cert-manager-operator.view-7O1zTAUZYYK1ZGD76vxclCLn1JYZ9KPomiDmSV owned by operator group: cert-manager-operator/cert-manager-operator" 2025-12-12T16:26:52.380912870+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="[CSV NS Plug-in] applied security.openshift.io/scc.podSecurityLabelSync=true label to namespace " 2025-12-12T16:26:52.384643844+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"d88ab97c-315a-4b9d-90ca-aa495a508885","error":"resource name may not be empty","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.393548199+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"4236bada-5dd4-4544-93b3-cb0cbc6ccd16","error":"resource name may not be empty","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.393664822+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"c0589743-0729-4955-8368-742d3a6ff84b","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.406002513+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"96437e26-bfde-4174-b025-92b186f9ced6","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.420669084+00:00 stderr F time="2025-12-12T16:26:52Z" level=warning msg="error adding operatorgroup annotations" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" id=rHtme namespace=openshift-operators opgroup=global-operators phase= 2025-12-12T16:26:52.420755646+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="operatorgroup incorrect" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" id=rHtme namespace=openshift-operators phase= 2025-12-12T16:26:52.420785637+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="not in operatorgroup namespace" csv=cluster-observability-operator.v1.3.0 id=nMo0I namespace=openshift-operators phase= 2025-12-12T16:26:52.420895920+00:00 stderr F E1212 16:26:52.420880 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:26:52.447569814+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"864b0eb8-c5a0-463b-b21c-19165414ed92","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.449110733+00:00 stderr F time="2025-12-12T16:26:52Z" level=warning msg="error adding operatorgroup annotations" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" id=I2RzL namespace=openshift-operators opgroup=global-operators phase= 2025-12-12T16:26:52.449110733+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="operatorgroup incorrect" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" id=I2RzL namespace=openshift-operators phase= 2025-12-12T16:26:52.449110733+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="not in operatorgroup namespace" csv=cluster-observability-operator.v1.3.0 id=ZH2wY namespace=openshift-operators phase= 2025-12-12T16:26:52.449240606+00:00 stderr F E1212 16:26:52.449191 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:26:52.476494844+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="operatorgroup incorrect" csv=cluster-observability-operator.v1.3.0 error="" id=VTw0e namespace=openshift-operators phase= 2025-12-12T16:26:52.476494844+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="not in operatorgroup namespace" csv=cluster-observability-operator.v1.3.0 id=7sUS/ namespace=openshift-operators phase= 2025-12-12T16:26:52.494026197+00:00 stderr F time="2025-12-12T16:26:52Z" level=warning msg="error adding operatorgroup annotations" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" id=xV99o namespace=openshift-operators opgroup=global-operators phase= 2025-12-12T16:26:52.494026197+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="operatorgroup incorrect" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" id=xV99o namespace=openshift-operators phase= 2025-12-12T16:26:52.494026197+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="not in operatorgroup namespace" csv=cluster-observability-operator.v1.3.0 id=OfOb/ namespace=openshift-operators phase= 2025-12-12T16:26:52.494026197+00:00 stderr F E1212 16:26:52.493632 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:26:52.494026197+00:00 stderr F time="2025-12-12T16:26:52Z" level=warning msg="error adding operatorgroup annotations" csv=cluster-observability-operator.v1.3.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" namespace=openshift-operators operatorGroup=global-operators 2025-12-12T16:26:52.494026197+00:00 stderr F time="2025-12-12T16:26:52Z" level=warning msg="failed to annotate CSVs in operatorgroup after group change" error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" namespace=openshift-operators operatorGroup=global-operators 2025-12-12T16:26:52.494026197+00:00 stderr F E1212 16:26:52.493826 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/global-operators\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cluster-observability-operator.v1.3.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:26:52.527459162+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=3IJ5e namespace=openshift-operators phase= 2025-12-12T16:26:52.527536514+00:00 stderr F E1212 16:26:52.527450 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:52.528012366+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"32292abb-18c2-4852-9ad2-263ccde6b6d4","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.529576715+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="scheduling ClusterServiceVersion for requirement verification" csv=cluster-observability-operator.v1.3.0 id=69dBO namespace=openshift-operators phase= 2025-12-12T16:26:52.529996136+00:00 stderr F I1212 16:26:52.529953 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41182", FieldPath:""}): type: 'Normal' reason: 'RequirementsUnknown' requirements not yet checked 2025-12-12T16:26:52.570657253+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"d0ceac00-b693-4316-92b0-8e7af73bd7d1","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.625195171+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=19rp5 namespace=openshift-operators phase= 2025-12-12T16:26:52.625195171+00:00 stderr F E1212 16:26:52.625085 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:52.691275711+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:52Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"d219ffa6-eda0-4a72-a13f-e0982af2ea15","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:52.918723207+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=XJtla namespace=openshift-operators phase=Pending 2025-12-12T16:26:52.920321857+00:00 stderr F I1212 16:26:52.919385 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41192", FieldPath:""}): type: 'Normal' reason: 'RequirementsNotMet' one or more requirements couldn't be found 2025-12-12T16:26:53.046232158+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=SYPQm namespace=openshift-operators phase=Pending 2025-12-12T16:26:53.046232158+00:00 stderr F E1212 16:26:53.043427 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:53.331734071+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:53Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"00c3deb6-b0ee-4e43-a7fc-65128b6ef8de","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:53.490287417+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=eh411 namespace=openshift-operators phase=Pending 2025-12-12T16:26:53.506334982+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=HI0sq namespace=openshift-operators phase=Pending 2025-12-12T16:26:53.506334982+00:00 stderr F E1212 16:26:53.505738 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:54.195976445+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=9lZkv namespace=openshift-operators phase=Pending 2025-12-12T16:26:54.238255893+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=SUiXJ namespace=openshift-operators phase=Pending 2025-12-12T16:26:54.238322495+00:00 stderr F E1212 16:26:54.238277 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:54.618454907+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:54Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"68879590-444f-49d6-b1e0-653c3e2efc68","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:55.496153611+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:55Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"cluster-observability-operator.openshift-operators"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"cluster-observability-operator.openshift-operators\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:55.567238437+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=BMAxa namespace=openshift-operators phase=Pending 2025-12-12T16:26:55.581678322+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=OM8RF namespace=openshift-operators phase=Pending 2025-12-12T16:26:55.581678322+00:00 stderr F E1212 16:26:55.580346 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:56.179833903+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=ul6wP namespace=openshift-operators phase=Pending 2025-12-12T16:26:56.194847683+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=A5/Ol namespace=openshift-operators phase=Pending 2025-12-12T16:26:56.194923465+00:00 stderr F E1212 16:26:56.194890 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:57.176864562+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:57Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"636e71bb-7ad5-4c46-b509-fd0074d40fa7","error":"Deployment.apps \"obo-prometheus-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:57.346398215+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=jAmzT namespace=openshift-operators phase=Pending 2025-12-12T16:26:57.363320593+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=giQNz namespace=openshift-operators phase=Pending 2025-12-12T16:26:57.517418006+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:57Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"cluster-observability-operator.openshift-operators"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"cluster-observability-operator.openshift-operators\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:57.984262179+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=IQzpD namespace=openshift-operators phase=Pending 2025-12-12T16:26:58.032598840+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=VZJDU namespace=openshift-operators phase=Pending 2025-12-12T16:26:58.032598840+00:00 stderr F E1212 16:26:58.028034 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:58.493434099+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=ZdE4u namespace=openshift-operators phase=Pending 2025-12-12T16:26:58.515245381+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=IfQ3N namespace=openshift-operators phase=Pending 2025-12-12T16:26:58.515245381+00:00 stderr F E1212 16:26:58.512400 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:58.960990622+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=XdYS0 namespace=openshift-operators phase=Pending 2025-12-12T16:26:58.979803249+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=q0bdS namespace=openshift-operators phase=Pending 2025-12-12T16:26:58.979886151+00:00 stderr F E1212 16:26:58.979806 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:26:58.988501519+00:00 stderr F {"level":"error","ts":"2025-12-12T16:26:58Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"cluster-observability-operator.openshift-operators"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"cluster-observability-operator.openshift-operators\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:26:59.556648888+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="requirements were not met" csv=cluster-observability-operator.v1.3.0 id=Ng+N4 namespace=openshift-operators phase=Pending 2025-12-12T16:26:59.592014333+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="couldn't ensure RBAC in target namespaces" csv=cluster-observability-operator.v1.3.0 error="no owned roles found" id=+lcuQ namespace=openshift-operators phase=Pending 2025-12-12T16:26:59.592014333+00:00 stderr F E1212 16:26:59.591651 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator.v1.3.0\" failed: no owned roles found" logger="UnhandledError" 2025-12-12T16:27:00.076562046+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="scheduling ClusterServiceVersion for install" csv=cluster-observability-operator.v1.3.0 id=+d186 namespace=openshift-operators phase=Pending 2025-12-12T16:27:00.084290182+00:00 stderr F I1212 16:27:00.078171 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41208", FieldPath:""}): type: 'Normal' reason: 'AllRequirementsMet' all requirements found, attempting install 2025-12-12T16:27:00.190111080+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="No api or webhook descs to add CA to" 2025-12-12T16:27:00.362948584+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="No api or webhook descs to add CA to" 2025-12-12T16:27:00.372439825+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="No api or webhook descs to add CA to" 2025-12-12T16:27:00.454264735+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"ce7add47-7e2a-488a-937b-a0097d2c7d4f","error":"Deployment.apps \"obo-prometheus-operator-admission-webhook\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.500150587+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"83c9aa96-cb1d-4eed-a0c1-330200b0907d","error":"Deployment.apps \"obo-prometheus-operator-admission-webhook\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.527931860+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"37617ce1-8351-4422-a35f-7944a11b67b5","error":"Deployment.apps \"obo-prometheus-operator-admission-webhook\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.552814400+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"730d2560-2390-4e56-b712-1ef49939075a","error":"Deployment.apps \"obo-prometheus-operator-admission-webhook\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.599845180+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"52248581-fb45-4396-944f-fd4c0e325136","error":"Deployment.apps \"observability-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.617884286+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"70bcbe2c-7408-4fab-bd78-aab61a692072","error":"Deployment.apps \"observability-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.625469278+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"e85b3985-bc04-4aeb-a305-328e98227c4a","error":"Deployment.apps \"observability-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.653126568+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"647237b5-9055-4f04-a278-f563d052e820","error":"Deployment.apps \"observability-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.721530210+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"c8f4bbf2-bd18-4dca-88af-d3da6c392968","error":"Deployment.apps \"observability-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.813230000+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"9c418eb4-5007-482c-a7e1-2daa49d8e72b","error":"Deployment.apps \"perses-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.845108877+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"68fcdd41-eda4-4f6f-b746-fef0bc1bceaf","error":"Deployment.apps \"perses-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.853844738+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"31e59214-3270-4c9e-a782-c668d56009f7","error":"Deployment.apps \"perses-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:00.868389636+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:00Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cluster-observability-operator.v1.3.0","namespace":"openshift-operators"},"namespace":"openshift-operators","name":"cluster-observability-operator.v1.3.0","reconcileID":"feaa1e50-36f7-4694-aa1d-2d0ab89f0c10","error":"Deployment.apps \"perses-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:01.170031151+00:00 stderr F I1212 16:27:01.169911 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41596", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' waiting for install components to report healthy 2025-12-12T16:27:01.407010507+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XaRH6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:01.407276094+00:00 stderr F I1212 16:27:01.407217 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41716", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' installing: waiting for deployment obo-prometheus-operator to become ready: deployment "obo-prometheus-operator" not available: Deployment does not have minimum availability. 2025-12-12T16:27:01.433438056+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:01Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"cluster-observability-operator.openshift-operators"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"cluster-observability-operator.openshift-operators\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:01.736627330+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=UlVug namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:01.936514238+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=6Di8o namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:02.243383665+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=u84O+ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:02.486541829+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=fa2KD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:02.580348683+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0vc4n namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:02.767169951+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hYpmT namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:03.065611315+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jVbYh namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:03.167158215+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=USTY9 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:03.239706131+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"cbbc87de-f464-4232-84b7-670681ff26b6","error":"resource name may not be empty","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.256702431+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"f0bd5b8c-d49e-4a19-8d14-417c074f5a25","error":"resource name may not be empty","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.256971518+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"18cc0c4a-6920-4dd5-a50e-576a06a80038","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.257075571+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"5754a150-ccf8-459a-8868-26998be125fc","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.282988987+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"2ab559e5-9b83-431b-a4ea-fc29c3b580dc","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.283381287+00:00 stderr F time="2025-12-12T16:27:03Z" level=warning msg="error adding operatorgroup annotations" csv=elasticsearch-eck-operator-certified.v3.2.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" id=5lFl9 namespace=service-telemetry opgroup=service-telemetry-operator-group phase= 2025-12-12T16:27:03.283392717+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="operatorgroup incorrect" csv=elasticsearch-eck-operator-certified.v3.2.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" id=5lFl9 namespace=service-telemetry phase= 2025-12-12T16:27:03.283416847+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="not in operatorgroup namespace" csv=elasticsearch-eck-operator-certified.v3.2.0 id=0gh0E namespace=service-telemetry phase= 2025-12-12T16:27:03.283491249+00:00 stderr F E1212 16:27:03.283471 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.363547895+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="operatorgroup incorrect" csv=elasticsearch-eck-operator-certified.v3.2.0 error="" id=dkoeg namespace=service-telemetry phase= 2025-12-12T16:27:03.363547895+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="not in operatorgroup namespace" csv=elasticsearch-eck-operator-certified.v3.2.0 id=o4eZi namespace=service-telemetry phase= 2025-12-12T16:27:03.363641188+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"1d2bce54-8d34-4c47-adc1-98c094a18bee","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.430541811+00:00 stderr F time="2025-12-12T16:27:03Z" level=warning msg="error adding operatorgroup annotations" csv=elasticsearch-eck-operator-certified.v3.2.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" id=WxIdR namespace=service-telemetry opgroup=service-telemetry-operator-group phase= 2025-12-12T16:27:03.430541811+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="operatorgroup incorrect" csv=elasticsearch-eck-operator-certified.v3.2.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" id=WxIdR namespace=service-telemetry phase= 2025-12-12T16:27:03.430541811+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="not in operatorgroup namespace" csv=elasticsearch-eck-operator-certified.v3.2.0 id=cOqqe namespace=service-telemetry phase= 2025-12-12T16:27:03.430541811+00:00 stderr F E1212 16:27:03.429734 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.446361021+00:00 stderr F time="2025-12-12T16:27:03Z" level=warning msg="error adding operatorgroup annotations" csv=elasticsearch-eck-operator-certified.v3.2.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" namespace=service-telemetry operatorGroup=service-telemetry-operator-group 2025-12-12T16:27:03.446361021+00:00 stderr F time="2025-12-12T16:27:03Z" level=warning msg="failed to annotate CSVs in operatorgroup after group change" error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" namespace=service-telemetry operatorGroup=service-telemetry-operator-group 2025-12-12T16:27:03.446361021+00:00 stderr F E1212 16:27:03.445518 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/service-telemetry-operator-group\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.488358224+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=damRz namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:03.488358224+00:00 stderr F E1212 16:27:03.485159 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: could not update operatorgroups olm.providedAPIs annotation: Operation cannot be fulfilled on operatorgroups.operators.coreos.com \"service-telemetry-operator-group\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.492227532+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="scheduling ClusterServiceVersion for requirement verification" csv=elasticsearch-eck-operator-certified.v3.2.0 id=P87Jh namespace=service-telemetry phase= 2025-12-12T16:27:03.492227532+00:00 stderr F I1212 16:27:03.490496 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41826", FieldPath:""}): type: 'Normal' reason: 'RequirementsUnknown' requirements not yet checked 2025-12-12T16:27:03.536822081+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"0a1f63f8-5533-4d3b-b1da-8ffa916ce84e","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.550505837+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="scheduling ClusterServiceVersion for requirement verification" csv=elasticsearch-eck-operator-certified.v3.2.0 id=cuGXR namespace=service-telemetry phase= 2025-12-12T16:27:03.554209061+00:00 stderr F I1212 16:27:03.551113 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41826", FieldPath:""}): type: 'Normal' reason: 'RequirementsUnknown' requirements not yet checked 2025-12-12T16:27:03.572944635+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" csv=elasticsearch-eck-operator-certified.v3.2.0 id=gA1Zl namespace=service-telemetry phase= 2025-12-12T16:27:03.573010527+00:00 stderr F E1212 16:27:03.572992 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.574285809+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="scheduling ClusterServiceVersion for requirement verification" csv=elasticsearch-eck-operator-certified.v3.2.0 id=K5HZk namespace=service-telemetry phase= 2025-12-12T16:27:03.575029168+00:00 stderr F I1212 16:27:03.574706 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41826", FieldPath:""}): type: 'Normal' reason: 'RequirementsUnknown' requirements not yet checked 2025-12-12T16:27:03.591243708+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" csv=elasticsearch-eck-operator-certified.v3.2.0 id=9rBKr namespace=service-telemetry phase= 2025-12-12T16:27:03.591243708+00:00 stderr F E1212 16:27:03.588290 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.595016094+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="scheduling ClusterServiceVersion for requirement verification" csv=elasticsearch-eck-operator-certified.v3.2.0 id=IXsAP namespace=service-telemetry phase= 2025-12-12T16:27:03.595016094+00:00 stderr F I1212 16:27:03.592462 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41826", FieldPath:""}): type: 'Normal' reason: 'RequirementsUnknown' requirements not yet checked 2025-12-12T16:27:03.643667435+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" csv=elasticsearch-eck-operator-certified.v3.2.0 id=2j20l namespace=service-telemetry phase= 2025-12-12T16:27:03.643750827+00:00 stderr F E1212 16:27:03.643689 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:03.813659957+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="requirements were not met" csv=elasticsearch-eck-operator-certified.v3.2.0 id=jska8 namespace=service-telemetry phase=Pending 2025-12-12T16:27:03.849460093+00:00 stderr F I1212 16:27:03.844877 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41845", FieldPath:""}): type: 'Normal' reason: 'RequirementsNotMet' one or more requirements couldn't be found 2025-12-12T16:27:03.849460093+00:00 stderr F E1212 16:27:03.845454 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:03.864966236+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:03Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"23254754-cfb7-402e-8b45-1d7e0e542f3c","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:03.902756202+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=34nrK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:04.030499685+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="requirements were not met" csv=elasticsearch-eck-operator-certified.v3.2.0 id=aNObY namespace=service-telemetry phase=Pending 2025-12-12T16:27:04.031807578+00:00 stderr F I1212 16:27:04.031758 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41845", FieldPath:""}): type: 'Normal' reason: 'RequirementsNotMet' one or more requirements couldn't be found 2025-12-12T16:27:04.068741323+00:00 stderr F E1212 16:27:04.068651 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: error transitioning ClusterServiceVersion: requirements were not met and error updating CSV status: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:04.102445986+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MR+ml namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:04.212624135+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="requirements were not met" csv=elasticsearch-eck-operator-certified.v3.2.0 id=g1PSQ namespace=service-telemetry phase=Pending 2025-12-12T16:27:04.212775669+00:00 stderr F E1212 16:27:04.212760 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:04.307395833+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=fp/XL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:04.361543944+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:04Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"ff820c0a-322d-4c10-8a73-2c9139a6d956","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:04.388026374+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="requirements were not met" csv=elasticsearch-eck-operator-certified.v3.2.0 id=3uL/T namespace=service-telemetry phase=Pending 2025-12-12T16:27:04.388026374+00:00 stderr F E1212 16:27:04.387580 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:04.505422755+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:04Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"elasticsearch-eck-operator-certified.v3.2.0","namespace":"service-telemetry"},"namespace":"service-telemetry","name":"elasticsearch-eck-operator-certified.v3.2.0","reconcileID":"9205e3f9-b242-444b-9839-4b6c88c5b44f","error":"Deployment.apps \"elastic-operator\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:04.565328831+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="requirements were not met" csv=elasticsearch-eck-operator-certified.v3.2.0 id=TPXR7 namespace=service-telemetry phase=Pending 2025-12-12T16:27:04.565328831+00:00 stderr F E1212 16:27:04.557391 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:04.669233071+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=iCjk2 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:04.776622809+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="requirements were not met" csv=elasticsearch-eck-operator-certified.v3.2.0 id=L/fB7 namespace=service-telemetry phase=Pending 2025-12-12T16:27:04.902169746+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="scheduling ClusterServiceVersion for install" csv=elasticsearch-eck-operator-certified.v3.2.0 id=shFIR namespace=service-telemetry phase=Pending 2025-12-12T16:27:04.902343061+00:00 stderr F I1212 16:27:04.902284 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41891", FieldPath:""}): type: 'Normal' reason: 'AllRequirementsMet' all requirements found, attempting install 2025-12-12T16:27:05.004341581+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=lBnTz namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:05.212233183+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=8FbQ4 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:05.504384256+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=DjDZN namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:05.755583374+00:00 stderr F I1212 16:27:05.754440 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41949", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' waiting for install components to report healthy 2025-12-12T16:27:05.842669708+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Yq+Kn namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:05.852520277+00:00 stderr F time="2025-12-12T16:27:05Z" level=warning msg="reusing existing cert elastic-operator-service-cert" 2025-12-12T16:27:06.080831586+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Vsain namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:06.365882170+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MhaE7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:06.619406977+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=4dmF6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:06.888280582+00:00 stderr F I1212 16:27:06.887802 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41949", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' waiting for install components to report healthy 2025-12-12T16:27:06.902277486+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" csv=elasticsearch-eck-operator-certified.v3.2.0 id=u8mp2 namespace=service-telemetry phase=InstallReady 2025-12-12T16:27:06.902277486+00:00 stderr F E1212 16:27:06.900970 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:06.988051977+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=W9ynf namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:07.185507864+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=LBfZi namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:07.483274920+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=rX5f3 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:07.555262642+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=GWoQu namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:07.555262642+00:00 stderr F I1212 16:27:07.533918 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42019", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' installing: waiting for deployment elastic-operator to become ready: deployment "elastic-operator" not available: Deployment does not have minimum availability. 2025-12-12T16:27:07.830514319+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=rHHsp namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:08.066107661+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=1Ijbz namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:08.111113580+00:00 stderr F I1212 16:27:08.110629 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42019", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' installing: waiting for deployment elastic-operator to become ready: deployment "elastic-operator" not available: Deployment does not have minimum availability. 2025-12-12T16:27:08.111113580+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=uGJfx namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:08.124382816+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" csv=elasticsearch-eck-operator-certified.v3.2.0 id=sOexh namespace=service-telemetry phase=Installing 2025-12-12T16:27:08.124460048+00:00 stderr F E1212 16:27:08.124424 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified.v3.2.0\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"elasticsearch-eck-operator-certified.v3.2.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:08.162520171+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=f0UQq namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:08.270481184+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5LE6N namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:08.557481926+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=4pLmN namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:08.568650569+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=fga0g namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:08.749662670+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=CSSx4 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:08.775033712+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=XFTnS namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:08.862512976+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=/qotK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:09.053816268+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5erF5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:09.453212986+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=SrVLb namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:09.494995394+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=YpOVo namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:09.544251040+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=52Yws namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:09.866062315+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=3WqY+ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:09.968377574+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=hRbmh namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:09.979471525+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=FHHW4 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:10.065359679+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0x/rZ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:10.230242652+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=UVlOt namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:10.240839240+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=+Cabf namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:10.645670506+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Yv5FM namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:10.931393537+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=St//5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.059309354+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=w0bfn namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.148838800+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=aksdT namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:11.235536435+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=YfOU6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.300537140+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=H5GyF namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.338209833+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=h6Jmh namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:11.358467976+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=uJmvh namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.470145342+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=tHfWr namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.514905305+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=NpnaO namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:11.586321783+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+g9U4 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.697342882+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Lkctu namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.720601381+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=j0Jjv namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:11.881733579+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=LFUwd namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:11.988433469+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=DrIea namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.108171800+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=rjr++ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:12.123499108+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=GroaU namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.213786932+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=qFlfV namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.269885902+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=kYJnm namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:12.319668132+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=I4+or namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.455272134+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ab+uH namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.519056118+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=rXHgu namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:12.520675799+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=HvzXa namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.707930138+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=RUxtf namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:12.731225198+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=PyENn namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:12.914952168+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=7w1S7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.015743128+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=bT8Tj namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:13.016299752+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=87oSw namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.083800691+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=dsNj9 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.173331387+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=slm7y namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.181507744+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ju2So namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:13.305437360+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=qskra namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.504988991+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=iH24Q namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:13.504988991+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=xm053 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.682818841+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+5+Vy namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.771735992+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=2E+b7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.803381683+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=MgKjf namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:13.845224932+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=m9laT namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.911201811+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZWWJP namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:13.914299080+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=bN6iO namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:13.981012078+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Z6h02 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.288076620+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=2pBFD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.292554993+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=xLMw9 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:14.331994721+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=PiUcC namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.393006555+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=9PAMk namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:14.413090884+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=L204f namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.482307355+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=7kDEK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.500601578+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=BL/db namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:14.531552422+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=65PF5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.601784749+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=wr2/B namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.659967582+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=2LPBX namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:14.864040107+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=tPmjW namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.919563292+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=dTIGY namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:14.953658305+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=E2Dmq namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:14.978716269+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=J7Y4l namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.130890350+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=qwGmw namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.143659303+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=vkYoO namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:15.185134373+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Lo6eP namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.233899077+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ozdKZ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.240855903+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=Vbh/1 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:15.281976714+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=OBj30 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.332298708+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=rOzvK namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:15.335142060+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=VYWs+ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.386754976+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=wcKSL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.498465143+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=cN4iw namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.502141486+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=C9IGE namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:15.545351760+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=67RND namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.680682205+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ix61i namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:15.684039020+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=/13aE namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.804255541+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=8/yXZ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:15.895364077+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=r1ptN namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:15.904435827+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ePExv namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.051358965+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=uPcwH namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.058657090+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=r0Ig3 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.157726427+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=dSIAt namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.221852960+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BigdJ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.226102378+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=UrqnS namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.278595096+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=4RQSh namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.326034627+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=YhDeM namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.326447217+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=dcF7z namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.388904838+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=LTCdZ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.435007585+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=nv6lL namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.444924226+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZZpEl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.493120236+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=idS+q namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.541747166+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=BzDjA namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.548578269+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jax8R namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.616101508+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=rrduK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.654587352+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=MIVsT namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.669799257+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=JoZzy namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.724909762+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=bBIzE namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.740217779+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=2TIwn namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.767280034+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=qni9K namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.812239832+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=OVzPz namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.813417592+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=H4ptN namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.863894780+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Spxre namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.911940366+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=Lrale namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:16.924324639+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=btW/H namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.967101532+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Y1hwN namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:16.983929887+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=PCvOU namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:17.033820480+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=yAM4w namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:17.094139727+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=yDSDU namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:17.105280439+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=vnTiG namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:17.148250566+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=PSVEL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:17.212417580+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"bbd58dcd-57bd-41ea-9d44-c346b21eb917","error":"resource name may not be empty","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.223517961+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"15adf0da-6cb8-4c84-bb09-e25ddd9f0cf9","error":"resource name may not be empty","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.227278946+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"66f22844-b652-406b-af20-adb56a76d4b7","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.235705980+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"40b40490-eac3-48ea-a40b-055293e46a69","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.247465087+00:00 stderr F time="2025-12-12T16:27:17Z" level=warning msg="error adding operatorgroup annotations" csv=cert-manager-operator.v1.18.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" id=Ccqjo namespace=cert-manager-operator opgroup=cert-manager-operator phase= 2025-12-12T16:27:17.247465087+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="operatorgroup incorrect" csv=cert-manager-operator.v1.18.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" id=Ccqjo namespace=cert-manager-operator phase= 2025-12-12T16:27:17.247465087+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="not in operatorgroup namespace" csv=cert-manager-operator.v1.18.0 id=h7URB namespace=cert-manager-operator phase= 2025-12-12T16:27:17.247465087+00:00 stderr F E1212 16:27:17.246666 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:17.278997845+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"d29d3685-a0a3-4a48-88a7-9a661d1add44","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.358659861+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"614e6996-99e9-414a-8c02-8a3f6f23ebae","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.439297562+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=E/QiG namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:17.457628226+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=BqYuA namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:17.477901119+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="operatorgroup incorrect" csv=cert-manager-operator.v1.18.0 error="" id=BlXHb namespace=cert-manager-operator phase= 2025-12-12T16:27:17.477901119+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="not in operatorgroup namespace" csv=cert-manager-operator.v1.18.0 id=DrOre namespace=cert-manager-operator phase= 2025-12-12T16:27:17.489412461+00:00 stderr F time="2025-12-12T16:27:17Z" level=warning msg="error adding operatorgroup annotations" csv=cert-manager-operator.v1.18.0 error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" namespace=cert-manager-operator operatorGroup=cert-manager-operator 2025-12-12T16:27:17.489412461+00:00 stderr F time="2025-12-12T16:27:17Z" level=warning msg="failed to annotate CSVs in operatorgroup after group change" error="Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" namespace=cert-manager-operator operatorGroup=cert-manager-operator 2025-12-12T16:27:17.489412461+00:00 stderr F E1212 16:27:17.488028 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator\" failed: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:17.519324518+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"235ff397-6a92-402b-9236-f9a8a73343f2","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.640923595+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=NJriv namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:17.670801431+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="scheduling ClusterServiceVersion for requirement verification" csv=cert-manager-operator.v1.18.0 id=IxDAF namespace=cert-manager-operator phase= 2025-12-12T16:27:17.671199662+00:00 stderr F I1212 16:27:17.671132 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42314", FieldPath:""}): type: 'Normal' reason: 'RequirementsUnknown' requirements not yet checked 2025-12-12T16:27:17.744129747+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=96WS6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:17.760498092+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=7rtZd namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:17.830598866+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=aDMI0 namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:17.831387996+00:00 stderr F I1212 16:27:17.831337 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42321", FieldPath:""}): type: 'Normal' reason: 'RequirementsNotMet' one or more requirements couldn't be found 2025-12-12T16:27:17.840770463+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:17Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"b23442d2-49a4-4fe6-83c4-f9dcc4a09e53","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:17.847646837+00:00 stderr F E1212 16:27:17.847597 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:17.940754844+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ORb+m namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.000445064+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=7snEU namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:18.000519276+00:00 stderr F E1212 16:27:18.000478 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:18.231434520+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Jbs92 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.259029889+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=2a47Q namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:18.418700130+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=R2qKW namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:18.418779602+00:00 stderr F E1212 16:27:18.418714 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:18.481633233+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:18Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"e097c6c0-d625-4c87-b646-1d6ccd04bfdc","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:18.538132063+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=qs5oq namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.630597263+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=l6lEZ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:18.654885878+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=KO01U namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.661549706+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=GW2Vp namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:18.661588947+00:00 stderr F E1212 16:27:18.661565 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:18.712244419+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=VMU/E namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.746451735+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=x0kxR namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:18.772433893+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=bnHR4 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.824440479+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=4XXKd namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:18.824440479+00:00 stderr F E1212 16:27:18.823666 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:18.930356269+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=qZxqu namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:18.977505273+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=FGPy+ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.002669450+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=e9POE namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.039101012+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=jwBp7 namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:19.039264776+00:00 stderr F E1212 16:27:19.039117 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:19.061169320+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=4qvt+ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.101693086+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=25p3j namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.112981991+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=zs79G namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.146097290+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Cm8CX namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.199877491+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=DG+60 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.201099162+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=6u5GK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.271562935+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=63kD0 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.289439297+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=X0e2u namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.292432303+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=AphHO namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:19.292534586+00:00 stderr F E1212 16:27:19.292493 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:19.336384384+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=WBe2O namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.371172055+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=PZYnj namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.375151166+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=KcjX6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.440401927+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=QD4JY namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.459527491+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=O/slB namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.465715138+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="requirements were not met" csv=cert-manager-operator.v1.18.0 id=nadst namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:19.465768059+00:00 stderr F E1212 16:27:19.465740 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: requirements were not met" logger="UnhandledError" 2025-12-12T16:27:19.498455576+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=vQNsA namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:19.531733149+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=OS4hz namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:19.538886130+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MU0lY namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:21.980560435+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:21Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"41802b5d-3ed8-46d5-8c01-cc4e533a16e0","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:22.076287908+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:22Z","msg":"Reconciler error","controller":"operatorcondition","controllerGroup":"operators.coreos.com","controllerKind":"OperatorCondition","OperatorCondition":{"name":"cert-manager-operator.v1.18.0","namespace":"cert-manager-operator"},"namespace":"cert-manager-operator","name":"cert-manager-operator.v1.18.0","reconcileID":"1e723f52-8b84-449b-a9e1-2a8e8fb5d808","error":"Deployment.apps \"cert-manager-operator-controller-manager\" not found","stacktrace":"sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:353\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:22.185660686+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:22Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"openshift-cert-manager-operator.cert-manager-operator"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"openshift-cert-manager-operator.cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:22.402214597+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MIqd7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:22.472233629+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=OV8XO namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:22.495195520+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jdOuk namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:22.548100069+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="scheduling ClusterServiceVersion for install" csv=cert-manager-operator.v1.18.0 id=bGy6e namespace=cert-manager-operator phase=Pending 2025-12-12T16:27:22.549176866+00:00 stderr F I1212 16:27:22.548583 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42380", FieldPath:""}): type: 'Normal' reason: 'AllRequirementsMet' all requirements found, attempting install 2025-12-12T16:27:22.667488641+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=loNC7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:22.693508129+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="No api or webhook descs to add CA to" 2025-12-12T16:27:22.695957551+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=w7ARf namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:22.717745773+00:00 stderr F I1212 16:27:22.717670 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42409", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' waiting for install components to report healthy 2025-12-12T16:27:22.841770571+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:22Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"openshift-cert-manager-operator.cert-manager-operator"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"openshift-cert-manager-operator.cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:22.870790536+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=3QfZC namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:22.909787842+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:22Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"openshift-cert-manager-operator.cert-manager-operator"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"openshift-cert-manager-operator.cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:22.974013097+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=k+WSB namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:22.974436398+00:00 stderr F I1212 16:27:22.974395 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42415", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' installing: waiting for deployment cert-manager-operator-controller-manager to become ready: deployment "cert-manager-operator-controller-manager" not available: Deployment does not have minimum availability. 2025-12-12T16:27:23.113253941+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=RJ3Al namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.195697918+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=mE+7K namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.205085996+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=5uXQM namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.214822802+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=ooixY namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.215363376+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ewRqR namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.260824916+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=yJ5/o namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.278902564+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=kvTCy namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.296983581+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=oSgG0 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.308014101+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Cskeh namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.330162251+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=IGQoj namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.368961983+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=wpJkx namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.399288841+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=SUAQj namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.399535957+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=mFQxU namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.412821233+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=pm9hC namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.517253726+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=9rH60 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.537501149+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=E+1LE namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.615727448+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=QoIO6 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.617816931+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=jWoVS namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.649595196+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=LD22u namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.717295589+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=dMB4m namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.735716865+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0iJ3Z namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.753000983+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=Wonc+ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.762653557+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=h72m5 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.774546378+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=6ge6J namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.793286702+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Mg+cx namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.823383734+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=GyA4d namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.832313310+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ryrGe namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.838391634+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=a/vD7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.847797952+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=rdnQg namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.882245914+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=GE9Rk namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.898762832+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5aDqp namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.926164615+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=3fHJ8 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:23.926373691+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=75M2u namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:23.942563260+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=PGV1f namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:23.979385162+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=qfm+Y namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.000514037+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=nK18d namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.010294915+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=0dhaG namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.018414090+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=gFpXQ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.042515020+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=nRjX9 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.062747382+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=cZZml namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.101860062+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=+atGS namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.112767288+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=WxS1n namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.120687248+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=alP6U namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.122830633+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=jdRiz namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.157050709+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=OGmhJ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.178046080+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5xNmD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.206633404+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=W1Mgb namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.209557138+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=t38rb namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.222819213+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=AmJLO namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.265131054+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=t2kw6 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.281970400+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZxdZl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.287708525+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=XcwGK namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.301934366+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=4INah namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.317080089+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=v/eSz namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.330119909+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=tYU0H namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.369061734+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=vtiaF namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.371944267+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=IyQd/ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.380903674+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=um+zT namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.388055655+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=yMRi8 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.478012822+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=hYZNg namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.668930294+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=HmIEK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.675369547+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=sfo1h namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.694152772+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=NL19Y namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.714832975+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=HDb8B namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.731377934+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=egZuo namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.773311965+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=kQhR7 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.782418566+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=aIzWK namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.790993673+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XueEb namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.791606059+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=tMfUG namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.831076927+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=z85TP namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.854989983+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Hib/Q namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.871248244+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=tElcS namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.888991033+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=H5xsL namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.913401301+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=93pXI namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.949996577+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=bq2k5 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:24.969836529+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jVQ78 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:24.975818961+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=d9M3B namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:24.986772008+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=fEuWV namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.006244341+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=OYiWJ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.024416921+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=46AHo namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.058280108+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=7EeJT namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.078203072+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=E+a38 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.082908181+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=OlIKx namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.087091487+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=I6YLN namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.120476152+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=xZ8lW namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.135407860+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BSLMw namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.159739446+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=8pnWB namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.171765860+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=gP2rp namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.186031491+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=09hwd namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.221927989+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=qIRI/ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.230630690+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=lI4/z namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.247296861+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Kximw namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.249570999+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=NJ0bH namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.284999466+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=UTsBA namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.303821862+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=R11fX namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.324698590+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=NEk4s namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.334287283+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=T7tlN namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.349661652+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=VDfvO namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.463419671+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=D+Lyj namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.487497161+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=P7FWF namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.527669257+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=tsVWO namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.544227857+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=PoaEr namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.573393865+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0qyYG namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.626009366+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=2COsN namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.644457973+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=IOnBL namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.652240820+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=PLvYK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.657555115+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=PFU3n namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.693141385+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=mtKkC namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.719649526+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=SGtXq namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.786239542+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ZaeCF namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.818622551+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=GaGgT namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.854835918+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BgOtb namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.908435364+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=2jeci namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.940278940+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=/43bd namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:25.956237344+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=0VWyp namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:25.970354091+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=NeCrt namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:25.990686406+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=suTZI namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.015127104+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Mku23 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.085940937+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=6lCl1 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.098761861+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=rUal5 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.110426976+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ulqrz namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.110621211+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=JQ9XD namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.147891515+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=WR3Wz namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.169265895+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ioSFl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.209682198+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=/JWH0 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.221229041+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=LApXh namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.232730312+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=HlUJI namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.234085746+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jn60P namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.272214321+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=ZXyeF namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.294467494+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Agd87 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.340846758+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=vPMZj namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.343040824+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ne6m3 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.355392586+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=9pu+/ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.362427434+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0OHUh namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.398161179+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=n3qi+ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.418325499+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=cvy8m namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.435841402+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=J7NM/ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.446871321+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=fqBTG namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.458394402+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=S8efK namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.480051860+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=LpQYS namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.528159758+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Ay60d namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.553055228+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=NWY8k namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.559374828+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=FgaCJ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.565053541+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=O9ua7 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.599867032+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=P6pPU namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.620212097+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=GedXY namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.659264146+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=pPFSv namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.660977279+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=iC98g namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.674247195+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=+SOqF namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.682401901+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BH0EH namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.722076685+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=KaRMw namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.746512074+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Mr19G namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.771013944+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=/ZTTy namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.782560456+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=8iZNE namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.800731576+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=VHY2S namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.837912807+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=BWbTm namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.859233767+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=m6K9A namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.862277604+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=6WE7t namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.871084177+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=KxMT5 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.901552318+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=wiJOm namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.921553234+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=JJVYi namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:26.967369494+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=Va+hU namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:26.967563578+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=gLjLK namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:26.985984925+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=elG9z namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.017309317+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=JK6Wb namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.034226126+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BUn3R namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.041674294+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=+gqIb namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.053358290+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=pkgvO namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.073928630+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=6rXNM namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.093585058+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=csUys namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.137013877+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=CE3TX namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.146834236+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=WPUh5 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.155397792+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=7TdtE namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.156854049+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BYYbx namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.191500746+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=YGFSp namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.207923382+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=bgZck namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.239379518+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ectWI namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.239519731+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=2VIC2 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.256550422+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=iVnXq namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.301854179+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=KxUcR namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.319056604+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=/3lkD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.327753644+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=m4xFw namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.337955373+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=EA9LS namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.356608405+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=sougb namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.373898632+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=gyc3V namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.413889354+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Y/Dzm namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.425131219+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=Riz2S namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.519248531+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=ylmmI namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.526618797+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=zpcZv namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.706745226+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=H7r0l namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.722644809+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=w6ySG namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.751454808+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=jp8dy namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.768280914+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=1hYB0 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.787848849+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=wOBDv namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.818005042+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=JLR9I namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.864880569+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=3tOJ4 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.876117323+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=gtOX0 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.884453724+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=fx/GL namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.886404883+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=S+ZST namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.927149544+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=4NOMa namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.947196072+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Nockj namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:27.980241278+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=nLCF5 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:27.990838026+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=u2LpP namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:27.996620013+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=lFMYO namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.033118426+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=zKjzc namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.052070886+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=13VTg namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.074490143+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=4XGD/ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.085959784+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=mkqsH namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.102324598+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=VxF47 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.144728721+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=7gEgF namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.168077582+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=mEzkn namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.172513534+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=fJVh3 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.182123227+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=pRUsA namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.204087573+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=hGV8Y namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.220484618+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=1xV1Y namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.305574672+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=g89/s namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.309304526+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ttXAP namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.318126070+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=ZHgrM namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.323934127+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=JSC9n namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.363740954+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=ugOm/ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.387821053+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=aWkGF namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.423498496+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=CiXng namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.431224822+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=HKTYZ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.451732461+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=pK5xQ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.487334832+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=LtG2h namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.505009379+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=7hNZn namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.519530307+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=0MAUj namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.532617418+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=CPXBx namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.560151385+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=E6+J7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.593975181+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=GT0Ij namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.615039134+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hN6FA namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.622930334+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=I9epr namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.632912777+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=tbLTQ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.654073582+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=CyGR/ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.675047173+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BjQMq namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.710843289+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=jPQ8a namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.711715151+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=VcB8K namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.730385183+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=egwO3 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.743379032+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=xuiiJ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.784413001+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=oxCr1 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.810833169+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=cAnV6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.826348142+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=j0yu6 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.838521820+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=hEnBY namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.855617393+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=1bWGA namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.895444151+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=+kEEY namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.925863961+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=rTKvq namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:28.926862096+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=kqi9f namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:28.934645963+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=7dg+/ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.958077736+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Ke0IP namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:28.974894452+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=kUJrI namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.010824441+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=+0VSE namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.014333340+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=R3uJD namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.022556118+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=GYE/t namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.027238906+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=SaDOW namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.063695639+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=05jPs namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.088522557+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=k2L52 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.101917886+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=brJKc namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.110406481+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=5um/d namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.120636490+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=mTTS5 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.140301418+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5Sbj5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.174222516+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=LNsez namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.182785723+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=o81SM namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.192828967+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=LtWC8 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.193559036+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=PHIuy namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.238423961+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=ZJOxQ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.257304909+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=94azX namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.268150654+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=Vnp2F namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.277276965+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=m/ZQ5 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.288803056+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=TNJb6 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.309511340+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=eCIRB namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.341967632+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=/WTgd namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.359678780+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=ihUD+ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.360058470+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=zwyBp namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.368649007+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=eoosN namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.472684750+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=9gh7d namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.472684750+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ekLk9 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.472684750+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=G+wLq namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.472684750+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=RAwiS namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.472684750+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hIQG6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.502870284+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=7hyxx namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.533707595+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=fK25b namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.550219452+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=2aR5Z namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.633517681+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=4PUle namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.745498955+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=na8y5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.782855730+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=25ax0 namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.790222697+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=/ZIZ4 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.827542801+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5HUzD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.878670585+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=EzM7t namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:29.976365128+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=mslFW namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:29.982767700+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=qk2S+ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:29.996935568+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=k8WKM namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:30.016222526+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=OAn2B namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:30.041452954+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=fsbsx namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:30.171757912+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=s0k4B namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:30.473473138+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hF4vS namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:30.482552848+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=L5qof namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:30.504409191+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=mVtxc namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:30.524656103+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=afIEb namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:30.567916398+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=L1WHl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:30.617913744+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=FFlub namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:30.948295585+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=hc6yJ namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:30.968272451+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0xPFo namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:30.972225981+00:00 stderr F I1212 16:27:30.969066 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"41728", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' installing: waiting for deployment observability-operator to become ready: deployment "observability-operator" not available: Deployment does not have minimum availability. 2025-12-12T16:27:31.068665201+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=438qJ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:31.247372674+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=zv5FG namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:31.580901986+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=s/ytJ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:31.670548744+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=e/QWn namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:31.856856450+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=t1b+2 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:31.856985683+00:00 stderr F I1212 16:27:31.856942 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42553", FieldPath:""}): type: 'Normal' reason: 'InstallWaiting' installing: waiting for deployment perses-operator to become ready: deployment "perses-operator" not available: Deployment does not have minimum availability. 2025-12-12T16:27:31.963033247+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=a5/6h namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:32.118013559+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:32Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"cluster-observability-operator.openshift-operators"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"cluster-observability-operator.openshift-operators\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:27:32.145258729+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=elasticsearch-eck-operator-certified.v3.2.0 id=EiSVo namespace=service-telemetry phase=Installing strategy=deployment 2025-12-12T16:27:32.145538046+00:00 stderr F I1212 16:27:32.145450 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"service-telemetry", Name:"elasticsearch-eck-operator-certified.v3.2.0", UID:"162fd4cf-95b5-4bf9-b0ed-a4135c0a23a8", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42095", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' install strategy completed with no errors 2025-12-12T16:27:32.251034806+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=VL4nC namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:32.263035930+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZNH3v namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:32.385988691+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=VkxFa namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:32.589332568+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=o1+aI namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:32.685334618+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=o1UA4 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:32.801412325+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XL1IS namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:32.896248926+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=qi45A namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.292258778+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=knAFP namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:33.294507945+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=g/AKq namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.388578426+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=a1xLz namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.491246434+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=MNxeZ namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.588881265+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=vf/G3 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.592137788+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=IXcj+ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:33.688695140+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Tj0md namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.796762395+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=BnhoE namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:33.822223640+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hfORO namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:33.899221269+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=RPIaw namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.001663061+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=zy/Jc namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.098080101+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=HZkoq namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.200775221+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=NAqjw namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.232253167+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=EMjYv namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:34.300261398+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=60JbM namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.400276670+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=RPRXO namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.430143106+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Fm/D6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:34.500813314+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=C4KQa namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.603936714+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Q6PCp namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.705533905+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=T/YYD namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.733233696+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZHUSi namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:34.802905100+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=KZL3Z namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:34.903973758+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=RVdw6 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.008060402+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=j5SrY namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.035647530+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=EY2hs namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:35.104107843+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=Sg6vX namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.204126314+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=eQegK namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.235114478+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=dHGaE namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:35.304423473+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=jU8NP namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.407367808+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=F8f98 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.435537721+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=1g37P namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.435757496+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=y55jn namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:35.564260969+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=bcNXn namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.666934647+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=wlT48 namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.741980377+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=2vkcb namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:35.761298416+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=FouRn namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.863665536+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=lFwve namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.864629241+00:00 stderr F I1212 16:27:35.864582 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42430", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' install strategy completed with no errors 2025-12-12T16:27:35.962299133+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="install strategy successful" csv=cert-manager-operator.v1.18.0 id=sv+NL namespace=cert-manager-operator phase=Installing strategy=deployment 2025-12-12T16:27:35.962598160+00:00 stderr F I1212 16:27:35.962559 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"cert-manager-operator", Name:"cert-manager-operator.v1.18.0", UID:"14349ba5-a098-44c0-8a63-4f51701ecde7", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42430", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' install strategy completed with no errors 2025-12-12T16:27:35.978716858+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" csv=cert-manager-operator.v1.18.0 id=TlKgV namespace=cert-manager-operator phase=Installing 2025-12-12T16:27:35.978761089+00:00 stderr F E1212 16:27:35.978722 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/cert-manager-operator.v1.18.0\" failed: error updating ClusterServiceVersion status: Operation cannot be fulfilled on clusterserviceversions.operators.coreos.com \"cert-manager-operator.v1.18.0\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:36.038106701+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=dufKZ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:36.396113022+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Mur1k namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:36.789747724+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=VU8ne namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:36.835432291+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=yGCYY namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:36.892328831+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XRIUu namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:36.941471644+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5/K3B namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:36.980598665+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=j2Jp5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.035133255+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ueCeQ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.078874942+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=u7gBZ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.178282678+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=6w4RK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.222600848+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=1FalL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.277547009+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=pASDS namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.434636745+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+KGa/ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.491611837+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XKK2h namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.586377695+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=nVlt2 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.677362878+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=vMEny namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.789047304+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jBSAN namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:37.977900724+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Um133 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.095065349+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+j2fl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.143280910+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MwVih namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.186894883+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XkH4O namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.231284187+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+xUnQ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.288700350+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=10XMB namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.376007560+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=DeKA2 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.432898410+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=3Jjcb namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.487047520+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=nOJuN namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.542963255+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=bkbDK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.602627805+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=aC2m3 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.653298258+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=bDc2f namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.700230955+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=lP43G namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.760341987+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=GI2s0 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.843371898+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Dk51E namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:38.993275052+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=DTccM namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.115274550+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZDqr1 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.168680051+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=RZz85 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.223167410+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=G9zzL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.263291436+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=OyX8w namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.316473632+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=61oNy namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.366984070+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=k18Nu namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.426259480+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=nafoD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.518859824+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hBYOs namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.568799928+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+wIJX namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.613490089+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=8Cbb/ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.681917331+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=DUnn6 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.829345332+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=K+1La namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.885992156+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=5uLJK namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:39.986139190+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=l2S5g namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.032683448+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MI/bV namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.081251677+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=bXT3M namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.235928212+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=JNCPy namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.352642676+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=0rOjX namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.455058568+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=hadbL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.567681048+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=XDs6x namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.617733985+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=9cKmi namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.680684678+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=iir/r namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.733387052+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=/WWBl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.781760215+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=olB4i namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.832103260+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=S8YWG namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.888302052+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=OSD7X namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.944698899+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Ap4xG namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:40.999798794+00:00 stderr F time="2025-12-12T16:27:40Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=wEJiL namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.048043295+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=tl38O namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.105423517+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=PXzOh namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.209054710+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=y5Xfl namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.402929867+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Eg1AC namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.445361850+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Misj2 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.508550360+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=1B9yA namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.611491045+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=kmiAJ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.802454398+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=o6Flv namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.846620936+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=jZhT1 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:41.950310920+00:00 stderr F time="2025-12-12T16:27:41Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=pmCCh namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.082299050+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=KEa4b namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.173734345+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=NUJdm namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.214011254+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+vubR namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.256224153+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=sauZC namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.311149403+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=x+V4n namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.351760731+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=MTeGt namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.393261141+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=+9488 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.443252647+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ZlUFV namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.557692464+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=DF7sf namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.611234159+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=am1++ namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.661713507+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=xcPMV namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.711706232+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=Cyh8S namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.753480839+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=rO6PD namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.810349849+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=I5Z3e namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:42.855261806+00:00 stderr F time="2025-12-12T16:27:42Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=BePL7 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.008610477+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=SlR+9 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.079147322+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=3Al6O namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.130242425+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=n9GFR namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.177135162+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=NJmeP namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.240901866+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=asiv5 namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.296389970+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=HiCst namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.345950835+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=ngrHS namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.398464534+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=YCPlc namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.461858638+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=QFhDW namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.638764865+00:00 stderr F time="2025-12-12T16:27:43Z" level=info msg="install strategy successful" csv=cluster-observability-operator.v1.3.0 id=KGouw namespace=openshift-operators phase=Installing strategy=deployment 2025-12-12T16:27:43.639011731+00:00 stderr F I1212 16:27:43.638947 1 event.go:377] Event(v1.ObjectReference{Kind:"ClusterServiceVersion", Namespace:"openshift-operators", Name:"cluster-observability-operator.v1.3.0", UID:"facdaaa3-7f6a-4748-a258-3458617687f9", APIVersion:"operators.coreos.com/v1alpha1", ResourceVersion:"42621", FieldPath:""}): type: 'Normal' reason: 'InstallSucceeded' install strategy completed with no errors 2025-12-12T16:27:44.012777131+00:00 stderr F {"level":"error","ts":"2025-12-12T16:27:44Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"cluster-observability-operator.openshift-operators"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"cluster-observability-operator.openshift-operators\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} 2025-12-12T16:29:23.922896936+00:00 stderr F {"level":"error","ts":"2025-12-12T16:29:23Z","logger":"controllers.operator","msg":"Could not update Operator status","request":{"name":"elasticsearch-eck-operator-certified.service-telemetry"},"error":"Operation cannot be fulfilled on operators.operators.coreos.com \"elasticsearch-eck-operator-certified.service-telemetry\": the object has been modified; please apply your changes to the latest version and try again","stacktrace":"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators.(*OperatorReconciler).Reconcile\n\t/build/vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/controller/operators/operator_controller.go:157\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/build/vendor/sigs.k8s.io/controller-runtime/pkg/internal/controller/controller.go:202"} ././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043043033144 5ustar zuulzuul././@LongLink0000644000000000000000000000030500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043062033145 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000644000175000017500000004535215117043043033157 0ustar zuulzuul2025-12-12T16:27:30.613171584+00:00 stdout F ts=2025-12-12T16:27:30.613002209Z level=info caller=/workspace/cmd/operator/main.go:219 msg="Starting Prometheus Operator" version="(version=, branch=, revision=unknown)" build_context="(go=go1.24.6 (Red Hat 1.24.6-1.el9_6), platform=linux/amd64, user=, date=, tags=unknown)" feature_gates="PrometheusAgentDaemonSet=false,PrometheusShardRetentionPolicy=false,PrometheusTopologySharding=false,StatusForConfigurationResources=false" 2025-12-12T16:27:30.613171584+00:00 stdout F ts=2025-12-12T16:27:30.613102922Z level=info caller=/workspace/cmd/operator/main.go:220 msg="Operator's configuration" watch_referenced_objects_in_all_namespaces=false controller_id="" enable_config_reloader_probes=false 2025-12-12T16:27:30.698250687+00:00 stdout F ts=2025-12-12T16:27:30.697736934Z level=info caller=/workspace/internal/goruntime/cpu.go:27 msg="Updating GOMAXPROCS=1: using minimum allowed GOMAXPROCS" 2025-12-12T16:27:30.698320218+00:00 stdout F ts=2025-12-12T16:27:30.698269317Z level=info caller=/workspace/cmd/operator/main.go:234 msg="Namespaces filtering configuration " config="{allow_list=\"\",deny_list=\"\",prometheus_allow_list=\"\",alertmanager_allow_list=\"\",alertmanagerconfig_allow_list=\"\",thanosruler_allow_list=\"\"}" 2025-12-12T16:27:30.709535682+00:00 stdout F ts=2025-12-12T16:27:30.709276696Z level=info caller=/workspace/cmd/operator/main.go:275 msg="connection established" kubernetes_version=1.33.5 2025-12-12T16:27:30.804459445+00:00 stdout F ts=2025-12-12T16:27:30.804050364Z level=info caller=/workspace/cmd/operator/main.go:360 msg="Kubernetes API capabilities" endpointslices=true 2025-12-12T16:27:31.098479806+00:00 stdout F ts=2025-12-12T16:27:31.098331172Z level=info caller=/workspace/pkg/server/server.go:293 msg="starting insecure server" address=[::]:8080 2025-12-12T16:27:31.098634490+00:00 stdout F ts=2025-12-12T16:27:31.098589259Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.098681381+00:00 stdout F ts=2025-12-12T16:27:31.09864875Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:31.098709302+00:00 stdout F ts=2025-12-12T16:27:31.098687061Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:31.098760943+00:00 stdout F ts=2025-12-12T16:27:31.098732502Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=thanos 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696664715Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696750598Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696759228Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696770058Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696777228Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696786638Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696792689Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696800939Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696808029Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696817349Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.69684417Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:31.697166518+00:00 stdout F ts=2025-12-12T16:27:31.696877881Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:31.707396087+00:00 stdout F ts=2025-12-12T16:27:31.707304035Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:31.707396087+00:00 stdout F ts=2025-12-12T16:27:31.707368766Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.707396087+00:00 stdout F ts=2025-12-12T16:27:31.707376367Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:31.707396087+00:00 stdout F ts=2025-12-12T16:27:31.707386687Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.707437878+00:00 stdout F ts=2025-12-12T16:27:31.707392457Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:31.707437878+00:00 stdout F ts=2025-12-12T16:27:31.707402637Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.707437878+00:00 stdout F ts=2025-12-12T16:27:31.707408317Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:31.707437878+00:00 stdout F ts=2025-12-12T16:27:31.707417418Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.707437878+00:00 stdout F ts=2025-12-12T16:27:31.707423058Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:31.707437878+00:00 stdout F ts=2025-12-12T16:27:31.707432228Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.707448708+00:00 stdout F ts=2025-12-12T16:27:31.707437698Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:31.707466889+00:00 stdout F ts=2025-12-12T16:27:31.707448358Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:31.903342366+00:00 stdout F ts=2025-12-12T16:27:31.900099274Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=thanos 2025-12-12T16:27:31.903342366+00:00 stdout F ts=2025-12-12T16:27:31.900245718Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=thanos 2025-12-12T16:27:31.903342366+00:00 stdout F ts=2025-12-12T16:27:31.900278519Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=thanos 2025-12-12T16:27:31.903342366+00:00 stdout F ts=2025-12-12T16:27:31.900293649Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=thanos 2025-12-12T16:27:31.903342366+00:00 stdout F ts=2025-12-12T16:27:31.900301409Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=thanos 2025-12-12T16:27:31.903342366+00:00 stdout F ts=2025-12-12T16:27:31.90031922Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=thanos 2025-12-12T16:27:31.996591476+00:00 stdout F ts=2025-12-12T16:27:31.996434922Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:31.996591476+00:00 stdout F ts=2025-12-12T16:27:31.996535255Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:32.099756037+00:00 stdout F ts=2025-12-12T16:27:32.099196283Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=thanos 2025-12-12T16:27:32.099756037+00:00 stdout F ts=2025-12-12T16:27:32.099702126Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=thanos 2025-12-12T16:27:32.703479377+00:00 stdout F ts=2025-12-12T16:27:32.703260171Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:32.703479377+00:00 stdout F ts=2025-12-12T16:27:32.703354354Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:32.923145476+00:00 stdout F ts=2025-12-12T16:27:32.921848213Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:32.923145476+00:00 stdout F ts=2025-12-12T16:27:32.922188232Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:33.017798832+00:00 stdout F ts=2025-12-12T16:27:33.017475014Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=thanos 2025-12-12T16:27:33.017798832+00:00 stdout F ts=2025-12-12T16:27:33.017542435Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=thanos 2025-12-12T16:27:33.017798832+00:00 stdout F ts=2025-12-12T16:27:33.017550846Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=thanos 2025-12-12T16:27:33.017798832+00:00 stdout F ts=2025-12-12T16:27:33.017560726Z level=info caller=/workspace/pkg/thanos/operator.go:317 msg="successfully synced all caches" component=thanos-controller 2025-12-12T16:27:33.099364506+00:00 stdout F ts=2025-12-12T16:27:33.098844933Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:33.099364506+00:00 stdout F ts=2025-12-12T16:27:33.098938695Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196264819Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196402592Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196413072Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196427773Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196435403Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196455733Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196462774Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheus 2025-12-12T16:27:33.196741031+00:00 stdout F ts=2025-12-12T16:27:33.196472204Z level=info caller=/workspace/pkg/prometheus/server/operator.go:446 msg="successfully synced all caches" component=prometheus-controller 2025-12-12T16:27:33.204905717+00:00 stdout F ts=2025-12-12T16:27:33.204813495Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:33.204905717+00:00 stdout F ts=2025-12-12T16:27:33.204877027Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:33.204905717+00:00 stdout F ts=2025-12-12T16:27:33.204884507Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:33.204905717+00:00 stdout F ts=2025-12-12T16:27:33.204892457Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:33.204905717+00:00 stdout F ts=2025-12-12T16:27:33.204898277Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:33.204962729+00:00 stdout F ts=2025-12-12T16:27:33.204928668Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=prometheusagent 2025-12-12T16:27:33.204962729+00:00 stdout F ts=2025-12-12T16:27:33.204934678Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=prometheusagent 2025-12-12T16:27:33.204962729+00:00 stdout F ts=2025-12-12T16:27:33.204942988Z level=info caller=/workspace/pkg/prometheus/agent/operator.go:490 msg="successfully synced all caches" component=prometheusagent-controller 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307636907Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.30775858Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307768551Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307782991Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307790671Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307802641Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:349 msg="Waiting for caches to sync" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307809752Z level=info caller=/cachi2/output/deps/gomod/pkg/mod/k8s.io/client-go@v0.34.1/tools/cache/shared_informer.go:356 msg="Caches are synced" controller=alertmanager 2025-12-12T16:27:33.310248423+00:00 stdout F ts=2025-12-12T16:27:33.307822882Z level=info caller=/workspace/pkg/alertmanager/operator.go:369 msg="successfully synced all caches" component=alertmanager-controller ././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000030500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000003420315117043043033101 0ustar zuulzuul2025-12-12T16:20:37.375551458+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Go Version: go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:20:37.375551458+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Go OS/Arch: linux/amd64" 2025-12-12T16:20:37.375551458+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[metrics] Registering marketplace metrics" 2025-12-12T16:20:37.375551458+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[metrics] Serving marketplace metrics" 2025-12-12T16:20:37.376437201+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="TLS keys set, using https for metrics" 2025-12-12T16:20:37.458245207+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Config API is available" 2025-12-12T16:20:37.458245207+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="setting up scheme" 2025-12-12T16:20:37.511031052+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="setting up health checks" 2025-12-12T16:20:37.512840789+00:00 stderr F I1212 16:20:37.512746 1 leaderelection.go:257] attempting to acquire leader lease openshift-marketplace/marketplace-operator-lock... 2025-12-12T16:20:37.526434086+00:00 stderr F I1212 16:20:37.526325 1 leaderelection.go:271] successfully acquired lease openshift-marketplace/marketplace-operator-lock 2025-12-12T16:20:37.526599790+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="became leader: marketplace-operator-547dbd544d-4vhrb" 2025-12-12T16:20:37.526599790+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="registering components" 2025-12-12T16:20:37.526599790+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="setting up the marketplace clusteroperator status reporter" 2025-12-12T16:20:37.539789466+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="setting up controllers" 2025-12-12T16:20:37.540051983+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="starting the marketplace clusteroperator status reporter" 2025-12-12T16:20:37.540051983+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="starting manager" 2025-12-12T16:20:37.540278509+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"starting server","name":"pprof","addr":"[::]:6060"} 2025-12-12T16:20:37.543492903+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting EventSource","controller":"catalogsource-controller","controllerGroup":"operators.coreos.com","controllerKind":"CatalogSource","source":"kind source: *v1alpha1.CatalogSource"} 2025-12-12T16:20:37.547219501+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting EventSource","controller":"operatorhub-controller","controllerGroup":"config.openshift.io","controllerKind":"OperatorHub","source":"kind source: *v1.OperatorHub"} 2025-12-12T16:20:37.547918399+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting EventSource","controller":"configmap-controller","controllerGroup":"","controllerKind":"ConfigMap","source":"kind source: *v1.ConfigMap"} 2025-12-12T16:20:37.652068641+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting Controller","controller":"catalogsource-controller","controllerGroup":"operators.coreos.com","controllerKind":"CatalogSource"} 2025-12-12T16:20:37.652068641+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting workers","controller":"catalogsource-controller","controllerGroup":"operators.coreos.com","controllerKind":"CatalogSource","worker count":1} 2025-12-12T16:20:37.652068641+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting Controller","controller":"configmap-controller","controllerGroup":"","controllerKind":"ConfigMap"} 2025-12-12T16:20:37.652120803+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting workers","controller":"configmap-controller","controllerGroup":"","controllerKind":"ConfigMap","worker count":1} 2025-12-12T16:20:37.652324248+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Reconciling ConfigMap openshift-marketplace/marketplace-trusted-ca" 2025-12-12T16:20:37.654003222+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[ca] Certificate Authorization ConfigMap openshift-marketplace/marketplace-trusted-ca is in sync with disk." name=marketplace-trusted-ca type=ConfigMap 2025-12-12T16:20:37.655713377+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting Controller","controller":"operatorhub-controller","controllerGroup":"config.openshift.io","controllerKind":"OperatorHub"} 2025-12-12T16:20:37.655760178+00:00 stderr F {"level":"info","ts":"2025-12-12T16:20:37Z","msg":"Starting workers","controller":"operatorhub-controller","controllerGroup":"config.openshift.io","controllerKind":"OperatorHub","worker count":1} 2025-12-12T16:20:37.661072227+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:20:37.661257662+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource redhat-marketplace is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.661337554+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.661398086+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.661439337+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.667818194+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:20:37.667943598+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.668034100+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.668103752+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource redhat-marketplace is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.668196954+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.675115516+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:20:37.675166307+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.675279930+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.675298611+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:37.675360312+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="[defaults] CatalogSource redhat-marketplace is annotated and its spec is the same as the default spec" 2025-12-12T16:20:38.835605384+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:39.832607666+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:41.239852768+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:42.240822762+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="[defaults] CatalogSource redhat-marketplace is annotated and its spec is the same as the default spec" 2025-12-12T16:20:44.186593180+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:44.793605932+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:52.185949890+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:52.812003941+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:54.188445915+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:54.793526486+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:20:56.185887778+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="[defaults] CatalogSource redhat-marketplace is annotated and its spec is the same as the default spec" 2025-12-12T16:20:56.785229547+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.855545123+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:26:38.855975534+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.856123487+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.856232240+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.870358137+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] Deleting CatalogSource redhat-marketplace" 2025-12-12T16:26:38.882659888+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:26:38.882802831+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.885673824+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.900323824+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.908825459+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:26:38.908825459+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.908825459+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.908928291+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.927048469+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="Reconciling OperatorHub cluster" 2025-12-12T16:26:38.927122671+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.927170332+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:38.927395238+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:53.625809920+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:26:58.205700553+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:27:25.368434037+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:35:29.930378449+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:35:29.930929993+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:35:31.923347404+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:37:13.699635694+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="[defaults] CatalogSource certified-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:37:22.548229969+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="[defaults] CatalogSource redhat-operators is annotated and its spec is the same as the default spec" 2025-12-12T16:38:09.920925712+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="[defaults] CatalogSource community-operators is annotated and its spec is the same as the default spec" ././@LongLink0000644000000000000000000000030300000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043043033144 5ustar zuulzuul././@LongLink0000644000000000000000000000035100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043062033145 5ustar zuulzuul././@LongLink0000644000000000000000000000035600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000644000175000017500000000204515117043043033147 0ustar zuulzuul2025-12-12T16:27:30.199453553+00:00 stdout F ts=2025-12-12T16:27:30.199014392Z level=info caller=/workspace/internal/goruntime/cpu.go:27 msg="Updating GOMAXPROCS=1: using minimum allowed GOMAXPROCS" 2025-12-12T16:27:30.200938500+00:00 stdout F ts=2025-12-12T16:27:30.200893919Z level=warn caller=/workspace/pkg/server/server.go:158 msg="server TLS client verification disabled" client_ca_file=/etc/tls/private/tls-ca.crt err="stat /etc/tls/private/tls-ca.crt: no such file or directory" 2025-12-12T16:27:30.201593637+00:00 stdout F ts=2025-12-12T16:27:30.201535685Z level=info caller=/workspace/pkg/server/server.go:295 msg="starting secure server" address=[::]:8443 http2=false 2025-12-12T16:27:30.201743011+00:00 stderr F I1212 16:27:30.201623 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:27:30.208232935+00:00 stderr F I1212 16:27:30.205083 1 dynamic_serving_content.go:135] "Starting controller" name="servingCert::/tmp/k8s-webhook-server/serving-certs/tls.crt::/tmp/k8s-webhook-server/serving-certs/tls.key" ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000755000175000017500000000000015117043043033017 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000755000175000017500000000000015117043062033020 5ustar zuulzuul././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000644000175000017500000000000015117043043033007 0ustar zuulzuul././@LongLink0000644000000000000000000000032300000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000755000175000017500000000000015117043062033020 5ustar zuulzuul././@LongLink0000644000000000000000000000033000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000644000175000017500000010613215117043043033024 0ustar zuulzuul2025-12-12T16:16:49.593224966+00:00 stderr F I1212 16:16:49.591777 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:49.593224966+00:00 stderr F I1212 16:16:49.592624 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:49.596529517+00:00 stderr F I1212 16:16:49.593842 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:49.703220791+00:00 stderr F I1212 16:16:49.699585 1 builder.go:304] config-operator version 4.20.0-202510211040.p2.g657754e.assembly.stream.el9-657754e-657754e2beaed6295ff28bb0f7813cb1c9ce35b2 2025-12-12T16:16:50.938664764+00:00 stderr F I1212 16:16:50.937520 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:50.938737186+00:00 stderr F W1212 16:16:50.938722 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:50.938776487+00:00 stderr F W1212 16:16:50.938755 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:50.938806588+00:00 stderr F W1212 16:16:50.938796 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:50.938831158+00:00 stderr F W1212 16:16:50.938822 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:50.938855029+00:00 stderr F W1212 16:16:50.938845 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:50.938880089+00:00 stderr F W1212 16:16:50.938870 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:50.945613864+00:00 stderr F I1212 16:16:50.944738 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:50.946346022+00:00 stderr F I1212 16:16:50.946283 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:50.946364792+00:00 stderr F I1212 16:16:50.946339 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:50.946476495+00:00 stderr F I1212 16:16:50.946447 1 leaderelection.go:257] attempting to acquire leader lease openshift-config-operator/config-operator-lock... 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.946576 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.946594 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.946293 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.946672 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.947204 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.947255 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:50.948416662+00:00 stderr F I1212 16:16:50.947371 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:50.964124396+00:00 stderr F I1212 16:16:50.963932 1 leaderelection.go:271] successfully acquired lease openshift-config-operator/config-operator-lock 2025-12-12T16:16:50.968944603+00:00 stderr F I1212 16:16:50.964422 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-config-operator", Name:"config-operator-lock", UID:"b62d52a3-898a-4ffd-8665-bd76fec9edf7", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37613", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' openshift-config-operator-5777786469-49zmj_3648f835-3f63-4660-b99f-68cd30b71864 became leader 2025-12-12T16:16:51.047937542+00:00 stderr F I1212 16:16:51.047777 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:51.047937542+00:00 stderr F I1212 16:16:51.047858 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:51.048027774+00:00 stderr F I1212 16:16:51.047945 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:51.227162208+00:00 stderr F I1212 16:16:51.227004 1 base_controller.go:76] Waiting for caches to sync for KubeCloudConfigController 2025-12-12T16:16:51.227162208+00:00 stderr F I1212 16:16:51.227012 1 base_controller.go:76] Waiting for caches to sync for ConfigOperatorController 2025-12-12T16:16:51.227162208+00:00 stderr F I1212 16:16:51.227136 1 base_controller.go:82] Caches are synced for ConfigOperatorController 2025-12-12T16:16:51.227222699+00:00 stderr F I1212 16:16:51.227153 1 base_controller.go:119] Starting #1 worker of ConfigOperatorController controller ... 2025-12-12T16:16:51.227321781+00:00 stderr F I1212 16:16:51.227289 1 base_controller.go:76] Waiting for caches to sync for FeatureUpgradeableController 2025-12-12T16:16:51.227392823+00:00 stderr F I1212 16:16:51.227372 1 base_controller.go:76] Waiting for caches to sync for MigrationPlatformStatusController 2025-12-12T16:16:51.227392823+00:00 stderr F I1212 16:16:51.227375 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:51.227402673+00:00 stderr F I1212 16:16:51.227396 1 base_controller.go:76] Waiting for caches to sync for LatencySensitiveRemovalController 2025-12-12T16:16:51.227426044+00:00 stderr F I1212 16:16:51.227408 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:51.227426044+00:00 stderr F I1212 16:16:51.227413 1 base_controller.go:82] Caches are synced for LatencySensitiveRemovalController 2025-12-12T16:16:51.227435624+00:00 stderr F I1212 16:16:51.227427 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:51.227435624+00:00 stderr F I1212 16:16:51.227427 1 base_controller.go:119] Starting #1 worker of LatencySensitiveRemovalController controller ... 2025-12-12T16:16:51.228433919+00:00 stderr F I1212 16:16:51.227478 1 base_controller.go:76] Waiting for caches to sync for FeatureGateController 2025-12-12T16:16:51.228433919+00:00 stderr F I1212 16:16:51.227593 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-config-operator", Name:"openshift-config-operator", UID:"dc451fc9-e781-493f-8e7d-55e9072cc784", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'FastControllerResync' Controller "ConfigOperatorController" resync interval is set to 10s which might lead to client request throttling 2025-12-12T16:16:51.228433919+00:00 stderr F I1212 16:16:51.227795 1 base_controller.go:76] Waiting for caches to sync for AWSPlatformServiceLocationController 2025-12-12T16:16:51.229173847+00:00 stderr F I1212 16:16:51.229095 1 base_controller.go:76] Waiting for caches to sync for StaleConditionController-RemoveStaleConditions 2025-12-12T16:16:51.229213098+00:00 stderr F I1212 16:16:51.229168 1 base_controller.go:82] Caches are synced for StaleConditionController-RemoveStaleConditions 2025-12-12T16:16:51.229213098+00:00 stderr F I1212 16:16:51.229202 1 base_controller.go:119] Starting #1 worker of StaleConditionController-RemoveStaleConditions controller ... 2025-12-12T16:16:51.230221692+00:00 stderr F I1212 16:16:51.229947 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_config-operator 2025-12-12T16:16:51.231779180+00:00 stderr F I1212 16:16:51.231727 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.237164462+00:00 stderr F I1212 16:16:51.232889 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.248550290+00:00 stderr F I1212 16:16:51.247716 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.248550290+00:00 stderr F I1212 16:16:51.247812 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.251135843+00:00 stderr F I1212 16:16:51.250886 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.264494839+00:00 stderr F I1212 16:16:51.264157 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.330456940+00:00 stderr F I1212 16:16:51.330346 1 base_controller.go:82] Caches are synced for StatusSyncer_config-operator 2025-12-12T16:16:51.330456940+00:00 stderr F I1212 16:16:51.330387 1 base_controller.go:119] Starting #1 worker of StatusSyncer_config-operator controller ... 2025-12-12T16:16:51.330898450+00:00 stderr F I1212 16:16:51.330836 1 base_controller.go:82] Caches are synced for AWSPlatformServiceLocationController 2025-12-12T16:16:51.330898450+00:00 stderr F I1212 16:16:51.330881 1 base_controller.go:119] Starting #1 worker of AWSPlatformServiceLocationController controller ... 2025-12-12T16:16:51.330959892+00:00 stderr F I1212 16:16:51.330924 1 base_controller.go:82] Caches are synced for MigrationPlatformStatusController 2025-12-12T16:16:51.330959892+00:00 stderr F I1212 16:16:51.330936 1 base_controller.go:119] Starting #1 worker of MigrationPlatformStatusController controller ... 2025-12-12T16:16:51.331736691+00:00 stderr F I1212 16:16:51.331658 1 base_controller.go:82] Caches are synced for FeatureUpgradeableController 2025-12-12T16:16:51.331736691+00:00 stderr F I1212 16:16:51.331724 1 base_controller.go:119] Starting #1 worker of FeatureUpgradeableController controller ... 2025-12-12T16:16:51.332540510+00:00 stderr F I1212 16:16:51.332213 1 base_controller.go:82] Caches are synced for FeatureGateController 2025-12-12T16:16:51.332540510+00:00 stderr F I1212 16:16:51.332229 1 base_controller.go:119] Starting #1 worker of FeatureGateController controller ... 2025-12-12T16:16:51.333013922+00:00 stderr F I1212 16:16:51.332933 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:51.431848265+00:00 stderr F I1212 16:16:51.431749 1 base_controller.go:82] Caches are synced for KubeCloudConfigController 2025-12-12T16:16:51.431848265+00:00 stderr F I1212 16:16:51.431788 1 base_controller.go:119] Starting #1 worker of KubeCloudConfigController controller ... 2025-12-12T16:16:55.911072392+00:00 stderr F I1212 16:16:55.910439 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.910393995 +0000 UTC))" 2025-12-12T16:16:55.911072392+00:00 stderr F I1212 16:16:55.911049 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.91102607 +0000 UTC))" 2025-12-12T16:16:55.911166354+00:00 stderr F I1212 16:16:55.911067 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.911058161 +0000 UTC))" 2025-12-12T16:16:55.911166354+00:00 stderr F I1212 16:16:55.911081 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.911072152 +0000 UTC))" 2025-12-12T16:16:55.911166354+00:00 stderr F I1212 16:16:55.911107 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.911085902 +0000 UTC))" 2025-12-12T16:16:55.911166354+00:00 stderr F I1212 16:16:55.911128 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.911115333 +0000 UTC))" 2025-12-12T16:16:55.911166354+00:00 stderr F I1212 16:16:55.911145 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.911134833 +0000 UTC))" 2025-12-12T16:16:55.911166354+00:00 stderr F I1212 16:16:55.911157 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.911150053 +0000 UTC))" 2025-12-12T16:16:55.912441135+00:00 stderr F I1212 16:16:55.911169 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.911161854 +0000 UTC))" 2025-12-12T16:16:55.912441135+00:00 stderr F I1212 16:16:55.911211 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.911197815 +0000 UTC))" 2025-12-12T16:16:55.912441135+00:00 stderr F I1212 16:16:55.911453 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-config-operator.svc\" [serving] validServingFor=[metrics.openshift-config-operator.svc,metrics.openshift-config-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:55.91143955 +0000 UTC))" 2025-12-12T16:16:55.912441135+00:00 stderr F I1212 16:16:55.911624 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:16:55.911605665 +0000 UTC))" 2025-12-12T16:17:46.325396384+00:00 stderr F I1212 16:17:46.323855 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.323799595 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331693 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.324254446 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331808 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.331768322 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331827 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.331817643 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331843 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.331832083 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331888 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.331851164 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331917 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.331895305 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331945 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.331930246 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.331965 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.331952406 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.332019 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.332006158 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.332054 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.332027928 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.332390 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-config-operator.svc\" [serving] validServingFor=[metrics.openshift-config-operator.svc,metrics.openshift-config-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:17:46.332373027 +0000 UTC))" 2025-12-12T16:17:46.333226068+00:00 stderr F I1212 16:17:46.332565 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556210\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556210\" (2025-12-12 15:16:49 +0000 UTC to 2028-12-12 15:16:49 +0000 UTC (now=2025-12-12 16:17:46.332550561 +0000 UTC))" 2025-12-12T16:18:50.997137355+00:00 stderr F E1212 16:18:50.996663 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-config-operator/leases/config-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:50.998684133+00:00 stderr F E1212 16:18:50.998637 1 leaderelection.go:436] error retrieving resource lock openshift-config-operator/config-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-config-operator/leases/config-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.435991874+00:00 stderr F W1212 16:18:51.435921 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.435991874+00:00 stderr F E1212 16:18:51.435967 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.444381081+00:00 stderr F W1212 16:18:51.444327 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.444381081+00:00 stderr F E1212 16:18:51.444366 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.457455795+00:00 stderr F W1212 16:18:51.457393 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.457511836+00:00 stderr F E1212 16:18:51.457452 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.481322625+00:00 stderr F W1212 16:18:51.481223 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.481322625+00:00 stderr F E1212 16:18:51.481263 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.525109977+00:00 stderr F W1212 16:18:51.525031 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.525109977+00:00 stderr F E1212 16:18:51.525075 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.609274528+00:00 stderr F W1212 16:18:51.609199 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.609274528+00:00 stderr F E1212 16:18:51.609244 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.774266797+00:00 stderr F W1212 16:18:51.774149 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.774266797+00:00 stderr F E1212 16:18:51.774214 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.098355340+00:00 stderr F W1212 16:18:52.098263 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.098355340+00:00 stderr F E1212 16:18:52.098314 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.744532885+00:00 stderr F W1212 16:18:52.743640 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.744532885+00:00 stderr F E1212 16:18:52.744512 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.029938184+00:00 stderr F W1212 16:18:54.029845 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:54.029938184+00:00 stderr F E1212 16:18:54.029887 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.595141402+00:00 stderr F W1212 16:18:56.594761 1 base_controller.go:242] Updating status of "KubeCloudConfigController" failed: unable to ApplyStatus for operator using fieldManager "KubeCloudConfigController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/configs/cluster/status?fieldManager=KubeCloudConfigController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.595141402+00:00 stderr F E1212 16:18:56.595126 1 base_controller.go:279] "Unhandled Error" err="KubeCloudConfigController reconciliation failed: Delete \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/kube-cloud-config\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:52.165317155+00:00 stderr F I1212 16:19:52.164751 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:59.189231990+00:00 stderr F I1212 16:19:59.188648 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:59.864533796+00:00 stderr F I1212 16:19:59.863984 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:00.564786318+00:00 stderr F I1212 16:20:00.564459 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:03.757898670+00:00 stderr F I1212 16:20:03.757311 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:10.575843615+00:00 stderr F I1212 16:20:10.575778 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:15.722858586+00:00 stderr F I1212 16:20:15.721587 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=configs" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:18.101616001+00:00 stderr F I1212 16:20:18.101428 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:18.932606206+00:00 stderr F I1212 16:20:18.932486 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:23.286961355+00:00 stderr F I1212 16:20:23.286297 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:36.534540128+00:00 stderr F I1212 16:20:36.533910 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" ././@LongLink0000644000000000000000000000025600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043043033052 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043063033054 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000644000175000017500000102713115117043043033061 0ustar zuulzuul2025-12-12T16:15:01.866015102+00:00 stderr F + timeout 3m /bin/bash -exuo pipefail -c 'while [ -n "$(ss -Htanop \( sport = 10357 \))" ]; do sleep 1; done' 2025-12-12T16:15:01.870483359+00:00 stderr F ++ ss -Htanop '(' sport = 10357 ')' 2025-12-12T16:15:01.879325472+00:00 stderr F + '[' -n '' ']' 2025-12-12T16:15:01.880493477+00:00 stderr F + exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2 2025-12-12T16:15:02.043040073+00:00 stderr F I1212 16:15:02.042911 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:15:02.044140867+00:00 stderr F I1212 16:15:02.044094 1 observer_polling.go:159] Starting file observer 2025-12-12T16:15:02.049838901+00:00 stderr F I1212 16:15:02.049788 1 builder.go:304] cluster-policy-controller version 4.20.0-202510211040.p2.g47c7831.assembly.stream.el9-47c7831-47c783103216aa5e1242632127a5d8f98b8b7455 2025-12-12T16:15:02.051610370+00:00 stderr F I1212 16:15:02.051570 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:20.466963249+00:00 stderr F I1212 16:15:20.466899 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:15:20.473637309+00:00 stderr F I1212 16:15:20.473343 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:15:20.473637309+00:00 stderr F I1212 16:15:20.473628 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:15:20.473676190+00:00 stderr F I1212 16:15:20.473658 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:15:20.473676190+00:00 stderr F I1212 16:15:20.473668 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:15:20.480660699+00:00 stderr F I1212 16:15:20.480592 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:15:20.480660699+00:00 stderr F I1212 16:15:20.480610 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:15:20.481903799+00:00 stderr F W1212 16:15:20.481431 1 builder.go:364] unable to get control plane topology, using HA cluster values for leader election: infrastructures.config.openshift.io "cluster" is forbidden: User "system:kube-controller-manager" cannot get resource "infrastructures" in API group "config.openshift.io" at the cluster scope 2025-12-12T16:15:20.481966370+00:00 stderr F I1212 16:15:20.481913 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ControlPlaneTopology' unable to get control plane topology, using HA cluster values for leader election: infrastructures.config.openshift.io "cluster" is forbidden: User "system:kube-controller-manager" cannot get resource "infrastructures" in API group "config.openshift.io" at the cluster scope 2025-12-12T16:15:20.482026291+00:00 stderr F I1212 16:15:20.481996 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-controller-manager/cluster-policy-controller-lock... 2025-12-12T16:15:20.483583719+00:00 stderr F I1212 16:15:20.483457 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:15:20.483583719+00:00 stderr F I1212 16:15:20.483494 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:15:20.483583719+00:00 stderr F I1212 16:15:20.483521 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:15:20.483840145+00:00 stderr F I1212 16:15:20.483757 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484316 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:15:20.484278506 +0000 UTC))" 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484511 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:15:20.484490091 +0000 UTC))" 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484575 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484582 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484527 1 secure_serving.go:211] Serving securely on 127.0.0.1:10357 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484678 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484711 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:15:20.486228963+00:00 stderr F I1212 16:15:20.484747 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:15:20.487020542+00:00 stderr F I1212 16:15:20.486974 1 leaderelection.go:271] successfully acquired lease openshift-kube-controller-manager/cluster-policy-controller-lock 2025-12-12T16:15:20.487066053+00:00 stderr F I1212 16:15:20.487041 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.487090444+00:00 stderr F I1212 16:15:20.487067 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.487157035+00:00 stderr F I1212 16:15:20.487127 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.487210326+00:00 stderr F I1212 16:15:20.487158 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-controller-manager", Name:"cluster-policy-controller-lock", UID:"3c400717-8ed9-4790-8b9c-dd760ce907d6", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"36226", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' crc_b1ef9ca6-fdbb-4d01-b5b9-32bb9608557f became leader 2025-12-12T16:15:20.493385135+00:00 stderr F I1212 16:15:20.493345 1 policy_controller.go:78] Starting "openshift.io/resourcequota" 2025-12-12T16:15:20.530694404+00:00 stderr F I1212 16:15:20.530651 1 policy_controller.go:88] Started "openshift.io/resourcequota" 2025-12-12T16:15:20.530782756+00:00 stderr F I1212 16:15:20.530768 1 policy_controller.go:78] Starting "openshift.io/cluster-quota-reconciliation" 2025-12-12T16:15:20.530890539+00:00 stderr F I1212 16:15:20.530690 1 resource_quota_controller.go:300] "Starting resource quota controller" 2025-12-12T16:15:20.530902259+00:00 stderr F I1212 16:15:20.530890 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:15:20.530928280+00:00 stderr F I1212 16:15:20.530911 1 resource_quota_monitor.go:308] "QuotaMonitor running" 2025-12-12T16:15:20.544645910+00:00 stderr F E1212 16:15:20.544610 1 reconciliation_controller.go:121] "Unhandled Error" err="initial discovery check failure, continuing and counting on future sync update: unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" 2025-12-12T16:15:20.547036348+00:00 stderr F I1212 16:15:20.545858 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="ippools.whereabouts.cni.cncf.io" 2025-12-12T16:15:20.547036348+00:00 stderr F I1212 16:15:20.545988 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="machines.machine.openshift.io" 2025-12-12T16:15:20.547036348+00:00 stderr F I1212 16:15:20.546003 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="alertrelabelconfigs.monitoring.openshift.io" 2025-12-12T16:15:20.547036348+00:00 stderr F I1212 16:15:20.546014 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="serviceaccounts" 2025-12-12T16:15:20.547036348+00:00 stderr F I1212 16:15:20.546036 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="replicasets.apps" 2025-12-12T16:15:20.547036348+00:00 stderr F I1212 16:15:20.546045 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="statefulsets.apps" 2025-12-12T16:15:20.547402517+00:00 stderr F I1212 16:15:20.547345 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="jobs.batch" 2025-12-12T16:15:20.547536310+00:00 stderr F I1212 16:15:20.547518 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="grpcroutes.gateway.networking.k8s.io" 2025-12-12T16:15:20.547678753+00:00 stderr F I1212 16:15:20.547659 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="egressfirewalls.k8s.ovn.org" 2025-12-12T16:15:20.547896188+00:00 stderr F I1212 16:15:20.547870 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="nodeslicepools.whereabouts.cni.cncf.io" 2025-12-12T16:15:20.547975690+00:00 stderr F I1212 16:15:20.547938 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="endpoints" 2025-12-12T16:15:20.548047952+00:00 stderr F I1212 16:15:20.548012 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="deployments.apps" 2025-12-12T16:15:20.548073773+00:00 stderr F I1212 16:15:20.548055 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="dnsrecords.ingress.operator.openshift.io" 2025-12-12T16:15:20.548148965+00:00 stderr F I1212 16:15:20.548130 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="network-attachment-definitions.k8s.cni.cncf.io" 2025-12-12T16:15:20.548246407+00:00 stderr F I1212 16:15:20.548165 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="machinesets.machine.openshift.io" 2025-12-12T16:15:20.548690568+00:00 stderr F I1212 16:15:20.548643 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="podmonitors.monitoring.coreos.com" 2025-12-12T16:15:20.549918177+00:00 stderr F I1212 16:15:20.549888 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="prometheusrules.monitoring.coreos.com" 2025-12-12T16:15:20.549973458+00:00 stderr F I1212 16:15:20.549957 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="httproutes.gateway.networking.k8s.io" 2025-12-12T16:15:20.550017730+00:00 stderr F I1212 16:15:20.549997 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="referencegrants.gateway.networking.k8s.io" 2025-12-12T16:15:20.550105262+00:00 stderr F I1212 16:15:20.550061 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="ingresscontrollers.operator.openshift.io" 2025-12-12T16:15:20.550143433+00:00 stderr F I1212 16:15:20.550121 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="horizontalpodautoscalers.autoscaling" 2025-12-12T16:15:20.550868430+00:00 stderr F I1212 16:15:20.550834 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="ingresses.networking.k8s.io" 2025-12-12T16:15:20.550886090+00:00 stderr F I1212 16:15:20.550866 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="metal3remediationtemplates.infrastructure.cluster.x-k8s.io" 2025-12-12T16:15:20.550915841+00:00 stderr F I1212 16:15:20.550886 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="egressrouters.network.operator.openshift.io" 2025-12-12T16:15:20.550924251+00:00 stderr F I1212 16:15:20.550916 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="clusterserviceversions.operators.coreos.com" 2025-12-12T16:15:20.550934162+00:00 stderr F I1212 16:15:20.550929 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="podtemplates" 2025-12-12T16:15:20.550959572+00:00 stderr F I1212 16:15:20.550941 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="networkpolicies.networking.k8s.io" 2025-12-12T16:15:20.550968792+00:00 stderr F I1212 16:15:20.550959 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="ipaddressclaims.ipam.cluster.x-k8s.io" 2025-12-12T16:15:20.550978853+00:00 stderr F I1212 16:15:20.550973 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="egressqoses.k8s.ovn.org" 2025-12-12T16:15:20.551013424+00:00 stderr F I1212 16:15:20.550986 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="probes.monitoring.coreos.com" 2025-12-12T16:15:20.551013424+00:00 stderr F I1212 16:15:20.551007 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="thanosrulers.monitoring.coreos.com" 2025-12-12T16:15:20.551038274+00:00 stderr F I1212 16:15:20.551021 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="alertmanagers.monitoring.coreos.com" 2025-12-12T16:15:20.551047964+00:00 stderr F I1212 16:15:20.551040 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="operatorconditions.operators.coreos.com" 2025-12-12T16:15:20.551066335+00:00 stderr F I1212 16:15:20.551060 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="leases.coordination.k8s.io" 2025-12-12T16:15:20.551594958+00:00 stderr F I1212 16:15:20.551572 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="roles.rbac.authorization.k8s.io" 2025-12-12T16:15:20.551613088+00:00 stderr F I1212 16:15:20.551592 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="rolebindings.rbac.authorization.k8s.io" 2025-12-12T16:15:20.551667909+00:00 stderr F I1212 16:15:20.551644 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="operatorpkis.network.operator.openshift.io" 2025-12-12T16:15:20.551822753+00:00 stderr F I1212 16:15:20.551806 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="csistoragecapacities.storage.k8s.io" 2025-12-12T16:15:20.551834503+00:00 stderr F I1212 16:15:20.551826 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="limitranges" 2025-12-12T16:15:20.551855224+00:00 stderr F I1212 16:15:20.551848 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="alertmanagerconfigs.monitoring.coreos.com" 2025-12-12T16:15:20.551865394+00:00 stderr F I1212 16:15:20.551860 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="operatorgroups.operators.coreos.com" 2025-12-12T16:15:20.551889155+00:00 stderr F I1212 16:15:20.551873 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="installplans.operators.coreos.com" 2025-12-12T16:15:20.551898185+00:00 stderr F I1212 16:15:20.551890 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="catalogsources.operators.coreos.com" 2025-12-12T16:15:20.551920615+00:00 stderr F I1212 16:15:20.551905 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="overlappingrangeipreservations.whereabouts.cni.cncf.io" 2025-12-12T16:15:20.551929686+00:00 stderr F I1212 16:15:20.551919 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="machineautoscalers.autoscaling.openshift.io" 2025-12-12T16:15:20.551937506+00:00 stderr F I1212 16:15:20.551931 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="metal3remediations.infrastructure.cluster.x-k8s.io" 2025-12-12T16:15:20.551965086+00:00 stderr F I1212 16:15:20.551949 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="ipaddresses.ipam.cluster.x-k8s.io" 2025-12-12T16:15:20.551974687+00:00 stderr F I1212 16:15:20.551964 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="ipamclaims.k8s.cni.cncf.io" 2025-12-12T16:15:20.551982517+00:00 stderr F I1212 16:15:20.551976 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="userdefinednetworks.k8s.ovn.org" 2025-12-12T16:15:20.551992957+00:00 stderr F I1212 16:15:20.551988 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="controlplanemachinesets.machine.openshift.io" 2025-12-12T16:15:20.552030868+00:00 stderr F I1212 16:15:20.552000 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="subscriptions.operators.coreos.com" 2025-12-12T16:15:20.552030868+00:00 stderr F I1212 16:15:20.552028 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="imagepolicies.config.openshift.io" 2025-12-12T16:15:20.552054289+00:00 stderr F I1212 16:15:20.552041 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="podnetworkconnectivitychecks.controlplane.operator.openshift.io" 2025-12-12T16:15:20.552074959+00:00 stderr F I1212 16:15:20.552058 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="gateways.gateway.networking.k8s.io" 2025-12-12T16:15:20.552074959+00:00 stderr F I1212 16:15:20.552070 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="egressservices.k8s.ovn.org" 2025-12-12T16:15:20.552099260+00:00 stderr F I1212 16:15:20.552082 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="machinehealthchecks.machine.openshift.io" 2025-12-12T16:15:20.552099260+00:00 stderr F I1212 16:15:20.552096 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="prometheuses.monitoring.coreos.com" 2025-12-12T16:15:20.552127320+00:00 stderr F I1212 16:15:20.552111 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="servicemonitors.monitoring.coreos.com" 2025-12-12T16:15:20.552136371+00:00 stderr F I1212 16:15:20.552131 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="daemonsets.apps" 2025-12-12T16:15:20.552169101+00:00 stderr F I1212 16:15:20.552144 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="cronjobs.batch" 2025-12-12T16:15:20.552169101+00:00 stderr F I1212 16:15:20.552159 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="poddisruptionbudgets.policy" 2025-12-12T16:15:20.552195512+00:00 stderr F I1212 16:15:20.552171 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="projecthelmchartrepositories.helm.openshift.io" 2025-12-12T16:15:20.552208332+00:00 stderr F I1212 16:15:20.552201 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="controllerrevisions.apps" 2025-12-12T16:15:20.552233433+00:00 stderr F I1212 16:15:20.552217 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="endpointslices.discovery.k8s.io" 2025-12-12T16:15:20.552242223+00:00 stderr F I1212 16:15:20.552237 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="alertingrules.monitoring.openshift.io" 2025-12-12T16:15:20.552270244+00:00 stderr F I1212 16:15:20.552254 1 policy_controller.go:88] Started "openshift.io/cluster-quota-reconciliation" 2025-12-12T16:15:20.552270244+00:00 stderr F I1212 16:15:20.552263 1 policy_controller.go:78] Starting "openshift.io/cluster-csr-approver" 2025-12-12T16:15:20.552448698+00:00 stderr F I1212 16:15:20.552412 1 reconciliation_controller.go:140] Starting the cluster quota reconciliation controller 2025-12-12T16:15:20.552463138+00:00 stderr F I1212 16:15:20.552441 1 clusterquotamapping.go:127] Starting ClusterQuotaMappingController controller 2025-12-12T16:15:20.552542520+00:00 stderr F I1212 16:15:20.552512 1 resource_quota_monitor.go:308] "QuotaMonitor running" 2025-12-12T16:15:20.557842698+00:00 stderr F I1212 16:15:20.556557 1 policy_controller.go:88] Started "openshift.io/cluster-csr-approver" 2025-12-12T16:15:20.557842698+00:00 stderr F I1212 16:15:20.556692 1 policy_controller.go:78] Starting "openshift.io/podsecurity-admission-label-syncer" 2025-12-12T16:15:20.557842698+00:00 stderr F I1212 16:15:20.556916 1 base_controller.go:76] Waiting for caches to sync for WebhookAuthenticatorCertApprover_csr-approver-controller 2025-12-12T16:15:20.558666088+00:00 stderr F I1212 16:15:20.558563 1 reconciliation_controller.go:171] error occurred GetQuotableResources err=unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1 2025-12-12T16:15:20.558895123+00:00 stderr F E1212 16:15:20.558677 1 reconciliation_controller.go:172] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" 2025-12-12T16:15:20.559618331+00:00 stderr F I1212 16:15:20.559583 1 policy_controller.go:88] Started "openshift.io/podsecurity-admission-label-syncer" 2025-12-12T16:15:20.559618331+00:00 stderr F I1212 16:15:20.559599 1 policy_controller.go:78] Starting "openshift.io/privileged-namespaces-psa-label-syncer" 2025-12-12T16:15:20.559741004+00:00 stderr F I1212 16:15:20.559702 1 base_controller.go:76] Waiting for caches to sync for pod-security-admission-label-synchronization-controller 2025-12-12T16:15:20.559826006+00:00 stderr F I1212 16:15:20.559801 1 reconciliation_controller.go:207] syncing resource quota controller with updated resources from discovery: added: [/v1, Resource=configmaps /v1, Resource=endpoints /v1, Resource=events /v1, Resource=limitranges /v1, Resource=persistentvolumeclaims /v1, Resource=pods /v1, Resource=podtemplates /v1, Resource=replicationcontrollers /v1, Resource=resourcequotas /v1, Resource=secrets /v1, Resource=serviceaccounts /v1, Resource=services apps/v1, Resource=controllerrevisions apps/v1, Resource=daemonsets apps/v1, Resource=deployments apps/v1, Resource=replicasets apps/v1, Resource=statefulsets autoscaling.openshift.io/v1beta1, Resource=machineautoscalers autoscaling/v2, Resource=horizontalpodautoscalers batch/v1, Resource=cronjobs batch/v1, Resource=jobs config.openshift.io/v1, Resource=imagepolicies controlplane.operator.openshift.io/v1alpha1, Resource=podnetworkconnectivitychecks coordination.k8s.io/v1, Resource=leases discovery.k8s.io/v1, Resource=endpointslices events.k8s.io/v1, Resource=events gateway.networking.k8s.io/v1, Resource=gateways gateway.networking.k8s.io/v1, Resource=grpcroutes gateway.networking.k8s.io/v1, Resource=httproutes gateway.networking.k8s.io/v1beta1, Resource=referencegrants helm.openshift.io/v1beta1, Resource=projecthelmchartrepositories infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediations infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediationtemplates ingress.operator.openshift.io/v1, Resource=dnsrecords ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddressclaims ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddresses k8s.cni.cncf.io/v1, Resource=network-attachment-definitions k8s.cni.cncf.io/v1alpha1, Resource=ipamclaims k8s.ovn.org/v1, Resource=egressfirewalls k8s.ovn.org/v1, Resource=egressqoses k8s.ovn.org/v1, Resource=egressservices k8s.ovn.org/v1, Resource=userdefinednetworks machine.openshift.io/v1, Resource=controlplanemachinesets machine.openshift.io/v1beta1, Resource=machinehealthchecks machine.openshift.io/v1beta1, Resource=machines machine.openshift.io/v1beta1, Resource=machinesets monitoring.coreos.com/v1, Resource=alertmanagers monitoring.coreos.com/v1, Resource=podmonitors monitoring.coreos.com/v1, Resource=probes monitoring.coreos.com/v1, Resource=prometheuses monitoring.coreos.com/v1, Resource=prometheusrules monitoring.coreos.com/v1, Resource=servicemonitors monitoring.coreos.com/v1, Resource=thanosrulers monitoring.coreos.com/v1beta1, Resource=alertmanagerconfigs monitoring.openshift.io/v1, Resource=alertingrules monitoring.openshift.io/v1, Resource=alertrelabelconfigs network.operator.openshift.io/v1, Resource=egressrouters network.operator.openshift.io/v1, Resource=operatorpkis networking.k8s.io/v1, Resource=ingresses networking.k8s.io/v1, Resource=networkpolicies operator.openshift.io/v1, Resource=ingresscontrollers operators.coreos.com/v1, Resource=operatorgroups operators.coreos.com/v1alpha1, Resource=catalogsources operators.coreos.com/v1alpha1, Resource=clusterserviceversions operators.coreos.com/v1alpha1, Resource=installplans operators.coreos.com/v1alpha1, Resource=subscriptions operators.coreos.com/v2, Resource=operatorconditions policy/v1, Resource=poddisruptionbudgets rbac.authorization.k8s.io/v1, Resource=rolebindings rbac.authorization.k8s.io/v1, Resource=roles storage.k8s.io/v1, Resource=csistoragecapacities whereabouts.cni.cncf.io/v1alpha1, Resource=ippools whereabouts.cni.cncf.io/v1alpha1, Resource=nodeslicepools whereabouts.cni.cncf.io/v1alpha1, Resource=overlappingrangeipreservations], removed: [] 2025-12-12T16:15:20.561375283+00:00 stderr F I1212 16:15:20.561357 1 policy_controller.go:88] Started "openshift.io/privileged-namespaces-psa-label-syncer" 2025-12-12T16:15:20.561408804+00:00 stderr F I1212 16:15:20.561399 1 policy_controller.go:78] Starting "openshift.io/namespace-security-allocation" 2025-12-12T16:15:20.561475476+00:00 stderr F I1212 16:15:20.561462 1 privileged_namespaces_controller.go:75] "Starting" controller="privileged-namespaces-psa-label-syncer" 2025-12-12T16:15:20.561502176+00:00 stderr F I1212 16:15:20.561493 1 shared_informer.go:350] "Waiting for caches to sync" controller="privileged-namespaces-psa-label-syncer" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.586845 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587060 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587512 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:15:20.587483992 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587532 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:15:20.587521953 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587545 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:15:20.587537213 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587557 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:15:20.587550164 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587571 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:15:20.587562604 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587585 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:15:20.587576494 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587598 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:15:20.587589585 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587826 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:15:20.58781171 +0000 UTC))" 2025-12-12T16:15:20.588222040+00:00 stderr F I1212 16:15:20.587962 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:15:20.587947773 +0000 UTC))" 2025-12-12T16:15:20.588557898+00:00 stderr F I1212 16:15:20.588319 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:15:20.588639240+00:00 stderr F I1212 16:15:20.588614 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:15:20.588587049 +0000 UTC))" 2025-12-12T16:15:20.588655780+00:00 stderr F I1212 16:15:20.588646 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:15:20.58863073 +0000 UTC))" 2025-12-12T16:15:20.588694491+00:00 stderr F I1212 16:15:20.588663 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:15:20.58865277 +0000 UTC))" 2025-12-12T16:15:20.588694491+00:00 stderr F I1212 16:15:20.588685 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:15:20.588674281 +0000 UTC))" 2025-12-12T16:15:20.588792334+00:00 stderr F I1212 16:15:20.588704 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:15:20.588692711 +0000 UTC))" 2025-12-12T16:15:20.588792334+00:00 stderr F I1212 16:15:20.588728 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:15:20.588716412 +0000 UTC))" 2025-12-12T16:15:20.588792334+00:00 stderr F I1212 16:15:20.588764 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:15:20.588750543 +0000 UTC))" 2025-12-12T16:15:20.588792334+00:00 stderr F I1212 16:15:20.588781 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:15:20.588769493 +0000 UTC))" 2025-12-12T16:15:20.589028429+00:00 stderr F I1212 16:15:20.588999 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:15:20.588979238 +0000 UTC))" 2025-12-12T16:15:20.589632434+00:00 stderr F I1212 16:15:20.589223 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:15:20.589205924 +0000 UTC))" 2025-12-12T16:15:20.896851395+00:00 stderr F I1212 16:15:20.896767 1 policy_controller.go:88] Started "openshift.io/namespace-security-allocation" 2025-12-12T16:15:20.896851395+00:00 stderr F I1212 16:15:20.896798 1 policy_controller.go:91] Started Origin Controllers 2025-12-12T16:15:20.897194343+00:00 stderr F I1212 16:15:20.897137 1 base_controller.go:76] Waiting for caches to sync for namespace-security-allocation-controller 2025-12-12T16:15:20.911102669+00:00 stderr F I1212 16:15:20.910880 1 reflector.go:430] "Caches populated" type="*v2.HorizontalPodAutoscaler" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.913275901+00:00 stderr F I1212 16:15:20.912661 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.913275901+00:00 stderr F I1212 16:15:20.913048 1 reflector.go:430] "Caches populated" type="*v1.ControllerRevision" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.915801872+00:00 stderr F I1212 16:15:20.913879 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.915801872+00:00 stderr F I1212 16:15:20.914539 1 reflector.go:430] "Caches populated" type="*v1.Job" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.915801872+00:00 stderr F I1212 16:15:20.915283 1 reflector.go:430] "Caches populated" type="*v1.NetworkPolicy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.915801872+00:00 stderr F I1212 16:15:20.915639 1 reflector.go:430] "Caches populated" type="*v1.CronJob" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.917288498+00:00 stderr F I1212 16:15:20.917260 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.917365379+00:00 stderr F I1212 16:15:20.917334 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.917517083+00:00 stderr F I1212 16:15:20.917499 1 reflector.go:430] "Caches populated" type="*v1.CSIStorageCapacity" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.917578065+00:00 stderr F I1212 16:15:20.917540 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.917710858+00:00 stderr F E1212 16:15:20.917597 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:15:20.927241417+00:00 stderr F I1212 16:15:20.923745 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.927241417+00:00 stderr F I1212 16:15:20.924974 1 reflector.go:430] "Caches populated" type="*v1.EndpointSlice" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.927241417+00:00 stderr F I1212 16:15:20.925727 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.927241417+00:00 stderr F I1212 16:15:20.925957 1 reflector.go:430] "Caches populated" type="*v1.Lease" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.932349050+00:00 stderr F I1212 16:15:20.929824 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.932349050+00:00 stderr F E1212 16:15:20.931031 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:20.966866962+00:00 stderr F E1212 16:15:20.966749 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca" should be enqueued: namespace "openshift-service-ca" not found 2025-12-12T16:15:20.970689184+00:00 stderr F I1212 16:15:20.970618 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.989816755+00:00 stderr F I1212 16:15:20.989732 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.990010550+00:00 stderr F I1212 16:15:20.989989 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.990275846+00:00 stderr F I1212 16:15:20.990254 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.990564463+00:00 stderr F I1212 16:15:20.990536 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.991970087+00:00 stderr F I1212 16:15:20.990635 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.991970087+00:00 stderr F I1212 16:15:20.990824 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.991970087+00:00 stderr F I1212 16:15:20.990939 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.991970087+00:00 stderr F I1212 16:15:20.990981 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.992846398+00:00 stderr F I1212 16:15:20.992811 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.995214475+00:00 stderr F I1212 16:15:20.993123 1 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.995668256+00:00 stderr F I1212 16:15:20.995634 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.995932842+00:00 stderr F I1212 16:15:20.995893 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "basic-user" not found 2025-12-12T16:15:20.995932842+00:00 stderr F I1212 16:15:20.995914 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.996548327+00:00 stderr F I1212 16:15:20.996510 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.996548327+00:00 stderr F I1212 16:15:20.996539 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-autoscaler" not found 2025-12-12T16:15:20.996576468+00:00 stderr F I1212 16:15:20.996550 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-autoscaler-operator" not found 2025-12-12T16:15:20.996576468+00:00 stderr F I1212 16:15:20.996558 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-monitoring-operator" not found 2025-12-12T16:15:20.996576468+00:00 stderr F I1212 16:15:20.996564 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.996585928+00:00 stderr F I1212 16:15:20.996578 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-reader" not found 2025-12-12T16:15:20.996592938+00:00 stderr F I1212 16:15:20.996585 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-samples-operator" not found 2025-12-12T16:15:20.996599888+00:00 stderr F I1212 16:15:20.996593 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-samples-operator-imageconfig-reader" not found 2025-12-12T16:15:20.996606908+00:00 stderr F I1212 16:15:20.996599 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-samples-operator-proxy-reader" not found 2025-12-12T16:15:20.996613659+00:00 stderr F I1212 16:15:20.996605 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-status" not found 2025-12-12T16:15:20.996622249+00:00 stderr F I1212 16:15:20.996616 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.996774752+00:00 stderr F I1212 16:15:20.996753 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "console" not found 2025-12-12T16:15:20.996774752+00:00 stderr F I1212 16:15:20.996768 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:auth-delegator" not found 2025-12-12T16:15:20.996784403+00:00 stderr F I1212 16:15:20.996775 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "console-extensions-reader" not found 2025-12-12T16:15:20.996784403+00:00 stderr F I1212 16:15:20.996780 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "console-operator" not found 2025-12-12T16:15:20.996791473+00:00 stderr F I1212 16:15:20.996786 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:auth-delegator" not found 2025-12-12T16:15:20.996807323+00:00 stderr F I1212 16:15:20.996791 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "control-plane-machine-set-operator" not found 2025-12-12T16:15:20.996807323+00:00 stderr F I1212 16:15:20.996797 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:15:20.996807323+00:00 stderr F I1212 16:15:20.996802 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-provisioner-runner" not found 2025-12-12T16:15:20.996818013+00:00 stderr F I1212 16:15:20.996807 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-provisioner-runner" not found 2025-12-12T16:15:20.996818013+00:00 stderr F I1212 16:15:20.996811 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "csi-snapshot-controller-operator-clusterrole" not found 2025-12-12T16:15:20.996825494+00:00 stderr F I1212 16:15:20.996821 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.996832314+00:00 stderr F I1212 16:15:20.996825 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-image-registry-operator" not found 2025-12-12T16:15:20.996838984+00:00 stderr F I1212 16:15:20.996831 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "dns-monitoring" not found 2025-12-12T16:15:20.996845754+00:00 stderr F I1212 16:15:20.996836 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "helm-chartrepos-viewer" not found 2025-12-12T16:15:20.996852674+00:00 stderr F I1212 16:15:20.996845 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "kube-apiserver" not found 2025-12-12T16:15:20.996852674+00:00 stderr F I1212 16:15:20.996849 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.996859714+00:00 stderr F I1212 16:15:20.996854 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-api-controllers" not found 2025-12-12T16:15:20.996866495+00:00 stderr F I1212 16:15:20.996858 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-api-controllers-metal3-remediation" not found 2025-12-12T16:15:20.996873325+00:00 stderr F I1212 16:15:20.996865 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-api-operator" not found 2025-12-12T16:15:20.996873325+00:00 stderr F I1212 16:15:20.996869 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-api-operator-ext-remediation" not found 2025-12-12T16:15:20.996880585+00:00 stderr F I1212 16:15:20.996874 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-config-controller" not found 2025-12-12T16:15:20.996890845+00:00 stderr F I1212 16:15:20.996878 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-config-daemon" not found 2025-12-12T16:15:20.996890845+00:00 stderr F I1212 16:15:20.996884 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-config-server" not found 2025-12-12T16:15:20.996897885+00:00 stderr F I1212 16:15:20.996892 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-os-builder" not found 2025-12-12T16:15:20.996904746+00:00 stderr F I1212 16:15:20.996897 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:scc:anyuid" not found 2025-12-12T16:15:20.996904746+00:00 stderr F I1212 16:15:20.996901 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "marketplace-operator" not found 2025-12-12T16:15:20.996912176+00:00 stderr F I1212 16:15:20.996905 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "metrics-daemon-role" not found 2025-12-12T16:15:20.996912176+00:00 stderr F I1212 16:15:20.996909 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "multus-admission-controller-webhook" not found 2025-12-12T16:15:20.996919676+00:00 stderr F I1212 16:15:20.996914 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "multus-ancillary-tools" not found 2025-12-12T16:15:20.996928746+00:00 stderr F I1212 16:15:20.996919 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "multus-ancillary-tools" not found 2025-12-12T16:15:20.996928746+00:00 stderr F I1212 16:15:20.996923 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "multus" not found 2025-12-12T16:15:20.996943486+00:00 stderr F I1212 16:15:20.996927 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "multus-ancillary-tools" not found 2025-12-12T16:15:20.996943486+00:00 stderr F I1212 16:15:20.996932 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "whereabouts-cni" not found 2025-12-12T16:15:20.996943486+00:00 stderr F I1212 16:15:20.996935 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "network-diagnostics" not found 2025-12-12T16:15:20.996943486+00:00 stderr F I1212 16:15:20.996940 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "network-node-identity" not found 2025-12-12T16:15:20.996953127+00:00 stderr F I1212 16:15:20.996944 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:operator-lifecycle-manager" not found 2025-12-12T16:15:20.996953127+00:00 stderr F I1212 16:15:20.996949 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-dns" not found 2025-12-12T16:15:20.996967267+00:00 stderr F I1212 16:15:20.996953 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-dns-operator" not found 2025-12-12T16:15:20.996967267+00:00 stderr F I1212 16:15:20.996957 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:image-pruner" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.996981 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-ingress-operator" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.996989 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-ingress-router" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.996993 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-iptables-alerter" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.996998 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-ovn-kubernetes-control-plane-limited" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997002 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-ovn-kubernetes-node-limited" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997007 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "openshift-ovn-kubernetes-kube-rbac-proxy" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997011 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:auth-delegator" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997016 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "prometheus-k8s-scheduler-resources" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997019 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "registry-monitoring" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997024 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:registry" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997027 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "router-monitoring" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997032 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "self-access-reviewer" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997035 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "self-provisioner" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997039 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997043 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node-bootstrapper" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997049 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:certificates.k8s.io:certificatesigningrequests:selfnodeclient" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997053 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:basic-user" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997061 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:build-strategy-docker" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997065 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:build-strategy-jenkinspipeline" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997072 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:build-strategy-source" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997076 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:attachdetach-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997080 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:certificate-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997085 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:clusterrole-aggregation-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997091 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:cronjob-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997095 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:daemon-set-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997100 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:deployment-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997103 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:disruption-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997108 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:endpoint-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997112 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:endpointslice-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997117 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:endpointslicemirroring-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997121 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:ephemeral-volume-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997125 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:expand-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997129 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:generic-garbage-collector" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997134 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:horizontal-pod-autoscaler" not found 2025-12-12T16:15:20.997362057+00:00 stderr F I1212 16:15:20.997138 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:job-controller" not found 2025-12-12T16:15:20.997362057+00:00 stderr P I1212 16:15:20.997142 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac. 2025-12-12T16:15:20.997403898+00:00 stderr F authorization.k8s.io "system:controller:legacy-service-account-token-cleaner" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997147 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:namespace-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997156 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:node-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997160 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:persistent-volume-binder" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997190 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:pod-garbage-collector" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997196 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:pv-protection-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997201 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:pvc-protection-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997205 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:replicaset-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997210 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:replication-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997214 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:resourcequota-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997219 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:root-ca-cert-publisher" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997222 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:route-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997231 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:selinux-warning-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997235 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:service-account-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997240 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:service-ca-cert-publisher" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997245 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:service-cidrs-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997249 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:service-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997254 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:statefulset-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997259 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:ttl-after-finished-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997263 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:ttl-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997268 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:controller:validatingadmissionpolicy-status-controller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997272 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:deployer" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997278 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:discovery" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997282 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:image-builder" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997286 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:image-puller" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997290 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:kube-controller-manager" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997295 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:kube-dns" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997298 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:kube-scheduler" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997303 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:master" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997307 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:monitoring" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997313 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997317 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node-admin" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997322 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node-admin" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997327 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node-bootstrapper" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997331 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node-proxier" not found 2025-12-12T16:15:20.997403898+00:00 stderr F I1212 16:15:20.997335 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:node-proxier" not found 2025-12-12T16:15:20.997403898+00:00 stderr P I1212 16:15:20.997340 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clust 2025-12-12T16:15:20.997427948+00:00 stderr F errole.rbac.authorization.k8s.io "system:oauth-token-deleter" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997345 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:build-config-change-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997350 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:build-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997354 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:cluster-csr-approver-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997359 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:cluster-quota-reconciliation-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997364 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:default-rolebindings-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997371 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:deployer-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997375 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:deploymentconfig-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997380 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:horizontal-pod-autoscaler" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997384 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:image-import-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997389 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:image-trigger-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997394 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:auth-delegator" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997399 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:check-endpoints-crd-reader" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997403 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:check-endpoints-node-reader" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997409 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:machine-approver" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997414 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:namespace-security-allocation-controller" not found 2025-12-12T16:15:20.997427948+00:00 stderr F I1212 16:15:20.997419 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:origin-namespace-controller" not found 2025-12-12T16:15:20.997442148+00:00 stderr F I1212 16:15:20.997424 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:podsecurity-admission-label-syncer-controller" not found 2025-12-12T16:15:20.997442148+00:00 stderr F I1212 16:15:20.997432 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:privileged-namespaces-psa-label-syncer" not found 2025-12-12T16:15:20.997442148+00:00 stderr F I1212 16:15:20.997437 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:pv-recycler-controller" not found 2025-12-12T16:15:20.997450329+00:00 stderr F I1212 16:15:20.997443 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:resourcequota-controller" not found 2025-12-12T16:15:20.997450329+00:00 stderr F I1212 16:15:20.997447 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:service-ca" not found 2025-12-12T16:15:20.997457639+00:00 stderr F I1212 16:15:20.997453 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:service-ingress-ip-controller" not found 2025-12-12T16:15:20.997464789+00:00 stderr F I1212 16:15:20.997457 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:serviceaccount-controller" not found 2025-12-12T16:15:20.997471629+00:00 stderr F I1212 16:15:20.997462 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:serviceaccount-pull-secrets-controller" not found 2025-12-12T16:15:20.997471629+00:00 stderr F I1212 16:15:20.997468 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:template-instance-controller" not found 2025-12-12T16:15:20.997478979+00:00 stderr F I1212 16:15:20.997473 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "admin" not found 2025-12-12T16:15:20.997487360+00:00 stderr F I1212 16:15:20.997478 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:template-instance-finalizer-controller" not found 2025-12-12T16:15:20.997487360+00:00 stderr F I1212 16:15:20.997482 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "admin" not found 2025-12-12T16:15:20.997499090+00:00 stderr F I1212 16:15:20.997488 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:template-service-broker" not found 2025-12-12T16:15:20.997499090+00:00 stderr F I1212 16:15:20.997496 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:controller:unidling-controller" not found 2025-12-12T16:15:20.997507130+00:00 stderr F I1212 16:15:20.997502 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:discovery" not found 2025-12-12T16:15:20.997514000+00:00 stderr F I1212 16:15:20.997509 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997520740+00:00 stderr F I1212 16:15:20.997515 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997527491+00:00 stderr F I1212 16:15:20.997521 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997534571+00:00 stderr F I1212 16:15:20.997527 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:openshift-controller-manager" not found 2025-12-12T16:15:20.997542531+00:00 stderr F I1212 16:15:20.997535 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:openshift-controller-manager:image-trigger-controller" not found 2025-12-12T16:15:20.997550161+00:00 stderr F I1212 16:15:20.997542 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:openshift-controller-manager:ingress-to-route-controller" not found 2025-12-12T16:15:20.997558051+00:00 stderr F I1212 16:15:20.997551 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:openshift-controller-manager:update-buildconfig-status" not found 2025-12-12T16:15:20.997565831+00:00 stderr F I1212 16:15:20.997557 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:openshift-route-controller-manager" not found 2025-12-12T16:15:20.997574072+00:00 stderr F I1212 16:15:20.997565 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997574072+00:00 stderr F I1212 16:15:20.997571 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997584772+00:00 stderr F I1212 16:15:20.997578 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:operator:etcd-backup-role" not found 2025-12-12T16:15:20.997592472+00:00 stderr F I1212 16:15:20.997584 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997603872+00:00 stderr F I1212 16:15:20.997591 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997603872+00:00 stderr F I1212 16:15:20.997598 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997612643+00:00 stderr F I1212 16:15:20.997604 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997612643+00:00 stderr F I1212 16:15:20.997609 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997621313+00:00 stderr F I1212 16:15:20.997615 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997629653+00:00 stderr F I1212 16:15:20.997621 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:kube-scheduler" not found 2025-12-12T16:15:20.997637443+00:00 stderr F I1212 16:15:20.997627 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997645233+00:00 stderr F I1212 16:15:20.997640 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997652544+00:00 stderr F I1212 16:15:20.997646 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997659284+00:00 stderr F I1212 16:15:20.997652 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997666074+00:00 stderr F I1212 16:15:20.997658 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997666074+00:00 stderr F I1212 16:15:20.997662 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997672954+00:00 stderr F I1212 16:15:20.997668 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997680904+00:00 stderr F I1212 16:15:20.997672 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997680904+00:00 stderr F I1212 16:15:20.997677 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "cluster-admin" not found 2025-12-12T16:15:20.997688974+00:00 stderr F I1212 16:15:20.997681 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:public-info-viewer" not found 2025-12-12T16:15:20.997688974+00:00 stderr F I1212 16:15:20.997686 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:scc:restricted-v2" not found 2025-12-12T16:15:20.997701125+00:00 stderr F I1212 16:15:20.997690 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:tokenreview-openshift-controller-manager" not found 2025-12-12T16:15:20.997701125+00:00 stderr F I1212 16:15:20.997696 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:tokenreview-openshift-route-controller-manager" not found 2025-12-12T16:15:20.997709905+00:00 stderr F I1212 16:15:20.997700 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:openshift:useroauthaccesstoken-manager" not found 2025-12-12T16:15:20.997718135+00:00 stderr F I1212 16:15:20.997708 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:public-info-viewer" not found 2025-12-12T16:15:20.997718135+00:00 stderr F I1212 16:15:20.997714 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:scope-impersonation" not found 2025-12-12T16:15:20.997725735+00:00 stderr F I1212 16:15:20.997719 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:sdn-reader" not found 2025-12-12T16:15:20.997725735+00:00 stderr F I1212 16:15:20.997723 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:service-account-issuer-discovery" not found 2025-12-12T16:15:20.997734416+00:00 stderr F I1212 16:15:20.997729 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:volume-scheduler" not found 2025-12-12T16:15:20.997741196+00:00 stderr F I1212 16:15:20.997733 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:webhook" not found 2025-12-12T16:15:20.999394126+00:00 stderr F I1212 16:15:20.999363 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.999663052+00:00 stderr F I1212 16:15:20.999645 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "machine-config-controller-events" not found 2025-12-12T16:15:21.000209945+00:00 stderr F I1212 16:15:20.999979 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.000651806+00:00 stderr F I1212 16:15:21.000620 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:deployer" not found 2025-12-12T16:15:21.000700147+00:00 stderr F I1212 16:15:21.000686 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:image-builder" not found 2025-12-12T16:15:21.000764089+00:00 stderr F I1212 16:15:21.000739 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "system:image-puller" not found 2025-12-12T16:15:21.000803400+00:00 stderr F I1212 16:15:21.000791 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:15:21.004722394+00:00 stderr F I1212 16:15:21.002762 1 reflector.go:430] "Caches populated" type="*v1.ReplicaSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.005373910+00:00 stderr F E1212 16:15:21.005327 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.008091125+00:00 stderr F E1212 16:15:21.008034 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-node-identity" should be enqueued: namespace "openshift-network-node-identity" not found 2025-12-12T16:15:21.008291590+00:00 stderr F E1212 16:15:21.008254 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca" should be enqueued: namespace "openshift-service-ca" not found 2025-12-12T16:15:21.057270150+00:00 stderr F I1212 16:15:21.057170 1 base_controller.go:82] Caches are synced for WebhookAuthenticatorCertApprover_csr-approver-controller 2025-12-12T16:15:21.057270150+00:00 stderr F I1212 16:15:21.057230 1 base_controller.go:119] Starting #1 worker of WebhookAuthenticatorCertApprover_csr-approver-controller controller ... 2025-12-12T16:15:21.095809348+00:00 stderr F I1212 16:15:21.095618 1 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.105116083+00:00 stderr F I1212 16:15:21.105056 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.296037372+00:00 stderr F I1212 16:15:21.295978 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.305160742+00:00 stderr F I1212 16:15:21.305013 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.499685028+00:00 stderr F I1212 16:15:21.499617 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499755 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "default" should be enqueued: namespace "default" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499828 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "default" should be enqueued: namespace "default" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499842 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "default" should be enqueued: namespace "default" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499878 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "hostpath-provisioner" should be enqueued: namespace "hostpath-provisioner" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499902 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "hostpath-provisioner" should be enqueued: namespace "hostpath-provisioner" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499912 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "hostpath-provisioner" should be enqueued: namespace "hostpath-provisioner" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499918 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "hostpath-provisioner" should be enqueued: namespace "hostpath-provisioner" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499924 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "hostpath-provisioner" should be enqueued: namespace "hostpath-provisioner" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499942 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-node-lease" should be enqueued: namespace "kube-node-lease" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499949 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-node-lease" should be enqueued: namespace "kube-node-lease" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499963 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-node-lease" should be enqueued: namespace "kube-node-lease" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499973 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-public" should be enqueued: namespace "kube-public" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499979 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-public" should be enqueued: namespace "kube-public" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499984 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-public" should be enqueued: namespace "kube-public" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499990 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.499995 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.500000 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.500005 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500036497+00:00 stderr F E1212 16:15:21.500027 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500077728+00:00 stderr F E1212 16:15:21.500032 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500077728+00:00 stderr F E1212 16:15:21.500039 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500077728+00:00 stderr F E1212 16:15:21.500044 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500077728+00:00 stderr F E1212 16:15:21.500050 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500077728+00:00 stderr F E1212 16:15:21.500055 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500077728+00:00 stderr F E1212 16:15:21.500070 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500093458+00:00 stderr F E1212 16:15:21.500075 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500093458+00:00 stderr F E1212 16:15:21.500089 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500133509+00:00 stderr F E1212 16:15:21.500111 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500133509+00:00 stderr F E1212 16:15:21.500123 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500199251+00:00 stderr F E1212 16:15:21.500166 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500277403+00:00 stderr F E1212 16:15:21.500264 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500305683+00:00 stderr F E1212 16:15:21.500296 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500330524+00:00 stderr F E1212 16:15:21.500321 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500365825+00:00 stderr F E1212 16:15:21.500355 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500393635+00:00 stderr F E1212 16:15:21.500382 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500434396+00:00 stderr F E1212 16:15:21.500422 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500476067+00:00 stderr F E1212 16:15:21.500463 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500507278+00:00 stderr F E1212 16:15:21.500495 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500549339+00:00 stderr F E1212 16:15:21.500537 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500592160+00:00 stderr F E1212 16:15:21.500580 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500619791+00:00 stderr F E1212 16:15:21.500610 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500659192+00:00 stderr F E1212 16:15:21.500648 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500711453+00:00 stderr F E1212 16:15:21.500699 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500740974+00:00 stderr F E1212 16:15:21.500730 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500770164+00:00 stderr F E1212 16:15:21.500760 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500809455+00:00 stderr F E1212 16:15:21.500797 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500840076+00:00 stderr F E1212 16:15:21.500829 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500879757+00:00 stderr F E1212 16:15:21.500867 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500913278+00:00 stderr F E1212 16:15:21.500901 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500943389+00:00 stderr F E1212 16:15:21.500932 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "kube-system" should be enqueued: namespace "kube-system" not found 2025-12-12T16:15:21.500974549+00:00 stderr F E1212 16:15:21.500961 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver-operator" should be enqueued: namespace "openshift-apiserver-operator" not found 2025-12-12T16:15:21.501031791+00:00 stderr F E1212 16:15:21.501004 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver-operator" should be enqueued: namespace "openshift-apiserver-operator" not found 2025-12-12T16:15:21.501068592+00:00 stderr F E1212 16:15:21.501049 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver-operator" should be enqueued: namespace "openshift-apiserver-operator" not found 2025-12-12T16:15:21.501068592+00:00 stderr F E1212 16:15:21.501061 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver-operator" should be enqueued: namespace "openshift-apiserver-operator" not found 2025-12-12T16:15:21.501068592+00:00 stderr F E1212 16:15:21.501065 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver" should be enqueued: namespace "openshift-apiserver" not found 2025-12-12T16:15:21.501080042+00:00 stderr F E1212 16:15:21.501070 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver" should be enqueued: namespace "openshift-apiserver" not found 2025-12-12T16:15:21.501080042+00:00 stderr F E1212 16:15:21.501075 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver" should be enqueued: namespace "openshift-apiserver" not found 2025-12-12T16:15:21.501095822+00:00 stderr F E1212 16:15:21.501080 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-apiserver" should be enqueued: namespace "openshift-apiserver" not found 2025-12-12T16:15:21.501106192+00:00 stderr F E1212 16:15:21.501099 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication-operator" should be enqueued: namespace "openshift-authentication-operator" not found 2025-12-12T16:15:21.501119023+00:00 stderr F E1212 16:15:21.501105 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication-operator" should be enqueued: namespace "openshift-authentication-operator" not found 2025-12-12T16:15:21.501119023+00:00 stderr F E1212 16:15:21.501111 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication-operator" should be enqueued: namespace "openshift-authentication-operator" not found 2025-12-12T16:15:21.501127873+00:00 stderr F E1212 16:15:21.501118 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication-operator" should be enqueued: namespace "openshift-authentication-operator" not found 2025-12-12T16:15:21.501127873+00:00 stderr F E1212 16:15:21.501122 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication" should be enqueued: namespace "openshift-authentication" not found 2025-12-12T16:15:21.501136623+00:00 stderr F E1212 16:15:21.501126 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication" should be enqueued: namespace "openshift-authentication" not found 2025-12-12T16:15:21.501136623+00:00 stderr F E1212 16:15:21.501132 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication" should be enqueued: namespace "openshift-authentication" not found 2025-12-12T16:15:21.501145413+00:00 stderr F E1212 16:15:21.501137 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-authentication" should be enqueued: namespace "openshift-authentication" not found 2025-12-12T16:15:21.501333668+00:00 stderr F E1212 16:15:21.501200 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cloud-network-config-controller" should be enqueued: namespace "openshift-cloud-network-config-controller" not found 2025-12-12T16:15:21.501333668+00:00 stderr F E1212 16:15:21.501210 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cloud-network-config-controller" should be enqueued: namespace "openshift-cloud-network-config-controller" not found 2025-12-12T16:15:21.501333668+00:00 stderr F E1212 16:15:21.501217 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cloud-network-config-controller" should be enqueued: namespace "openshift-cloud-network-config-controller" not found 2025-12-12T16:15:21.501355308+00:00 stderr F E1212 16:15:21.501221 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cloud-platform-infra" should be enqueued: namespace "openshift-cloud-platform-infra" not found 2025-12-12T16:15:21.501355308+00:00 stderr F E1212 16:15:21.501227 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cloud-platform-infra" should be enqueued: namespace "openshift-cloud-platform-infra" not found 2025-12-12T16:15:21.501432760+00:00 stderr F E1212 16:15:21.501416 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cloud-platform-infra" should be enqueued: namespace "openshift-cloud-platform-infra" not found 2025-12-12T16:15:21.501495052+00:00 stderr F E1212 16:15:21.501459 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-machine-approver" should be enqueued: namespace "openshift-cluster-machine-approver" not found 2025-12-12T16:15:21.501495052+00:00 stderr F E1212 16:15:21.501470 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-machine-approver" should be enqueued: namespace "openshift-cluster-machine-approver" not found 2025-12-12T16:15:21.501495052+00:00 stderr F E1212 16:15:21.501475 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-machine-approver" should be enqueued: namespace "openshift-cluster-machine-approver" not found 2025-12-12T16:15:21.501495052+00:00 stderr F E1212 16:15:21.501479 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-machine-approver" should be enqueued: namespace "openshift-cluster-machine-approver" not found 2025-12-12T16:15:21.501495052+00:00 stderr F E1212 16:15:21.501484 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-samples-operator" should be enqueued: namespace "openshift-cluster-samples-operator" not found 2025-12-12T16:15:21.501514602+00:00 stderr F E1212 16:15:21.501494 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-samples-operator" should be enqueued: namespace "openshift-cluster-samples-operator" not found 2025-12-12T16:15:21.501514602+00:00 stderr F E1212 16:15:21.501511 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-samples-operator" should be enqueued: namespace "openshift-cluster-samples-operator" not found 2025-12-12T16:15:21.501523503+00:00 stderr F E1212 16:15:21.501516 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-samples-operator" should be enqueued: namespace "openshift-cluster-samples-operator" not found 2025-12-12T16:15:21.501531943+00:00 stderr F E1212 16:15:21.501522 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-storage-operator" should be enqueued: namespace "openshift-cluster-storage-operator" not found 2025-12-12T16:15:21.501573744+00:00 stderr F E1212 16:15:21.501548 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-storage-operator" should be enqueued: namespace "openshift-cluster-storage-operator" not found 2025-12-12T16:15:21.501573744+00:00 stderr F E1212 16:15:21.501563 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-storage-operator" should be enqueued: namespace "openshift-cluster-storage-operator" not found 2025-12-12T16:15:21.501692087+00:00 stderr F E1212 16:15:21.501625 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-version" should be enqueued: namespace "openshift-cluster-version" not found 2025-12-12T16:15:21.502006644+00:00 stderr F E1212 16:15:21.501991 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-version" should be enqueued: namespace "openshift-cluster-version" not found 2025-12-12T16:15:21.502049295+00:00 stderr F E1212 16:15:21.502039 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-cluster-version" should be enqueued: namespace "openshift-cluster-version" not found 2025-12-12T16:15:21.502191679+00:00 stderr F E1212 16:15:21.502140 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-managed" should be enqueued: namespace "openshift-config-managed" not found 2025-12-12T16:15:21.502211739+00:00 stderr F E1212 16:15:21.502204 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-managed" should be enqueued: namespace "openshift-config-managed" not found 2025-12-12T16:15:21.502220339+00:00 stderr F E1212 16:15:21.502214 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-managed" should be enqueued: namespace "openshift-config-managed" not found 2025-12-12T16:15:21.502228359+00:00 stderr F E1212 16:15:21.502222 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-operator" should be enqueued: namespace "openshift-config-operator" not found 2025-12-12T16:15:21.502242420+00:00 stderr F E1212 16:15:21.502228 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-operator" should be enqueued: namespace "openshift-config-operator" not found 2025-12-12T16:15:21.502242420+00:00 stderr F E1212 16:15:21.502235 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-operator" should be enqueued: namespace "openshift-config-operator" not found 2025-12-12T16:15:21.502251140+00:00 stderr F E1212 16:15:21.502240 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config-operator" should be enqueued: namespace "openshift-config-operator" not found 2025-12-12T16:15:21.502259450+00:00 stderr F E1212 16:15:21.502249 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config" should be enqueued: namespace "openshift-config" not found 2025-12-12T16:15:21.502259450+00:00 stderr F E1212 16:15:21.502255 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config" should be enqueued: namespace "openshift-config" not found 2025-12-12T16:15:21.502267920+00:00 stderr F E1212 16:15:21.502262 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-config" should be enqueued: namespace "openshift-config" not found 2025-12-12T16:15:21.502275951+00:00 stderr F E1212 16:15:21.502267 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-operator" should be enqueued: namespace "openshift-console-operator" not found 2025-12-12T16:15:21.502284091+00:00 stderr F E1212 16:15:21.502275 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-operator" should be enqueued: namespace "openshift-console-operator" not found 2025-12-12T16:15:21.502292451+00:00 stderr F E1212 16:15:21.502281 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-operator" should be enqueued: namespace "openshift-console-operator" not found 2025-12-12T16:15:21.502300801+00:00 stderr F E1212 16:15:21.502289 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-operator" should be enqueued: namespace "openshift-console-operator" not found 2025-12-12T16:15:21.502300801+00:00 stderr F E1212 16:15:21.502296 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-user-settings" should be enqueued: namespace "openshift-console-user-settings" not found 2025-12-12T16:15:21.502309311+00:00 stderr F E1212 16:15:21.502303 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-user-settings" should be enqueued: namespace "openshift-console-user-settings" not found 2025-12-12T16:15:21.502317382+00:00 stderr F E1212 16:15:21.502310 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console-user-settings" should be enqueued: namespace "openshift-console-user-settings" not found 2025-12-12T16:15:21.502325432+00:00 stderr F E1212 16:15:21.502317 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console" should be enqueued: namespace "openshift-console" not found 2025-12-12T16:15:21.502333732+00:00 stderr F E1212 16:15:21.502323 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console" should be enqueued: namespace "openshift-console" not found 2025-12-12T16:15:21.502333732+00:00 stderr F E1212 16:15:21.502330 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console" should be enqueued: namespace "openshift-console" not found 2025-12-12T16:15:21.502342142+00:00 stderr F E1212 16:15:21.502335 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-console" should be enqueued: namespace "openshift-console" not found 2025-12-12T16:15:21.502356753+00:00 stderr F E1212 16:15:21.502342 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager-operator" should be enqueued: namespace "openshift-controller-manager-operator" not found 2025-12-12T16:15:21.502356753+00:00 stderr F E1212 16:15:21.502348 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager-operator" should be enqueued: namespace "openshift-controller-manager-operator" not found 2025-12-12T16:15:21.502365723+00:00 stderr F E1212 16:15:21.502355 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager-operator" should be enqueued: namespace "openshift-controller-manager-operator" not found 2025-12-12T16:15:21.502365723+00:00 stderr F E1212 16:15:21.502361 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager-operator" should be enqueued: namespace "openshift-controller-manager-operator" not found 2025-12-12T16:15:21.502374313+00:00 stderr F E1212 16:15:21.502368 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager" should be enqueued: namespace "openshift-controller-manager" not found 2025-12-12T16:15:21.502382383+00:00 stderr F E1212 16:15:21.502374 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager" should be enqueued: namespace "openshift-controller-manager" not found 2025-12-12T16:15:21.502391493+00:00 stderr F E1212 16:15:21.502381 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager" should be enqueued: namespace "openshift-controller-manager" not found 2025-12-12T16:15:21.502391493+00:00 stderr F E1212 16:15:21.502387 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-controller-manager" should be enqueued: namespace "openshift-controller-manager" not found 2025-12-12T16:15:21.502402064+00:00 stderr F E1212 16:15:21.502394 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns-operator" should be enqueued: namespace "openshift-dns-operator" not found 2025-12-12T16:15:21.502410284+00:00 stderr F E1212 16:15:21.502400 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns-operator" should be enqueued: namespace "openshift-dns-operator" not found 2025-12-12T16:15:21.502410284+00:00 stderr F E1212 16:15:21.502407 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns-operator" should be enqueued: namespace "openshift-dns-operator" not found 2025-12-12T16:15:21.502418594+00:00 stderr F E1212 16:15:21.502412 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns-operator" should be enqueued: namespace "openshift-dns-operator" not found 2025-12-12T16:15:21.502426764+00:00 stderr F E1212 16:15:21.502419 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns" should be enqueued: namespace "openshift-dns" not found 2025-12-12T16:15:21.502435184+00:00 stderr F E1212 16:15:21.502425 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns" should be enqueued: namespace "openshift-dns" not found 2025-12-12T16:15:21.502435184+00:00 stderr F E1212 16:15:21.502431 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns" should be enqueued: namespace "openshift-dns" not found 2025-12-12T16:15:21.502443805+00:00 stderr F E1212 16:15:21.502436 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns" should be enqueued: namespace "openshift-dns" not found 2025-12-12T16:15:21.502455305+00:00 stderr F E1212 16:15:21.502442 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-dns" should be enqueued: namespace "openshift-dns" not found 2025-12-12T16:15:21.502455305+00:00 stderr F E1212 16:15:21.502448 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd-operator" should be enqueued: namespace "openshift-etcd-operator" not found 2025-12-12T16:15:21.502463815+00:00 stderr F E1212 16:15:21.502454 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd-operator" should be enqueued: namespace "openshift-etcd-operator" not found 2025-12-12T16:15:21.502463815+00:00 stderr F E1212 16:15:21.502460 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd-operator" should be enqueued: namespace "openshift-etcd-operator" not found 2025-12-12T16:15:21.502472505+00:00 stderr F E1212 16:15:21.502466 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd-operator" should be enqueued: namespace "openshift-etcd-operator" not found 2025-12-12T16:15:21.502480536+00:00 stderr F E1212 16:15:21.502473 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd" should be enqueued: namespace "openshift-etcd" not found 2025-12-12T16:15:21.502488666+00:00 stderr F E1212 16:15:21.502479 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd" should be enqueued: namespace "openshift-etcd" not found 2025-12-12T16:15:21.502488666+00:00 stderr F E1212 16:15:21.502485 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd" should be enqueued: namespace "openshift-etcd" not found 2025-12-12T16:15:21.502497066+00:00 stderr F E1212 16:15:21.502491 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd" should be enqueued: namespace "openshift-etcd" not found 2025-12-12T16:15:21.502504966+00:00 stderr F E1212 16:15:21.502496 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd" should be enqueued: namespace "openshift-etcd" not found 2025-12-12T16:15:21.502513166+00:00 stderr F E1212 16:15:21.502503 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-etcd" should be enqueued: namespace "openshift-etcd" not found 2025-12-12T16:15:21.502513166+00:00 stderr F E1212 16:15:21.502509 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-host-network" should be enqueued: namespace "openshift-host-network" not found 2025-12-12T16:15:21.502523637+00:00 stderr F E1212 16:15:21.502517 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-host-network" should be enqueued: namespace "openshift-host-network" not found 2025-12-12T16:15:21.502531837+00:00 stderr F E1212 16:15:21.502522 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-host-network" should be enqueued: namespace "openshift-host-network" not found 2025-12-12T16:15:21.502540327+00:00 stderr F E1212 16:15:21.502529 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502540327+00:00 stderr F E1212 16:15:21.502535 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502548967+00:00 stderr F E1212 16:15:21.502542 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502561158+00:00 stderr F E1212 16:15:21.502548 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502561158+00:00 stderr F E1212 16:15:21.502554 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502570088+00:00 stderr F E1212 16:15:21.502559 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502578488+00:00 stderr F E1212 16:15:21.502567 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-image-registry" should be enqueued: namespace "openshift-image-registry" not found 2025-12-12T16:15:21.502578488+00:00 stderr F E1212 16:15:21.502573 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502586838+00:00 stderr F E1212 16:15:21.502580 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502594798+00:00 stderr F E1212 16:15:21.502585 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502603259+00:00 stderr F E1212 16:15:21.502593 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502603259+00:00 stderr F E1212 16:15:21.502598 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502611649+00:00 stderr F E1212 16:15:21.502605 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502610 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502617 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502622 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502629 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502633 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502639 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502645 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502651 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502656 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502662 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502711381+00:00 stderr F E1212 16:15:21.502667 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502728052+00:00 stderr F E1212 16:15:21.502707 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502728052+00:00 stderr F E1212 16:15:21.502715 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502728052+00:00 stderr F E1212 16:15:21.502721 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502738252+00:00 stderr F E1212 16:15:21.502727 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502738252+00:00 stderr F E1212 16:15:21.502734 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502746672+00:00 stderr F E1212 16:15:21.502740 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502754952+00:00 stderr F E1212 16:15:21.502747 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502763312+00:00 stderr F E1212 16:15:21.502752 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-infra" should be enqueued: namespace "openshift-infra" not found 2025-12-12T16:15:21.502813054+00:00 stderr F E1212 16:15:21.502798 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-canary" should be enqueued: namespace "openshift-ingress-canary" not found 2025-12-12T16:15:21.502854335+00:00 stderr F E1212 16:15:21.502842 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-canary" should be enqueued: namespace "openshift-ingress-canary" not found 2025-12-12T16:15:21.502882755+00:00 stderr F E1212 16:15:21.502872 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-canary" should be enqueued: namespace "openshift-ingress-canary" not found 2025-12-12T16:15:21.502921856+00:00 stderr F E1212 16:15:21.502910 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-operator" should be enqueued: namespace "openshift-ingress-operator" not found 2025-12-12T16:15:21.502949307+00:00 stderr F E1212 16:15:21.502939 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-operator" should be enqueued: namespace "openshift-ingress-operator" not found 2025-12-12T16:15:21.502998998+00:00 stderr F E1212 16:15:21.502986 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-operator" should be enqueued: namespace "openshift-ingress-operator" not found 2025-12-12T16:15:21.503093750+00:00 stderr F E1212 16:15:21.503082 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress-operator" should be enqueued: namespace "openshift-ingress-operator" not found 2025-12-12T16:15:21.503167602+00:00 stderr F E1212 16:15:21.503157 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress" should be enqueued: namespace "openshift-ingress" not found 2025-12-12T16:15:21.503332676+00:00 stderr F E1212 16:15:21.503319 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress" should be enqueued: namespace "openshift-ingress" not found 2025-12-12T16:15:21.503365297+00:00 stderr F E1212 16:15:21.503355 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress" should be enqueued: namespace "openshift-ingress" not found 2025-12-12T16:15:21.503457609+00:00 stderr F E1212 16:15:21.503445 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ingress" should be enqueued: namespace "openshift-ingress" not found 2025-12-12T16:15:21.503500510+00:00 stderr F E1212 16:15:21.503491 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kni-infra" should be enqueued: namespace "openshift-kni-infra" not found 2025-12-12T16:15:21.503522551+00:00 stderr F E1212 16:15:21.503514 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kni-infra" should be enqueued: namespace "openshift-kni-infra" not found 2025-12-12T16:15:21.503544451+00:00 stderr F E1212 16:15:21.503536 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kni-infra" should be enqueued: namespace "openshift-kni-infra" not found 2025-12-12T16:15:21.503667544+00:00 stderr F E1212 16:15:21.503577 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver-operator" should be enqueued: namespace "openshift-kube-apiserver-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503835 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver-operator" should be enqueued: namespace "openshift-kube-apiserver-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503847 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver-operator" should be enqueued: namespace "openshift-kube-apiserver-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503851 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver-operator" should be enqueued: namespace "openshift-kube-apiserver-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503856 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver" should be enqueued: namespace "openshift-kube-apiserver" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503862 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver" should be enqueued: namespace "openshift-kube-apiserver" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503888 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver" should be enqueued: namespace "openshift-kube-apiserver" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503894 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver" should be enqueued: namespace "openshift-kube-apiserver" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503908 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-apiserver" should be enqueued: namespace "openshift-kube-apiserver" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503929 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager-operator" should be enqueued: namespace "openshift-kube-controller-manager-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503936 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager-operator" should be enqueued: namespace "openshift-kube-controller-manager-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503943 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager-operator" should be enqueued: namespace "openshift-kube-controller-manager-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503956 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager-operator" should be enqueued: namespace "openshift-kube-controller-manager-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503962 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager" should be enqueued: namespace "openshift-kube-controller-manager" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.503992 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager" should be enqueued: namespace "openshift-kube-controller-manager" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.504000 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager" should be enqueued: namespace "openshift-kube-controller-manager" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.504005 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager" should be enqueued: namespace "openshift-kube-controller-manager" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.504039 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager" should be enqueued: namespace "openshift-kube-controller-manager" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.504046 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-controller-manager" should be enqueued: namespace "openshift-kube-controller-manager" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.504053 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler-operator" should be enqueued: namespace "openshift-kube-scheduler-operator" not found 2025-12-12T16:15:21.504081474+00:00 stderr F E1212 16:15:21.504070 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler-operator" should be enqueued: namespace "openshift-kube-scheduler-operator" not found 2025-12-12T16:15:21.504135275+00:00 stderr F E1212 16:15:21.504108 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler-operator" should be enqueued: namespace "openshift-kube-scheduler-operator" not found 2025-12-12T16:15:21.504135275+00:00 stderr F E1212 16:15:21.504125 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler-operator" should be enqueued: namespace "openshift-kube-scheduler-operator" not found 2025-12-12T16:15:21.504146556+00:00 stderr F E1212 16:15:21.504133 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler" should be enqueued: namespace "openshift-kube-scheduler" not found 2025-12-12T16:15:21.504146556+00:00 stderr F E1212 16:15:21.504140 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler" should be enqueued: namespace "openshift-kube-scheduler" not found 2025-12-12T16:15:21.504769551+00:00 stderr F E1212 16:15:21.504751 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler" should be enqueued: namespace "openshift-kube-scheduler" not found 2025-12-12T16:15:21.504820842+00:00 stderr F E1212 16:15:21.504810 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler" should be enqueued: namespace "openshift-kube-scheduler" not found 2025-12-12T16:15:21.504843852+00:00 stderr F E1212 16:15:21.504835 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler" should be enqueued: namespace "openshift-kube-scheduler" not found 2025-12-12T16:15:21.504875933+00:00 stderr F E1212 16:15:21.504867 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-scheduler" should be enqueued: namespace "openshift-kube-scheduler" not found 2025-12-12T16:15:21.504913794+00:00 stderr F E1212 16:15:21.504904 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator-operator" should be enqueued: namespace "openshift-kube-storage-version-migrator-operator" not found 2025-12-12T16:15:21.504937035+00:00 stderr F E1212 16:15:21.504928 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator-operator" should be enqueued: namespace "openshift-kube-storage-version-migrator-operator" not found 2025-12-12T16:15:21.504960745+00:00 stderr F E1212 16:15:21.504951 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator-operator" should be enqueued: namespace "openshift-kube-storage-version-migrator-operator" not found 2025-12-12T16:15:21.504982276+00:00 stderr F E1212 16:15:21.504974 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator-operator" should be enqueued: namespace "openshift-kube-storage-version-migrator-operator" not found 2025-12-12T16:15:21.505012677+00:00 stderr F E1212 16:15:21.505004 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator" should be enqueued: namespace "openshift-kube-storage-version-migrator" not found 2025-12-12T16:15:21.505050597+00:00 stderr F E1212 16:15:21.505041 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator" should be enqueued: namespace "openshift-kube-storage-version-migrator" not found 2025-12-12T16:15:21.505081218+00:00 stderr F E1212 16:15:21.505072 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator" should be enqueued: namespace "openshift-kube-storage-version-migrator" not found 2025-12-12T16:15:21.505103249+00:00 stderr F E1212 16:15:21.505095 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-kube-storage-version-migrator" should be enqueued: namespace "openshift-kube-storage-version-migrator" not found 2025-12-12T16:15:21.505125769+00:00 stderr F E1212 16:15:21.505117 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505147610+00:00 stderr F E1212 16:15:21.505139 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505213811+00:00 stderr F E1212 16:15:21.505167 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505300413+00:00 stderr F E1212 16:15:21.505287 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505340744+00:00 stderr F E1212 16:15:21.505330 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505408066+00:00 stderr F E1212 16:15:21.505380 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505408066+00:00 stderr F E1212 16:15:21.505397 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505421166+00:00 stderr F E1212 16:15:21.505405 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505421166+00:00 stderr F E1212 16:15:21.505411 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-api" should be enqueued: namespace "openshift-machine-api" not found 2025-12-12T16:15:21.505421166+00:00 stderr F E1212 16:15:21.505417 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505429457+00:00 stderr F E1212 16:15:21.505422 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505466027+00:00 stderr F E1212 16:15:21.505452 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505506878+00:00 stderr F E1212 16:15:21.505487 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505549000+00:00 stderr F E1212 16:15:21.505537 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505579020+00:00 stderr F E1212 16:15:21.505567 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505626631+00:00 stderr F E1212 16:15:21.505605 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505626631+00:00 stderr F E1212 16:15:21.505621 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505635412+00:00 stderr F E1212 16:15:21.505629 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505650022+00:00 stderr F E1212 16:15:21.505635 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-machine-config-operator" should be enqueued: namespace "openshift-machine-config-operator" not found 2025-12-12T16:15:21.505650022+00:00 stderr F E1212 16:15:21.505642 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505659102+00:00 stderr F E1212 16:15:21.505647 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505659102+00:00 stderr F E1212 16:15:21.505655 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505667952+00:00 stderr F E1212 16:15:21.505661 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505676213+00:00 stderr F E1212 16:15:21.505668 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505684423+00:00 stderr F E1212 16:15:21.505676 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505692753+00:00 stderr F E1212 16:15:21.505682 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505692753+00:00 stderr F E1212 16:15:21.505689 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-marketplace" should be enqueued: namespace "openshift-marketplace" not found 2025-12-12T16:15:21.505703473+00:00 stderr F E1212 16:15:21.505697 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-monitoring" should be enqueued: namespace "openshift-monitoring" not found 2025-12-12T16:15:21.505739794+00:00 stderr F E1212 16:15:21.505726 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-monitoring" should be enqueued: namespace "openshift-monitoring" not found 2025-12-12T16:15:21.505779535+00:00 stderr F E1212 16:15:21.505768 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-monitoring" should be enqueued: namespace "openshift-monitoring" not found 2025-12-12T16:15:21.505807636+00:00 stderr F E1212 16:15:21.505797 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-monitoring" should be enqueued: namespace "openshift-monitoring" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505846 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505859 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505865 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505870 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505882 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505895 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505905558+00:00 stderr F E1212 16:15:21.505900 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-multus" should be enqueued: namespace "openshift-multus" not found 2025-12-12T16:15:21.505923949+00:00 stderr F E1212 16:15:21.505912 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-console" should be enqueued: namespace "openshift-network-console" not found 2025-12-12T16:15:21.505932049+00:00 stderr F E1212 16:15:21.505924 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-console" should be enqueued: namespace "openshift-network-console" not found 2025-12-12T16:15:21.505932049+00:00 stderr F E1212 16:15:21.505929 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-console" should be enqueued: namespace "openshift-network-console" not found 2025-12-12T16:15:21.505954349+00:00 stderr F E1212 16:15:21.505940 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-diagnostics" should be enqueued: namespace "openshift-network-diagnostics" not found 2025-12-12T16:15:21.505954349+00:00 stderr F E1212 16:15:21.505948 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-diagnostics" should be enqueued: namespace "openshift-network-diagnostics" not found 2025-12-12T16:15:21.506010251+00:00 stderr F E1212 16:15:21.505996 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-diagnostics" should be enqueued: namespace "openshift-network-diagnostics" not found 2025-12-12T16:15:21.506047632+00:00 stderr F E1212 16:15:21.506036 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-diagnostics" should be enqueued: namespace "openshift-network-diagnostics" not found 2025-12-12T16:15:21.506077032+00:00 stderr F E1212 16:15:21.506067 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-node-identity" should be enqueued: namespace "openshift-network-node-identity" not found 2025-12-12T16:15:21.506163734+00:00 stderr F E1212 16:15:21.506151 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-node-identity" should be enqueued: namespace "openshift-network-node-identity" not found 2025-12-12T16:15:21.506332198+00:00 stderr F E1212 16:15:21.506318 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-node-identity" should be enqueued: namespace "openshift-network-node-identity" not found 2025-12-12T16:15:21.506392350+00:00 stderr F E1212 16:15:21.506380 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-node-identity" should be enqueued: namespace "openshift-network-node-identity" not found 2025-12-12T16:15:21.506441601+00:00 stderr F E1212 16:15:21.506430 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-operator" should be enqueued: namespace "openshift-network-operator" not found 2025-12-12T16:15:21.506481552+00:00 stderr F E1212 16:15:21.506470 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-operator" should be enqueued: namespace "openshift-network-operator" not found 2025-12-12T16:15:21.506515633+00:00 stderr F E1212 16:15:21.506505 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-operator" should be enqueued: namespace "openshift-network-operator" not found 2025-12-12T16:15:21.506542564+00:00 stderr F E1212 16:15:21.506533 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-operator" should be enqueued: namespace "openshift-network-operator" not found 2025-12-12T16:15:21.506568554+00:00 stderr F E1212 16:15:21.506558 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-network-operator" should be enqueued: namespace "openshift-network-operator" not found 2025-12-12T16:15:21.506609435+00:00 stderr F E1212 16:15:21.506598 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-node" should be enqueued: namespace "openshift-node" not found 2025-12-12T16:15:21.506634946+00:00 stderr F E1212 16:15:21.506625 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-node" should be enqueued: namespace "openshift-node" not found 2025-12-12T16:15:21.506673697+00:00 stderr F E1212 16:15:21.506662 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-node" should be enqueued: namespace "openshift-node" not found 2025-12-12T16:15:21.506716948+00:00 stderr F E1212 16:15:21.506706 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-nutanix-infra" should be enqueued: namespace "openshift-nutanix-infra" not found 2025-12-12T16:15:21.506744048+00:00 stderr F E1212 16:15:21.506733 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-nutanix-infra" should be enqueued: namespace "openshift-nutanix-infra" not found 2025-12-12T16:15:21.506772299+00:00 stderr F E1212 16:15:21.506762 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-nutanix-infra" should be enqueued: namespace "openshift-nutanix-infra" not found 2025-12-12T16:15:21.506821300+00:00 stderr F E1212 16:15:21.506810 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-oauth-apiserver" should be enqueued: namespace "openshift-oauth-apiserver" not found 2025-12-12T16:15:21.506860561+00:00 stderr F E1212 16:15:21.506850 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-oauth-apiserver" should be enqueued: namespace "openshift-oauth-apiserver" not found 2025-12-12T16:15:21.506887182+00:00 stderr F E1212 16:15:21.506877 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-oauth-apiserver" should be enqueued: namespace "openshift-oauth-apiserver" not found 2025-12-12T16:15:21.506932343+00:00 stderr F E1212 16:15:21.506920 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-oauth-apiserver" should be enqueued: namespace "openshift-oauth-apiserver" not found 2025-12-12T16:15:21.506959774+00:00 stderr F E1212 16:15:21.506950 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-openstack-infra" should be enqueued: namespace "openshift-openstack-infra" not found 2025-12-12T16:15:21.506987254+00:00 stderr F E1212 16:15:21.506977 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-openstack-infra" should be enqueued: namespace "openshift-openstack-infra" not found 2025-12-12T16:15:21.507034545+00:00 stderr F E1212 16:15:21.507023 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-openstack-infra" should be enqueued: namespace "openshift-openstack-infra" not found 2025-12-12T16:15:21.507429805+00:00 stderr F I1212 16:15:21.507337 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.507648240+00:00 stderr F E1212 16:15:21.507440 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operator-lifecycle-manager" should be enqueued: namespace "openshift-operator-lifecycle-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508281 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operator-lifecycle-manager" should be enqueued: namespace "openshift-operator-lifecycle-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508304 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operator-lifecycle-manager" should be enqueued: namespace "openshift-operator-lifecycle-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508333 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operator-lifecycle-manager" should be enqueued: namespace "openshift-operator-lifecycle-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508346 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operator-lifecycle-manager" should be enqueued: namespace "openshift-operator-lifecycle-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508357 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operators" should be enqueued: namespace "openshift-operators" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508361 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operators" should be enqueued: namespace "openshift-operators" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508366 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-operators" should be enqueued: namespace "openshift-operators" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508370 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovirt-infra" should be enqueued: namespace "openshift-ovirt-infra" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508378 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovirt-infra" should be enqueued: namespace "openshift-ovirt-infra" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508383 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovirt-infra" should be enqueued: namespace "openshift-ovirt-infra" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508388 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovn-kubernetes" should be enqueued: namespace "openshift-ovn-kubernetes" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508393 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovn-kubernetes" should be enqueued: namespace "openshift-ovn-kubernetes" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508397 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovn-kubernetes" should be enqueued: namespace "openshift-ovn-kubernetes" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508401 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovn-kubernetes" should be enqueued: namespace "openshift-ovn-kubernetes" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508406 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-ovn-kubernetes" should be enqueued: namespace "openshift-ovn-kubernetes" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508409 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-route-controller-manager" should be enqueued: namespace "openshift-route-controller-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508414 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-route-controller-manager" should be enqueued: namespace "openshift-route-controller-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508418 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-route-controller-manager" should be enqueued: namespace "openshift-route-controller-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508423 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-route-controller-manager" should be enqueued: namespace "openshift-route-controller-manager" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508434 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca-operator" should be enqueued: namespace "openshift-service-ca-operator" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508442 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca-operator" should be enqueued: namespace "openshift-service-ca-operator" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508447 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca-operator" should be enqueued: namespace "openshift-service-ca-operator" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508452 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca-operator" should be enqueued: namespace "openshift-service-ca-operator" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508456 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca" should be enqueued: namespace "openshift-service-ca" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508460 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca" should be enqueued: namespace "openshift-service-ca" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508464 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca" should be enqueued: namespace "openshift-service-ca" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508474 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-service-ca" should be enqueued: namespace "openshift-service-ca" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508478 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-user-workload-monitoring" should be enqueued: namespace "openshift-user-workload-monitoring" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508484 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-user-workload-monitoring" should be enqueued: namespace "openshift-user-workload-monitoring" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508488 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-user-workload-monitoring" should be enqueued: namespace "openshift-user-workload-monitoring" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508492 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-vsphere-infra" should be enqueued: namespace "openshift-vsphere-infra" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508497 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-vsphere-infra" should be enqueued: namespace "openshift-vsphere-infra" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508501 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift-vsphere-infra" should be enqueued: namespace "openshift-vsphere-infra" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508511 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift" should be enqueued: namespace "openshift" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508516 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift" should be enqueued: namespace "openshift" not found 2025-12-12T16:15:21.508620324+00:00 stderr F E1212 16:15:21.508524 1 podsecurity_label_sync_controller.go:427] failed to determine whether namespace "openshift" should be enqueued: namespace "openshift" not found 2025-12-12T16:15:21.696685564+00:00 stderr F I1212 16:15:21.696620 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.704986994+00:00 stderr F I1212 16:15:21.704946 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.735644553+00:00 stderr F E1212 16:15:21.735590 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:15:21.895965585+00:00 stderr F I1212 16:15:21.895870 1 reflector.go:430] "Caches populated" type="*v1.PodTemplate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:21.905896154+00:00 stderr F I1212 16:15:21.905813 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.096132267+00:00 stderr F I1212 16:15:22.095998 1 request.go:752] "Waited before sending request" delay="1.196020913s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/api/v1/pods?limit=500&resourceVersion=0" 2025-12-12T16:15:22.104929539+00:00 stderr F I1212 16:15:22.104851 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.106696342+00:00 stderr F I1212 16:15:22.106650 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.307923580+00:00 stderr F I1212 16:15:22.307852 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.423031913+00:00 stderr F I1212 16:15:22.422966 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.498285206+00:00 stderr F I1212 16:15:22.497648 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.505412868+00:00 stderr F I1212 16:15:22.505335 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.560606837+00:00 stderr F I1212 16:15:22.560471 1 base_controller.go:82] Caches are synced for pod-security-admission-label-synchronization-controller 2025-12-12T16:15:22.560606837+00:00 stderr F I1212 16:15:22.560565 1 base_controller.go:119] Starting #1 worker of pod-security-admission-label-synchronization-controller controller ... 2025-12-12T16:15:22.561717564+00:00 stderr F I1212 16:15:22.561672 1 shared_informer.go:357] "Caches are synced" controller="privileged-namespaces-psa-label-syncer" 2025-12-12T16:15:22.598141031+00:00 stderr F I1212 16:15:22.598033 1 base_controller.go:82] Caches are synced for namespace-security-allocation-controller 2025-12-12T16:15:22.598141031+00:00 stderr F I1212 16:15:22.598092 1 base_controller.go:119] Starting #1 worker of namespace-security-allocation-controller controller ... 2025-12-12T16:15:22.598215443+00:00 stderr F I1212 16:15:22.598204 1 namespace_scc_allocation_controller.go:111] Repairing SCC UID Allocations 2025-12-12T16:15:22.698823277+00:00 stderr F I1212 16:15:22.698733 1 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.708890900+00:00 stderr F I1212 16:15:22.708838 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.896194542+00:00 stderr F I1212 16:15:22.896112 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:15:22.896512190+00:00 stderr F I1212 16:15:22.896441 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:22.897645297+00:00 stderr F I1212 16:15:22.897603 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:15:22.906106751+00:00 stderr F I1212 16:15:22.905999 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.095107384+00:00 stderr F I1212 16:15:23.095031 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.104015029+00:00 stderr F I1212 16:15:23.103957 1 request.go:752] "Waited before sending request" delay="2.194856347s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/helm.openshift.io/v1beta1/projecthelmchartrepositories?limit=500&resourceVersion=0" 2025-12-12T16:15:23.105262439+00:00 stderr F I1212 16:15:23.105221 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.309732895+00:00 stderr F I1212 16:15:23.309675 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.368402578+00:00 stderr F I1212 16:15:23.368293 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.513089433+00:00 stderr F I1212 16:15:23.509362 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.705766504+00:00 stderr F I1212 16:15:23.705665 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:23.711706118+00:00 stderr F I1212 16:15:23.711682 1 namespace_scc_allocation_controller.go:116] Repair complete 2025-12-12T16:15:23.726136775+00:00 stderr F E1212 16:15:23.726052 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:15:23.905592219+00:00 stderr F I1212 16:15:23.905508 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:24.105279229+00:00 stderr F I1212 16:15:24.104870 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:24.304771005+00:00 stderr F I1212 16:15:24.304584 1 request.go:752] "Waited before sending request" delay="3.395221114s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/monitoring.coreos.com/v1beta1/alertmanagerconfigs?limit=500&resourceVersion=0" 2025-12-12T16:15:24.306202350+00:00 stderr F I1212 16:15:24.306144 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:24.506887925+00:00 stderr F I1212 16:15:24.506828 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:24.705836177+00:00 stderr F I1212 16:15:24.705737 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:24.906603394+00:00 stderr F I1212 16:15:24.906498 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:25.106328716+00:00 stderr F I1212 16:15:25.106231 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:25.306041047+00:00 stderr F I1212 16:15:25.305945 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:25.504423256+00:00 stderr F I1212 16:15:25.504243 1 request.go:752] "Waited before sending request" delay="4.594681501s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/machine.openshift.io/v1/controlplanemachinesets?limit=500&resourceVersion=0" 2025-12-12T16:15:25.507949751+00:00 stderr F I1212 16:15:25.507854 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:25.709658441+00:00 stderr F I1212 16:15:25.709460 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:25.906118574+00:00 stderr F I1212 16:15:25.906027 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:26.105769534+00:00 stderr F I1212 16:15:26.105697 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:26.306671554+00:00 stderr F I1212 16:15:26.306546 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:26.504461829+00:00 stderr F I1212 16:15:26.504379 1 request.go:752] "Waited before sending request" delay="5.594810256s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/monitoring.coreos.com/v1/prometheuses?limit=500&resourceVersion=0" 2025-12-12T16:15:26.506133959+00:00 stderr F I1212 16:15:26.506087 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:26.705773859+00:00 stderr F I1212 16:15:26.705696 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:26.905558412+00:00 stderr F I1212 16:15:26.905469 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:27.091169903+00:00 stderr F E1212 16:15:27.091089 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:15:27.106379089+00:00 stderr F I1212 16:15:27.106296 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:27.306309656+00:00 stderr F I1212 16:15:27.306175 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:27.504698915+00:00 stderr F I1212 16:15:27.504551 1 request.go:752] "Waited before sending request" delay="6.594049427s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/machine.openshift.io/v1beta1/machinesets?limit=500&resourceVersion=0" 2025-12-12T16:15:27.506462458+00:00 stderr F I1212 16:15:27.506374 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:27.705861351+00:00 stderr F I1212 16:15:27.705765 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:27.906344422+00:00 stderr F I1212 16:15:27.906235 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:27.911479845+00:00 stderr F I1212 16:15:27.911414 1 reconciliation_controller.go:224] synced cluster resource quota controller 2025-12-12T16:15:27.953006166+00:00 stderr F I1212 16:15:27.952887 1 reconciliation_controller.go:149] Caches are synced 2025-12-12T16:15:36.577453077+00:00 stderr F E1212 16:15:36.577368 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:15:55.111330735+00:00 stderr F E1212 16:15:55.110667 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:15:57.921211545+00:00 stderr F I1212 16:15:57.921014 1 reconciliation_controller.go:171] error occurred GetQuotableResources err=unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1 2025-12-12T16:15:57.921211545+00:00 stderr F E1212 16:15:57.921161 1 reconciliation_controller.go:172] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" 2025-12-12T16:16:25.614081834+00:00 stderr F E1212 16:16:25.613991 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ImageStream: the server is currently unable to handle the request (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:16:27.929838852+00:00 stderr F I1212 16:16:27.929719 1 reconciliation_controller.go:171] error occurred GetQuotableResources err=unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1 2025-12-12T16:16:27.929838852+00:00 stderr F E1212 16:16:27.929807 1 reconciliation_controller.go:172] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" 2025-12-12T16:16:55.909034752+00:00 stderr F I1212 16:16:55.908959 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.908909689 +0000 UTC))" 2025-12-12T16:16:55.909034752+00:00 stderr F I1212 16:16:55.909000 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.908990001 +0000 UTC))" 2025-12-12T16:16:55.909034752+00:00 stderr F I1212 16:16:55.909014 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.909007091 +0000 UTC))" 2025-12-12T16:16:55.909103873+00:00 stderr F I1212 16:16:55.909035 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.909022451 +0000 UTC))" 2025-12-12T16:16:55.909103873+00:00 stderr F I1212 16:16:55.909048 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.909039542 +0000 UTC))" 2025-12-12T16:16:55.909103873+00:00 stderr F I1212 16:16:55.909063 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.909052102 +0000 UTC))" 2025-12-12T16:16:55.909103873+00:00 stderr F I1212 16:16:55.909078 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.909069673 +0000 UTC))" 2025-12-12T16:16:55.909103873+00:00 stderr F I1212 16:16:55.909093 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.909083373 +0000 UTC))" 2025-12-12T16:16:55.909118824+00:00 stderr F I1212 16:16:55.909108 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.909097653 +0000 UTC))" 2025-12-12T16:16:55.909133804+00:00 stderr F I1212 16:16:55.909126 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.909118564 +0000 UTC))" 2025-12-12T16:16:55.909666907+00:00 stderr F I1212 16:16:55.909361 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:16:55.909347419 +0000 UTC))" 2025-12-12T16:16:55.909666907+00:00 stderr F I1212 16:16:55.909508 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:16:55.909495163 +0000 UTC))" 2025-12-12T16:16:57.940281563+00:00 stderr F I1212 16:16:57.939823 1 reconciliation_controller.go:207] syncing resource quota controller with updated resources from discovery: added: [apps.openshift.io/v1, Resource=deploymentconfigs authorization.openshift.io/v1, Resource=rolebindingrestrictions build.openshift.io/v1, Resource=buildconfigs build.openshift.io/v1, Resource=builds image.openshift.io/v1, Resource=imagestreams route.openshift.io/v1, Resource=routes template.openshift.io/v1, Resource=templateinstances template.openshift.io/v1, Resource=templates], removed: [] 2025-12-12T16:16:57.940281563+00:00 stderr F I1212 16:16:57.940030 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="buildconfigs.build.openshift.io" 2025-12-12T16:16:57.940281563+00:00 stderr F I1212 16:16:57.940071 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="builds.build.openshift.io" 2025-12-12T16:16:57.940281563+00:00 stderr F I1212 16:16:57.940133 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="templateinstances.template.openshift.io" 2025-12-12T16:16:57.940281563+00:00 stderr F I1212 16:16:57.940153 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="templates.template.openshift.io" 2025-12-12T16:16:57.940665802+00:00 stderr F I1212 16:16:57.940636 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="routes.route.openshift.io" 2025-12-12T16:16:57.940700953+00:00 stderr F I1212 16:16:57.940677 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="deploymentconfigs.apps.openshift.io" 2025-12-12T16:16:57.940735344+00:00 stderr F I1212 16:16:57.940709 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="rolebindingrestrictions.authorization.openshift.io" 2025-12-12T16:16:57.949217921+00:00 stderr F I1212 16:16:57.949140 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:57.952665845+00:00 stderr F I1212 16:16:57.951831 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:16:57.952665845+00:00 stderr F I1212 16:16:57.951984 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:57.952665845+00:00 stderr F I1212 16:16:57.952507 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:57.953234229+00:00 stderr F I1212 16:16:57.953187 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:57.953922976+00:00 stderr F I1212 16:16:57.953880 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:57.955532005+00:00 stderr F I1212 16:16:57.955490 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:57.960591638+00:00 stderr F I1212 16:16:57.958613 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:16:57.960591638+00:00 stderr F I1212 16:16:57.958762 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:17:08.441366009+00:00 stderr F I1212 16:17:08.441283 1 reflector.go:430] "Caches populated" type="*v1.ImageStream" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:17:08.541789120+00:00 stderr F I1212 16:17:08.541717 1 reconciliation_controller.go:224] synced cluster resource quota controller 2025-12-12T16:17:20.588931398+00:00 stderr F I1212 16:17:20.588833 1 resource_quota_controller.go:476] "syncing resource quota controller with updated resources from discovery" diff="added: [image.openshift.io/v1, Resource=imagestreams], removed: []" 2025-12-12T16:17:20.588990899+00:00 stderr F I1212 16:17:20.588948 1 shared_informer.go:683] "Warning: resync period is smaller than resync check period and the informer has already started. Changing it to the resync check period" resyncPeriod="9m0.711949688s" resyncCheckPeriod="10m0s" 2025-12-12T16:17:20.589111913+00:00 stderr F I1212 16:17:20.589051 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:17:20.589111913+00:00 stderr F I1212 16:17:20.589063 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:17:20.589111913+00:00 stderr F I1212 16:17:20.589072 1 resource_quota_controller.go:502] "synced quota controller" 2025-12-12T16:17:20.632140027+00:00 stderr F I1212 16:17:20.632045 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317052 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.317011777 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317116 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.317078229 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317300 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.31712217 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317329 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.317315775 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317343 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.317333785 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317364 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.317349175 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317379 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.317368506 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317398 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.317384366 +0000 UTC))" 2025-12-12T16:17:46.317452148+00:00 stderr F I1212 16:17:46.317414 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.317405287 +0000 UTC))" 2025-12-12T16:17:46.319624732+00:00 stderr F I1212 16:17:46.317435 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.317424517 +0000 UTC))" 2025-12-12T16:17:46.319624732+00:00 stderr F I1212 16:17:46.317630 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.317612772 +0000 UTC))" 2025-12-12T16:17:46.319624732+00:00 stderr F I1212 16:17:46.317845 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:17:46.317827937 +0000 UTC))" 2025-12-12T16:17:46.334507800+00:00 stderr F I1212 16:17:46.334445 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:17:46.333953186 +0000 UTC))" 2025-12-12T16:18:20.599228335+00:00 stderr F E1212 16:18:20.599151 1 resource_quota_controller.go:446] "Unhandled Error" err="failed to discover resources: Get \"https://api-int.crc.testing:6443/api\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:22.525316943+00:00 stderr F E1212 16:18:22.525170 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock?timeout=1m47s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:22.525904138+00:00 stderr F E1212 16:18:22.525849 1 leaderelection.go:436] error retrieving resource lock openshift-kube-controller-manager/cluster-policy-controller-lock: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock?timeout=1m47s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:30.411803948+00:00 stderr F I1212 16:18:30.410115 1 cert_rotation.go:92] "Certificate rotation detected, shutting down client connections to start using new credentials" logger="tls-transport-cache" 2025-12-12T16:18:33.362032685+00:00 stderr F E1212 16:18:33.361947 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/template.openshift.io/v1/templates?allowWatchBookmarks=true&resourceVersion=38941&timeout=9m12s&timeoutSeconds=552&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:33.859778201+00:00 stderr F I1212 16:18:33.859547 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:34.631196783+00:00 stderr F E1212 16:18:34.631119 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/template.openshift.io/v1/templateinstances?allowWatchBookmarks=true&resourceVersion=38941&timeout=6m57s&timeoutSeconds=417&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:34.681045805+00:00 stderr F E1212 16:18:34.680626 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/apps.openshift.io/v1/deploymentconfigs?allowWatchBookmarks=true&resourceVersion=38945&timeout=5m24s&timeoutSeconds=324&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:35.582078251+00:00 stderr F E1212 16:18:35.581981 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/build.openshift.io/v1/buildconfigs?allowWatchBookmarks=true&resourceVersion=38944&timeout=8m33s&timeoutSeconds=513&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:35.881021552+00:00 stderr F E1212 16:18:35.880919 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/build.openshift.io/v1/builds?allowWatchBookmarks=true&resourceVersion=38946&timeout=9m56s&timeoutSeconds=596&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:37.724416155+00:00 stderr F I1212 16:18:37.724338 1 request.go:752] "Waited before sending request" delay="1.160298365s" reason="retries: 1, retry-after: 1s - retry-reason: due to server-side throttling, FlowSchema UID: \"c063f6fa-737b-4f3e-8cf4-c8405b7272ed\"" verb="GET" URL="https://api-int.crc.testing:6443/apis/ingress.operator.openshift.io/v1/dnsrecords?allowWatchBookmarks=true&resourceVersion=38489&timeout=7m10s&timeoutSeconds=430&watch=true" 2025-12-12T16:18:38.532727649+00:00 stderr F E1212 16:18:38.532634 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/route.openshift.io/v1/routes?allowWatchBookmarks=true&resourceVersion=38942&timeout=9m9s&timeoutSeconds=549&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:38.724772086+00:00 stderr F I1212 16:18:38.724646 1 request.go:752] "Waited before sending request" delay="1.896784324s" reason="retries: 1, retry-after: 1s - retry-reason: due to server-side throttling, FlowSchema UID: \"c063f6fa-737b-4f3e-8cf4-c8405b7272ed\"" verb="GET" URL="https://api-int.crc.testing:6443/apis/ipam.cluster.x-k8s.io/v1beta1/ipaddressclaims?allowWatchBookmarks=true&resourceVersion=38489&timeout=7m31s&timeoutSeconds=451&watch=true" 2025-12-12T16:18:39.725756174+00:00 stderr F I1212 16:18:39.724978 1 request.go:752] "Waited before sending request" delay="2.090687627s" reason="retries: 1, retry-after: 1s - retry-reason: due to server-side throttling, FlowSchema UID: \"c063f6fa-737b-4f3e-8cf4-c8405b7272ed\"" verb="GET" URL="https://api-int.crc.testing:6443/apis/monitoring.coreos.com/v1/podmonitors?allowWatchBookmarks=true&resourceVersion=38471&timeout=6m4s&timeoutSeconds=364&watch=true" 2025-12-12T16:18:46.823366898+00:00 stderr F I1212 16:18:46.822807 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:47.068814166+00:00 stderr F I1212 16:18:47.068731 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:47.373698854+00:00 stderr F I1212 16:18:47.373602 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:47.394191261+00:00 stderr F I1212 16:18:47.394106 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:47.969919993+00:00 stderr F I1212 16:18:47.969844 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.179854794+00:00 stderr F I1212 16:18:48.179745 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.208570574+00:00 stderr F I1212 16:18:48.208507 1 reflector.go:430] "Caches populated" type="*v1.Lease" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.563840377+00:00 stderr F I1212 16:18:48.563762 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.897216069+00:00 stderr F I1212 16:18:48.897153 1 reflector.go:430] "Caches populated" type="*v1.EndpointSlice" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:49.230873828+00:00 stderr F I1212 16:18:49.230798 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:49.231047362+00:00 stderr F I1212 16:18:49.231004 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:18:49.231047362+00:00 stderr F I1212 16:18:49.231034 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:18:49.278355902+00:00 stderr F I1212 16:18:49.278273 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:49.533749306+00:00 stderr F I1212 16:18:49.533681 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:49.736250802+00:00 stderr F I1212 16:18:49.736173 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:50.128354846+00:00 stderr F I1212 16:18:50.128297 1 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:50.142253600+00:00 stderr F I1212 16:18:50.142174 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:50.687931461+00:00 stderr F I1212 16:18:50.687869 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:50.801426657+00:00 stderr F I1212 16:18:50.801355 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:51.144070087+00:00 stderr F I1212 16:18:51.144014 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:51.269433986+00:00 stderr F I1212 16:18:51.269338 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:51.302211037+00:00 stderr F I1212 16:18:51.302091 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:51.620711531+00:00 stderr F I1212 16:18:51.620654 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.016681831+00:00 stderr F I1212 16:18:52.016625 1 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.185333730+00:00 stderr F I1212 16:18:52.185277 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.276130745+00:00 stderr F E1212 16:18:52.276056 1 reflector.go:200] "Failed to watch" err="an error on the server (\"Internal Server Error: \\\"/apis/image.openshift.io/v1/imagestreams?allowWatchBookmarks=true&resourceVersion=39065&timeout=5m43s&timeoutSeconds=343&watch=true\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding (get imagestreams.image.openshift.io)" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ImageStream" 2025-12-12T16:18:52.423951159+00:00 stderr F I1212 16:18:52.423874 1 reflector.go:430] "Caches populated" type="*v1.PodTemplate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.629737817+00:00 stderr F E1212 16:18:52.629645 1 reflector.go:200] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: an error on the server (\"Internal Server Error: \\\"/apis/build.openshift.io/v1/builds?resourceVersion=38946\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:52.709777296+00:00 stderr F I1212 16:18:52.709697 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.801641847+00:00 stderr F I1212 16:18:52.801559 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.839915153+00:00 stderr F I1212 16:18:52.839830 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.997442728+00:00 stderr F I1212 16:18:52.997362 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:53.227589338+00:00 stderr F I1212 16:18:53.227501 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:53.679150832+00:00 stderr F I1212 16:18:53.679050 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:53.733336151+00:00 stderr F I1212 16:18:53.733191 1 reflector.go:430] "Caches populated" type="*v1.CSIStorageCapacity" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:53.790395772+00:00 stderr F I1212 16:18:53.790317 1 reflector.go:430] "Caches populated" type="*v1.Job" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:53.949363162+00:00 stderr F I1212 16:18:53.949294 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:53.979599729+00:00 stderr F I1212 16:18:53.979516 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:54.269666081+00:00 stderr F I1212 16:18:54.269617 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:54.428417976+00:00 stderr F I1212 16:18:54.427823 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:54.440482804+00:00 stderr F I1212 16:18:54.440429 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:54.547650643+00:00 stderr F I1212 16:18:54.547596 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:54.723987692+00:00 stderr F I1212 16:18:54.723927 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.034098849+00:00 stderr F I1212 16:18:55.034024 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.049942970+00:00 stderr F I1212 16:18:55.049897 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.313593948+00:00 stderr F I1212 16:18:55.313556 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.447414207+00:00 stderr F I1212 16:18:55.447363 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.508558358+00:00 stderr F I1212 16:18:55.508497 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.666390451+00:00 stderr F I1212 16:18:55.666319 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.340453895+00:00 stderr F I1212 16:18:56.340341 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.608509832+00:00 stderr F I1212 16:18:56.608454 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.686100701+00:00 stderr F E1212 16:18:56.686025 1 reflector.go:200] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: an error on the server (\"Internal Server Error: \\\"/apis/build.openshift.io/v1/buildconfigs?resourceVersion=38944\\\": Post \\\"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\\\": dial tcp 10.217.4.1:443: connect: connection refused\") has prevented the request from succeeding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.PartialObjectMetadata" 2025-12-12T16:18:56.829235329+00:00 stderr F I1212 16:18:56.828922 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.832472889+00:00 stderr F I1212 16:18:56.832439 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.905282320+00:00 stderr F I1212 16:18:56.905209 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.925221673+00:00 stderr F I1212 16:18:56.925113 1 reflector.go:430] "Caches populated" type="*v1.ReplicaSet" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:56.962455093+00:00 stderr F I1212 16:18:56.962346 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.394443793+00:00 stderr F I1212 16:18:57.394301 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.395482869+00:00 stderr F I1212 16:18:57.395437 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.649027907+00:00 stderr F I1212 16:18:57.648935 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.745236126+00:00 stderr F I1212 16:18:57.745127 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.792116385+00:00 stderr F I1212 16:18:57.791641 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:18:57.792116385+00:00 stderr F I1212 16:18:57.791788 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.797407895+00:00 stderr F I1212 16:18:57.797320 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:18:57.986892980+00:00 stderr F I1212 16:18:57.986650 1 reflector.go:430] "Caches populated" type="*v2.HorizontalPodAutoscaler" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:57.986892980+00:00 stderr F I1212 16:18:57.986860 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.112220999+00:00 stderr F I1212 16:18:58.112133 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.143268876+00:00 stderr F I1212 16:18:58.143170 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.216057995+00:00 stderr F I1212 16:18:58.215964 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.302645835+00:00 stderr F I1212 16:18:58.302561 1 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.418617323+00:00 stderr F I1212 16:18:58.418524 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.737829544+00:00 stderr F I1212 16:18:58.736929 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:58.737829544+00:00 stderr F I1212 16:18:58.737445 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.745439763+00:00 stderr F I1212 16:18:58.740444 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:58.784277383+00:00 stderr F I1212 16:18:58.782925 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.841695232+00:00 stderr F I1212 16:18:58.841616 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.848065400+00:00 stderr F I1212 16:18:58.848029 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.930800175+00:00 stderr F I1212 16:18:58.930749 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.976488925+00:00 stderr F I1212 16:18:58.976415 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.986934073+00:00 stderr F I1212 16:18:58.986872 1 reflector.go:430] "Caches populated" type="*v1.CronJob" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.505415311+00:00 stderr F I1212 16:18:59.505331 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.555817557+00:00 stderr F I1212 16:18:59.555649 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.556888544+00:00 stderr F I1212 16:18:59.556055 1 reflector.go:430] "Caches populated" type="*v1.ControllerRevision" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.823241919+00:00 stderr F I1212 16:18:59.821558 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.940311413+00:00 stderr F I1212 16:18:59.940231 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:00.341276346+00:00 stderr F I1212 16:19:00.341168 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:00.341810879+00:00 stderr F I1212 16:19:00.341762 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:19:00.341810879+00:00 stderr F I1212 16:19:00.341783 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:19:00.499969260+00:00 stderr F I1212 16:19:00.499877 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:00.598031674+00:00 stderr F I1212 16:19:00.597958 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:01.145539290+00:00 stderr F I1212 16:19:01.145460 1 reflector.go:430] "Caches populated" type="*v1.NetworkPolicy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:01.183240712+00:00 stderr F I1212 16:19:01.182390 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:01.809580666+00:00 stderr F I1212 16:19:01.809491 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:02.090926902+00:00 stderr F I1212 16:19:02.090873 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:02.654290120+00:00 stderr F I1212 16:19:02.654247 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:03.222011755+00:00 stderr F I1212 16:19:03.221962 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:03.899434573+00:00 stderr F I1212 16:19:03.899365 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:25.456277475+00:00 stderr F I1212 16:19:25.456194 1 reflector.go:430] "Caches populated" type="*v1.ImageStream" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:32.196025459+00:00 stderr F I1212 16:19:32.195943 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:35.709055094+00:00 stderr F I1212 16:19:35.708960 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:24:16.810523476+00:00 stderr F I1212 16:24:16.808824 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:25:04.296274503+00:00 stderr F I1212 16:25:04.296160 1 podsecurity_label_sync_controller.go:304] no service accounts were found in the "openstack" NS 2025-12-12T16:25:04.298499011+00:00 stderr F I1212 16:25:04.296812 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CreatedSCCRanges' created SCC ranges for openstack namespace 2025-12-12T16:25:05.007522330+00:00 stderr F I1212 16:25:05.007060 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CreatedSCCRanges' created SCC ranges for openstack-operators namespace 2025-12-12T16:26:15.059977347+00:00 stderr F I1212 16:26:15.059545 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CreatedSCCRanges' created SCC ranges for service-telemetry namespace 2025-12-12T16:26:42.046797315+00:00 stderr F I1212 16:26:42.045284 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CreatedSCCRanges' created SCC ranges for cert-manager-operator namespace 2025-12-12T16:26:57.017614919+00:00 stderr F I1212 16:26:57.017474 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "observability-operator" not found 2025-12-12T16:27:08.773224336+00:00 stderr F I1212 16:27:08.773128 1 reconciliation_controller.go:207] syncing resource quota controller with updated resources from discovery: added: [agent.k8s.elastic.co/v1alpha1, Resource=agents apm.k8s.elastic.co/v1, Resource=apmservers autoscaling.k8s.elastic.co/v1alpha1, Resource=elasticsearchautoscalers beat.k8s.elastic.co/v1beta1, Resource=beats elasticsearch.k8s.elastic.co/v1, Resource=elasticsearches enterprisesearch.k8s.elastic.co/v1, Resource=enterprisesearches kibana.k8s.elastic.co/v1, Resource=kibanas logstash.k8s.elastic.co/v1alpha1, Resource=logstashes maps.k8s.elastic.co/v1alpha1, Resource=elasticmapsservers monitoring.rhobs/v1, Resource=alertmanagers monitoring.rhobs/v1, Resource=podmonitors monitoring.rhobs/v1, Resource=probes monitoring.rhobs/v1, Resource=prometheuses monitoring.rhobs/v1, Resource=prometheusrules monitoring.rhobs/v1, Resource=servicemonitors monitoring.rhobs/v1, Resource=thanosrulers monitoring.rhobs/v1alpha1, Resource=alertmanagerconfigs monitoring.rhobs/v1alpha1, Resource=monitoringstacks monitoring.rhobs/v1alpha1, Resource=prometheusagents monitoring.rhobs/v1alpha1, Resource=scrapeconfigs monitoring.rhobs/v1alpha1, Resource=thanosqueriers observability.openshift.io/v1alpha1, Resource=observabilityinstallers perses.dev/v1alpha1, Resource=perses perses.dev/v1alpha1, Resource=persesdashboards perses.dev/v1alpha1, Resource=persesdatasources stackconfigpolicy.k8s.elastic.co/v1alpha1, Resource=stackconfigpolicies], removed: [] 2025-12-12T16:27:08.773435072+00:00 stderr F I1212 16:27:08.773398 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="apmservers.apm.k8s.elastic.co" 2025-12-12T16:27:08.773446672+00:00 stderr F I1212 16:27:08.773439 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="beats.beat.k8s.elastic.co" 2025-12-12T16:27:08.773492053+00:00 stderr F I1212 16:27:08.773464 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="prometheusrules.monitoring.rhobs" 2025-12-12T16:27:08.773523114+00:00 stderr F I1212 16:27:08.773503 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="monitoringstacks.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773546 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="elasticsearchautoscalers.autoscaling.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773572 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="alertmanagers.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773592 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="stackconfigpolicies.stackconfigpolicy.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773617 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="persesdashboards.perses.dev" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773638 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="agents.agent.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773659 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="elasticmapsservers.maps.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773687 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="enterprisesearches.enterprisesearch.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773708 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="podmonitors.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773728 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="perses.perses.dev" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773752 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="thanosrulers.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773800 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="prometheuses.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773821 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="servicemonitors.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773839 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="alertmanagerconfigs.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773857 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="thanosqueriers.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773877 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="observabilityinstallers.observability.openshift.io" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773899 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="prometheusagents.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773929 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="scrapeconfigs.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773954 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="elasticsearches.elasticsearch.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773974 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="kibanas.kibana.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.773992 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="probes.monitoring.rhobs" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.774011 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="logstashes.logstash.k8s.elastic.co" 2025-12-12T16:27:08.774933600+00:00 stderr F I1212 16:27:08.774030 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="persesdatasources.perses.dev" 2025-12-12T16:27:08.779917236+00:00 stderr F I1212 16:27:08.779883 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.780131891+00:00 stderr F I1212 16:27:08.780100 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.780294825+00:00 stderr F I1212 16:27:08.780276 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783223110+00:00 stderr F I1212 16:27:08.783014 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783365443+00:00 stderr F I1212 16:27:08.783339 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783581869+00:00 stderr F I1212 16:27:08.783552 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783581869+00:00 stderr F I1212 16:27:08.783562 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783736332+00:00 stderr F I1212 16:27:08.783711 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783745803+00:00 stderr F I1212 16:27:08.783723 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.783895717+00:00 stderr F I1212 16:27:08.783858 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:08.976675546+00:00 stderr F I1212 16:27:08.976615 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:09.182269189+00:00 stderr F I1212 16:27:09.182052 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:09.380608409+00:00 stderr F I1212 16:27:09.380560 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:09.583242337+00:00 stderr F I1212 16:27:09.580335 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:09.780414247+00:00 stderr F I1212 16:27:09.775926 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:09.974850738+00:00 stderr F I1212 16:27:09.974761 1 request.go:752] "Waited before sending request" delay="1.19920973s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/monitoring.rhobs/v1alpha1/thanosqueriers?limit=500&resourceVersion=0" 2025-12-12T16:27:09.976485860+00:00 stderr F I1212 16:27:09.976454 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:10.180599125+00:00 stderr F I1212 16:27:10.180512 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:10.379243033+00:00 stderr F I1212 16:27:10.376449 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:10.577299965+00:00 stderr F I1212 16:27:10.577234 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:10.780818826+00:00 stderr F I1212 16:27:10.780592 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:10.980283904+00:00 stderr F I1212 16:27:10.978455 1 request.go:752] "Waited before sending request" delay="2.202690288s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://api-int.crc.testing:6443/apis/kibana.k8s.elastic.co/v1/kibanas?limit=500&resourceVersion=0" 2025-12-12T16:27:10.981510315+00:00 stderr F I1212 16:27:10.980982 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:11.179679391+00:00 stderr F I1212 16:27:11.179633 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:11.376979874+00:00 stderr F I1212 16:27:11.376909 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:11.582391013+00:00 stderr F I1212 16:27:11.581633 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:11.780512607+00:00 stderr F I1212 16:27:11.780445 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:11.976873037+00:00 stderr F I1212 16:27:11.976654 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:12.076162580+00:00 stderr F I1212 16:27:12.074620 1 reconciliation_controller.go:224] synced cluster resource quota controller 2025-12-12T16:27:14.742699776+00:00 stderr F I1212 16:27:14.742591 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:27:36.313427449+00:00 stderr F I1212 16:27:36.313336 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CreatedSCCRanges' created SCC ranges for cert-manager namespace 2025-12-12T16:27:42.090999001+00:00 stderr F I1212 16:27:42.090135 1 reconciliation_controller.go:207] syncing resource quota controller with updated resources from discovery: added: [acme.cert-manager.io/v1, Resource=challenges acme.cert-manager.io/v1, Resource=orders cert-manager.io/v1, Resource=certificaterequests cert-manager.io/v1, Resource=certificates cert-manager.io/v1, Resource=issuers operator.openshift.io/v1alpha1, Resource=istiocsrs], removed: [] 2025-12-12T16:27:42.097231908+00:00 stderr F I1212 16:27:42.093817 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="orders.acme.cert-manager.io" 2025-12-12T16:27:42.097231908+00:00 stderr F I1212 16:27:42.093917 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="istiocsrs.operator.openshift.io" 2025-12-12T16:27:42.097231908+00:00 stderr F I1212 16:27:42.093963 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="certificaterequests.cert-manager.io" 2025-12-12T16:27:42.097231908+00:00 stderr F I1212 16:27:42.093986 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="certificates.cert-manager.io" 2025-12-12T16:27:42.097231908+00:00 stderr F I1212 16:27:42.094060 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="issuers.cert-manager.io" 2025-12-12T16:27:42.097231908+00:00 stderr F I1212 16:27:42.094111 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" resource="challenges.acme.cert-manager.io" 2025-12-12T16:27:42.097710921+00:00 stderr F I1212 16:27:42.097506 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:42.099320201+00:00 stderr F I1212 16:27:42.098939 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:42.099320201+00:00 stderr F I1212 16:27:42.098945 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:42.099320201+00:00 stderr F I1212 16:27:42.099021 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:42.099320201+00:00 stderr F I1212 16:27:42.099089 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:42.099390663+00:00 stderr F I1212 16:27:42.099332 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:27:42.195051384+00:00 stderr F I1212 16:27:42.194950 1 reconciliation_controller.go:224] synced cluster resource quota controller 2025-12-12T16:28:49.237223596+00:00 stderr F I1212 16:28:49.235812 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:28:49.237223596+00:00 stderr F I1212 16:28:49.236711 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:28:49.237223596+00:00 stderr F I1212 16:28:49.236900 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "observability-operator" not found 2025-12-12T16:28:49.237223596+00:00 stderr F I1212 16:28:49.236908 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "observability-operator" not found 2025-12-12T16:29:00.349363147+00:00 stderr F I1212 16:29:00.349153 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:29:00.349363147+00:00 stderr F I1212 16:29:00.349214 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:33:31.829248203+00:00 stderr F I1212 16:33:31.829143 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" 2025-12-12T16:36:28.745931681+00:00 stderr F I1212 16:36:28.745842 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:38:49.239133050+00:00 stderr F I1212 16:38:49.239015 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:38:49.239133050+00:00 stderr F I1212 16:38:49.239071 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "external-health-monitor-controller-cfg" not found 2025-12-12T16:38:49.239579052+00:00 stderr F I1212 16:38:49.239507 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "observability-operator" not found 2025-12-12T16:38:49.239579052+00:00 stderr F I1212 16:38:49.239550 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve role from role ref: role.rbac.authorization.k8s.io "observability-operator" not found 2025-12-12T16:39:00.346566749+00:00 stderr F I1212 16:39:00.346470 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:39:00.346566749+00:00 stderr F I1212 16:39:00.346510 1 sccrolecache.go:466] failed to retrieve a role for a rolebinding ref: couldn't retrieve clusterrole from role ref: clusterrole.rbac.authorization.k8s.io "crc-hostpath-external-health-monitor-controller-runner" not found 2025-12-12T16:40:38.213172895+00:00 stderr F I1212 16:40:38.213079 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CreatedSCCRanges' created SCC ranges for openshift-must-gather-2sjxj namespace 2025-12-12T16:41:10.837805650+00:00 stderr F I1212 16:41:10.836276 1 warnings.go:110] "Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+" ././@LongLink0000644000000000000000000000033200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043063033054 5ustar zuulzuul././@LongLink0000644000000000000000000000033700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000644000175000017500000004061015117043043033055 0ustar zuulzuul2025-12-12T16:15:02.523858576+00:00 stderr F + timeout 3m /bin/bash -exuo pipefail -c 'while [ -n "$(ss -Htanop \( sport = 9443 \))" ]; do sleep 1; done' 2025-12-12T16:15:02.529480278+00:00 stderr F ++ ss -Htanop '(' sport = 9443 ')' 2025-12-12T16:15:02.541443328+00:00 stderr F + '[' -n '' ']' 2025-12-12T16:15:02.542478941+00:00 stderr F + exec cluster-kube-controller-manager-operator cert-recovery-controller --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-controller-cert-syncer-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager --listen=0.0.0.0:9443 -v=2 2025-12-12T16:15:02.605133714+00:00 stderr F W1212 16:15:02.605002 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:15:02.605540743+00:00 stderr F I1212 16:15:02.605524 1 crypto.go:594] Generating new CA for cert-recovery-controller-signer@1765556102 cert, and key in /tmp/serving-cert-2636761332/serving-signer.crt, /tmp/serving-cert-2636761332/serving-signer.key 2025-12-12T16:15:02.605569964+00:00 stderr F Validity period of the certificate for "cert-recovery-controller-signer@1765556102" is unset, resetting to 43800h0m0s! 2025-12-12T16:15:04.509372990+00:00 stderr F I1212 16:15:04.509301 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:15:04.509978020+00:00 stderr F I1212 16:15:04.509942 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:15:04.509978020+00:00 stderr F I1212 16:15:04.509960 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:15:04.509978020+00:00 stderr F I1212 16:15:04.509965 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:15:04.509978020+00:00 stderr F I1212 16:15:04.509970 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:15:04.509978020+00:00 stderr F I1212 16:15:04.509975 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:15:04.510018951+00:00 stderr F I1212 16:15:04.509961 1 observer_polling.go:159] Starting file observer 2025-12-12T16:15:14.512258884+00:00 stderr F W1212 16:15:14.511581 1 builder.go:272] unable to get owner reference (falling back to namespace): Get "https://localhost:6443/api/v1/namespaces/openshift-kube-controller-manager/pods": net/http: TLS handshake timeout 2025-12-12T16:15:14.512258884+00:00 stderr F I1212 16:15:14.511711 1 builder.go:304] cert-recovery-controller version v0.0.0-unknown-afdae35-afdae35 2025-12-12T16:15:19.877300553+00:00 stderr F I1212 16:15:19.877239 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:15:19.877745734+00:00 stderr F I1212 16:15:19.877592 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-controller-manager/cert-recovery-controller-lock... 2025-12-12T16:15:19.885100291+00:00 stderr F I1212 16:15:19.885029 1 leaderelection.go:271] successfully acquired lease openshift-kube-controller-manager/cert-recovery-controller-lock 2025-12-12T16:15:19.885353917+00:00 stderr F I1212 16:15:19.885295 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-controller-manager", Name:"cert-recovery-controller-lock", UID:"15489cf0-fcad-4593-bd7c-1abce0633823", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"35979", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' crc_a389582d-2357-4046-8a4a-85813e7f6ff1 became leader 2025-12-12T16:15:19.895260186+00:00 stderr F I1212 16:15:19.891878 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:15:19.897360746+00:00 stderr F I1212 16:15:19.897312 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.898320549+00:00 stderr F I1212 16:15:19.898260 1 cmd.go:122] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:15:19.898766790+00:00 stderr F I1212 16:15:19.898735 1 csrcontroller.go:103] Starting CSR controller 2025-12-12T16:15:19.898766790+00:00 stderr F I1212 16:15:19.898754 1 shared_informer.go:350] "Waiting for caches to sync" controller="CSRController" 2025-12-12T16:15:19.898811671+00:00 stderr F I1212 16:15:19.898783 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"openshift-kube-controller-manager", Name:"openshift-kube-controller-manager", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:15:19.901106386+00:00 stderr F I1212 16:15:19.899253 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.920968485+00:00 stderr F I1212 16:15:19.920890 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:15:19.924036949+00:00 stderr F I1212 16:15:19.923938 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.929138852+00:00 stderr F I1212 16:15:19.929088 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.929415688+00:00 stderr F I1212 16:15:19.929372 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.929466230+00:00 stderr F I1212 16:15:19.929369 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.930086125+00:00 stderr F I1212 16:15:19.930067 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.932385990+00:00 stderr F I1212 16:15:19.932349 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubecontrollermanagers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.943601600+00:00 stderr F I1212 16:15:19.941498 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.943601600+00:00 stderr F I1212 16:15:19.942808 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.968894500+00:00 stderr F I1212 16:15:19.968822 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.998971894+00:00 stderr F I1212 16:15:19.998873 1 shared_informer.go:357] "Caches are synced" controller="CSRController" 2025-12-12T16:15:19.998971894+00:00 stderr F I1212 16:15:19.998944 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager 2025-12-12T16:15:19.998971894+00:00 stderr F I1212 16:15:19.998960 1 base_controller.go:82] Caches are synced for kube-controller-manager 2025-12-12T16:15:19.999048136+00:00 stderr F I1212 16:15:19.998965 1 base_controller.go:119] Starting #1 worker of kube-controller-manager controller ... 2025-12-12T16:15:20.021738653+00:00 stderr F I1212 16:15:20.021591 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:15:20.021738653+00:00 stderr F I1212 16:15:20.021662 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:58.937904699+00:00 stderr F I1212 16:16:58.937798 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"openshift-kube-controller-manager", Name:"openshift-kube-controller-manager", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/kube-controller-manager-client-cert-key -n openshift-kube-controller-manager because it changed 2025-12-12T16:18:18.961366433+00:00 stderr F E1212 16:18:18.961283 1 csrcontroller.go:147] "Unhandled Error" err="key failed with : Get \"https://localhost:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps/csr-signer-ca\": dial tcp [::1]:6443: connect: connection refused" 2025-12-12T16:18:19.902774497+00:00 stderr F E1212 16:18:19.902679 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://localhost:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cert-recovery-controller-lock?timeout=4m0s": dial tcp [::1]:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:19.903478174+00:00 stderr F E1212 16:18:19.903429 1 leaderelection.go:436] error retrieving resource lock openshift-kube-controller-manager/cert-recovery-controller-lock: Get "https://localhost:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cert-recovery-controller-lock?timeout=4m0s": dial tcp [::1]:6443: connect: connection refused 2025-12-12T16:18:46.494562009+00:00 stderr F I1212 16:18:46.494489 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:47.777761633+00:00 stderr F I1212 16:18:47.777708 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.806080416+00:00 stderr F I1212 16:18:48.805909 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:51.958997004+00:00 stderr F I1212 16:18:51.958918 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.243848817+00:00 stderr F I1212 16:18:52.243767 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:52.449591333+00:00 stderr F I1212 16:18:52.449523 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:54.368857143+00:00 stderr F I1212 16:18:54.368806 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:55.048256929+00:00 stderr F I1212 16:18:55.048200 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:58.048579745+00:00 stderr F I1212 16:18:58.048526 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubecontrollermanagers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.218042917+00:00 stderr F I1212 16:18:59.217995 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:00.036594034+00:00 stderr F I1212 16:19:00.036524 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" ././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043063033054 5ustar zuulzuul././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/1.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000644000175000017500000355555515117043043033103 0ustar zuulzuul2025-12-12T16:18:27.683611900+00:00 stderr F + timeout 3m /bin/bash -exuo pipefail -c 'while [ -n "$(ss -Htanop \( sport = 10257 \))" ]; do sleep 1; done' 2025-12-12T16:18:27.688382858+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:27.699660207+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,15sec,0)' ']' 2025-12-12T16:18:27.699660207+00:00 stderr F + sleep 1 2025-12-12T16:18:28.702806477+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:28.713306347+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,14sec,0)' ']' 2025-12-12T16:18:28.713306347+00:00 stderr F + sleep 1 2025-12-12T16:18:29.720088957+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:29.729417857+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,13sec,0)' ']' 2025-12-12T16:18:29.729417857+00:00 stderr F + sleep 1 2025-12-12T16:18:30.732563298+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:30.748226795+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,12sec,0)' ']' 2025-12-12T16:18:30.748226795+00:00 stderr F + sleep 1 2025-12-12T16:18:31.750216317+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:31.760205174+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,11sec,0)' ']' 2025-12-12T16:18:31.760205174+00:00 stderr F + sleep 1 2025-12-12T16:18:32.763271533+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:32.774982642+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,10sec,0)' ']' 2025-12-12T16:18:32.774982642+00:00 stderr F + sleep 1 2025-12-12T16:18:33.778927202+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:33.791397700+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,9.678ms,0)' ']' 2025-12-12T16:18:33.791397700+00:00 stderr F + sleep 1 2025-12-12T16:18:34.794983142+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:34.805630455+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,8.663ms,0)' ']' 2025-12-12T16:18:34.805630455+00:00 stderr F + sleep 1 2025-12-12T16:18:35.810011577+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:35.821534591+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,7.648ms,0)' ']' 2025-12-12T16:18:35.821534591+00:00 stderr F + sleep 1 2025-12-12T16:18:36.825311817+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:36.849887894+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,6.620ms,0)' ']' 2025-12-12T16:18:36.849887894+00:00 stderr F + sleep 1 2025-12-12T16:18:37.853609819+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:37.863460113+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,5.605ms,0)' ']' 2025-12-12T16:18:37.863534214+00:00 stderr F + sleep 1 2025-12-12T16:18:38.866550352+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:38.878418855+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,4.591ms,0)' ']' 2025-12-12T16:18:38.878418855+00:00 stderr F + sleep 1 2025-12-12T16:18:39.882097539+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:39.897383017+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,3.573ms,0)' ']' 2025-12-12T16:18:39.897473839+00:00 stderr F + sleep 1 2025-12-12T16:18:40.901968973+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:40.925757171+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,2.544ms,0)' ']' 2025-12-12T16:18:40.925960106+00:00 stderr F + sleep 1 2025-12-12T16:18:41.930491801+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:41.940598481+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,1.528ms,0)' ']' 2025-12-12T16:18:41.940598481+00:00 stderr F + sleep 1 2025-12-12T16:18:42.943508326+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:42.953787340+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,515ms,0)' ']' 2025-12-12T16:18:42.953862962+00:00 stderr F + sleep 1 2025-12-12T16:18:43.957143966+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:43.967764688+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,,0)' ']' 2025-12-12T16:18:43.967841600+00:00 stderr F + sleep 1 2025-12-12T16:18:44.971258268+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:44.980830404+00:00 stderr F + '[' -n 'TIME-WAIT 0 0 [::ffff:192.168.126.11]:10257 [::ffff:192.168.126.11]:46194 timer:(timewait,,0)' ']' 2025-12-12T16:18:44.980904366+00:00 stderr F + sleep 1 2025-12-12T16:18:45.984076848+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:18:45.994500656+00:00 stderr F + '[' -n '' ']' 2025-12-12T16:18:45.995652415+00:00 stderr F + '[' -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt ']' 2025-12-12T16:18:45.995667085+00:00 stderr F + echo 'Copying system trust bundle' 2025-12-12T16:18:45.995676075+00:00 stdout F Copying system trust bundle 2025-12-12T16:18:45.995683665+00:00 stderr F + cp -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem 2025-12-12T16:18:45.999886919+00:00 stderr F + '[' -f /etc/kubernetes/static-pod-resources/configmaps/cloud-config/ca-bundle.pem ']' 2025-12-12T16:18:46.000290959+00:00 stderr P + exec hyperkube kube-controller-manager --openshift-config=/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --authentication-kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --authorization-kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --client-ca-file=/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt --requestheader-client-ca-file=/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt -v=2 --tls-cert-file=/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt --tls-private-key-file=/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key --allocate-node-cidrs=false --cert-dir=/var/run/kubernetes --cloud-provider=external --cluster-cidr=10.217.0.0/22 --cluster-name=crc-rzkkk --cluster-signing-cert-file=/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt --cluster-signing-duration=720h --cluster-signing-key-file=/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key '--controllers=*' --controllers=-bootstrapsigner --controllers=-tokencleaner --controllers=-ttl --controllers=selinux-warning-controller --enable-dynamic-provisioning=true --feature-gates=AWSClusterHostedDNS=false --feature-gates=AWSClusterHostedDNSInstall=false --feature-gates=AWSDedicatedHosts=false --feature-gates=AWSServiceLBNetworkSecurityGroup=false --feature-gates=AdditionalRoutingCapabilities=true --feature-gates=AdminNetworkPolicy=true --feature-gates=AlibabaPlatform=true --feature-gates=AutomatedEtcdBackup=false --feature-gates=AzureClusterHostedDNSInstall=false --feature-gates=AzureDedicatedHosts=false --feature-gates=AzureMultiDisk=false --feature-gates=AzureWorkloadIdentity=true --feature-gates=BootImageSkewEnforcement=false --feature-gates=BootcNodeManagement=false --feature-gates=BuildCSIVolumes=true --feature-gates=CPMSMachineNamePrefix=true --feature-gates=ClusterAPIInstall=false --feature-gates=ClusterAPIInstallIBMCloud=false --feature-gates=ClusterMonitoringConfig=false --feature-gates=ClusterVersionOperatorConfiguration=false --feature-gates=ConsolePluginContentSecurityPolicy=true --feature-gates=DNSNameResolver=false --feature-gates=DualReplica=false --feature-gates=DyanmicServiceEndpointIBMCloud=false --feature-gates=DynamicResourceAllocation=false --feature-gates=EtcdBackendQuota=false --feature-gates=EventedPLEG=false --feature-gates=Example2=false --feature-gates=Example=false --feature-gates=ExternalOIDC=false --feature-gates=ExternalOIDCWithUIDAndExtraClaimMappings=false --feature-gates=ExternalSnapshotMetadata=false --feature-gates=GCPClusterHostedDNS=false --feature-gates=GCPClusterHostedDNSInstall=false --feature-gates=GCPCustomAPIEndpoints=false --feature-gates=GCPCustomAPIEndpointsInstall=false --feature-gates=GatewayAPI=true --feature-gates=GatewayAPIController=true --feature-gates=HighlyAvailableArbiter=true --feature-gates=ImageModeStatusReporting=false --feature-gates=ImageStreamImportMode=false --feature-gates=ImageVolume=true --feature-gates=IngressControllerDynamicConfigurationManager=false --feature-gates=IngressControllerLBSubnetsAWS=true --feature-gates=InsightsConfig=false --feature-gates=InsightsConfigAPI=false --feature-gates=InsightsOnDemandDataGather=false --feature-gates=IrreconcilableMachineConfig=false --feature-gates=KMSEncryptionProvider=false --feature-gates=KMSv1=true --feature-gates=MachineAPIMigration=false --feature-gates=MachineAPIOperatorDisableMachineHealthCheckController=false --feature-gates=MachineConfigNodes=true --feature-gates=ManagedBootImages=true --feature-gates=ManagedBootImagesAWS=true --feature-gates=ManagedBootImagesAzure=false --feature-gates=ManagedBootImagesvSphere=false --feature-gates=MaxUnavailableStatefulSet=false --feature-gates=MetricsCollectionProfiles=true --feature-gates=MinimumKubeletVersion=false --feature-gates=MixedCPUsAllocation=false --feature-gates=MultiArchInstallAzure=fals 2025-12-12T16:18:46.000343581+00:00 stderr F e --feature-gates=MultiDiskSetup=false --feature-gates=MutatingAdmissionPolicy=false --feature-gates=NetworkDiagnosticsConfig=true --feature-gates=NetworkLiveMigration=true --feature-gates=NetworkSegmentation=true --feature-gates=NewOLM=true --feature-gates=NewOLMCatalogdAPIV1Metas=false --feature-gates=NewOLMOwnSingleNamespace=false --feature-gates=NewOLMPreflightPermissionChecks=false --feature-gates=NewOLMWebhookProviderOpenshiftServiceCA=false --feature-gates=NoRegistryClusterOperations=false --feature-gates=NodeSwap=false --feature-gates=NutanixMultiSubnets=false --feature-gates=OVNObservability=false --feature-gates=OpenShiftPodSecurityAdmission=false --feature-gates=PinnedImages=true --feature-gates=PreconfiguredUDNAddresses=false --feature-gates=ProcMountType=true --feature-gates=RouteAdvertisements=true --feature-gates=RouteExternalCertificate=true --feature-gates=SELinuxMount=false --feature-gates=ServiceAccountTokenNodeBinding=true --feature-gates=SetEIPForNLBIngressController=true --feature-gates=ShortCertRotation=false --feature-gates=SignatureStores=false --feature-gates=SigstoreImageVerification=true --feature-gates=SigstoreImageVerificationPKI=false --feature-gates=StoragePerformantSecurityPolicy=true --feature-gates=TranslateStreamCloseWebsocketRequests=false --feature-gates=UpgradeStatus=true --feature-gates=UserNamespacesPodSecurityStandards=true --feature-gates=UserNamespacesSupport=true --feature-gates=VSphereConfigurableMaxAllowedBlockVolumesPerNode=false --feature-gates=VSphereHostVMGroupZonal=false --feature-gates=VSphereMixedNodeEnv=false --feature-gates=VSphereMultiDisk=true --feature-gates=VSphereMultiNetworks=true --feature-gates=VolumeAttributesClass=false --feature-gates=VolumeGroupSnapshot=false --flex-volume-plugin-dir=/etc/kubernetes/kubelet-plugins/volume/exec --kube-api-burst=300 --kube-api-qps=150 --leader-elect-renew-deadline=12s --leader-elect-resource-lock=leases --leader-elect-retry-period=3s --leader-elect=true --pv-recycler-pod-template-filepath-hostpath=/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml --pv-recycler-pod-template-filepath-nfs=/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml --root-ca-file=/etc/kubernetes/static-pod-resources/configmaps/serviceaccount-ca/ca-bundle.crt --secure-port=10257 --service-account-private-key-file=/etc/kubernetes/static-pod-resources/secrets/service-account-private-key/service-account.key --service-cluster-ip-range=10.217.4.0/23 --use-service-account-credentials=true --tls-cipher-suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 --tls-min-version=VersionTLS12 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069450 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069537 1 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069541 1 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069544 1 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069548 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069551 1 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069555 1 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069558 1 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069564 1 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069568 1 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069572 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069574 1 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:18:46.069590433+00:00 stderr F W1212 16:18:46.069577 1 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069588 1 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069592 1 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069595 1 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069599 1 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069602 1 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069605 1 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069608 1 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069611 1 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069614 1 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069618 1 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069621 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069624 1 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069627 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069630 1 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:18:46.069639034+00:00 stderr F W1212 16:18:46.069633 1 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069636 1 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069641 1 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069644 1 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069647 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069650 1 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069653 1 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069656 1 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:18:46.069664384+00:00 stderr F W1212 16:18:46.069659 1 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:18:46.069674115+00:00 stderr F W1212 16:18:46.069662 1 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:18:46.069674115+00:00 stderr F W1212 16:18:46.069666 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:18:46.069674115+00:00 stderr F W1212 16:18:46.069669 1 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:18:46.069682625+00:00 stderr F W1212 16:18:46.069672 1 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:18:46.069682625+00:00 stderr F W1212 16:18:46.069676 1 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:18:46.069682625+00:00 stderr F W1212 16:18:46.069679 1 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:18:46.069690465+00:00 stderr F W1212 16:18:46.069682 1 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:18:46.069690465+00:00 stderr F W1212 16:18:46.069687 1 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:18:46.069697935+00:00 stderr F W1212 16:18:46.069691 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:18:46.069697935+00:00 stderr F W1212 16:18:46.069695 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:18:46.069705335+00:00 stderr F W1212 16:18:46.069699 1 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:18:46.069705335+00:00 stderr F W1212 16:18:46.069702 1 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:18:46.069717086+00:00 stderr F W1212 16:18:46.069705 1 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:18:46.069717086+00:00 stderr F W1212 16:18:46.069709 1 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:18:46.069717086+00:00 stderr F W1212 16:18:46.069712 1 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:18:46.069725566+00:00 stderr F W1212 16:18:46.069715 1 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:18:46.069725566+00:00 stderr F W1212 16:18:46.069719 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:18:46.069725566+00:00 stderr F W1212 16:18:46.069722 1 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:18:46.069733566+00:00 stderr F W1212 16:18:46.069726 1 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:18:46.069733566+00:00 stderr F W1212 16:18:46.069730 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:18:46.069740956+00:00 stderr F W1212 16:18:46.069734 1 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:18:46.069740956+00:00 stderr F W1212 16:18:46.069737 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:18:46.069748316+00:00 stderr F W1212 16:18:46.069741 1 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:18:46.069748316+00:00 stderr F W1212 16:18:46.069744 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:18:46.069755927+00:00 stderr F W1212 16:18:46.069747 1 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:18:46.069755927+00:00 stderr F W1212 16:18:46.069751 1 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:18:46.069763487+00:00 stderr F W1212 16:18:46.069754 1 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:18:46.069763487+00:00 stderr F W1212 16:18:46.069757 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:18:46.069763487+00:00 stderr F W1212 16:18:46.069760 1 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:18:46.069771667+00:00 stderr F W1212 16:18:46.069766 1 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:18:46.069778917+00:00 stderr F W1212 16:18:46.069769 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:18:46.069778917+00:00 stderr F W1212 16:18:46.069773 1 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:18:46.069786487+00:00 stderr F W1212 16:18:46.069776 1 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:18:46.069786487+00:00 stderr F W1212 16:18:46.069781 1 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:18:46.069786487+00:00 stderr F W1212 16:18:46.069784 1 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:18:46.069794178+00:00 stderr F W1212 16:18:46.069787 1 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:18:46.069794178+00:00 stderr F W1212 16:18:46.069791 1 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:18:46.069801318+00:00 stderr F W1212 16:18:46.069794 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:18:46.069801318+00:00 stderr F W1212 16:18:46.069797 1 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:18:46.069809478+00:00 stderr F W1212 16:18:46.069802 1 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:18:46.069809478+00:00 stderr F W1212 16:18:46.069806 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:18:46.069820828+00:00 stderr F W1212 16:18:46.069810 1 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:18:46.069820828+00:00 stderr F W1212 16:18:46.069813 1 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:18:46.069820828+00:00 stderr F W1212 16:18:46.069816 1 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:18:46.069829468+00:00 stderr F W1212 16:18:46.069819 1 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:18:46.069829468+00:00 stderr F W1212 16:18:46.069823 1 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:18:46.069829468+00:00 stderr F W1212 16:18:46.069826 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:18:46.069838639+00:00 stderr F W1212 16:18:46.069829 1 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:18:46.069838639+00:00 stderr F W1212 16:18:46.069832 1 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:18:46.069838639+00:00 stderr F W1212 16:18:46.069836 1 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:18:46.069969022+00:00 stderr F I1212 16:18:46.069934 1 flags.go:64] FLAG: --allocate-node-cidrs="false" 2025-12-12T16:18:46.069969022+00:00 stderr F I1212 16:18:46.069950 1 flags.go:64] FLAG: --allow-metric-labels="[]" 2025-12-12T16:18:46.069969022+00:00 stderr F I1212 16:18:46.069956 1 flags.go:64] FLAG: --allow-metric-labels-manifest="" 2025-12-12T16:18:46.069969022+00:00 stderr F I1212 16:18:46.069961 1 flags.go:64] FLAG: --allow-untagged-cloud="false" 2025-12-12T16:18:46.069979542+00:00 stderr F I1212 16:18:46.069966 1 flags.go:64] FLAG: --attach-detach-reconcile-sync-period="1m0s" 2025-12-12T16:18:46.069979542+00:00 stderr F I1212 16:18:46.069971 1 flags.go:64] FLAG: --authentication-kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig" 2025-12-12T16:18:46.069979542+00:00 stderr F I1212 16:18:46.069976 1 flags.go:64] FLAG: --authentication-skip-lookup="false" 2025-12-12T16:18:46.069987272+00:00 stderr F I1212 16:18:46.069980 1 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="10s" 2025-12-12T16:18:46.069987272+00:00 stderr F I1212 16:18:46.069984 1 flags.go:64] FLAG: --authentication-tolerate-lookup-failure="false" 2025-12-12T16:18:46.070015733+00:00 stderr F I1212 16:18:46.069987 1 flags.go:64] FLAG: --authorization-always-allow-paths="[/healthz,/readyz,/livez]" 2025-12-12T16:18:46.070015733+00:00 stderr F I1212 16:18:46.070000 1 flags.go:64] FLAG: --authorization-kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig" 2025-12-12T16:18:46.070015733+00:00 stderr F I1212 16:18:46.070005 1 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="10s" 2025-12-12T16:18:46.070015733+00:00 stderr F I1212 16:18:46.070009 1 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="10s" 2025-12-12T16:18:46.070023963+00:00 stderr F I1212 16:18:46.070013 1 flags.go:64] FLAG: --bind-address="0.0.0.0" 2025-12-12T16:18:46.070023963+00:00 stderr F I1212 16:18:46.070019 1 flags.go:64] FLAG: --cert-dir="/var/run/kubernetes" 2025-12-12T16:18:46.070031533+00:00 stderr F I1212 16:18:46.070023 1 flags.go:64] FLAG: --cidr-allocator-type="RangeAllocator" 2025-12-12T16:18:46.070031533+00:00 stderr F I1212 16:18:46.070027 1 flags.go:64] FLAG: --client-ca-file="/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:46.070039024+00:00 stderr F I1212 16:18:46.070032 1 flags.go:64] FLAG: --cloud-config="" 2025-12-12T16:18:46.070051104+00:00 stderr F I1212 16:18:46.070036 1 flags.go:64] FLAG: --cloud-provider="external" 2025-12-12T16:18:46.070051104+00:00 stderr F I1212 16:18:46.070041 1 flags.go:64] FLAG: --cluster-cidr="10.217.0.0/22" 2025-12-12T16:18:46.070051104+00:00 stderr F I1212 16:18:46.070044 1 flags.go:64] FLAG: --cluster-name="crc-rzkkk" 2025-12-12T16:18:46.070058844+00:00 stderr F I1212 16:18:46.070048 1 flags.go:64] FLAG: --cluster-signing-cert-file="/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt" 2025-12-12T16:18:46.070058844+00:00 stderr F I1212 16:18:46.070053 1 flags.go:64] FLAG: --cluster-signing-duration="720h0m0s" 2025-12-12T16:18:46.070066124+00:00 stderr F I1212 16:18:46.070057 1 flags.go:64] FLAG: --cluster-signing-key-file="/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:18:46.070066124+00:00 stderr F I1212 16:18:46.070061 1 flags.go:64] FLAG: --cluster-signing-kube-apiserver-client-cert-file="" 2025-12-12T16:18:46.070073235+00:00 stderr F I1212 16:18:46.070064 1 flags.go:64] FLAG: --cluster-signing-kube-apiserver-client-key-file="" 2025-12-12T16:18:46.070073235+00:00 stderr F I1212 16:18:46.070068 1 flags.go:64] FLAG: --cluster-signing-kubelet-client-cert-file="" 2025-12-12T16:18:46.070080335+00:00 stderr F I1212 16:18:46.070071 1 flags.go:64] FLAG: --cluster-signing-kubelet-client-key-file="" 2025-12-12T16:18:46.070080335+00:00 stderr F I1212 16:18:46.070075 1 flags.go:64] FLAG: --cluster-signing-kubelet-serving-cert-file="" 2025-12-12T16:18:46.070087465+00:00 stderr F I1212 16:18:46.070078 1 flags.go:64] FLAG: --cluster-signing-kubelet-serving-key-file="" 2025-12-12T16:18:46.070087465+00:00 stderr F I1212 16:18:46.070082 1 flags.go:64] FLAG: --cluster-signing-legacy-unknown-cert-file="" 2025-12-12T16:18:46.070094705+00:00 stderr F I1212 16:18:46.070085 1 flags.go:64] FLAG: --cluster-signing-legacy-unknown-key-file="" 2025-12-12T16:18:46.070094705+00:00 stderr F I1212 16:18:46.070089 1 flags.go:64] FLAG: --concurrent-cron-job-syncs="5" 2025-12-12T16:18:46.070102025+00:00 stderr F I1212 16:18:46.070094 1 flags.go:64] FLAG: --concurrent-daemonset-syncs="2" 2025-12-12T16:18:46.070108915+00:00 stderr F I1212 16:18:46.070099 1 flags.go:64] FLAG: --concurrent-deployment-syncs="5" 2025-12-12T16:18:46.070108915+00:00 stderr F I1212 16:18:46.070104 1 flags.go:64] FLAG: --concurrent-endpoint-syncs="5" 2025-12-12T16:18:46.070116006+00:00 stderr F I1212 16:18:46.070107 1 flags.go:64] FLAG: --concurrent-ephemeralvolume-syncs="5" 2025-12-12T16:18:46.070116006+00:00 stderr F I1212 16:18:46.070111 1 flags.go:64] FLAG: --concurrent-gc-syncs="20" 2025-12-12T16:18:46.070123066+00:00 stderr F I1212 16:18:46.070114 1 flags.go:64] FLAG: --concurrent-horizontal-pod-autoscaler-syncs="5" 2025-12-12T16:18:46.070123066+00:00 stderr F I1212 16:18:46.070118 1 flags.go:64] FLAG: --concurrent-job-syncs="5" 2025-12-12T16:18:46.070130146+00:00 stderr F I1212 16:18:46.070121 1 flags.go:64] FLAG: --concurrent-namespace-syncs="10" 2025-12-12T16:18:46.070130146+00:00 stderr F I1212 16:18:46.070125 1 flags.go:64] FLAG: --concurrent-rc-syncs="5" 2025-12-12T16:18:46.070137256+00:00 stderr F I1212 16:18:46.070128 1 flags.go:64] FLAG: --concurrent-replicaset-syncs="5" 2025-12-12T16:18:46.070137256+00:00 stderr F I1212 16:18:46.070132 1 flags.go:64] FLAG: --concurrent-resource-quota-syncs="5" 2025-12-12T16:18:46.070144326+00:00 stderr F I1212 16:18:46.070135 1 flags.go:64] FLAG: --concurrent-service-endpoint-syncs="5" 2025-12-12T16:18:46.070144326+00:00 stderr F I1212 16:18:46.070139 1 flags.go:64] FLAG: --concurrent-service-syncs="1" 2025-12-12T16:18:46.070151396+00:00 stderr F I1212 16:18:46.070142 1 flags.go:64] FLAG: --concurrent-serviceaccount-token-syncs="5" 2025-12-12T16:18:46.070151396+00:00 stderr F I1212 16:18:46.070146 1 flags.go:64] FLAG: --concurrent-statefulset-syncs="5" 2025-12-12T16:18:46.070162027+00:00 stderr F I1212 16:18:46.070149 1 flags.go:64] FLAG: --concurrent-ttl-after-finished-syncs="5" 2025-12-12T16:18:46.070162027+00:00 stderr F I1212 16:18:46.070153 1 flags.go:64] FLAG: --concurrent-validating-admission-policy-status-syncs="5" 2025-12-12T16:18:46.070162027+00:00 stderr F I1212 16:18:46.070157 1 flags.go:64] FLAG: --configure-cloud-routes="true" 2025-12-12T16:18:46.070169637+00:00 stderr F I1212 16:18:46.070160 1 flags.go:64] FLAG: --contention-profiling="false" 2025-12-12T16:18:46.070169637+00:00 stderr F I1212 16:18:46.070164 1 flags.go:64] FLAG: --controller-start-interval="0s" 2025-12-12T16:18:46.070189457+00:00 stderr F I1212 16:18:46.070167 1 flags.go:64] FLAG: --controllers="[*,-bootstrapsigner,-tokencleaner,-ttl,selinux-warning-controller]" 2025-12-12T16:18:46.070189457+00:00 stderr F I1212 16:18:46.070174 1 flags.go:64] FLAG: --disable-attach-detach-reconcile-sync="false" 2025-12-12T16:18:46.070202438+00:00 stderr F I1212 16:18:46.070196 1 flags.go:64] FLAG: --disable-force-detach-on-timeout="false" 2025-12-12T16:18:46.070202438+00:00 stderr F I1212 16:18:46.070199 1 flags.go:64] FLAG: --disable-http2-serving="false" 2025-12-12T16:18:46.070209808+00:00 stderr F I1212 16:18:46.070202 1 flags.go:64] FLAG: --disabled-metrics="[]" 2025-12-12T16:18:46.070216758+00:00 stderr F I1212 16:18:46.070207 1 flags.go:64] FLAG: --emulated-version="[]" 2025-12-12T16:18:46.070216758+00:00 stderr F I1212 16:18:46.070211 1 flags.go:64] FLAG: --enable-dynamic-provisioning="true" 2025-12-12T16:18:46.070216758+00:00 stderr F I1212 16:18:46.070214 1 flags.go:64] FLAG: --enable-garbage-collector="true" 2025-12-12T16:18:46.070224738+00:00 stderr F I1212 16:18:46.070217 1 flags.go:64] FLAG: --enable-hostpath-provisioner="false" 2025-12-12T16:18:46.070224738+00:00 stderr F I1212 16:18:46.070220 1 flags.go:64] FLAG: --enable-leader-migration="false" 2025-12-12T16:18:46.070232038+00:00 stderr F I1212 16:18:46.070222 1 flags.go:64] FLAG: --endpoint-updates-batch-period="0s" 2025-12-12T16:18:46.070232038+00:00 stderr F I1212 16:18:46.070226 1 flags.go:64] FLAG: --endpointslice-updates-batch-period="0s" 2025-12-12T16:18:46.070232038+00:00 stderr F I1212 16:18:46.070228 1 flags.go:64] FLAG: --external-cloud-volume-plugin="" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070231 1 flags.go:64] FLAG: --feature-gates=":AWSClusterHostedDNS=false,:AWSClusterHostedDNSInstall=false,:AWSDedicatedHosts=false,:AWSServiceLBNetworkSecurityGroup=false,:AdditionalRoutingCapabilities=true,:AdminNetworkPolicy=true,:AlibabaPlatform=true,:AutomatedEtcdBackup=false,:AzureClusterHostedDNSInstall=false,:AzureDedicatedHosts=false,:AzureMultiDisk=false,:AzureWorkloadIdentity=true,:BootImageSkewEnforcement=false,:BootcNodeManagement=false,:BuildCSIVolumes=true,:CPMSMachineNamePrefix=true,:ClusterAPIInstall=false,:ClusterAPIInstallIBMCloud=false,:ClusterMonitoringConfig=false,:ClusterVersionOperatorConfiguration=false,:ConsolePluginContentSecurityPolicy=true,:DNSNameResolver=false,:DualReplica=false,:DyanmicServiceEndpointIBMCloud=false,:DynamicResourceAllocation=false,:EtcdBackendQuota=false,:EventedPLEG=false,:Example2=false,:Example=false,:ExternalOIDC=false,:ExternalOIDCWithUIDAndExtraClaimMappings=false,:ExternalSnapshotMetadata=false,:GCPClusterHostedDNS=false,:GCPClusterHostedDNSInstall=false,:GCPCustomAPIEndpoints=false,:GCPCustomAPIEndpointsInstall=false,:GatewayAPI=true,:GatewayAPIController=true,:HighlyAvailableArbiter=true,:ImageModeStatusReporting=false,:ImageStreamImportMode=false,:ImageVolume=true,:IngressControllerDynamicConfigurationManager=false,:IngressControllerLBSubnetsAWS=true,:InsightsConfig=false,:InsightsConfigAPI=false,:InsightsOnDemandDataGather=false,:IrreconcilableMachineConfig=false,:KMSEncryptionProvider=false,:KMSv1=true,:MachineAPIMigration=false,:MachineAPIOperatorDisableMachineHealthCheckController=false,:MachineConfigNodes=true,:ManagedBootImages=true,:ManagedBootImagesAWS=true,:ManagedBootImagesAzure=false,:ManagedBootImagesvSphere=false,:MaxUnavailableStatefulSet=false,:MetricsCollectionProfiles=true,:MinimumKubeletVersion=false,:MixedCPUsAllocation=false,:MultiArchInstallAzure=false,:MultiDiskSetup=false,:MutatingAdmissionPolicy=false,:NetworkDiagnosticsConfig=true,:NetworkLiveMigration=true,:NetworkSegmentation=true,:NewOLM=true,:NewOLMCatalogdAPIV1Metas=false,:NewOLMOwnSingleNamespace=false,:NewOLMPreflightPermissionChecks=false,:NewOLMWebhookProviderOpenshiftServiceCA=false,:NoRegistryClusterOperations=false,:NodeSwap=false,:NutanixMultiSubnets=false,:OVNObservability=false,:OpenShiftPodSecurityAdmission=false,:PinnedImages=true,:PreconfiguredUDNAddresses=false,:ProcMountType=true,:RouteAdvertisements=true,:RouteExternalCertificate=true,:SELinuxMount=false,:ServiceAccountTokenNodeBinding=true,:SetEIPForNLBIngressController=true,:ShortCertRotation=false,:SignatureStores=false,:SigstoreImageVerification=true,:SigstoreImageVerificationPKI=false,:StoragePerformantSecurityPolicy=true,:TranslateStreamCloseWebsocketRequests=false,:UpgradeStatus=true,:UserNamespacesPodSecurityStandards=true,:UserNamespacesSupport=true,:VSphereConfigurableMaxAllowedBlockVolumesPerNode=false,:VSphereHostVMGroupZonal=false,:VSphereMixedNodeEnv=false,:VSphereMultiDisk=true,:VSphereMultiNetworks=true,:VolumeAttributesClass=false,:VolumeGroupSnapshot=false" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070290 1 flags.go:64] FLAG: --flex-volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070294 1 flags.go:64] FLAG: --help="false" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070297 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-cpu-initialization-period="5m0s" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070300 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-downscale-stabilization="5m0s" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070303 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-initial-readiness-delay="30s" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070305 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-sync-period="15s" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070308 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-tolerance="0.1" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070313 1 flags.go:64] FLAG: --http2-max-streams-per-connection="0" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070316 1 flags.go:64] FLAG: --kube-api-burst="300" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070319 1 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070322 1 flags.go:64] FLAG: --kube-api-qps="150" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070326 1 flags.go:64] FLAG: --kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig" 2025-12-12T16:18:46.070336771+00:00 stderr F I1212 16:18:46.070331 1 flags.go:64] FLAG: --large-cluster-size-threshold="50" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070333 1 flags.go:64] FLAG: --leader-elect="true" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070338 1 flags.go:64] FLAG: --leader-elect-lease-duration="15s" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070341 1 flags.go:64] FLAG: --leader-elect-renew-deadline="12s" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070343 1 flags.go:64] FLAG: --leader-elect-resource-lock="leases" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070346 1 flags.go:64] FLAG: --leader-elect-resource-name="kube-controller-manager" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070349 1 flags.go:64] FLAG: --leader-elect-resource-namespace="kube-system" 2025-12-12T16:18:46.070354831+00:00 stderr F I1212 16:18:46.070352 1 flags.go:64] FLAG: --leader-elect-retry-period="3s" 2025-12-12T16:18:46.070366822+00:00 stderr F I1212 16:18:46.070354 1 flags.go:64] FLAG: --leader-migration-config="" 2025-12-12T16:18:46.070366822+00:00 stderr F I1212 16:18:46.070357 1 flags.go:64] FLAG: --legacy-service-account-token-clean-up-period="8760h0m0s" 2025-12-12T16:18:46.070366822+00:00 stderr F I1212 16:18:46.070360 1 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:18:46.070374532+00:00 stderr F I1212 16:18:46.070363 1 flags.go:64] FLAG: --log-json-info-buffer-size="0" 2025-12-12T16:18:46.070374532+00:00 stderr F I1212 16:18:46.070368 1 flags.go:64] FLAG: --log-json-split-stream="false" 2025-12-12T16:18:46.070374532+00:00 stderr F I1212 16:18:46.070371 1 flags.go:64] FLAG: --log-text-info-buffer-size="0" 2025-12-12T16:18:46.070382162+00:00 stderr F I1212 16:18:46.070373 1 flags.go:64] FLAG: --log-text-split-stream="false" 2025-12-12T16:18:46.070382162+00:00 stderr F I1212 16:18:46.070376 1 flags.go:64] FLAG: --logging-format="text" 2025-12-12T16:18:46.070382162+00:00 stderr F I1212 16:18:46.070379 1 flags.go:64] FLAG: --master="" 2025-12-12T16:18:46.070390032+00:00 stderr F I1212 16:18:46.070382 1 flags.go:64] FLAG: --max-endpoints-per-slice="100" 2025-12-12T16:18:46.070390032+00:00 stderr F I1212 16:18:46.070385 1 flags.go:64] FLAG: --min-resync-period="12h0m0s" 2025-12-12T16:18:46.070397823+00:00 stderr F I1212 16:18:46.070387 1 flags.go:64] FLAG: --mirroring-concurrent-service-endpoint-syncs="5" 2025-12-12T16:18:46.070397823+00:00 stderr F I1212 16:18:46.070391 1 flags.go:64] FLAG: --mirroring-endpointslice-updates-batch-period="0s" 2025-12-12T16:18:46.070397823+00:00 stderr F I1212 16:18:46.070394 1 flags.go:64] FLAG: --mirroring-max-endpoints-per-subset="1000" 2025-12-12T16:18:46.070405883+00:00 stderr F I1212 16:18:46.070398 1 flags.go:64] FLAG: --namespace-sync-period="5m0s" 2025-12-12T16:18:46.070405883+00:00 stderr F I1212 16:18:46.070402 1 flags.go:64] FLAG: --node-cidr-mask-size="0" 2025-12-12T16:18:46.070413313+00:00 stderr F I1212 16:18:46.070406 1 flags.go:64] FLAG: --node-cidr-mask-size-ipv4="0" 2025-12-12T16:18:46.070413313+00:00 stderr F I1212 16:18:46.070410 1 flags.go:64] FLAG: --node-cidr-mask-size-ipv6="0" 2025-12-12T16:18:46.070420753+00:00 stderr F I1212 16:18:46.070414 1 flags.go:64] FLAG: --node-eviction-rate="0.1" 2025-12-12T16:18:46.070427913+00:00 stderr F I1212 16:18:46.070418 1 flags.go:64] FLAG: --node-monitor-grace-period="50s" 2025-12-12T16:18:46.070427913+00:00 stderr F I1212 16:18:46.070422 1 flags.go:64] FLAG: --node-monitor-period="5s" 2025-12-12T16:18:46.070434953+00:00 stderr F I1212 16:18:46.070426 1 flags.go:64] FLAG: --node-startup-grace-period="1m0s" 2025-12-12T16:18:46.070434953+00:00 stderr F I1212 16:18:46.070430 1 flags.go:64] FLAG: --node-sync-period="0s" 2025-12-12T16:18:46.070442184+00:00 stderr F I1212 16:18:46.070433 1 flags.go:64] FLAG: --openshift-config="/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml" 2025-12-12T16:18:46.070442184+00:00 stderr F I1212 16:18:46.070438 1 flags.go:64] FLAG: --permit-address-sharing="false" 2025-12-12T16:18:46.070450434+00:00 stderr F I1212 16:18:46.070441 1 flags.go:64] FLAG: --permit-port-sharing="false" 2025-12-12T16:18:46.070450434+00:00 stderr F I1212 16:18:46.070444 1 flags.go:64] FLAG: --profiling="true" 2025-12-12T16:18:46.070450434+00:00 stderr F I1212 16:18:46.070447 1 flags.go:64] FLAG: --pv-recycler-increment-timeout-nfs="30" 2025-12-12T16:18:46.070458214+00:00 stderr F I1212 16:18:46.070451 1 flags.go:64] FLAG: --pv-recycler-minimum-timeout-hostpath="60" 2025-12-12T16:18:46.070458214+00:00 stderr F I1212 16:18:46.070454 1 flags.go:64] FLAG: --pv-recycler-minimum-timeout-nfs="300" 2025-12-12T16:18:46.070465394+00:00 stderr F I1212 16:18:46.070457 1 flags.go:64] FLAG: --pv-recycler-pod-template-filepath-hostpath="/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml" 2025-12-12T16:18:46.070465394+00:00 stderr F I1212 16:18:46.070461 1 flags.go:64] FLAG: --pv-recycler-pod-template-filepath-nfs="/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml" 2025-12-12T16:18:46.070476984+00:00 stderr F I1212 16:18:46.070464 1 flags.go:64] FLAG: --pv-recycler-timeout-increment-hostpath="30" 2025-12-12T16:18:46.070476984+00:00 stderr F I1212 16:18:46.070467 1 flags.go:64] FLAG: --pvclaimbinder-sync-period="15s" 2025-12-12T16:18:46.070476984+00:00 stderr F I1212 16:18:46.070470 1 flags.go:64] FLAG: --requestheader-allowed-names="[]" 2025-12-12T16:18:46.070484665+00:00 stderr F I1212 16:18:46.070474 1 flags.go:64] FLAG: --requestheader-client-ca-file="/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:46.070484665+00:00 stderr F I1212 16:18:46.070478 1 flags.go:64] FLAG: --requestheader-extra-headers-prefix="[x-remote-extra-]" 2025-12-12T16:18:46.070491915+00:00 stderr F I1212 16:18:46.070483 1 flags.go:64] FLAG: --requestheader-group-headers="[x-remote-group]" 2025-12-12T16:18:46.070491915+00:00 stderr F I1212 16:18:46.070487 1 flags.go:64] FLAG: --requestheader-uid-headers="[]" 2025-12-12T16:18:46.070499085+00:00 stderr F I1212 16:18:46.070491 1 flags.go:64] FLAG: --requestheader-username-headers="[x-remote-user]" 2025-12-12T16:18:46.070499085+00:00 stderr F I1212 16:18:46.070495 1 flags.go:64] FLAG: --resource-quota-sync-period="5m0s" 2025-12-12T16:18:46.070506265+00:00 stderr F I1212 16:18:46.070499 1 flags.go:64] FLAG: --root-ca-file="/etc/kubernetes/static-pod-resources/configmaps/serviceaccount-ca/ca-bundle.crt" 2025-12-12T16:18:46.070513275+00:00 stderr F I1212 16:18:46.070504 1 flags.go:64] FLAG: --route-reconciliation-period="10s" 2025-12-12T16:18:46.070513275+00:00 stderr F I1212 16:18:46.070508 1 flags.go:64] FLAG: --secondary-node-eviction-rate="0.01" 2025-12-12T16:18:46.070520486+00:00 stderr F I1212 16:18:46.070512 1 flags.go:64] FLAG: --secure-port="10257" 2025-12-12T16:18:46.070520486+00:00 stderr F I1212 16:18:46.070516 1 flags.go:64] FLAG: --service-account-private-key-file="/etc/kubernetes/static-pod-resources/secrets/service-account-private-key/service-account.key" 2025-12-12T16:18:46.070527676+00:00 stderr F I1212 16:18:46.070522 1 flags.go:64] FLAG: --service-cluster-ip-range="10.217.4.0/23" 2025-12-12T16:18:46.070534636+00:00 stderr F I1212 16:18:46.070526 1 flags.go:64] FLAG: --show-hidden-metrics-for-version="" 2025-12-12T16:18:46.070534636+00:00 stderr F I1212 16:18:46.070530 1 flags.go:64] FLAG: --terminated-pod-gc-threshold="12500" 2025-12-12T16:18:46.070541876+00:00 stderr F I1212 16:18:46.070533 1 flags.go:64] FLAG: --tls-cert-file="/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt" 2025-12-12T16:18:46.070550546+00:00 stderr F I1212 16:18:46.070538 1 flags.go:64] FLAG: --tls-cipher-suites="[TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256]" 2025-12-12T16:18:46.070557556+00:00 stderr F I1212 16:18:46.070549 1 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:18:46.070564467+00:00 stderr F I1212 16:18:46.070553 1 flags.go:64] FLAG: --tls-private-key-file="/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:18:46.070571697+00:00 stderr F I1212 16:18:46.070562 1 flags.go:64] FLAG: --tls-sni-cert-key="[]" 2025-12-12T16:18:46.070571697+00:00 stderr F I1212 16:18:46.070566 1 flags.go:64] FLAG: --unhealthy-zone-threshold="0.55" 2025-12-12T16:18:46.070578907+00:00 stderr F I1212 16:18:46.070569 1 flags.go:64] FLAG: --unsupported-kube-api-over-localhost="false" 2025-12-12T16:18:46.070578907+00:00 stderr F I1212 16:18:46.070573 1 flags.go:64] FLAG: --use-service-account-credentials="true" 2025-12-12T16:18:46.070593777+00:00 stderr F I1212 16:18:46.070576 1 flags.go:64] FLAG: --v="2" 2025-12-12T16:18:46.070593777+00:00 stderr F I1212 16:18:46.070580 1 flags.go:64] FLAG: --version="false" 2025-12-12T16:18:46.070593777+00:00 stderr F I1212 16:18:46.070584 1 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:18:46.070766042+00:00 stderr F W1212 16:18:46.070734 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:18:46.070766042+00:00 stderr F W1212 16:18:46.070747 1 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:18:46.070766042+00:00 stderr F W1212 16:18:46.070751 1 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:18:46.070766042+00:00 stderr F W1212 16:18:46.070755 1 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:18:46.070766042+00:00 stderr F W1212 16:18:46.070758 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:18:46.070766042+00:00 stderr F W1212 16:18:46.070761 1 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:18:46.070776642+00:00 stderr F W1212 16:18:46.070765 1 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:18:46.070776642+00:00 stderr F W1212 16:18:46.070769 1 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:18:46.070776642+00:00 stderr F W1212 16:18:46.070773 1 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:18:46.070784972+00:00 stderr F W1212 16:18:46.070778 1 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:18:46.070784972+00:00 stderr F W1212 16:18:46.070782 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:18:46.070792652+00:00 stderr F W1212 16:18:46.070786 1 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:18:46.070792652+00:00 stderr F W1212 16:18:46.070790 1 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:18:46.070799982+00:00 stderr F W1212 16:18:46.070793 1 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:18:46.070799982+00:00 stderr F W1212 16:18:46.070796 1 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:18:46.070807393+00:00 stderr F W1212 16:18:46.070798 1 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:18:46.070807393+00:00 stderr F W1212 16:18:46.070801 1 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:18:46.070807393+00:00 stderr F W1212 16:18:46.070804 1 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:18:46.070815033+00:00 stderr F W1212 16:18:46.070806 1 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:18:46.070815033+00:00 stderr F W1212 16:18:46.070809 1 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:18:46.070815033+00:00 stderr F W1212 16:18:46.070812 1 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:18:46.070822623+00:00 stderr F W1212 16:18:46.070814 1 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:18:46.070822623+00:00 stderr F W1212 16:18:46.070817 1 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:18:46.070822623+00:00 stderr F W1212 16:18:46.070820 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:18:46.070830223+00:00 stderr F W1212 16:18:46.070822 1 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:18:46.070830223+00:00 stderr F W1212 16:18:46.070825 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:18:46.070830223+00:00 stderr F W1212 16:18:46.070828 1 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:18:46.070843394+00:00 stderr F W1212 16:18:46.070831 1 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:18:46.070843394+00:00 stderr F W1212 16:18:46.070833 1 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:18:46.070843394+00:00 stderr F W1212 16:18:46.070836 1 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:18:46.070843394+00:00 stderr F W1212 16:18:46.070838 1 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:18:46.070843394+00:00 stderr F W1212 16:18:46.070841 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070843 1 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070846 1 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070849 1 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070851 1 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070854 1 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070856 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070859 1 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070861 1 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070864 1 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070866 1 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070869 1 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070872 1 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070875 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:18:46.070881124+00:00 stderr F W1212 16:18:46.070877 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:18:46.070892335+00:00 stderr F W1212 16:18:46.070880 1 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:18:46.070899355+00:00 stderr F W1212 16:18:46.070891 1 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:18:46.070899355+00:00 stderr F W1212 16:18:46.070894 1 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:18:46.070899355+00:00 stderr F W1212 16:18:46.070896 1 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:18:46.070907165+00:00 stderr F W1212 16:18:46.070899 1 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:18:46.070907165+00:00 stderr F W1212 16:18:46.070901 1 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:18:46.070914385+00:00 stderr F W1212 16:18:46.070905 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:18:46.070914385+00:00 stderr F W1212 16:18:46.070909 1 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:18:46.070914385+00:00 stderr F W1212 16:18:46.070911 1 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:18:46.070926086+00:00 stderr F W1212 16:18:46.070914 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:18:46.070926086+00:00 stderr F W1212 16:18:46.070917 1 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:18:46.070926086+00:00 stderr F W1212 16:18:46.070919 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:18:46.070926086+00:00 stderr F W1212 16:18:46.070922 1 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:18:46.070934656+00:00 stderr F W1212 16:18:46.070925 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:18:46.070934656+00:00 stderr F W1212 16:18:46.070927 1 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:18:46.070934656+00:00 stderr F W1212 16:18:46.070930 1 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:18:46.070934656+00:00 stderr F W1212 16:18:46.070932 1 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:18:46.070943046+00:00 stderr F W1212 16:18:46.070935 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:18:46.070943046+00:00 stderr F W1212 16:18:46.070938 1 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:18:46.070943046+00:00 stderr F W1212 16:18:46.070940 1 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:18:46.070952036+00:00 stderr F W1212 16:18:46.070943 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:18:46.070952036+00:00 stderr F W1212 16:18:46.070946 1 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:18:46.070952036+00:00 stderr F W1212 16:18:46.070948 1 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:18:46.070960796+00:00 stderr F W1212 16:18:46.070951 1 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:18:46.070960796+00:00 stderr F W1212 16:18:46.070954 1 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:18:46.070960796+00:00 stderr F W1212 16:18:46.070958 1 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:18:46.070969377+00:00 stderr F W1212 16:18:46.070961 1 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:18:46.070969377+00:00 stderr F W1212 16:18:46.070965 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:18:46.070977887+00:00 stderr F W1212 16:18:46.070968 1 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:18:46.070977887+00:00 stderr F W1212 16:18:46.070973 1 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:18:46.070985097+00:00 stderr F W1212 16:18:46.070978 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:18:46.070985097+00:00 stderr F W1212 16:18:46.070981 1 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:18:46.070992307+00:00 stderr F W1212 16:18:46.070985 1 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:18:46.070992307+00:00 stderr F W1212 16:18:46.070988 1 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:18:46.070999497+00:00 stderr F W1212 16:18:46.070992 1 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:18:46.070999497+00:00 stderr F W1212 16:18:46.070995 1 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:18:46.071010078+00:00 stderr F W1212 16:18:46.070998 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:18:46.071010078+00:00 stderr F W1212 16:18:46.071001 1 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:18:46.071010078+00:00 stderr F W1212 16:18:46.071003 1 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:18:46.071010078+00:00 stderr F W1212 16:18:46.071006 1 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:18:46.073562381+00:00 stderr F I1212 16:18:46.073171 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:18:46.271937825+00:00 stderr F I1212 16:18:46.271863 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:46.272008287+00:00 stderr F I1212 16:18:46.271973 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:46.278430636+00:00 stderr F I1212 16:18:46.278372 1 controllermanager.go:203] "Starting" version="v1.33.5" 2025-12-12T16:18:46.278430636+00:00 stderr F I1212 16:18:46.278401 1 controllermanager.go:205] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" 2025-12-12T16:18:46.280061706+00:00 stderr F I1212 16:18:46.280014 1 dynamic_cafile_content.go:161] "Starting controller" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:18:46.280080546+00:00 stderr F I1212 16:18:46.280055 1 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:46.280442755+00:00 stderr F I1212 16:18:46.280361 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:18:46.280530428+00:00 stderr F I1212 16:18:46.280483 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:18:46.280439645 +0000 UTC))" 2025-12-12T16:18:46.280557118+00:00 stderr F I1212 16:18:46.280531 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:18:46.280520807 +0000 UTC))" 2025-12-12T16:18:46.280579279+00:00 stderr F I1212 16:18:46.280558 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:18:46.280549948 +0000 UTC))" 2025-12-12T16:18:46.280602509+00:00 stderr F I1212 16:18:46.280580 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:18:46.280573249 +0000 UTC))" 2025-12-12T16:18:46.280614360+00:00 stderr F I1212 16:18:46.280598 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:18:46.280591839 +0000 UTC))" 2025-12-12T16:18:46.280642820+00:00 stderr F I1212 16:18:46.280621 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:18:46.28061528 +0000 UTC))" 2025-12-12T16:18:46.280663411+00:00 stderr F I1212 16:18:46.280642 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:18:46.28063618 +0000 UTC))" 2025-12-12T16:18:46.280673931+00:00 stderr F I1212 16:18:46.280662 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:18:46.280657281 +0000 UTC))" 2025-12-12T16:18:46.280699402+00:00 stderr F I1212 16:18:46.280678 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:18:46.280673181 +0000 UTC))" 2025-12-12T16:18:46.280725352+00:00 stderr F I1212 16:18:46.280704 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:46.280698052 +0000 UTC))" 2025-12-12T16:18:46.280973339+00:00 stderr F I1212 16:18:46.280929 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:18:46.280917737 +0000 UTC))" 2025-12-12T16:18:46.281159613+00:00 stderr F I1212 16:18:46.281120 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556326\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556326\" (2025-12-12 15:18:46 +0000 UTC to 2028-12-12 15:18:46 +0000 UTC (now=2025-12-12 16:18:46.281111412 +0000 UTC))" 2025-12-12T16:18:46.281171763+00:00 stderr F I1212 16:18:46.281161 1 secure_serving.go:211] Serving securely on [::]:10257 2025-12-12T16:18:46.281651535+00:00 stderr F I1212 16:18:46.281621 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:18:46.281768978+00:00 stderr F I1212 16:18:46.281738 1 leaderelection.go:257] attempting to acquire leader lease kube-system/kube-controller-manager... 2025-12-12T16:19:05.233615588+00:00 stderr F I1212 16:19:05.233552 1 leaderelection.go:271] successfully acquired lease kube-system/kube-controller-manager 2025-12-12T16:19:05.233735571+00:00 stderr F I1212 16:19:05.233650 1 event.go:389] "Event occurred" object="kube-system/kube-controller-manager" fieldPath="" kind="Lease" apiVersion="coordination.k8s.io/v1" type="Normal" reason="LeaderElection" message="crc_31ba8ef3-0ce5-4226-a7f4-94d32420e3d4 became leader" 2025-12-12T16:19:05.236964231+00:00 stderr F I1212 16:19:05.236933 1 controllermanager.go:796] "Starting controller" controller="serviceaccount-token-controller" 2025-12-12T16:19:05.238319694+00:00 stderr F I1212 16:19:05.238302 1 controllermanager.go:827] "Started controller" controller="serviceaccount-token-controller" 2025-12-12T16:19:05.238367876+00:00 stderr F I1212 16:19:05.238353 1 controllermanager.go:796] "Starting controller" controller="node-ipam-controller" 2025-12-12T16:19:05.238397986+00:00 stderr F I1212 16:19:05.238385 1 controllermanager.go:805] "Warning: skipping controller" controller="node-ipam-controller" 2025-12-12T16:19:05.238426517+00:00 stderr F I1212 16:19:05.238412 1 controllermanager.go:785] "Skipping a cloud provider controller" controller="service-lb-controller" 2025-12-12T16:19:05.238457208+00:00 stderr F I1212 16:19:05.238444 1 controllermanager.go:796] "Starting controller" controller="deployment-controller" 2025-12-12T16:19:05.238577171+00:00 stderr F I1212 16:19:05.238317 1 shared_informer.go:350] "Waiting for caches to sync" controller="tokens" 2025-12-12T16:19:05.242120368+00:00 stderr F I1212 16:19:05.242076 1 controllermanager.go:827] "Started controller" controller="deployment-controller" 2025-12-12T16:19:05.242120368+00:00 stderr F I1212 16:19:05.242102 1 controllermanager.go:796] "Starting controller" controller="horizontal-pod-autoscaler-controller" 2025-12-12T16:19:05.242223601+00:00 stderr F I1212 16:19:05.242200 1 deployment_controller.go:173] "Starting controller" logger="deployment-controller" controller="deployment" 2025-12-12T16:19:05.242223601+00:00 stderr F I1212 16:19:05.242215 1 shared_informer.go:350] "Waiting for caches to sync" controller="deployment" 2025-12-12T16:19:05.249287916+00:00 stderr F I1212 16:19:05.249230 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.260073682+00:00 stderr F I1212 16:19:05.260011 1 controllermanager.go:827] "Started controller" controller="horizontal-pod-autoscaler-controller" 2025-12-12T16:19:05.260073682+00:00 stderr F I1212 16:19:05.260045 1 controllermanager.go:796] "Starting controller" controller="node-lifecycle-controller" 2025-12-12T16:19:05.260120283+00:00 stderr F I1212 16:19:05.260081 1 horizontal.go:204] "Starting HPA controller" logger="horizontal-pod-autoscaler-controller" 2025-12-12T16:19:05.260120283+00:00 stderr F I1212 16:19:05.260102 1 shared_informer.go:350] "Waiting for caches to sync" controller="HPA" 2025-12-12T16:19:05.263652601+00:00 stderr F I1212 16:19:05.263615 1 node_lifecycle_controller.go:419] "Controller will reconcile labels" logger="node-lifecycle-controller" 2025-12-12T16:19:05.263729603+00:00 stderr F I1212 16:19:05.263707 1 controllermanager.go:827] "Started controller" controller="node-lifecycle-controller" 2025-12-12T16:19:05.263729603+00:00 stderr F I1212 16:19:05.263720 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-binder-controller" 2025-12-12T16:19:05.263773464+00:00 stderr F I1212 16:19:05.263749 1 node_lifecycle_controller.go:453] "Sending events to api server" logger="node-lifecycle-controller" 2025-12-12T16:19:05.263786334+00:00 stderr F I1212 16:19:05.263779 1 node_lifecycle_controller.go:464] "Starting node controller" logger="node-lifecycle-controller" 2025-12-12T16:19:05.263793904+00:00 stderr F I1212 16:19:05.263786 1 shared_informer.go:350] "Waiting for caches to sync" controller="taint" 2025-12-12T16:19:05.268767677+00:00 stderr F I1212 16:19:05.268738 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/host-path" 2025-12-12T16:19:05.268767677+00:00 stderr F I1212 16:19:05.268761 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/nfs" 2025-12-12T16:19:05.268786368+00:00 stderr F I1212 16:19:05.268778 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" 2025-12-12T16:19:05.268860500+00:00 stderr F I1212 16:19:05.268838 1 controllermanager.go:827] "Started controller" controller="persistentvolume-binder-controller" 2025-12-12T16:19:05.268860500+00:00 stderr F I1212 16:19:05.268854 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-attach-detach-controller" 2025-12-12T16:19:05.268991443+00:00 stderr F I1212 16:19:05.268955 1 pv_controller_base.go:308] "Starting persistent volume controller" logger="persistentvolume-binder-controller" 2025-12-12T16:19:05.268991443+00:00 stderr F I1212 16:19:05.268973 1 shared_informer.go:350] "Waiting for caches to sync" controller="persistent volume" 2025-12-12T16:19:05.273364991+00:00 stderr F W1212 16:19:05.273324 1 probe.go:272] Flexvolume plugin directory at /etc/kubernetes/kubelet-plugins/volume/exec does not exist. Recreating. 2025-12-12T16:19:05.275516204+00:00 stderr F I1212 16:19:05.275469 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/fc" 2025-12-12T16:19:05.275534405+00:00 stderr F I1212 16:19:05.275513 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" 2025-12-12T16:19:05.275541645+00:00 stderr F I1212 16:19:05.275535 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/csi" 2025-12-12T16:19:05.275825472+00:00 stderr F I1212 16:19:05.275801 1 controllermanager.go:827] "Started controller" controller="persistentvolume-attach-detach-controller" 2025-12-12T16:19:05.275870143+00:00 stderr F I1212 16:19:05.275821 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="volumeattributesclass-protection-controller" requiredFeatureGates=["VolumeAttributesClass"] 2025-12-12T16:19:05.275870143+00:00 stderr F I1212 16:19:05.275862 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="resourceclaim-controller" requiredFeatureGates=["DynamicResourceAllocation"] 2025-12-12T16:19:05.275878803+00:00 stderr F I1212 16:19:05.275869 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="device-taint-eviction-controller" requiredFeatureGates=["DynamicResourceAllocation","DRADeviceTaints"] 2025-12-12T16:19:05.275885843+00:00 stderr F I1212 16:19:05.275880 1 controllermanager.go:796] "Starting controller" controller="job-controller" 2025-12-12T16:19:05.275949765+00:00 stderr F I1212 16:19:05.275913 1 attach_detach_controller.go:338] "Starting attach detach controller" logger="persistentvolume-attach-detach-controller" 2025-12-12T16:19:05.275949765+00:00 stderr F I1212 16:19:05.275940 1 shared_informer.go:350] "Waiting for caches to sync" controller="attach detach" 2025-12-12T16:19:05.279767049+00:00 stderr F I1212 16:19:05.279737 1 controllermanager.go:827] "Started controller" controller="job-controller" 2025-12-12T16:19:05.279767049+00:00 stderr F I1212 16:19:05.279754 1 controllermanager.go:796] "Starting controller" controller="pod-garbage-collector-controller" 2025-12-12T16:19:05.279943504+00:00 stderr F I1212 16:19:05.279892 1 job_controller.go:243] "Starting job controller" logger="job-controller" 2025-12-12T16:19:05.279943504+00:00 stderr F I1212 16:19:05.279913 1 shared_informer.go:350] "Waiting for caches to sync" controller="job" 2025-12-12T16:19:05.282923447+00:00 stderr F I1212 16:19:05.282896 1 controllermanager.go:827] "Started controller" controller="pod-garbage-collector-controller" 2025-12-12T16:19:05.282923447+00:00 stderr F I1212 16:19:05.282909 1 controllermanager.go:796] "Starting controller" controller="resourcequota-controller" 2025-12-12T16:19:05.283001609+00:00 stderr F I1212 16:19:05.282969 1 gc_controller.go:99] "Starting GC controller" logger="pod-garbage-collector-controller" 2025-12-12T16:19:05.283001609+00:00 stderr F I1212 16:19:05.282988 1 shared_informer.go:350] "Waiting for caches to sync" controller="GC" 2025-12-12T16:19:05.306591862+00:00 stderr F I1212 16:19:05.306513 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.317977404+00:00 stderr F I1212 16:19:05.317929 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="routes.route.openshift.io" 2025-12-12T16:19:05.318009075+00:00 stderr F I1212 16:19:05.317983 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressservices.k8s.ovn.org" 2025-12-12T16:19:05.318016585+00:00 stderr F I1212 16:19:05.318007 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="probes.monitoring.coreos.com" 2025-12-12T16:19:05.318089807+00:00 stderr F I1212 16:19:05.318030 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="gateways.gateway.networking.k8s.io" 2025-12-12T16:19:05.318089807+00:00 stderr F I1212 16:19:05.318056 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheusrules.monitoring.coreos.com" 2025-12-12T16:19:05.318089807+00:00 stderr F I1212 16:19:05.318079 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="servicemonitors.monitoring.coreos.com" 2025-12-12T16:19:05.318128998+00:00 stderr F I1212 16:19:05.318109 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="userdefinednetworks.k8s.ovn.org" 2025-12-12T16:19:05.318155998+00:00 stderr F I1212 16:19:05.318137 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertmanagerconfigs.monitoring.coreos.com" 2025-12-12T16:19:05.318164849+00:00 stderr F I1212 16:19:05.318157 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="jobs.batch" 2025-12-12T16:19:05.318191009+00:00 stderr F I1212 16:19:05.318170 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="deployments.apps" 2025-12-12T16:19:05.318232190+00:00 stderr F I1212 16:19:05.318212 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="controlplanemachinesets.machine.openshift.io" 2025-12-12T16:19:05.318252431+00:00 stderr F I1212 16:19:05.318243 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertrelabelconfigs.monitoring.openshift.io" 2025-12-12T16:19:05.318301712+00:00 stderr F I1212 16:19:05.318282 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="csistoragecapacities.storage.k8s.io" 2025-12-12T16:19:05.318321062+00:00 stderr F I1212 16:19:05.318307 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="builds.build.openshift.io" 2025-12-12T16:19:05.318352793+00:00 stderr F I1212 16:19:05.318335 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="templates.template.openshift.io" 2025-12-12T16:19:05.318377454+00:00 stderr F I1212 16:19:05.318360 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="dnsrecords.ingress.operator.openshift.io" 2025-12-12T16:19:05.318405054+00:00 stderr F I1212 16:19:05.318387 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ipaddressclaims.ipam.cluster.x-k8s.io" 2025-12-12T16:19:05.318431015+00:00 stderr F I1212 16:19:05.318413 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="network-attachment-definitions.k8s.cni.cncf.io" 2025-12-12T16:19:05.318464526+00:00 stderr F I1212 16:19:05.318446 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machineautoscalers.autoscaling.openshift.io" 2025-12-12T16:19:05.318488726+00:00 stderr F I1212 16:19:05.318471 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="httproutes.gateway.networking.k8s.io" 2025-12-12T16:19:05.318516327+00:00 stderr F I1212 16:19:05.318499 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressfirewalls.k8s.ovn.org" 2025-12-12T16:19:05.318543798+00:00 stderr F I1212 16:19:05.318526 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressqoses.k8s.ovn.org" 2025-12-12T16:19:05.318563408+00:00 stderr F I1212 16:19:05.318550 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machinehealthchecks.machine.openshift.io" 2025-12-12T16:19:05.318592459+00:00 stderr F I1212 16:19:05.318573 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheuses.monitoring.coreos.com" 2025-12-12T16:19:05.318610820+00:00 stderr F I1212 16:19:05.318598 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ingresscontrollers.operator.openshift.io" 2025-12-12T16:19:05.318645240+00:00 stderr F I1212 16:19:05.318624 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="operatorgroups.operators.coreos.com" 2025-12-12T16:19:05.318665941+00:00 stderr F I1212 16:19:05.318650 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="nodeslicepools.whereabouts.cni.cncf.io" 2025-12-12T16:19:05.318673031+00:00 stderr F I1212 16:19:05.318665 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="cronjobs.batch" 2025-12-12T16:19:05.318704252+00:00 stderr F I1212 16:19:05.318687 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podnetworkconnectivitychecks.controlplane.operator.openshift.io" 2025-12-12T16:19:05.318728832+00:00 stderr F I1212 16:19:05.318711 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ipamclaims.k8s.cni.cncf.io" 2025-12-12T16:19:05.318753393+00:00 stderr F I1212 16:19:05.318736 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machines.machine.openshift.io" 2025-12-12T16:19:05.318777944+00:00 stderr F I1212 16:19:05.318760 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="operatorpkis.network.operator.openshift.io" 2025-12-12T16:19:05.318804494+00:00 stderr F I1212 16:19:05.318787 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="clusterserviceversions.operators.coreos.com" 2025-12-12T16:19:05.318845135+00:00 stderr F I1212 16:19:05.318827 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="networkpolicies.networking.k8s.io" 2025-12-12T16:19:05.318870536+00:00 stderr F I1212 16:19:05.318853 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="imagepolicies.config.openshift.io" 2025-12-12T16:19:05.318908027+00:00 stderr F I1212 16:19:05.318890 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="referencegrants.gateway.networking.k8s.io" 2025-12-12T16:19:05.318936278+00:00 stderr F I1212 16:19:05.318919 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertmanagers.monitoring.coreos.com" 2025-12-12T16:19:05.318961138+00:00 stderr F I1212 16:19:05.318944 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="thanosrulers.monitoring.coreos.com" 2025-12-12T16:19:05.318969138+00:00 stderr F I1212 16:19:05.318959 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="leases.coordination.k8s.io" 2025-12-12T16:19:05.319006569+00:00 stderr F I1212 16:19:05.318985 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="deploymentconfigs.apps.openshift.io" 2025-12-12T16:19:05.319026920+00:00 stderr F I1212 16:19:05.319011 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="imagestreams.image.openshift.io" 2025-12-12T16:19:05.319056461+00:00 stderr F I1212 16:19:05.319038 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="templateinstances.template.openshift.io" 2025-12-12T16:19:05.319081691+00:00 stderr F I1212 16:19:05.319063 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="metal3remediationtemplates.infrastructure.cluster.x-k8s.io" 2025-12-12T16:19:05.319106062+00:00 stderr F I1212 16:19:05.319088 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podmonitors.monitoring.coreos.com" 2025-12-12T16:19:05.319130782+00:00 stderr F I1212 16:19:05.319114 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ippools.whereabouts.cni.cncf.io" 2025-12-12T16:19:05.319137913+00:00 stderr F I1212 16:19:05.319131 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podtemplates" 2025-12-12T16:19:05.319150373+00:00 stderr F I1212 16:19:05.319143 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="statefulsets.apps" 2025-12-12T16:19:05.319210384+00:00 stderr F I1212 16:19:05.319172 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="roles.rbac.authorization.k8s.io" 2025-12-12T16:19:05.319234655+00:00 stderr F I1212 16:19:05.319220 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="overlappingrangeipreservations.whereabouts.cni.cncf.io" 2025-12-12T16:19:05.319253905+00:00 stderr F I1212 16:19:05.319240 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="limitranges" 2025-12-12T16:19:05.319285936+00:00 stderr F I1212 16:19:05.319268 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="controllerrevisions.apps" 2025-12-12T16:19:05.319305567+00:00 stderr F I1212 16:19:05.319290 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="replicasets.apps" 2025-12-12T16:19:05.319330657+00:00 stderr F I1212 16:19:05.319308 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="endpointslices.discovery.k8s.io" 2025-12-12T16:19:05.319337797+00:00 stderr F I1212 16:19:05.319330 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="buildconfigs.build.openshift.io" 2025-12-12T16:19:05.319369378+00:00 stderr F I1212 16:19:05.319352 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ipaddresses.ipam.cluster.x-k8s.io" 2025-12-12T16:19:05.319397239+00:00 stderr F I1212 16:19:05.319380 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="installplans.operators.coreos.com" 2025-12-12T16:19:05.319450050+00:00 stderr F I1212 16:19:05.319432 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="subscriptions.operators.coreos.com" 2025-12-12T16:19:05.319474781+00:00 stderr F I1212 16:19:05.319453 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="rolebindings.rbac.authorization.k8s.io" 2025-12-12T16:19:05.319493971+00:00 stderr F I1212 16:19:05.319478 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="grpcroutes.gateway.networking.k8s.io" 2025-12-12T16:19:05.319501142+00:00 stderr F I1212 16:19:05.319494 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="horizontalpodautoscalers.autoscaling" 2025-12-12T16:19:05.319520952+00:00 stderr F I1212 16:19:05.319508 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ingresses.networking.k8s.io" 2025-12-12T16:19:05.319545303+00:00 stderr F I1212 16:19:05.319528 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="poddisruptionbudgets.policy" 2025-12-12T16:19:05.319571723+00:00 stderr F I1212 16:19:05.319554 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="catalogsources.operators.coreos.com" 2025-12-12T16:19:05.319580353+00:00 stderr F I1212 16:19:05.319572 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="endpoints" 2025-12-12T16:19:05.319612634+00:00 stderr F I1212 16:19:05.319595 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="projecthelmchartrepositories.helm.openshift.io" 2025-12-12T16:19:05.319637265+00:00 stderr F I1212 16:19:05.319620 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machinesets.machine.openshift.io" 2025-12-12T16:19:05.319659175+00:00 stderr F I1212 16:19:05.319644 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertingrules.monitoring.openshift.io" 2025-12-12T16:19:05.319696126+00:00 stderr F I1212 16:19:05.319676 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressrouters.network.operator.openshift.io" 2025-12-12T16:19:05.319718757+00:00 stderr F I1212 16:19:05.319701 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="operatorconditions.operators.coreos.com" 2025-12-12T16:19:05.320136827+00:00 stderr F I1212 16:19:05.320097 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="daemonsets.apps" 2025-12-12T16:19:05.320160958+00:00 stderr F I1212 16:19:05.320145 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="rolebindingrestrictions.authorization.openshift.io" 2025-12-12T16:19:05.320260010+00:00 stderr F I1212 16:19:05.320230 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="metal3remediations.infrastructure.cluster.x-k8s.io" 2025-12-12T16:19:05.320855345+00:00 stderr F I1212 16:19:05.320816 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="serviceaccounts" 2025-12-12T16:19:05.320855345+00:00 stderr F I1212 16:19:05.320849 1 controllermanager.go:827] "Started controller" controller="resourcequota-controller" 2025-12-12T16:19:05.320893196+00:00 stderr F I1212 16:19:05.320866 1 controllermanager.go:796] "Starting controller" controller="certificatesigningrequest-approving-controller" 2025-12-12T16:19:05.320921667+00:00 stderr F I1212 16:19:05.320888 1 resource_quota_controller.go:300] "Starting resource quota controller" logger="resourcequota-controller" 2025-12-12T16:19:05.320930467+00:00 stderr F I1212 16:19:05.320923 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:19:05.320973498+00:00 stderr F I1212 16:19:05.320948 1 resource_quota_monitor.go:308] "QuotaMonitor running" logger="resourcequota-controller" 2025-12-12T16:19:05.324481715+00:00 stderr F I1212 16:19:05.324422 1 controllermanager.go:827] "Started controller" controller="certificatesigningrequest-approving-controller" 2025-12-12T16:19:05.324481715+00:00 stderr F I1212 16:19:05.324451 1 controllermanager.go:796] "Starting controller" controller="certificatesigningrequest-cleaner-controller" 2025-12-12T16:19:05.324567487+00:00 stderr F I1212 16:19:05.324527 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-approving-controller" name="csrapproving" 2025-12-12T16:19:05.324567487+00:00 stderr F I1212 16:19:05.324559 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrapproving" 2025-12-12T16:19:05.326976346+00:00 stderr F I1212 16:19:05.326908 1 controllermanager.go:827] "Started controller" controller="certificatesigningrequest-cleaner-controller" 2025-12-12T16:19:05.326976346+00:00 stderr F I1212 16:19:05.326939 1 controllermanager.go:796] "Starting controller" controller="clusterrole-aggregation-controller" 2025-12-12T16:19:05.327066979+00:00 stderr F I1212 16:19:05.327034 1 cleaner.go:83] "Starting CSR cleaner controller" logger="certificatesigningrequest-cleaner-controller" 2025-12-12T16:19:05.328205617+00:00 stderr F I1212 16:19:05.328108 1 resource_quota_controller.go:476] "syncing resource quota controller with updated resources from discovery" logger="resourcequota-controller" diff="added: [/v1, Resource=configmaps /v1, Resource=endpoints /v1, Resource=events /v1, Resource=limitranges /v1, Resource=persistentvolumeclaims /v1, Resource=pods /v1, Resource=podtemplates /v1, Resource=replicationcontrollers /v1, Resource=resourcequotas /v1, Resource=secrets /v1, Resource=serviceaccounts /v1, Resource=services apps.openshift.io/v1, Resource=deploymentconfigs apps/v1, Resource=controllerrevisions apps/v1, Resource=daemonsets apps/v1, Resource=deployments apps/v1, Resource=replicasets apps/v1, Resource=statefulsets authorization.openshift.io/v1, Resource=rolebindingrestrictions autoscaling.openshift.io/v1beta1, Resource=machineautoscalers autoscaling/v2, Resource=horizontalpodautoscalers batch/v1, Resource=cronjobs batch/v1, Resource=jobs build.openshift.io/v1, Resource=buildconfigs build.openshift.io/v1, Resource=builds config.openshift.io/v1, Resource=imagepolicies controlplane.operator.openshift.io/v1alpha1, Resource=podnetworkconnectivitychecks coordination.k8s.io/v1, Resource=leases discovery.k8s.io/v1, Resource=endpointslices events.k8s.io/v1, Resource=events gateway.networking.k8s.io/v1, Resource=gateways gateway.networking.k8s.io/v1, Resource=grpcroutes gateway.networking.k8s.io/v1, Resource=httproutes gateway.networking.k8s.io/v1beta1, Resource=referencegrants helm.openshift.io/v1beta1, Resource=projecthelmchartrepositories image.openshift.io/v1, Resource=imagestreams infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediations infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediationtemplates ingress.operator.openshift.io/v1, Resource=dnsrecords ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddressclaims ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddresses k8s.cni.cncf.io/v1, Resource=network-attachment-definitions k8s.cni.cncf.io/v1alpha1, Resource=ipamclaims k8s.ovn.org/v1, Resource=egressfirewalls k8s.ovn.org/v1, Resource=egressqoses k8s.ovn.org/v1, Resource=egressservices k8s.ovn.org/v1, Resource=userdefinednetworks machine.openshift.io/v1, Resource=controlplanemachinesets machine.openshift.io/v1beta1, Resource=machinehealthchecks machine.openshift.io/v1beta1, Resource=machines machine.openshift.io/v1beta1, Resource=machinesets monitoring.coreos.com/v1, Resource=alertmanagers monitoring.coreos.com/v1, Resource=podmonitors monitoring.coreos.com/v1, Resource=probes monitoring.coreos.com/v1, Resource=prometheuses monitoring.coreos.com/v1, Resource=prometheusrules monitoring.coreos.com/v1, Resource=servicemonitors monitoring.coreos.com/v1, Resource=thanosrulers monitoring.coreos.com/v1beta1, Resource=alertmanagerconfigs monitoring.openshift.io/v1, Resource=alertingrules monitoring.openshift.io/v1, Resource=alertrelabelconfigs network.operator.openshift.io/v1, Resource=egressrouters network.operator.openshift.io/v1, Resource=operatorpkis networking.k8s.io/v1, Resource=ingresses networking.k8s.io/v1, Resource=networkpolicies operator.openshift.io/v1, Resource=ingresscontrollers operators.coreos.com/v1, Resource=operatorgroups operators.coreos.com/v1alpha1, Resource=catalogsources operators.coreos.com/v1alpha1, Resource=clusterserviceversions operators.coreos.com/v1alpha1, Resource=installplans operators.coreos.com/v1alpha1, Resource=subscriptions operators.coreos.com/v2, Resource=operatorconditions policy/v1, Resource=poddisruptionbudgets rbac.authorization.k8s.io/v1, Resource=rolebindings rbac.authorization.k8s.io/v1, Resource=roles route.openshift.io/v1, Resource=routes storage.k8s.io/v1, Resource=csistoragecapacities template.openshift.io/v1, Resource=templateinstances template.openshift.io/v1, Resource=templates whereabouts.cni.cncf.io/v1alpha1, Resource=ippools whereabouts.cni.cncf.io/v1alpha1, Resource=nodeslicepools whereabouts.cni.cncf.io/v1alpha1, Resource=overlappingrangeipreservations], removed: []" 2025-12-12T16:19:05.329398226+00:00 stderr F I1212 16:19:05.329344 1 controllermanager.go:827] "Started controller" controller="clusterrole-aggregation-controller" 2025-12-12T16:19:05.329398226+00:00 stderr F I1212 16:19:05.329366 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="kube-apiserver-serving-clustertrustbundle-publisher-controller" requiredFeatureGates=["ClusterTrustBundle"] 2025-12-12T16:19:05.329398226+00:00 stderr F I1212 16:19:05.329382 1 clusterroleaggregation_controller.go:194] "Starting ClusterRoleAggregator controller" logger="clusterrole-aggregation-controller" 2025-12-12T16:19:05.329425047+00:00 stderr F I1212 16:19:05.329396 1 shared_informer.go:350] "Waiting for caches to sync" controller="ClusterRoleAggregator" 2025-12-12T16:19:05.329425047+00:00 stderr F I1212 16:19:05.329385 1 controllermanager.go:796] "Starting controller" controller="legacy-serviceaccount-token-cleaner-controller" 2025-12-12T16:19:05.331442127+00:00 stderr F I1212 16:19:05.331397 1 controllermanager.go:827] "Started controller" controller="legacy-serviceaccount-token-cleaner-controller" 2025-12-12T16:19:05.331442127+00:00 stderr F I1212 16:19:05.331414 1 controllermanager.go:796] "Starting controller" controller="cronjob-controller" 2025-12-12T16:19:05.331532929+00:00 stderr F I1212 16:19:05.331502 1 legacy_serviceaccount_token_cleaner.go:103] "Starting legacy service account token cleaner controller" logger="legacy-serviceaccount-token-cleaner-controller" 2025-12-12T16:19:05.331532929+00:00 stderr F I1212 16:19:05.331520 1 shared_informer.go:350] "Waiting for caches to sync" controller="legacy-service-account-token-cleaner" 2025-12-12T16:19:05.333502078+00:00 stderr F I1212 16:19:05.333457 1 controllermanager.go:827] "Started controller" controller="cronjob-controller" 2025-12-12T16:19:05.333502078+00:00 stderr F I1212 16:19:05.333473 1 controllermanager.go:790] "Warning: controller is disabled" controller="bootstrap-signer-controller" 2025-12-12T16:19:05.333502078+00:00 stderr F I1212 16:19:05.333479 1 controllermanager.go:796] "Starting controller" controller="persistentvolumeclaim-protection-controller" 2025-12-12T16:19:05.333653291+00:00 stderr F I1212 16:19:05.333621 1 cronjob_controllerv2.go:145] "Starting cronjob controller v2" logger="cronjob-controller" 2025-12-12T16:19:05.333653291+00:00 stderr F I1212 16:19:05.333634 1 shared_informer.go:350] "Waiting for caches to sync" controller="cronjob" 2025-12-12T16:19:05.335044546+00:00 stderr F I1212 16:19:05.334949 1 controllermanager.go:827] "Started controller" controller="persistentvolumeclaim-protection-controller" 2025-12-12T16:19:05.335044546+00:00 stderr F I1212 16:19:05.334966 1 controllermanager.go:796] "Starting controller" controller="ttl-after-finished-controller" 2025-12-12T16:19:05.335065156+00:00 stderr F I1212 16:19:05.335054 1 pvc_protection_controller.go:168] "Starting PVC protection controller" logger="persistentvolumeclaim-protection-controller" 2025-12-12T16:19:05.335073616+00:00 stderr F I1212 16:19:05.335064 1 shared_informer.go:350] "Waiting for caches to sync" controller="PVC protection" 2025-12-12T16:19:05.336794169+00:00 stderr F I1212 16:19:05.336679 1 controllermanager.go:827] "Started controller" controller="ttl-after-finished-controller" 2025-12-12T16:19:05.336794169+00:00 stderr F I1212 16:19:05.336697 1 controllermanager.go:796] "Starting controller" controller="validatingadmissionpolicy-status-controller" 2025-12-12T16:19:05.336837820+00:00 stderr F I1212 16:19:05.336816 1 ttlafterfinished_controller.go:112] "Starting TTL after finished controller" logger="ttl-after-finished-controller" 2025-12-12T16:19:05.336837820+00:00 stderr F I1212 16:19:05.336828 1 shared_informer.go:350] "Waiting for caches to sync" controller="TTL after finished" 2025-12-12T16:19:05.340578373+00:00 stderr F I1212 16:19:05.340534 1 shared_informer.go:357] "Caches are synced" controller="tokens" 2025-12-12T16:19:05.354021275+00:00 stderr F I1212 16:19:05.353964 1 controllermanager.go:827] "Started controller" controller="validatingadmissionpolicy-status-controller" 2025-12-12T16:19:05.354021275+00:00 stderr F I1212 16:19:05.353988 1 controllermanager.go:796] "Starting controller" controller="endpointslice-controller" 2025-12-12T16:19:05.354021275+00:00 stderr F I1212 16:19:05.354000 1 shared_informer.go:350] "Waiting for caches to sync" controller="validatingadmissionpolicy-status" 2025-12-12T16:19:05.355915292+00:00 stderr F I1212 16:19:05.355895 1 controllermanager.go:827] "Started controller" controller="endpointslice-controller" 2025-12-12T16:19:05.355957943+00:00 stderr F I1212 16:19:05.355946 1 controllermanager.go:796] "Starting controller" controller="namespace-controller" 2025-12-12T16:19:05.356171628+00:00 stderr F I1212 16:19:05.356143 1 endpointslice_controller.go:281] "Starting endpoint slice controller" logger="endpointslice-controller" 2025-12-12T16:19:05.356171628+00:00 stderr F I1212 16:19:05.356162 1 shared_informer.go:350] "Waiting for caches to sync" controller="endpoint_slice" 2025-12-12T16:19:05.390230989+00:00 stderr F I1212 16:19:05.388681 1 controllermanager.go:827] "Started controller" controller="namespace-controller" 2025-12-12T16:19:05.390230989+00:00 stderr F I1212 16:19:05.388698 1 controllermanager.go:785] "Skipping a cloud provider controller" controller="node-route-controller" 2025-12-12T16:19:05.390230989+00:00 stderr F I1212 16:19:05.388703 1 controllermanager.go:785] "Skipping a cloud provider controller" controller="cloud-node-lifecycle-controller" 2025-12-12T16:19:05.390230989+00:00 stderr F I1212 16:19:05.388709 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-expander-controller" 2025-12-12T16:19:05.390230989+00:00 stderr F I1212 16:19:05.389458 1 namespace_controller.go:202] "Starting namespace controller" logger="namespace-controller" 2025-12-12T16:19:05.390230989+00:00 stderr F I1212 16:19:05.389470 1 shared_informer.go:350] "Waiting for caches to sync" controller="namespace" 2025-12-12T16:19:05.394992037+00:00 stderr F I1212 16:19:05.394934 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" 2025-12-12T16:19:05.395166671+00:00 stderr F I1212 16:19:05.395150 1 controllermanager.go:827] "Started controller" controller="persistentvolume-expander-controller" 2025-12-12T16:19:05.395243823+00:00 stderr F I1212 16:19:05.395229 1 controllermanager.go:796] "Starting controller" controller="service-ca-certificate-publisher-controller" 2025-12-12T16:19:05.395420537+00:00 stderr F I1212 16:19:05.395234 1 expand_controller.go:329] "Starting expand controller" logger="persistentvolume-expander-controller" 2025-12-12T16:19:05.395457998+00:00 stderr F I1212 16:19:05.395445 1 shared_informer.go:350] "Waiting for caches to sync" controller="expand" 2025-12-12T16:19:05.399474328+00:00 stderr F I1212 16:19:05.399421 1 controllermanager.go:827] "Started controller" controller="service-ca-certificate-publisher-controller" 2025-12-12T16:19:05.399474328+00:00 stderr F I1212 16:19:05.399445 1 controllermanager.go:796] "Starting controller" controller="replicationcontroller-controller" 2025-12-12T16:19:05.399581930+00:00 stderr F I1212 16:19:05.399564 1 publisher.go:80] Starting service CA certificate configmap publisher 2025-12-12T16:19:05.399619381+00:00 stderr F I1212 16:19:05.399606 1 shared_informer.go:350] "Waiting for caches to sync" controller="crt configmap" 2025-12-12T16:19:05.401949909+00:00 stderr F I1212 16:19:05.401900 1 controllermanager.go:827] "Started controller" controller="replicationcontroller-controller" 2025-12-12T16:19:05.401949909+00:00 stderr F I1212 16:19:05.401922 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-protection-controller" 2025-12-12T16:19:05.401984170+00:00 stderr F I1212 16:19:05.401971 1 replica_set.go:219] "Starting controller" logger="replicationcontroller-controller" name="replicationcontroller" 2025-12-12T16:19:05.402486532+00:00 stderr F I1212 16:19:05.401984 1 shared_informer.go:350] "Waiting for caches to sync" controller="ReplicationController" 2025-12-12T16:19:05.407766243+00:00 stderr F I1212 16:19:05.407727 1 controllermanager.go:827] "Started controller" controller="persistentvolume-protection-controller" 2025-12-12T16:19:05.407766243+00:00 stderr F I1212 16:19:05.407752 1 controllermanager.go:796] "Starting controller" controller="service-cidr-controller" 2025-12-12T16:19:05.407766243+00:00 stderr F I1212 16:19:05.407755 1 pv_protection_controller.go:81] "Starting PV protection controller" logger="persistentvolume-protection-controller" 2025-12-12T16:19:05.407787883+00:00 stderr F I1212 16:19:05.407768 1 shared_informer.go:350] "Waiting for caches to sync" controller="PV protection" 2025-12-12T16:19:05.412257564+00:00 stderr F I1212 16:19:05.411494 1 controllermanager.go:827] "Started controller" controller="service-cidr-controller" 2025-12-12T16:19:05.412257564+00:00 stderr F I1212 16:19:05.411526 1 controllermanager.go:796] "Starting controller" controller="replicaset-controller" 2025-12-12T16:19:05.412257564+00:00 stderr F I1212 16:19:05.411610 1 servicecidrs_controller.go:136] "Starting" logger="service-cidr-controller" controller="service-cidr-controller" 2025-12-12T16:19:05.412257564+00:00 stderr F I1212 16:19:05.411626 1 shared_informer.go:350] "Waiting for caches to sync" controller="service-cidr-controller" 2025-12-12T16:19:05.413562746+00:00 stderr F I1212 16:19:05.413530 1 controllermanager.go:827] "Started controller" controller="replicaset-controller" 2025-12-12T16:19:05.413562746+00:00 stderr F I1212 16:19:05.413547 1 controllermanager.go:796] "Starting controller" controller="serviceaccount-controller" 2025-12-12T16:19:05.413725150+00:00 stderr F I1212 16:19:05.413687 1 replica_set.go:219] "Starting controller" logger="replicaset-controller" name="replicaset" 2025-12-12T16:19:05.413737540+00:00 stderr F I1212 16:19:05.413721 1 shared_informer.go:350] "Waiting for caches to sync" controller="ReplicaSet" 2025-12-12T16:19:05.415906984+00:00 stderr F I1212 16:19:05.415879 1 controllermanager.go:827] "Started controller" controller="serviceaccount-controller" 2025-12-12T16:19:05.415906984+00:00 stderr F I1212 16:19:05.415894 1 controllermanager.go:796] "Starting controller" controller="disruption-controller" 2025-12-12T16:19:05.415963975+00:00 stderr F I1212 16:19:05.415945 1 serviceaccounts_controller.go:114] "Starting service account controller" logger="serviceaccount-controller" 2025-12-12T16:19:05.415963975+00:00 stderr F I1212 16:19:05.415960 1 shared_informer.go:350] "Waiting for caches to sync" controller="service account" 2025-12-12T16:19:05.421348908+00:00 stderr F I1212 16:19:05.420659 1 controllermanager.go:827] "Started controller" controller="disruption-controller" 2025-12-12T16:19:05.421348908+00:00 stderr F I1212 16:19:05.420679 1 controllermanager.go:796] "Starting controller" controller="statefulset-controller" 2025-12-12T16:19:05.421348908+00:00 stderr F I1212 16:19:05.420893 1 disruption.go:455] "Sending events to api server." logger="disruption-controller" 2025-12-12T16:19:05.421348908+00:00 stderr F I1212 16:19:05.420928 1 disruption.go:466] "Starting disruption controller" logger="disruption-controller" 2025-12-12T16:19:05.421348908+00:00 stderr F I1212 16:19:05.420934 1 shared_informer.go:350] "Waiting for caches to sync" controller="disruption" 2025-12-12T16:19:05.423509042+00:00 stderr F I1212 16:19:05.423469 1 controllermanager.go:827] "Started controller" controller="statefulset-controller" 2025-12-12T16:19:05.423509042+00:00 stderr F I1212 16:19:05.423488 1 controllermanager.go:790] "Warning: controller is disabled" controller="ttl-controller" 2025-12-12T16:19:05.423509042+00:00 stderr F I1212 16:19:05.423493 1 controllermanager.go:790] "Warning: controller is disabled" controller="token-cleaner-controller" 2025-12-12T16:19:05.423509042+00:00 stderr F I1212 16:19:05.423499 1 controllermanager.go:796] "Starting controller" controller="root-ca-certificate-publisher-controller" 2025-12-12T16:19:05.423778239+00:00 stderr F I1212 16:19:05.423749 1 stateful_set.go:166] "Starting stateful set controller" logger="statefulset-controller" 2025-12-12T16:19:05.423778239+00:00 stderr F I1212 16:19:05.423763 1 shared_informer.go:350] "Waiting for caches to sync" controller="stateful set" 2025-12-12T16:19:05.425686996+00:00 stderr F I1212 16:19:05.425632 1 controllermanager.go:827] "Started controller" controller="root-ca-certificate-publisher-controller" 2025-12-12T16:19:05.425686996+00:00 stderr F I1212 16:19:05.425647 1 controllermanager.go:796] "Starting controller" controller="daemonset-controller" 2025-12-12T16:19:05.425819009+00:00 stderr F I1212 16:19:05.425789 1 publisher.go:107] "Starting root CA cert publisher controller" logger="root-ca-certificate-publisher-controller" 2025-12-12T16:19:05.425858510+00:00 stderr F I1212 16:19:05.425846 1 shared_informer.go:350] "Waiting for caches to sync" controller="crt configmap" 2025-12-12T16:19:05.427504831+00:00 stderr F I1212 16:19:05.427456 1 controllermanager.go:827] "Started controller" controller="daemonset-controller" 2025-12-12T16:19:05.427504831+00:00 stderr F I1212 16:19:05.427475 1 controllermanager.go:796] "Starting controller" controller="certificatesigningrequest-signing-controller" 2025-12-12T16:19:05.427632484+00:00 stderr F I1212 16:19:05.427595 1 daemon_controller.go:316] "Starting daemon sets controller" logger="daemonset-controller" 2025-12-12T16:19:05.427632484+00:00 stderr F I1212 16:19:05.427609 1 shared_informer.go:350] "Waiting for caches to sync" controller="daemon sets" 2025-12-12T16:19:05.430634018+00:00 stderr F I1212 16:19:05.430586 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.430951146+00:00 stderr F I1212 16:19:05.430912 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.431240733+00:00 stderr F I1212 16:19:05.431205 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.431498529+00:00 stderr F I1212 16:19:05.431471 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.431516500+00:00 stderr F I1212 16:19:05.431500 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-kubelet-client" 2025-12-12T16:19:05.431525560+00:00 stderr F I1212 16:19:05.431517 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-kubelet-client" 2025-12-12T16:19:05.431554721+00:00 stderr F I1212 16:19:05.431473 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-kubelet-serving" 2025-12-12T16:19:05.431596192+00:00 stderr F I1212 16:19:05.431584 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-kubelet-serving" 2025-12-12T16:19:05.431636473+00:00 stderr F I1212 16:19:05.431565 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-kube-apiserver-client" 2025-12-12T16:19:05.431636473+00:00 stderr F I1212 16:19:05.431604 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.431657283+00:00 stderr F I1212 16:19:05.431636 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-kube-apiserver-client" 2025-12-12T16:19:05.431699234+00:00 stderr F I1212 16:19:05.431539 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.431748446+00:00 stderr F I1212 16:19:05.431691 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.431819117+00:00 stderr F I1212 16:19:05.431790 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-legacy-unknown" 2025-12-12T16:19:05.431819117+00:00 stderr F I1212 16:19:05.431808 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-legacy-unknown" 2025-12-12T16:19:05.431850278+00:00 stderr F I1212 16:19:05.431826 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:19:05.432121255+00:00 stderr F I1212 16:19:05.431675 1 controllermanager.go:827] "Started controller" controller="certificatesigningrequest-signing-controller" 2025-12-12T16:19:05.432121255+00:00 stderr F I1212 16:19:05.432097 1 controllermanager.go:796] "Starting controller" controller="ephemeral-volume-controller" 2025-12-12T16:19:05.434768400+00:00 stderr F I1212 16:19:05.434721 1 controllermanager.go:827] "Started controller" controller="ephemeral-volume-controller" 2025-12-12T16:19:05.434798951+00:00 stderr F I1212 16:19:05.434761 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="storageversion-garbage-collector-controller" requiredFeatureGates=["APIServerIdentity","StorageVersionAPI"] 2025-12-12T16:19:05.434798951+00:00 stderr F I1212 16:19:05.434792 1 controllermanager.go:796] "Starting controller" controller="taint-eviction-controller" 2025-12-12T16:19:05.434886673+00:00 stderr F I1212 16:19:05.434840 1 controller.go:173] "Starting ephemeral volume controller" logger="ephemeral-volume-controller" 2025-12-12T16:19:05.434896853+00:00 stderr F I1212 16:19:05.434887 1 shared_informer.go:350] "Waiting for caches to sync" controller="ephemeral" 2025-12-12T16:19:05.437749624+00:00 stderr F I1212 16:19:05.437689 1 controllermanager.go:827] "Started controller" controller="taint-eviction-controller" 2025-12-12T16:19:05.437749624+00:00 stderr F I1212 16:19:05.437707 1 controllermanager.go:796] "Starting controller" controller="storage-version-migrator-controller" 2025-12-12T16:19:05.437749624+00:00 stderr F I1212 16:19:05.437715 1 controllermanager.go:805] "Warning: skipping controller" controller="storage-version-migrator-controller" 2025-12-12T16:19:05.437749624+00:00 stderr F I1212 16:19:05.437722 1 controllermanager.go:796] "Starting controller" controller="selinux-warning-controller" 2025-12-12T16:19:05.437838046+00:00 stderr F I1212 16:19:05.437805 1 taint_eviction.go:282] "Starting" logger="taint-eviction-controller" controller="taint-eviction-controller" 2025-12-12T16:19:05.437869687+00:00 stderr F I1212 16:19:05.437849 1 taint_eviction.go:288] "Sending events to api server" logger="taint-eviction-controller" 2025-12-12T16:19:05.437886567+00:00 stderr F I1212 16:19:05.437877 1 shared_informer.go:350] "Waiting for caches to sync" controller="taint-eviction-controller" 2025-12-12T16:19:05.441802564+00:00 stderr F I1212 16:19:05.441750 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/host-path" 2025-12-12T16:19:05.441802564+00:00 stderr F I1212 16:19:05.441772 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/nfs" 2025-12-12T16:19:05.441802564+00:00 stderr F I1212 16:19:05.441779 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/fc" 2025-12-12T16:19:05.441802564+00:00 stderr F I1212 16:19:05.441788 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" 2025-12-12T16:19:05.441824775+00:00 stderr F I1212 16:19:05.441814 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/csi" 2025-12-12T16:19:05.441880416+00:00 stderr F I1212 16:19:05.441831 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" 2025-12-12T16:19:05.442050620+00:00 stderr F I1212 16:19:05.441985 1 controllermanager.go:827] "Started controller" controller="selinux-warning-controller" 2025-12-12T16:19:05.442050620+00:00 stderr F I1212 16:19:05.442002 1 controllermanager.go:796] "Starting controller" controller="garbage-collector-controller" 2025-12-12T16:19:05.443134557+00:00 stderr F I1212 16:19:05.442234 1 selinux_warning_controller.go:348] "Starting SELinux warning controller" logger="selinux-warning-controller" 2025-12-12T16:19:05.443134557+00:00 stderr F I1212 16:19:05.442273 1 shared_informer.go:350] "Waiting for caches to sync" controller="selinux_warning" 2025-12-12T16:19:05.449428323+00:00 stderr F I1212 16:19:05.449364 1 controllermanager.go:827] "Started controller" controller="garbage-collector-controller" 2025-12-12T16:19:05.449428323+00:00 stderr F I1212 16:19:05.449378 1 garbagecollector.go:144] "Starting controller" logger="garbage-collector-controller" controller="garbagecollector" 2025-12-12T16:19:05.449428323+00:00 stderr F I1212 16:19:05.449408 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:19:05.449455743+00:00 stderr F I1212 16:19:05.449392 1 controllermanager.go:796] "Starting controller" controller="endpoints-controller" 2025-12-12T16:19:05.449455743+00:00 stderr F I1212 16:19:05.449440 1 graph_builder.go:351] "Running" logger="garbage-collector-controller" component="GraphBuilder" 2025-12-12T16:19:05.451544745+00:00 stderr F I1212 16:19:05.451482 1 controllermanager.go:827] "Started controller" controller="endpoints-controller" 2025-12-12T16:19:05.451544745+00:00 stderr F I1212 16:19:05.451510 1 controllermanager.go:796] "Starting controller" controller="endpointslice-mirroring-controller" 2025-12-12T16:19:05.451667128+00:00 stderr F I1212 16:19:05.451618 1 endpoints_controller.go:187] "Starting endpoint controller" logger="endpoints-controller" 2025-12-12T16:19:05.451667128+00:00 stderr F I1212 16:19:05.451642 1 shared_informer.go:350] "Waiting for caches to sync" controller="endpoint" 2025-12-12T16:19:05.454861957+00:00 stderr F I1212 16:19:05.454818 1 controllermanager.go:827] "Started controller" controller="endpointslice-mirroring-controller" 2025-12-12T16:19:05.454985160+00:00 stderr F I1212 16:19:05.454951 1 endpointslicemirroring_controller.go:227] "Starting EndpointSliceMirroring controller" logger="endpointslice-mirroring-controller" 2025-12-12T16:19:05.454985160+00:00 stderr F I1212 16:19:05.454969 1 shared_informer.go:350] "Waiting for caches to sync" controller="endpoint_slice_mirroring" 2025-12-12T16:19:05.466731000+00:00 stderr F I1212 16:19:05.466657 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:19:05.467884559+00:00 stderr F I1212 16:19:05.467807 1 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.467884559+00:00 stderr F I1212 16:19:05.467830 1 reflector.go:430] "Caches populated" type="*v1.PodTemplate" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.468005392+00:00 stderr F I1212 16:19:05.467960 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.468310439+00:00 stderr F I1212 16:19:05.468214 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.468874283+00:00 stderr F I1212 16:19:05.468807 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469052578+00:00 stderr F I1212 16:19:05.469008 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.469134530+00:00 stderr F I1212 16:19:05.469027 1 reflector.go:430] "Caches populated" type="*v1.CSIDriver" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.469161930+00:00 stderr F I1212 16:19:05.469005 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469350845+00:00 stderr F I1212 16:19:05.469166 1 reflector.go:430] "Caches populated" type="*v1.CSINode" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.469418307+00:00 stderr F I1212 16:19:05.469322 1 reflector.go:430] "Caches populated" type="*v1.VolumeAttachment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.469418307+00:00 stderr F I1212 16:19:05.469391 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469568141+00:00 stderr F I1212 16:19:05.469530 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469581221+00:00 stderr F I1212 16:19:05.469390 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469610162+00:00 stderr F I1212 16:19:05.469585 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469714324+00:00 stderr F I1212 16:19:05.469671 1 reflector.go:430] "Caches populated" type="*v1.RoleBindingRestriction" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/authorization/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.469784686+00:00 stderr F I1212 16:19:05.469541 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469860098+00:00 stderr F I1212 16:19:05.469829 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.469901509+00:00 stderr F I1212 16:19:05.469881 1 reflector.go:430] "Caches populated" type="*v1.DeploymentConfig" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/apps/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.469979381+00:00 stderr F I1212 16:19:05.469955 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.470581406+00:00 stderr F I1212 16:19:05.470556 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.471843437+00:00 stderr F I1212 16:19:05.471739 1 reflector.go:430] "Caches populated" type="*v1.TemplateInstance" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/template/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.471917 1 reflector.go:430] "Caches populated" type="*v1.Build" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/build/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.472309 1 reflector.go:430] "Caches populated" type="*v1.CSIStorageCapacity" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.473221 1 reflector.go:430] "Caches populated" type="*v1.Route" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/route/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.473742 1 reflector.go:430] "Caches populated" type="*v1.NetworkPolicy" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.473763 1 reflector.go:430] "Caches populated" type="*v1.Job" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.473961 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-machine-config-operator/machine-config-nodes-crd-cleanup-29367829" delay="0s" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.474003 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369355" delay="0s" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.474012 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369370" delay="0s" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.474240 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="0s" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.474311 1 reflector.go:430] "Caches populated" type="*v1.CronJob" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.474629816+00:00 stderr F I1212 16:19:05.474482 1 reflector.go:430] "Caches populated" type="*v1.IPAddress" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.474747859+00:00 stderr F I1212 16:19:05.474716 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.475512758+00:00 stderr F I1212 16:19:05.475488 1 reflector.go:430] "Caches populated" type="*v1.StorageClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.475693002+00:00 stderr F I1212 16:19:05.475661 1 reflector.go:430] "Caches populated" type="*v1.Lease" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.476291597+00:00 stderr F I1212 16:19:05.476247 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.477736593+00:00 stderr F I1212 16:19:05.477690 1 actual_state_of_world.go:541] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"crc\" does not exist" 2025-12-12T16:19:05.478210384+00:00 stderr F I1212 16:19:05.478163 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.479000704+00:00 stderr F I1212 16:19:05.478954 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.479334172+00:00 stderr F I1212 16:19:05.479311 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.480697086+00:00 stderr F I1212 16:19:05.480641 1 reflector.go:430] "Caches populated" type="*v1.EndpointSlice" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.483952636+00:00 stderr F I1212 16:19:05.483907 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.484110380+00:00 stderr F I1212 16:19:05.484089 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.484324575+00:00 stderr F I1212 16:19:05.484303 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.484485489+00:00 stderr F I1212 16:19:05.484465 1 reflector.go:430] "Caches populated" type="*v1.ServiceCIDR" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.484971801+00:00 stderr P I1212 16:19:05.484815 1 garbagecollector.go:203] "syncing garbage collector with updated resources from discovery" logger="garbage-collector-controller" diff="added: [/v1, Resource=configmaps /v1, Resource=endpoints /v1, Resource=events /v1, Resource=limitranges /v1, Resource=namespaces /v1, Resource=nodes /v1, Resource=persistentvolumeclaims /v1, Resource=persistentvolumes /v1, Resource=pods /v1, Resource=podtemplates /v1, Resource=replicationcontrollers /v1, Resource=resourcequotas /v1, Resource=secrets /v1, Resource=serviceaccounts /v1, Resource=services admissionregistration.k8s.io/v1, Resource=mutatingwebhookconfigurations admissionregistration.k8s.io/v1, Resource=validatingadmissionpolicies admissionregistration.k8s.io/v1, Resource=validatingadmissionpolicybindings admissionregistration.k8s.io/v1, Resource=validatingwebhookconfigurations apiextensions.k8s.io/v1, Resource=customresourcedefinitions apiregistration.k8s.io/v1, Resource=apiservices apiserver.openshift.io/v1, Resource=apirequestcounts apps.openshift.io/v1, Resource=deploymentconfigs apps/v1, Resource=controllerrevisions apps/v1, Resource=daemonsets apps/v1, Resource=deployments apps/v1, Resource=replicasets apps/v1, Resource=statefulsets authorization.openshift.io/v1, Resource=rolebindingrestrictions autoscaling.openshift.io/v1, Resource=clusterautoscalers autoscaling.openshift.io/v1beta1, Resource=machineautoscalers autoscaling/v2, Resource=horizontalpodautoscalers batch/v1, Resource=cronjobs batch/v1, Resource=jobs build.openshift.io/v1, Resource=buildconfigs build.openshift.io/v1, Resource=builds certificates.k8s.io/v1, Resource=certificatesigningrequests config.openshift.io/v1, Resource=apiservers config.openshift.io/v1, Resource=authentications config.openshift.io/v1, Resource=builds config.openshift.io/v1, Resource=clusterimagepolicies config.openshift.io/v1, Resource=clusteroperators config.openshift.io/v1, Resource=clusterversions config.openshift.io/v1, Resource=consoles config.openshift.io/v1, Resource=dnses config.openshift.io/v1, Resource=featuregates config.openshift.io/v1, Resource=imagecontentpolicies config.openshift.io/v1, Resource=imagedigestmirrorsets config.openshift.io/v1, Resource=imagepolicies config.openshift.io/v1, Resource=images config.openshift.io/v1, Resource=imagetagmirrorsets config.openshift.io/v1, Resource=infrastructures config.openshift.io/v1, Resource=ingresses config.openshift.io/v1, Resource=networks config.openshift.io/v1, Resource=nodes config.openshift.io/v1, Resource=oauths config.openshift.io/v1, Resource=operatorhubs config.openshift.io/v1, Resource=projects config.openshift.io/v1, Resource=proxies config.openshift.io/v1, Resource=schedulers console.openshift.io/v1, Resource=consoleclidownloads console.openshift.io/v1, Resource=consoleexternalloglinks console.openshift.io/v1, Resource=consolelinks console.openshift.io/v1, Resource=consolenotifications console.openshift.io/v1, Resource=consoleplugins console.openshift.io/v1, Resource=consolequickstarts console.openshift.io/v1, Resource=consolesamples console.openshift.io/v1, Resource=consoleyamlsamples controlplane.operator.openshift.io/v1alpha1, Resource=podnetworkconnectivitychecks coordination.k8s.io/v1, Resource=leases discovery.k8s.io/v1, Resource=endpointslices events.k8s.io/v1, Resource=events flowcontrol.apiserver.k8s.io/v1, Resource=flowschemas flowcontrol.apiserver.k8s.io/v1, Resource=prioritylevelconfigurations gateway.networking.k8s.io/v1, Resource=gatewayclasses gateway.networking.k8s.io/v1, Resource=gateways gateway.networking.k8s.io/v1, Resource=grpcroutes gateway.networking.k8s.io/v1, Resource=httproutes gateway.networking.k8s.io/v1beta1, Resource=referencegrants helm.openshift.io/v1beta1, Resource=helmchartrepositories helm.openshift.io/v1beta1, Resource=projecthelmchartrepositories image.openshift.io/v1, Resource=images image.openshift.io/v1, Resource=imagestreams imageregistry.operator.openshift.io/v1, Resource=configs imageregistry.operator.openshift.io/v1, Resource=imagepruners infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediations infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediationtemplates ingress.operator.openshift.io/v1, Resource=dnsrecords ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddressclaims ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddresses k8s.cni.cncf.io/v1, Resource=network-attachment-definitions k8s.cni.cncf.io/v1alpha1, Resource=ipamclaims k8s.ovn.org/v1, Resource=adminpolicybasedexternalroutes k8s.ovn.org/v1, Resource=clusteruserdefinednetworks k8s.ovn.org/v1, Resource=egressfirewalls k8s.ovn.org/v1, Resource=egressips k8s.ovn.org/v1, Resource=egressqoses k8s.ovn.org/v1, Resource=egressservices k8s.ovn.org/v1, Resource=userdefinednetworks machine.openshift.io/v1, Resource=controlplanemachinesets machine.openshift.io/v1beta1, Resource=machinehealthchecks machine.openshift.io/v1beta1, Resource=machines machine.openshift.io/v1beta1, Resource=machinesets machineconfiguration.openshift.io/v1, Resource=containerruntimeconfigs machineconfiguration.openshift.io/v1, Resource=controllerconfigs machineconfiguration.openshift.io/v1, Resource=kubeletconfigs machineconfiguration.openshift.io/v1, Resource=machineconfignodes machineconfiguration.openshift.io/v1, Resource=machineconfigpools machineconfiguration.openshift.io/v1, Resource=machineconfigs machineconfiguration.openshift.io/v1, Resource=machineosbuilds machineconfiguration.openshift.io/v1, Resource=machineosconfigs machineconfiguration.openshift.io/v1, Resource=pinnedimagesets migration.k8s.io/v1alpha1, Resource=storagestates migration.k8s.io/v1alpha1, Resource=storageversionmigrations monitoring.coreos.com/v1, Resource=alertmanagers monitoring.coreos.com/v1, Resource=podmonitors monitoring.coreos.com/v1, Resource=probes monitoring.coreos.com/v1, Resource=prometheuses monitoring.coreos.com/v1, Resource=prometheusrules monitoring.coreos.com/v1, Resource=servicemonitors monitoring.coreos.com/v1, Resource=thanosrulers monitoring.coreos.com/v1beta1, Resource=alertmanagerconfigs monitoring.openshift.io/v1, Resource=alertingrules monitoring.openshift.io/v1, Resource=alertrelabelconfigs network.operator.openshift.io/v1, Resource=egressrouters network.operator.openshift.io/v1, Resource=operatorpkis networking.k8s.io/v1, Resource=ingressclasses networking.k8s.io/v1, Resource=ingresses networking.k8s.io/v1, Resource=ipaddresses networking.k8s.io/v1, Resource=networkpolicies networking.k8s.io/v1, Resource=servicecidrs node.k8s.io/v1, Resource=runtimeclasses oauth.openshift.io/v1, Resource=oauthaccesstokens oauth.openshift.io/v1, Resource=oauthauthorizetokens oauth.openshift.io/v1, Resource=oauthclientauthorizations oauth.openshift.io/v1, Resource=oauthclients oauth.openshift.io/v1, Resource=useroauthaccesstokens operator.openshift.io/v1, Resource=authentications operator.openshift.io/v1, Resource=clustercsidrivers operator.openshift.io/v1, Resource=configs operator.openshift.io/v1, Resource=consoles operator.openshift.io/v1, Resource=csisnapshotcontrollers operator.openshift.io/v1, Resource=dnses operator.openshift.io/v1, Resource=etcds operator.openshift.io/v1, Resource=ingresscontrollers operator.openshift.io/v1, Resource=kubeapiservers operator.openshift.io/v1, Resource=kubecontrollermanagers operator.openshift.io/v1, Resource=kubeschedulers operator.openshift.io/v1, Resource=kubestorageversionmigrators operator.openshift.io/v1, Resource=machineconfigurations operator.openshift.io/v1, Resource=networks operator.openshift.io/v1, Resource=openshiftapiservers operator.openshift.io/v1, Resource=openshiftcontrollermanagers operator.openshift.io/v1, Resource=servicecas operator.openshift.io/v1, Resource=storages operator.openshift.io/v1alpha1, Resource=imagecontentsourcepolicies operators.coreos.com/v1, Resource=olmconfigs operators.coreos.com/v1, Resource=operatorgroups operators.coreos.com/v1, Resource=operators operators.coreos.com/v1alpha1, Resource=catalogsources operators.coreos.com/v1alpha1, Resource=clusterserviceversions operators.coreos.com/v1alpha1, Resource=installplans operators.coreos.com/v1alpha1, Resource=subscriptions operators.coreos.com/v2, Resource=operato 2025-12-12T16:19:05.484998952+00:00 stderr F rconditions policy.networking.k8s.io/v1alpha1, Resource=adminnetworkpolicies policy.networking.k8s.io/v1alpha1, Resource=baselineadminnetworkpolicies policy/v1, Resource=poddisruptionbudgets project.openshift.io/v1, Resource=projects quota.openshift.io/v1, Resource=clusterresourcequotas rbac.authorization.k8s.io/v1, Resource=clusterrolebindings rbac.authorization.k8s.io/v1, Resource=clusterroles rbac.authorization.k8s.io/v1, Resource=rolebindings rbac.authorization.k8s.io/v1, Resource=roles route.openshift.io/v1, Resource=routes samples.operator.openshift.io/v1, Resource=configs scheduling.k8s.io/v1, Resource=priorityclasses security.internal.openshift.io/v1, Resource=rangeallocations security.openshift.io/v1, Resource=rangeallocations security.openshift.io/v1, Resource=securitycontextconstraints storage.k8s.io/v1, Resource=csidrivers storage.k8s.io/v1, Resource=csinodes storage.k8s.io/v1, Resource=csistoragecapacities storage.k8s.io/v1, Resource=storageclasses storage.k8s.io/v1, Resource=volumeattachments template.openshift.io/v1, Resource=brokertemplateinstances template.openshift.io/v1, Resource=templateinstances template.openshift.io/v1, Resource=templates user.openshift.io/v1, Resource=groups user.openshift.io/v1, Resource=identities user.openshift.io/v1, Resource=users whereabouts.cni.cncf.io/v1alpha1, Resource=ippools whereabouts.cni.cncf.io/v1alpha1, Resource=nodeslicepools whereabouts.cni.cncf.io/v1alpha1, Resource=overlappingrangeipreservations], removed: []" 2025-12-12T16:19:05.485084134+00:00 stderr F I1212 16:19:05.485058 1 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.485211777+00:00 stderr F I1212 16:19:05.485148 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.485247978+00:00 stderr F I1212 16:19:05.485219 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485307880+00:00 stderr F I1212 16:19:05.485270 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485354541+00:00 stderr F I1212 16:19:05.485334 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485399722+00:00 stderr F I1212 16:19:05.485376 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485429713+00:00 stderr F I1212 16:19:05.485408 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485489944+00:00 stderr F I1212 16:19:05.485464 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485554526+00:00 stderr F I1212 16:19:05.485361 1 reflector.go:430] "Caches populated" type="*v2.HorizontalPodAutoscaler" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.485709700+00:00 stderr F I1212 16:19:05.485672 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485787592+00:00 stderr F I1212 16:19:05.485409 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.485888604+00:00 stderr F I1212 16:19:05.485867 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolume" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.485945575+00:00 stderr F I1212 16:19:05.485922 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.485945575+00:00 stderr F I1212 16:19:05.485931 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.486061628+00:00 stderr F I1212 16:19:05.486046 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.486328325+00:00 stderr F I1212 16:19:05.486292 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicy" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.486442558+00:00 stderr F I1212 16:19:05.485869 1 reflector.go:430] "Caches populated" type="*v1.BuildConfig" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/build/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.489701448+00:00 stderr F I1212 16:19:05.489632 1 shared_informer.go:357] "Caches are synced" controller="namespace" 2025-12-12T16:19:05.495148613+00:00 stderr F I1212 16:19:05.495066 1 reflector.go:430] "Caches populated" type="*v1.Template" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/template/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.495613915+00:00 stderr F I1212 16:19:05.495580 1 shared_informer.go:357] "Caches are synced" controller="expand" 2025-12-12T16:19:05.498238459+00:00 stderr F I1212 16:19:05.498164 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.499631704+00:00 stderr F I1212 16:19:05.499513 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.502146336+00:00 stderr F I1212 16:19:05.501734 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.502146336+00:00 stderr F I1212 16:19:05.501866 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.503843808+00:00 stderr F I1212 16:19:05.503754 1 reflector.go:430] "Caches populated" type="*v1.ControllerRevision" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.504093494+00:00 stderr F I1212 16:19:05.504036 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.509296853+00:00 stderr F I1212 16:19:05.509252 1 shared_informer.go:357] "Caches are synced" controller="PV protection" 2025-12-12T16:19:05.517710371+00:00 stderr F I1212 16:19:05.517626 1 shared_informer.go:357] "Caches are synced" controller="service account" 2025-12-12T16:19:05.518537321+00:00 stderr F I1212 16:19:05.518509 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.518848629+00:00 stderr F I1212 16:19:05.518822 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.519117106+00:00 stderr F I1212 16:19:05.519090 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.519425133+00:00 stderr F I1212 16:19:05.519391 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.519592757+00:00 stderr F I1212 16:19:05.519564 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.519950936+00:00 stderr F I1212 16:19:05.519925 1 shared_informer.go:357] "Caches are synced" controller="service-cidr-controller" 2025-12-12T16:19:05.520598072+00:00 stderr F I1212 16:19:05.520564 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.520982092+00:00 stderr F I1212 16:19:05.520938 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.520982092+00:00 stderr F I1212 16:19:05.520967 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.521187717+00:00 stderr F I1212 16:19:05.521108 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.521301880+00:00 stderr F I1212 16:19:05.521277 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.521352611+00:00 stderr F I1212 16:19:05.520970 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.525316699+00:00 stderr F I1212 16:19:05.525272 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.525989365+00:00 stderr F I1212 16:19:05.525939 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.526418176+00:00 stderr F I1212 16:19:05.526232 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.526728864+00:00 stderr F I1212 16:19:05.526685 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.527271807+00:00 stderr F I1212 16:19:05.527170 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.527529924+00:00 stderr F I1212 16:19:05.527435 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.527529924+00:00 stderr F I1212 16:19:05.527509 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.527735079+00:00 stderr F I1212 16:19:05.527692 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.528490357+00:00 stderr F I1212 16:19:05.528435 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.528729573+00:00 stderr F I1212 16:19:05.528700 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.533068481+00:00 stderr F I1212 16:19:05.533000 1 reflector.go:430] "Caches populated" type="*v1.ImageStream" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/image/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.533068481+00:00 stderr F I1212 16:19:05.533046 1 shared_informer.go:357] "Caches are synced" controller="ClusterRoleAggregator" 2025-12-12T16:19:05.533372028+00:00 stderr F I1212 16:19:05.533342 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.534415404+00:00 stderr F I1212 16:19:05.534381 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-kubelet-serving" 2025-12-12T16:19:05.534565108+00:00 stderr F I1212 16:19:05.534544 1 shared_informer.go:357] "Caches are synced" controller="cronjob" 2025-12-12T16:19:05.537639573+00:00 stderr F I1212 16:19:05.537545 1 shared_informer.go:357] "Caches are synced" controller="TTL after finished" 2025-12-12T16:19:05.539750156+00:00 stderr F I1212 16:19:05.539712 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.541290924+00:00 stderr F I1212 16:19:05.541243 1 reflector.go:430] "Caches populated" type="*v1.ReplicaSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.554596293+00:00 stderr F I1212 16:19:05.554529 1 shared_informer.go:357] "Caches are synced" controller="validatingadmissionpolicy-status" 2025-12-12T16:19:05.556323125+00:00 stderr F I1212 16:19:05.556280 1 shared_informer.go:357] "Caches are synced" controller="endpoint_slice_mirroring" 2025-12-12T16:19:05.556385357+00:00 stderr F I1212 16:19:05.556307 1 endpointslicemirroring_controller.go:234] "Starting worker threads" logger="endpointslice-mirroring-controller" total=5 2025-12-12T16:19:05.562453307+00:00 stderr F I1212 16:19:05.562370 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:19:05.563974395+00:00 stderr F I1212 16:19:05.563943 1 shared_informer.go:357] "Caches are synced" controller="taint" 2025-12-12T16:19:05.564071357+00:00 stderr F I1212 16:19:05.564043 1 node_lifecycle_controller.go:675] "Controller observed a new Node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:19:05.564129808+00:00 stderr F I1212 16:19:05.564102 1 controller_utils.go:173] "Recording event message for node" logger="node-lifecycle-controller" event="Registered Node crc in Controller" node="crc" 2025-12-12T16:19:05.564392075+00:00 stderr F I1212 16:19:05.564361 1 node_lifecycle_controller.go:1221] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone="" 2025-12-12T16:19:05.564487967+00:00 stderr F I1212 16:19:05.564447 1 node_lifecycle_controller.go:873] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:19:05.564557619+00:00 stderr F I1212 16:19:05.564508 1 node_lifecycle_controller.go:1067] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal" 2025-12-12T16:19:05.567053091+00:00 stderr F I1212 16:19:05.566998 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[v1/Node, namespace: openshift-network-diagnostics, name: crc, uid: 23216ff3-032e-49af-af7e-1d23d5907b59]" observed="[v1/Node, namespace: , name: crc, uid: 23216ff3-032e-49af-af7e-1d23d5907b59]" 2025-12-12T16:19:05.569051990+00:00 stderr F I1212 16:19:05.569011 1 shared_informer.go:357] "Caches are synced" controller="persistent volume" 2025-12-12T16:19:05.571513921+00:00 stderr F I1212 16:19:05.571468 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.576031673+00:00 stderr F I1212 16:19:05.575980 1 shared_informer.go:357] "Caches are synced" controller="attach detach" 2025-12-12T16:19:05.579129659+00:00 stderr F I1212 16:19:05.579084 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:19:05.579957760+00:00 stderr F I1212 16:19:05.579921 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.580039102+00:00 stderr F I1212 16:19:05.580010 1 shared_informer.go:357] "Caches are synced" controller="job" 2025-12-12T16:19:05.580529744+00:00 stderr F I1212 16:19:05.580482 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.581032266+00:00 stderr F I1212 16:19:05.580993 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.581125649+00:00 stderr F I1212 16:19:05.581078 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[config.openshift.io/v1/ClusterVersion, namespace: openshift-machine-config-operator, name: version, uid: 81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503]" observed="[config.openshift.io/v1/ClusterVersion, namespace: , name: version, uid: 81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503]" 2025-12-12T16:19:05.581201521+00:00 stderr F I1212 16:19:05.581153 1 reflector.go:430] "Caches populated" type="*v1.IngressClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.581247432+00:00 stderr F I1212 16:19:05.581217 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.581328444+00:00 stderr F I1212 16:19:05.581299 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.581356784+00:00 stderr F I1212 16:19:05.581334 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.581519588+00:00 stderr F I1212 16:19:05.581494 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.581665902+00:00 stderr F I1212 16:19:05.581637 1 reflector.go:430] "Caches populated" type="*v1.PriorityClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.581741154+00:00 stderr F I1212 16:19:05.580998 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.581838226+00:00 stderr F I1212 16:19:05.581808 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicyBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.581945279+00:00 stderr F I1212 16:19:05.581918 1 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/quota/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.582107453+00:00 stderr F I1212 16:19:05.582018 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.582318768+00:00 stderr F I1212 16:19:05.582285 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.583063297+00:00 stderr F I1212 16:19:05.583031 1 shared_informer.go:357] "Caches are synced" controller="GC" 2025-12-12T16:19:05.583539608+00:00 stderr F I1212 16:19:05.583507 1 reflector.go:430] "Caches populated" type="*v1.FlowSchema" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.584443701+00:00 stderr F I1212 16:19:05.584391 1 reflector.go:430] "Caches populated" type="*v1.RuntimeClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.585448245+00:00 stderr F I1212 16:19:05.585419 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.589084095+00:00 stderr F I1212 16:19:05.589030 1 reflector.go:430] "Caches populated" type="*v1.PriorityLevelConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:05.589652979+00:00 stderr F I1212 16:19:05.589615 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.589755082+00:00 stderr F I1212 16:19:05.589730 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.589861115+00:00 stderr F I1212 16:19:05.589832 1 reflector.go:430] "Caches populated" type="*v1.BrokerTemplateInstance" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/template/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.592272744+00:00 stderr F I1212 16:19:05.592244 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/security/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.592549091+00:00 stderr F I1212 16:19:05.592528 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.593286259+00:00 stderr F I1212 16:19:05.593246 1 reflector.go:430] "Caches populated" type="*v1.UserOAuthAccessToken" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/oauth/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.593659149+00:00 stderr F I1212 16:19:05.593623 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.593867934+00:00 stderr F I1212 16:19:05.593836 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.593942926+00:00 stderr F I1212 16:19:05.593925 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.594039858+00:00 stderr F I1212 16:19:05.594007 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.594456708+00:00 stderr F I1212 16:19:05.594422 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.594494499+00:00 stderr F I1212 16:19:05.594430 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.594774536+00:00 stderr F I1212 16:19:05.594722 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[operator.openshift.io/v1/Network, namespace: openshift-host-network, name: cluster, uid: d56acc66-d25c-4e5c-aa52-5418dd270c94]" observed="[operator.openshift.io/v1/Network, namespace: , name: cluster, uid: d56acc66-d25c-4e5c-aa52-5418dd270c94]" 2025-12-12T16:19:05.594806217+00:00 stderr F I1212 16:19:05.594745 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.595024812+00:00 stderr F I1212 16:19:05.595005 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.595431962+00:00 stderr F I1212 16:19:05.595410 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.595705529+00:00 stderr F I1212 16:19:05.595685 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.595749800+00:00 stderr F I1212 16:19:05.595719 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596104209+00:00 stderr F I1212 16:19:05.596078 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596333805+00:00 stderr F I1212 16:19:05.596296 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596433337+00:00 stderr F I1212 16:19:05.596414 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596508149+00:00 stderr F I1212 16:19:05.596469 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596583481+00:00 stderr F I1212 16:19:05.596553 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596769025+00:00 stderr F I1212 16:19:05.596707 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: openshift-machine-api, name: master, uid: 3b9df6d6-bacd-4862-b99f-10ec7fcf29ac]" observed="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: , name: master, uid: 3b9df6d6-bacd-4862-b99f-10ec7fcf29ac]" 2025-12-12T16:19:05.596769025+00:00 stderr F I1212 16:19:05.596744 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: openshift-machine-api, name: worker, uid: 633fcfae-03e0-4a3a-8d5c-de9a658e82f6]" observed="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: , name: worker, uid: 633fcfae-03e0-4a3a-8d5c-de9a658e82f6]" 2025-12-12T16:19:05.596806766+00:00 stderr F I1212 16:19:05.596093 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.596852157+00:00 stderr F I1212 16:19:05.596825 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.597073133+00:00 stderr F I1212 16:19:05.597043 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.598703983+00:00 stderr F I1212 16:19:05.598662 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.598919579+00:00 stderr F I1212 16:19:05.598873 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.598919579+00:00 stderr F I1212 16:19:05.598894 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599097153+00:00 stderr F I1212 16:19:05.599068 1 reflector.go:430] "Caches populated" type="*v1.RangeAllocation" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/security/informers/externalversions/factory.go:125" 2025-12-12T16:19:05.599271667+00:00 stderr F I1212 16:19:05.599244 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599391430+00:00 stderr F I1212 16:19:05.599368 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599500763+00:00 stderr F I1212 16:19:05.599249 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599586715+00:00 stderr F I1212 16:19:05.599559 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599629116+00:00 stderr F I1212 16:19:05.599605 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599671057+00:00 stderr F I1212 16:19:05.599649 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599728289+00:00 stderr F I1212 16:19:05.599704 1 shared_informer.go:357] "Caches are synced" controller="crt configmap" 2025-12-12T16:19:05.599779830+00:00 stderr F I1212 16:19:05.599757 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599857342+00:00 stderr F I1212 16:19:05.599833 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.599920453+00:00 stderr F I1212 16:19:05.599899 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.600022136+00:00 stderr F I1212 16:19:05.599989 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.600150529+00:00 stderr F I1212 16:19:05.600120 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.600450806+00:00 stderr F I1212 16:19:05.600405 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.600450806+00:00 stderr F I1212 16:19:05.600436 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.601245996+00:00 stderr F I1212 16:19:05.601171 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.601435421+00:00 stderr F I1212 16:19:05.601400 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.601603415+00:00 stderr F I1212 16:19:05.601577 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.601781449+00:00 stderr F I1212 16:19:05.601753 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.602448376+00:00 stderr F I1212 16:19:05.602416 1 shared_informer.go:357] "Caches are synced" controller="ReplicationController" 2025-12-12T16:19:05.602605690+00:00 stderr F I1212 16:19:05.602578 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.606439554+00:00 stderr F I1212 16:19:05.606390 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.614400821+00:00 stderr F I1212 16:19:05.614344 1 shared_informer.go:357] "Caches are synced" controller="ReplicaSet" 2025-12-12T16:19:05.621843275+00:00 stderr F I1212 16:19:05.621786 1 shared_informer.go:357] "Caches are synced" controller="disruption" 2025-12-12T16:19:05.621843275+00:00 stderr F I1212 16:19:05.621817 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:19:05.627172067+00:00 stderr F I1212 16:19:05.624189 1 shared_informer.go:357] "Caches are synced" controller="stateful set" 2025-12-12T16:19:05.627393562+00:00 stderr F I1212 16:19:05.627357 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrapproving" 2025-12-12T16:19:05.627469784+00:00 stderr F I1212 16:19:05.627370 1 shared_informer.go:357] "Caches are synced" controller="crt configmap" 2025-12-12T16:19:05.628902370+00:00 stderr F I1212 16:19:05.627740 1 shared_informer.go:357] "Caches are synced" controller="daemon sets" 2025-12-12T16:19:05.628902370+00:00 stderr F I1212 16:19:05.627757 1 shared_informer.go:350] "Waiting for caches to sync" controller="daemon sets" 2025-12-12T16:19:05.628902370+00:00 stderr F I1212 16:19:05.627762 1 shared_informer.go:357] "Caches are synced" controller="daemon sets" 2025-12-12T16:19:05.636677102+00:00 stderr F I1212 16:19:05.636588 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-kube-apiserver-client" 2025-12-12T16:19:05.636780765+00:00 stderr F I1212 16:19:05.636727 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-kubelet-client" 2025-12-12T16:19:05.636830946+00:00 stderr F I1212 16:19:05.636804 1 shared_informer.go:357] "Caches are synced" controller="PVC protection" 2025-12-12T16:19:05.636846476+00:00 stderr F I1212 16:19:05.636837 1 shared_informer.go:357] "Caches are synced" controller="ephemeral" 2025-12-12T16:19:05.636992010+00:00 stderr F I1212 16:19:05.636964 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-legacy-unknown" 2025-12-12T16:19:05.637146474+00:00 stderr F I1212 16:19:05.637120 1 shared_informer.go:357] "Caches are synced" controller="legacy-service-account-token-cleaner" 2025-12-12T16:19:05.638047806+00:00 stderr F I1212 16:19:05.638012 1 shared_informer.go:357] "Caches are synced" controller="taint-eviction-controller" 2025-12-12T16:19:05.638337213+00:00 stderr F I1212 16:19:05.638304 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.642850295+00:00 stderr F I1212 16:19:05.642772 1 shared_informer.go:357] "Caches are synced" controller="selinux_warning" 2025-12-12T16:19:05.644808463+00:00 stderr F I1212 16:19:05.644771 1 shared_informer.go:357] "Caches are synced" controller="deployment" 2025-12-12T16:19:05.651446567+00:00 stderr F I1212 16:19:05.651397 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.651740884+00:00 stderr F I1212 16:19:05.651718 1 shared_informer.go:357] "Caches are synced" controller="endpoint" 2025-12-12T16:19:05.657144118+00:00 stderr F I1212 16:19:05.657094 1 shared_informer.go:357] "Caches are synced" controller="endpoint_slice" 2025-12-12T16:19:05.657144118+00:00 stderr F I1212 16:19:05.657121 1 endpointslice_controller.go:288] "Starting service queue worker threads" logger="endpointslice-controller" total=5 2025-12-12T16:19:05.657172239+00:00 stderr F I1212 16:19:05.657146 1 endpointslice_controller.go:292] "Starting topology queue worker threads" logger="endpointslice-controller" total=1 2025-12-12T16:19:05.657340543+00:00 stderr F I1212 16:19:05.657241 1 topologycache.go:253] "Insufficient node info for topology hints" logger="endpointslice-controller" totalZones=0 totalCPU="0" sufficientNodeInfo=true 2025-12-12T16:19:05.660365538+00:00 stderr F I1212 16:19:05.660309 1 shared_informer.go:357] "Caches are synced" controller="HPA" 2025-12-12T16:19:05.663967457+00:00 stderr F I1212 16:19:05.663928 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.667483844+00:00 stderr F I1212 16:19:05.667413 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:19:05.667483844+00:00 stderr F I1212 16:19:05.667450 1 resource_quota_controller.go:502] "synced quota controller" logger="resourcequota-controller" 2025-12-12T16:19:05.673613015+00:00 stderr F I1212 16:19:05.673547 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.673859341+00:00 stderr F I1212 16:19:05.673786 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[operator.openshift.io/v1/DNS, namespace: openshift-dns, name: default, uid: 0f9755ef-acf2-4bc6-a6fc-f491e28e635f]" observed="[operator.openshift.io/v1/DNS, namespace: , name: default, uid: 0f9755ef-acf2-4bc6-a6fc-f491e28e635f]" 2025-12-12T16:19:05.682921325+00:00 stderr F I1212 16:19:05.682856 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.693374824+00:00 stderr F I1212 16:19:05.693307 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.708745324+00:00 stderr F I1212 16:19:05.708664 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.717334336+00:00 stderr F I1212 16:19:05.717292 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.742911708+00:00 stderr F I1212 16:19:05.742861 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.761764214+00:00 stderr F I1212 16:19:05.761705 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.783151633+00:00 stderr F I1212 16:19:05.783086 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.784371573+00:00 stderr F I1212 16:19:05.784352 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.793842108+00:00 stderr F I1212 16:19:05.793774 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.801868406+00:00 stderr F I1212 16:19:05.801823 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.827663344+00:00 stderr F I1212 16:19:05.827621 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.827962231+00:00 stderr F I1212 16:19:05.827931 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[operator.openshift.io/v1/Console, namespace: openshift-console, name: cluster, uid: 72c9b389-7361-48f0-8bf6-56fe26546245]" observed="[operator.openshift.io/v1/Console, namespace: , name: cluster, uid: 72c9b389-7361-48f0-8bf6-56fe26546245]" 2025-12-12T16:19:05.845325040+00:00 stderr F I1212 16:19:05.845249 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.872675107+00:00 stderr F I1212 16:19:05.872595 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.883256858+00:00 stderr F I1212 16:19:05.883204 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.890624750+00:00 stderr F I1212 16:19:05.890590 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.900879744+00:00 stderr F I1212 16:19:05.900838 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.909865286+00:00 stderr F I1212 16:19:05.909840 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.920787136+00:00 stderr F I1212 16:19:05.920714 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.928467276+00:00 stderr F I1212 16:19:05.928421 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.939200161+00:00 stderr F I1212 16:19:05.939134 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:19:05.950305296+00:00 stderr F I1212 16:19:05.950264 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:19:05.950363787+00:00 stderr F I1212 16:19:05.950351 1 garbagecollector.go:154] "Garbage collector: all resource monitors have synced" logger="garbage-collector-controller" 2025-12-12T16:19:05.950385638+00:00 stderr F I1212 16:19:05.950376 1 garbagecollector.go:157] "Proceeding to collect garbage" logger="garbage-collector-controller" 2025-12-12T16:19:05.950509121+00:00 stderr F I1212 16:19:05.950472 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ovn-kubernetes, name: ovnkube-node, uid: 68675fbc-dce1-4e0f-93b3-f0fa287b1edd]" virtual=false 2025-12-12T16:19:05.950586753+00:00 stderr F I1212 16:19:05.950547 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-server, uid: d1ca07c1-cb4d-45e8-b23b-37a1f3a3f651]" virtual=false 2025-12-12T16:19:05.950637834+00:00 stderr F I1212 16:19:05.950605 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: dns-default, uid: d5a0398f-8bff-46a5-9d7c-f2f4e26b3879]" virtual=false 2025-12-12T16:19:05.950645754+00:00 stderr F I1212 16:19:05.950622 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ingress-canary, name: ingress-canary, uid: 77896bcd-d1f7-46a2-984f-9205a544fb94]" virtual=false 2025-12-12T16:19:05.950727856+00:00 stderr F I1212 16:19:05.950566 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-operator, name: iptables-alerter, uid: 04364458-be97-459c-9e4e-c10e4ed7a89c]" virtual=false 2025-12-12T16:19:05.950816118+00:00 stderr F I1212 16:19:05.950788 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 2fe756fc-41fb-44a6-ab78-d1fdba7d2669]" virtual=false 2025-12-12T16:19:05.950858849+00:00 stderr F I1212 16:19:05.950814 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus, uid: c6bd5b69-2014-4db0-b123-1bdb423140f1]" virtual=false 2025-12-12T16:19:05.950925691+00:00 stderr F I1212 16:19:05.950533 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: hostpath-provisioner, name: csi-hostpathplugin, uid: 22984672-0d0d-46da-9df6-54a721b7b6bf]" virtual=false 2025-12-12T16:19:05.950971082+00:00 stderr F I1212 16:19:05.950943 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: node-resolver, uid: 7a3a6da6-fe06-4125-a5c7-e5f524871af3]" virtual=false 2025-12-12T16:19:05.950994833+00:00 stderr F I1212 16:19:05.950960 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-image-registry, name: node-ca, uid: 191786f0-2ab8-4a50-a3fb-f9953399f287]" virtual=false 2025-12-12T16:19:05.951055744+00:00 stderr F I1212 16:19:05.951028 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-diagnostics, name: network-check-target, uid: 9914de7f-6a1d-49fb-87e3-f0d03b5893ec]" virtual=false 2025-12-12T16:19:05.951114676+00:00 stderr F I1212 16:19:05.951089 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-node-identity, name: network-node-identity, uid: af047b3a-df8e-4f5f-bbf5-258cdd2c977b]" virtual=false 2025-12-12T16:19:05.951141706+00:00 stderr F I1212 16:19:05.950910 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: network-metrics-daemon, uid: 9623fe22-300e-4e9e-9241-5a539b90f3a7]" virtual=false 2025-12-12T16:19:05.951220308+00:00 stderr F I1212 16:19:05.950457 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[batch/v1/CronJob, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: a64f7156-efdf-4f2f-bdb6-f498fe674093]" virtual=false 2025-12-12T16:19:05.951298410+00:00 stderr F I1212 16:19:05.950471 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus-additional-cni-plugins, uid: db614dd0-5d3f-4079-bdf6-87e2d4631507]" virtual=false 2025-12-12T16:19:05.951355172+00:00 stderr F I1212 16:19:05.951329 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 6029f3c3-1e22-47e0-b96d-b40f71acacb2]" virtual=false 2025-12-12T16:19:05.951420133+00:00 stderr F I1212 16:19:05.951396 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator, uid: a990d1c8-6964-4853-af7b-2ce1eec0c42d]" virtual=false 2025-12-12T16:19:05.951509396+00:00 stderr F I1212 16:19:05.951311 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-storage-version-migrator-operator, name: metrics, uid: e5bc231d-9114-40a5-a422-584788726a16]" virtual=false 2025-12-12T16:19:05.951644919+00:00 stderr F I1212 16:19:05.951295 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-controller-manager-operator, name: metrics, uid: 756dc7f9-c733-4561-89e6-0982cec51bd4]" virtual=false 2025-12-12T16:19:05.951644919+00:00 stderr F I1212 16:19:05.951272 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: olm-operator-metrics, uid: 5f438b20-bbb9-4020-9829-dc86fe8ca8bd]" virtual=false 2025-12-12T16:19:05.955640458+00:00 stderr F I1212 16:19:05.955571 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-image-registry, name: node-ca, uid: 191786f0-2ab8-4a50-a3fb-f9953399f287]" 2025-12-12T16:19:05.955640458+00:00 stderr F I1212 16:19:05.955613 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-operators, uid: ca744265-3ae3-4482-8c3d-b10e28fe1042]" virtual=false 2025-12-12T16:19:05.955673809+00:00 stderr F I1212 16:19:05.955653 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-server, uid: d1ca07c1-cb4d-45e8-b23b-37a1f3a3f651]" 2025-12-12T16:19:05.955700189+00:00 stderr F I1212 16:19:05.955676 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-source, uid: 5ad92a4d-4ca9-422b-8abe-d013f8c3121c]" virtual=false 2025-12-12T16:19:05.958256192+00:00 stderr F I1212 16:19:05.957464 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[batch/v1/CronJob, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: a64f7156-efdf-4f2f-bdb6-f498fe674093]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.958404496+00:00 stderr F I1212 16:19:05.958374 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-operator, name: metrics, uid: 0f5c5226-4139-4bd9-a1f1-4819358f4b44]" virtual=false 2025-12-12T16:19:05.958683003+00:00 stderr F I1212 16:19:05.958065 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ingress-canary, name: ingress-canary, uid: 77896bcd-d1f7-46a2-984f-9205a544fb94]" 2025-12-12T16:19:05.958745514+00:00 stderr F I1212 16:19:05.958728 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: multus-admission-controller, uid: 1c31f0d5-3fc0-4cca-8214-9358f0570149]" virtual=false 2025-12-12T16:19:05.958922919+00:00 stderr F I1212 16:19:05.958867 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 2fe756fc-41fb-44a6-ab78-d1fdba7d2669]" 2025-12-12T16:19:05.958936599+00:00 stderr F I1212 16:19:05.958916 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-console, name: networking-console-plugin, uid: 99d5d1b5-3679-4a1e-a5a0-7df1601fc793]" virtual=false 2025-12-12T16:19:05.959194226+00:00 stderr F I1212 16:19:05.959131 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: hostpath-provisioner, name: csi-hostpathplugin, uid: 22984672-0d0d-46da-9df6-54a721b7b6bf]" 2025-12-12T16:19:05.959194226+00:00 stderr F I1212 16:19:05.959162 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: 502d0892-a804-4fae-ad3f-cb342f49e7aa]" virtual=false 2025-12-12T16:19:05.960671642+00:00 stderr F I1212 16:19:05.960615 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-operators, uid: ca744265-3ae3-4482-8c3d-b10e28fe1042]" 2025-12-12T16:19:05.960705323+00:00 stderr F I1212 16:19:05.960668 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator-machine-webhook, uid: a39c524d-dab9-414c-a0c5-6c6ece7558fe]" virtual=false 2025-12-12T16:19:05.960739154+00:00 stderr F I1212 16:19:05.960685 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ovn-kubernetes, name: ovnkube-node, uid: 68675fbc-dce1-4e0f-93b3-f0fa287b1edd]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.960765944+00:00 stderr F I1212 16:19:05.960734 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: community-operators, uid: 88a656bd-c52a-4813-892e-7e3363ba9ac0]" virtual=false 2025-12-12T16:19:05.961396700+00:00 stderr F I1212 16:19:05.961103 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: node-resolver, uid: 7a3a6da6-fe06-4125-a5c7-e5f524871af3]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"DNS","name":"default","uid":"0f9755ef-acf2-4bc6-a6fc-f491e28e635f","controller":true}] 2025-12-12T16:19:05.961396700+00:00 stderr F I1212 16:19:05.961147 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 5ecbdbba-0aac-4ca1-9bd5-63c3dd666779]" virtual=false 2025-12-12T16:19:05.961396700+00:00 stderr F I1212 16:19:05.961218 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-operator, name: iptables-alerter, uid: 04364458-be97-459c-9e4e-c10e4ed7a89c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.961396700+00:00 stderr F I1212 16:19:05.961276 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 1d6e0c49-5022-4407-b7e2-606925e10c95]" virtual=false 2025-12-12T16:19:05.961396700+00:00 stderr F I1212 16:19:05.961299 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: olm-operator-metrics, uid: 5f438b20-bbb9-4020-9829-dc86fe8ca8bd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.961396700+00:00 stderr F I1212 16:19:05.961323 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-console-operator, name: metrics, uid: 899e5eff-60ef-429f-9533-9c263e2d0ddb]" virtual=false 2025-12-12T16:19:05.961430311+00:00 stderr F I1212 16:19:05.961397 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-diagnostics, name: network-check-target, uid: 9914de7f-6a1d-49fb-87e3-f0d03b5893ec]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.961438971+00:00 stderr F I1212 16:19:05.961424 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-target, uid: 2870f12f-1c16-412d-8d85-3f66a56def0d]" virtual=false 2025-12-12T16:19:05.961607575+00:00 stderr F I1212 16:19:05.961529 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator, uid: a990d1c8-6964-4853-af7b-2ce1eec0c42d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.961607575+00:00 stderr F I1212 16:19:05.961558 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-service-ca-operator, name: metrics, uid: fda33e4c-938e-4672-b83c-6f709efbd0d6]" virtual=false 2025-12-12T16:19:05.962620300+00:00 stderr F I1212 16:19:05.961526 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-storage-version-migrator-operator, name: metrics, uid: e5bc231d-9114-40a5-a422-584788726a16]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.962620300+00:00 stderr F I1212 16:19:05.961718 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-dns-operator, name: metrics, uid: 7038e591-b4c2-4e44-9589-0decde72039e]" virtual=false 2025-12-12T16:19:05.962620300+00:00 stderr F I1212 16:19:05.962497 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: dns-default, uid: d5a0398f-8bff-46a5-9d7c-f2f4e26b3879]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"DNS","name":"default","uid":"0f9755ef-acf2-4bc6-a6fc-f491e28e635f","controller":true}] 2025-12-12T16:19:05.962620300+00:00 stderr F I1212 16:19:05.962539 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: catalog-operator-metrics, uid: c608aa3d-4067-43ca-a0d6-9d04be3b853c]" virtual=false 2025-12-12T16:19:05.963209485+00:00 stderr F I1212 16:19:05.963127 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 6029f3c3-1e22-47e0-b96d-b40f71acacb2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.963209485+00:00 stderr F I1212 16:19:05.963153 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 199e5ec3-0fe1-4929-8be5-eeb222b837d4]" virtual=false 2025-12-12T16:19:05.963461191+00:00 stderr F I1212 16:19:05.963397 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-controller-manager-operator, name: metrics, uid: 756dc7f9-c733-4561-89e6-0982cec51bd4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.963461191+00:00 stderr F I1212 16:19:05.963429 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-image-registry, name: image-registry-operator, uid: 21252561-db9f-4519-becf-9b4daef38a73]" virtual=false 2025-12-12T16:19:05.963577424+00:00 stderr F I1212 16:19:05.963528 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: network-metrics-daemon, uid: 9623fe22-300e-4e9e-9241-5a539b90f3a7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.963590144+00:00 stderr F I1212 16:19:05.963577 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-controllers, uid: 3304c944-7c53-445a-9f17-9e3e5f75226c]" virtual=false 2025-12-12T16:19:05.963771759+00:00 stderr F I1212 16:19:05.963637 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus, uid: c6bd5b69-2014-4db0-b123-1bdb423140f1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.963771759+00:00 stderr F I1212 16:19:05.963674 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator-webhook, uid: 3d8d8642-1cd0-4ece-918a-8ae8e150b269]" virtual=false 2025-12-12T16:19:05.963794409+00:00 stderr F I1212 16:19:05.963776 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus-additional-cni-plugins, uid: db614dd0-5d3f-4079-bdf6-87e2d4631507]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.963882931+00:00 stderr F I1212 16:19:05.963798 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: certified-operators, uid: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83]" virtual=false 2025-12-12T16:19:05.963882931+00:00 stderr F I1212 16:19:05.963850 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-node-identity, name: network-node-identity, uid: af047b3a-df8e-4f5f-bbf5-258cdd2c977b]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.963899702+00:00 stderr F I1212 16:19:05.963874 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: network-metrics-service, uid: 221650ca-5ea9-450a-b2e3-01b15c386136]" virtual=false 2025-12-12T16:19:05.964951568+00:00 stderr F I1212 16:19:05.964215 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: community-operators, uid: 88a656bd-c52a-4813-892e-7e3363ba9ac0]" 2025-12-12T16:19:05.964951568+00:00 stderr F I1212 16:19:05.964235 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-controller-manager-operator, name: metrics, uid: 4a7bd676-8b0b-4244-9e34-29c3edb8bb40]" virtual=false 2025-12-12T16:19:05.966415144+00:00 stderr F I1212 16:19:05.966341 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: 502d0892-a804-4fae-ad3f-cb342f49e7aa]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.966415144+00:00 stderr F I1212 16:19:05.966372 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-etcd-operator, name: metrics, uid: 518336b3-7008-41e5-9cec-2c1c85f2ff09]" virtual=false 2025-12-12T16:19:05.966522087+00:00 stderr F I1212 16:19:05.966477 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-source, uid: 5ad92a4d-4ca9-422b-8abe-d013f8c3121c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.966536167+00:00 stderr F I1212 16:19:05.966515 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-scheduler-operator, name: metrics, uid: 9c082bd0-69fc-4adf-bbca-de0862ba049d]" virtual=false 2025-12-12T16:19:05.966735612+00:00 stderr F I1212 16:19:05.966655 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator-machine-webhook, uid: a39c524d-dab9-414c-a0c5-6c6ece7558fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.966735612+00:00 stderr F I1212 16:19:05.966678 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-console, name: networking-console-plugin, uid: 99d5d1b5-3679-4a1e-a5a0-7df1601fc793]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.966735612+00:00 stderr F I1212 16:19:05.966696 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: c758def5-7362-4985-b540-393f26fc97a6]" virtual=false 2025-12-12T16:19:05.966735612+00:00 stderr F I1212 16:19:05.966701 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver, name: check-endpoints, uid: d0180f4d-9eb3-45e2-8586-df212e67c7f6]" virtual=false 2025-12-12T16:19:05.967310356+00:00 stderr F I1212 16:19:05.967269 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: certified-operators, uid: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83]" 2025-12-12T16:19:05.967393808+00:00 stderr F I1212 16:19:05.967356 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: marketplace-operator-metrics, uid: 94337474-19e9-47ef-a63f-a5db85f82770]" virtual=false 2025-12-12T16:19:05.968401383+00:00 stderr F I1212 16:19:05.968365 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-operator, name: metrics, uid: 0f5c5226-4139-4bd9-a1f1-4819358f4b44]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.968449414+00:00 stderr F I1212 16:19:05.968431 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver-operator, name: metrics, uid: 67098b90-387a-4f7b-8a68-e484e6889ec7]" virtual=false 2025-12-12T16:19:05.970404393+00:00 stderr F I1212 16:19:05.970352 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-service-ca-operator, name: metrics, uid: fda33e4c-938e-4672-b83c-6f709efbd0d6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.970423553+00:00 stderr F I1212 16:19:05.970404 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3a493813-5327-4484-bc96-a108bced6093]" virtual=false 2025-12-12T16:19:05.970617658+00:00 stderr F I1212 16:19:05.970578 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: multus-admission-controller, uid: 1c31f0d5-3fc0-4cca-8214-9358f0570149]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.970676409+00:00 stderr F I1212 16:19:05.970647 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-config-operator, name: metrics, uid: d0b70160-b97a-47c1-8814-0419134941de]" virtual=false 2025-12-12T16:19:05.970810923+00:00 stderr F I1212 16:19:05.970772 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 1d6e0c49-5022-4407-b7e2-606925e10c95]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.970831683+00:00 stderr F I1212 16:19:05.970800 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-marketplace, uid: 647ee808-5841-49cb-96be-0e8080859241]" virtual=false 2025-12-12T16:19:05.971139661+00:00 stderr F I1212 16:19:05.971114 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-controller-manager-operator, name: metrics, uid: 4a7bd676-8b0b-4244-9e34-29c3edb8bb40]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.971206263+00:00 stderr F I1212 16:19:05.971166 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-canary, name: ingress-canary, uid: 0888ad56-e63e-40f9-9ab5-bdfe12ee18ef]" virtual=false 2025-12-12T16:19:05.971299935+00:00 stderr F I1212 16:19:05.971260 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: network-metrics-service, uid: 221650ca-5ea9-450a-b2e3-01b15c386136]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.971310015+00:00 stderr F I1212 16:19:05.971298 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress, name: router-default, uid: 6445a1ec-8ec2-4ec8-b191-9cc7fa235148]" virtual=false 2025-12-12T16:19:05.971404317+00:00 stderr F I1212 16:19:05.971384 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator-webhook, uid: 3d8d8642-1cd0-4ece-918a-8ae8e150b269]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.971441408+00:00 stderr F I1212 16:19:05.971427 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-authentication-operator, name: metrics, uid: 9d158722-c9d8-4574-9c8d-76aff39b1405]" virtual=false 2025-12-12T16:19:05.971623823+00:00 stderr F I1212 16:19:05.971603 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-image-registry, name: image-registry-operator, uid: 21252561-db9f-4519-becf-9b4daef38a73]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.971659034+00:00 stderr F I1212 16:19:05.971644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-apiserver-operator, name: metrics, uid: c89a6cf8-b1f5-433c-a98e-6433ab2d8604]" virtual=false 2025-12-12T16:19:05.974158735+00:00 stderr F I1212 16:19:05.973903 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-target, uid: 2870f12f-1c16-412d-8d85-3f66a56def0d]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.974158735+00:00 stderr F I1212 16:19:05.973945 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-operator, name: metrics, uid: 62887ae0-bdbb-4eca-867b-a51f8a3fa46b]" virtual=false 2025-12-12T16:19:05.974302939+00:00 stderr F I1212 16:19:05.974248 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-dns-operator, name: metrics, uid: 7038e591-b4c2-4e44-9589-0decde72039e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.974302939+00:00 stderr F I1212 16:19:05.974282 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-samples-operator, name: metrics, uid: c016741c-0788-45bd-a328-6a4aa719a9ee]" virtual=false 2025-12-12T16:19:05.974461883+00:00 stderr F I1212 16:19:05.974391 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-etcd-operator, name: metrics, uid: 518336b3-7008-41e5-9cec-2c1c85f2ff09]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.974514424+00:00 stderr F I1212 16:19:05.974478 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: a79ef72a-9de8-4bd8-ab71-7e0b71724a57]" virtual=false 2025-12-12T16:19:05.974561045+00:00 stderr F I1212 16:19:05.974525 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 5ecbdbba-0aac-4ca1-9bd5-63c3dd666779]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.974570946+00:00 stderr F I1212 16:19:05.974559 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[storage.k8s.io/v1/CSINode, namespace: , name: crc, uid: c8ff9da2-6151-45a0-92ea-435bc51180ea]" virtual=false 2025-12-12T16:19:05.974746210+00:00 stderr F I1212 16:19:05.974436 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: c758def5-7362-4985-b540-393f26fc97a6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.974812502+00:00 stderr F I1212 16:19:05.974785 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[route.openshift.io/v1/Route, namespace: openshift-ingress-canary, name: canary, uid: c77f72a8-1cd5-4d3c-9619-806f41a86efa]" virtual=false 2025-12-12T16:19:05.974887284+00:00 stderr F I1212 16:19:05.974847 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 199e5ec3-0fe1-4929-8be5-eeb222b837d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.974900264+00:00 stderr F I1212 16:19:05.974882 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[policy/v1/PodDisruptionBudget, namespace: openshift-operator-lifecycle-manager, name: packageserver-pdb, uid: e1546b15-314b-4240-8e11-d5e915c472c7]" virtual=false 2025-12-12T16:19:05.974976326+00:00 stderr F I1212 16:19:05.974919 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: marketplace-operator-metrics, uid: 94337474-19e9-47ef-a63f-a5db85f82770]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.974987836+00:00 stderr F I1212 16:19:05.974960 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: catalog-operator-metrics, uid: c608aa3d-4067-43ca-a0d6-9d04be3b853c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.975023937+00:00 stderr F I1212 16:19:05.974982 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: 5effb0d2-94d8-48b7-8c69-e538f7848429]" virtual=false 2025-12-12T16:19:05.975034247+00:00 stderr F I1212 16:19:05.974989 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[machine.openshift.io/v1beta1/MachineHealthCheck, namespace: openshift-machine-api, name: machine-api-termination-handler, uid: ac1c5de0-6d0d-41cd-814f-3cd5299bedbb]" virtual=false 2025-12-12T16:19:05.975064458+00:00 stderr F I1212 16:19:05.974746 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-console-operator, name: metrics, uid: 899e5eff-60ef-429f-9533-9c263e2d0ddb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.975075178+00:00 stderr F I1212 16:19:05.975059 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-etcd-operator, name: etcd-operator, uid: 7bcc9069-5a71-4f51-8970-90dddeee56b2]" virtual=false 2025-12-12T16:19:05.975104049+00:00 stderr F I1212 16:19:05.975065 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-scheduler-operator, name: metrics, uid: 9c082bd0-69fc-4adf-bbca-de0862ba049d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.975138450+00:00 stderr F I1212 16:19:05.975107 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-multus, name: multus-admission-controller, uid: add425b8-cb71-4a29-b746-fade1ff57eee]" virtual=false 2025-12-12T16:19:05.975148170+00:00 stderr F I1212 16:19:05.975127 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver, name: check-endpoints, uid: d0180f4d-9eb3-45e2-8586-df212e67c7f6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.975200891+00:00 stderr F I1212 16:19:05.975140 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-controllers, uid: 3304c944-7c53-445a-9f17-9e3e5f75226c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.975220412+00:00 stderr F I1212 16:19:05.975153 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-diagnostics, name: network-check-source, uid: e3b48335-28bd-49bf-9cf0-82069658b68a]" virtual=false 2025-12-12T16:19:05.975220412+00:00 stderr F I1212 16:19:05.975196 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-operator, name: network-operator, uid: 2c897060-d3cf-4d7f-8d38-ef464b7a697a]" virtual=false 2025-12-12T16:19:05.975810916+00:00 stderr F I1212 16:19:05.975757 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-marketplace, uid: 647ee808-5841-49cb-96be-0e8080859241]" 2025-12-12T16:19:05.975842307+00:00 stderr F I1212 16:19:05.975792 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 614226dc-6dfc-4b23-a9e9-54341ad46bc9]" virtual=false 2025-12-12T16:19:05.978774860+00:00 stderr F I1212 16:19:05.978349 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress, name: router-default, uid: 6445a1ec-8ec2-4ec8-b191-9cc7fa235148]" 2025-12-12T16:19:05.978774860+00:00 stderr F I1212 16:19:05.978400 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-config-operator, name: openshift-config-operator, uid: dc451fc9-e781-493f-8e7d-55e9072cc784]" virtual=false 2025-12-12T16:19:05.978774860+00:00 stderr F I1212 16:19:05.978500 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver-operator, name: metrics, uid: 67098b90-387a-4f7b-8a68-e484e6889ec7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.978774860+00:00 stderr F I1212 16:19:05.978533 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: a4c18a44-787c-4851-97ac-f3da87e8d0e3]" virtual=false 2025-12-12T16:19:05.978885772+00:00 stderr F I1212 16:19:05.978850 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-config-operator, name: metrics, uid: d0b70160-b97a-47c1-8814-0419134941de]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.978895313+00:00 stderr F I1212 16:19:05.978884 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-marketplace, name: marketplace-operator, uid: d268648d-aa1b-439b-844b-8e7f98ea08a3]" virtual=false 2025-12-12T16:19:05.981716092+00:00 stderr F I1212 16:19:05.981668 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:19:05.981716092+00:00 stderr F I1212 16:19:05.981699 1 garbagecollector.go:235] "synced garbage collector" logger="garbage-collector-controller" 2025-12-12T16:19:05.981931108+00:00 stderr F I1212 16:19:05.981876 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3a493813-5327-4484-bc96-a108bced6093]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.981958588+00:00 stderr F I1212 16:19:05.981914 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-authentication-operator, name: metrics, uid: 9d158722-c9d8-4574-9c8d-76aff39b1405]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.981958588+00:00 stderr F I1212 16:19:05.981935 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 3a8705c5-b62b-40a4-8e43-30f0569fa490]" virtual=false 2025-12-12T16:19:05.981992159+00:00 stderr F I1212 16:19:05.981942 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: af746821-921a-4842-94da-28c08769612a]" virtual=false 2025-12-12T16:19:05.983908637+00:00 stderr F I1212 16:19:05.983857 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-diagnostics, name: network-check-source, uid: e3b48335-28bd-49bf-9cf0-82069658b68a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.983927677+00:00 stderr F I1212 16:19:05.983897 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: e6e8c1a2-3934-417b-9f46-0df6a0dbf8d9]" virtual=false 2025-12-12T16:19:05.984105311+00:00 stderr F I1212 16:19:05.984075 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-apiserver-operator, name: metrics, uid: c89a6cf8-b1f5-433c-a98e-6433ab2d8604]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.984157463+00:00 stderr F I1212 16:19:05.984103 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: c3ff943a-b570-4a98-8388-1f8a3280a85a]" virtual=false 2025-12-12T16:19:05.984367208+00:00 stderr F I1212 16:19:05.984209 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-multus, name: multus-admission-controller, uid: add425b8-cb71-4a29-b746-fade1ff57eee]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.984367208+00:00 stderr F I1212 16:19:05.984248 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-console, name: networking-console-plugin, uid: e8047e30-a40e-4ced-ae42-eea4288c975a]" virtual=false 2025-12-12T16:19:05.984811369+00:00 stderr F I1212 16:19:05.984753 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[policy/v1/PodDisruptionBudget, namespace: openshift-operator-lifecycle-manager, name: packageserver-pdb, uid: e1546b15-314b-4240-8e11-d5e915c472c7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.984874610+00:00 stderr F I1212 16:19:05.984855 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 83079835-b3de-4de8-ad7d-f332ab909932]" virtual=false 2025-12-12T16:19:05.985014464+00:00 stderr F I1212 16:19:05.984967 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-canary, name: ingress-canary, uid: 0888ad56-e63e-40f9-9ab5-bdfe12ee18ef]" owner=[{"apiVersion":"apps/v1","kind":"daemonset","name":"ingress-canary","uid":"77896bcd-d1f7-46a2-984f-9205a544fb94","controller":true}] 2025-12-12T16:19:05.985014464+00:00 stderr F I1212 16:19:05.985003 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console-operator, name: console-operator, uid: 4982b9f1-eaf4-44fa-a84a-bf9954aedcb1]" virtual=false 2025-12-12T16:19:05.987900785+00:00 stderr F I1212 16:19:05.987817 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[machine.openshift.io/v1beta1/MachineHealthCheck, namespace: openshift-machine-api, name: machine-api-termination-handler, uid: ac1c5de0-6d0d-41cd-814f-3cd5299bedbb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.987900785+00:00 stderr F I1212 16:19:05.987857 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress-operator, name: ingress-operator, uid: dcd260b6-d741-4056-94e9-f063ec7db58c]" virtual=false 2025-12-12T16:19:05.988126531+00:00 stderr F I1212 16:19:05.988090 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[route.openshift.io/v1/Route, namespace: openshift-ingress-canary, name: canary, uid: c77f72a8-1cd5-4d3c-9619-806f41a86efa]" owner=[{"apiVersion":"apps/v1","kind":"daemonset","name":"ingress-canary","uid":"77896bcd-d1f7-46a2-984f-9205a544fb94","controller":true}] 2025-12-12T16:19:05.988137491+00:00 stderr F I1212 16:19:05.988123 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: machine-api-operator, uid: 6e3281a2-74ca-4530-b743-ae9a62edcc78]" virtual=false 2025-12-12T16:19:05.988171842+00:00 stderr F I1212 16:19:05.988121 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-etcd-operator, name: etcd-operator, uid: 7bcc9069-5a71-4f51-8970-90dddeee56b2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.988236754+00:00 stderr F I1212 16:19:05.988206 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: bc11b984-7cfa-489a-9f9a-5f2c0648078f]" virtual=false 2025-12-12T16:19:05.988440659+00:00 stderr F I1212 16:19:05.988402 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-samples-operator, name: metrics, uid: c016741c-0788-45bd-a328-6a4aa719a9ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.988458639+00:00 stderr F I1212 16:19:05.988438 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ovn-kubernetes, name: ovnkube-control-plane, uid: 8bfd4bef-4292-4ca1-b90f-38cca09fb8f8]" virtual=false 2025-12-12T16:19:05.988695115+00:00 stderr F I1212 16:19:05.988654 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: a4c18a44-787c-4851-97ac-f3da87e8d0e3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.988706465+00:00 stderr F I1212 16:19:05.988690 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 1703c560-9cd5-4273-a6b7-22510bce9318]" virtual=false 2025-12-12T16:19:05.988735126+00:00 stderr F I1212 16:19:05.988669 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-config-operator, name: openshift-config-operator, uid: dc451fc9-e781-493f-8e7d-55e9072cc784]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.988777557+00:00 stderr F I1212 16:19:05.988759 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: f8199ef4-1467-44ed-9019-69c1f1737f70]" virtual=false 2025-12-12T16:19:05.988854759+00:00 stderr F I1212 16:19:05.988765 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-operator, name: metrics, uid: 62887ae0-bdbb-4eca-867b-a51f8a3fa46b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.988887640+00:00 stderr F I1212 16:19:05.988860 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[storage.k8s.io/v1/CSINode, namespace: , name: crc, uid: c8ff9da2-6151-45a0-92ea-435bc51180ea]" owner=[{"apiVersion":"v1","kind":"Node","name":"crc","uid":"23216ff3-032e-49af-af7e-1d23d5907b59"}] 2025-12-12T16:19:05.988897180+00:00 stderr F I1212 16:19:05.988887 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-dns-operator, name: dns-operator, uid: 75c9caa6-d284-4a97-95d2-2a04b51f093f]" virtual=false 2025-12-12T16:19:05.988926551+00:00 stderr F I1212 16:19:05.988909 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-version, name: cluster-version-operator, uid: d5123c8d-63b9-4bc1-a443-acddb48f1d78]" virtual=false 2025-12-12T16:19:05.989092355+00:00 stderr F I1212 16:19:05.989024 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 614226dc-6dfc-4b23-a9e9-54341ad46bc9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.989133096+00:00 stderr F I1212 16:19:05.989085 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 09857aec-2c93-4f0d-9e38-a820bd5b8362]" virtual=false 2025-12-12T16:19:05.989366131+00:00 stderr F I1212 16:19:05.989339 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: a79ef72a-9de8-4bd8-ab71-7e0b71724a57]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.989412063+00:00 stderr F I1212 16:19:05.989396 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 8043f85f-0f9a-4179-b841-9d68d3642aae]" virtual=false 2025-12-12T16:19:05.989458534+00:00 stderr F I1212 16:19:05.989425 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: 5effb0d2-94d8-48b7-8c69-e538f7848429]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.989466554+00:00 stderr F I1212 16:19:05.989453 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 7b943ba9-3321-444f-9be4-e7b351a28efa]" virtual=false 2025-12-12T16:19:05.989703810+00:00 stderr F I1212 16:19:05.989646 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-marketplace, name: marketplace-operator, uid: d268648d-aa1b-439b-844b-8e7f98ea08a3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.989732271+00:00 stderr F I1212 16:19:05.989707 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-authentication-operator, name: authentication-operator, uid: 391a5d9a-ccb4-4c96-a945-870a508a19d6]" virtual=false 2025-12-12T16:19:05.990063639+00:00 stderr F I1212 16:19:05.990027 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-operator, name: network-operator, uid: 2c897060-d3cf-4d7f-8d38-ef464b7a697a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.990074449+00:00 stderr F I1212 16:19:05.990056 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-diagnostics, name: network-check-source-5bb8f5cd97, uid: 1df99967-ec1a-4576-a622-ef7910592096]" virtual=false 2025-12-12T16:19:05.991112155+00:00 stderr F I1212 16:19:05.991079 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 83079835-b3de-4de8-ad7d-f332ab909932]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.991167306+00:00 stderr F I1212 16:19:05.991152 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-cluster-version, name: cluster-version-operator-7c9b9cfd6, uid: a0b73ecb-9dbc-4f7e-8e8a-9f4d7513b246]" virtual=false 2025-12-12T16:19:05.993275998+00:00 stderr F I1212 16:19:05.993200 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-console, name: networking-console-plugin, uid: e8047e30-a40e-4ced-ae42-eea4288c975a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.993298869+00:00 stderr F I1212 16:19:05.993267 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-console, name: downloads-747b44746d, uid: 79874096-0d38-4bc8-b244-d36a9841c09d]" virtual=false 2025-12-12T16:19:05.993410321+00:00 stderr F I1212 16:19:05.993345 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: c3ff943a-b570-4a98-8388-1f8a3280a85a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.993410321+00:00 stderr F I1212 16:19:05.993383 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-etcd-operator, name: etcd-operator-69b85846b6, uid: ee8bcd40-8099-4fe1-9b60-51e64a4cde02]" virtual=false 2025-12-12T16:19:05.993421902+00:00 stderr F I1212 16:19:05.993214 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console-operator, name: console-operator, uid: 4982b9f1-eaf4-44fa-a84a-bf9954aedcb1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.993458373+00:00 stderr F I1212 16:19:05.993429 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-11, uid: c121b5bf-996c-44ca-8254-1a98965ff795]" virtual=false 2025-12-12T16:19:05.993755590+00:00 stderr F I1212 16:19:05.993722 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress-operator, name: ingress-operator, uid: dcd260b6-d741-4056-94e9-f063ec7db58c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.993835842+00:00 stderr F I1212 16:19:05.993794 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: catalog-operator-75ff9f647d, uid: cc550be6-9973-4940-81f8-2fbf0f313a3b]" virtual=false 2025-12-12T16:19:05.993977145+00:00 stderr F I1212 16:19:05.993954 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: e6e8c1a2-3934-417b-9f46-0df6a0dbf8d9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.994019567+00:00 stderr F I1212 16:19:05.994003 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-console-operator, name: console-operator-67c89758df, uid: 9c191a3a-6ee1-46c7-8ce9-b0742e2f39ef]" virtual=false 2025-12-12T16:19:05.995249657+00:00 stderr F I1212 16:19:05.995199 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: bc11b984-7cfa-489a-9f9a-5f2c0648078f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.995271648+00:00 stderr F I1212 16:19:05.995241 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-config-operator, name: machine-config-operator-67c9d58cbb, uid: ee69d9ef-da48-4f01-92cb-f7812f8b1668]" virtual=false 2025-12-12T16:19:05.995325749+00:00 stderr F I1212 16:19:05.995265 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: af746821-921a-4842-94da-28c08769612a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.995359740+00:00 stderr F I1212 16:19:05.995326 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-multus, name: multus-admission-controller-69db94689b, uid: 4d48d09d-7d9c-4332-b959-eb7de70b3fc1]" virtual=false 2025-12-12T16:19:05.995834621+00:00 stderr F I1212 16:19:05.995805 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 3a8705c5-b62b-40a4-8e43-30f0569fa490]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.995876632+00:00 stderr F I1212 16:19:05.995861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-dns-operator, name: dns-operator-799b87ffcd, uid: 34b14622-c7c1-4083-862f-b01f4d0b89fb]" virtual=false 2025-12-12T16:19:05.995962555+00:00 stderr F I1212 16:19:05.995892 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ovn-kubernetes, name: ovnkube-control-plane, uid: 8bfd4bef-4292-4ca1-b90f-38cca09fb8f8]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.996010696+00:00 stderr F I1212 16:19:05.995975 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-controller-manager, name: controller-manager-7fffb5779, uid: c1bb4e6a-7815-489c-b7e2-493abd4b3cfc]" virtual=false 2025-12-12T16:19:05.996248082+00:00 stderr F I1212 16:19:05.996161 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: machine-api-operator, uid: 6e3281a2-74ca-4530-b743-ae9a62edcc78]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.996248082+00:00 stderr F I1212 16:19:05.996206 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator-69d5f845f8, uid: 7d6f8705-6a84-4fe2-b00d-a715d0bef5ac]" virtual=false 2025-12-12T16:19:05.997851431+00:00 stderr F I1212 16:19:05.997790 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-cluster-version, name: cluster-version-operator-7c9b9cfd6, uid: a0b73ecb-9dbc-4f7e-8e8a-9f4d7513b246]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"cluster-version-operator","uid":"d5123c8d-63b9-4bc1-a443-acddb48f1d78","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.997869452+00:00 stderr F I1212 16:19:05.997838 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-service-ca, name: service-ca-74545575db, uid: 032d2299-12ca-4888-8267-6bf5e7408ffb]" virtual=false 2025-12-12T16:19:05.998206540+00:00 stderr F I1212 16:19:05.998134 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-11, uid: c121b5bf-996c-44ca-8254-1a98965ff795]" 2025-12-12T16:19:05.998206540+00:00 stderr F I1212 16:19:05.998168 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-12, uid: 11d4d04a-208b-4be3-a6a0-e2fba01f9a7f]" virtual=false 2025-12-12T16:19:05.998381664+00:00 stderr F I1212 16:19:05.998354 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 7b943ba9-3321-444f-9be4-e7b351a28efa]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.998424405+00:00 stderr F I1212 16:19:05.998407 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-config-operator, name: machine-config-controller-f9cdd68f7, uid: 0c0e2173-8b9c-4d1b-8f39-d7f0de20c165]" virtual=false 2025-12-12T16:19:05.998595420+00:00 stderr F I1212 16:19:05.998552 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-dns-operator, name: dns-operator, uid: 75c9caa6-d284-4a97-95d2-2a04b51f093f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.998625230+00:00 stderr F I1212 16:19:05.998594 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-route-controller-manager, name: route-controller-manager-67bd47cff9, uid: 5c748875-8459-42f0-a8c4-b1d94f4599c2]" virtual=false 2025-12-12T16:19:05.998654261+00:00 stderr F I1212 16:19:05.998610 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: f8199ef4-1467-44ed-9019-69c1f1737f70]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:05.998718413+00:00 stderr F I1212 16:19:05.998701 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-ingress, name: router-default-68cf44c8b8, uid: f7f15996-7847-48ee-ae2e-c7569e03a11d]" virtual=false 2025-12-12T16:19:05.999058361+00:00 stderr F I1212 16:19:05.999029 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-diagnostics, name: network-check-source-5bb8f5cd97, uid: 1df99967-ec1a-4576-a622-ef7910592096]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"network-check-source","uid":"e3b48335-28bd-49bf-9cf0-82069658b68a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:05.999109642+00:00 stderr F I1212 16:19:05.999087 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: packageserver-7d4fc7d867, uid: b114bb7d-eae3-49ec-9f30-1aaaa852df8a]" virtual=false 2025-12-12T16:19:06.000038625+00:00 stderr F I1212 16:19:05.999971 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-authentication-operator, name: authentication-operator, uid: 391a5d9a-ccb4-4c96-a945-870a508a19d6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.000059076+00:00 stderr F I1212 16:19:06.000020 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-service-ca-operator, name: service-ca-operator-5b9c976747, uid: 1120b639-31d4-4218-8480-450040ee91c8]" virtual=false 2025-12-12T16:19:06.000359343+00:00 stderr F I1212 16:19:06.000312 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 09857aec-2c93-4f0d-9e38-a820bd5b8362]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.000374624+00:00 stderr F I1212 16:19:06.000350 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-image-registry, name: cluster-image-registry-operator-86c45576b9, uid: ae9ef406-4015-4436-bbed-259aa8d939f1]" virtual=false 2025-12-12T16:19:06.000487546+00:00 stderr F I1212 16:19:06.000438 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 1703c560-9cd5-4273-a6b7-22510bce9318]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.000495817+00:00 stderr F I1212 16:19:06.000479 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-ingress-operator, name: ingress-operator-6b9cb4dbcf, uid: d469250a-60be-48f7-abbd-a47280b65246]" virtual=false 2025-12-12T16:19:06.000739083+00:00 stderr F I1212 16:19:06.000690 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 8043f85f-0f9a-4179-b841-9d68d3642aae]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.000739083+00:00 stderr F I1212 16:19:06.000726 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-authentication, name: oauth-openshift-6567f5ffdb, uid: 906ecfe3-d641-46a2-b1c8-6cfd0db88430]" virtual=false 2025-12-12T16:19:06.001085331+00:00 stderr F I1212 16:19:06.000984 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-version, name: cluster-version-operator, uid: d5123c8d-63b9-4bc1-a443-acddb48f1d78]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.001085331+00:00 stderr F I1212 16:19:06.001031 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-6, uid: 7fbbc739-803e-4e42-8e9d-30d2b33db770]" virtual=false 2025-12-12T16:19:06.001614934+00:00 stderr F I1212 16:19:06.001283 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-12, uid: 11d4d04a-208b-4be3-a6a0-e2fba01f9a7f]" 2025-12-12T16:19:06.001614934+00:00 stderr F I1212 16:19:06.001317 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: olm-operator-5cdf44d969, uid: 72578216-96a2-4ed2-a3b0-bffb59c17f89]" virtual=false 2025-12-12T16:19:06.001614934+00:00 stderr F I1212 16:19:06.001360 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-console, name: downloads-747b44746d, uid: 79874096-0d38-4bc8-b244-d36a9841c09d]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"downloads","uid":"61eec0c1-c955-4ca2-b98d-b0e62696a08c","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.001614934+00:00 stderr F I1212 16:19:06.001377 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-config-operator, name: openshift-config-operator-5777786469, uid: 06dd195a-30c6-4f66-a070-af1607bbeb7f]" virtual=false 2025-12-12T16:19:06.001614934+00:00 stderr F I1212 16:19:06.001297 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: catalog-operator-75ff9f647d, uid: cc550be6-9973-4940-81f8-2fbf0f313a3b]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"catalog-operator","uid":"bc11b984-7cfa-489a-9f9a-5f2c0648078f","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.001614934+00:00 stderr F I1212 16:19:06.001529 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator-575994946d, uid: 0a5e6eef-b7d1-48de-8316-f008277a7a09]" virtual=false 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004396 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-dns-operator, name: dns-operator-799b87ffcd, uid: 34b14622-c7c1-4083-862f-b01f4d0b89fb]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"dns-operator","uid":"75c9caa6-d284-4a97-95d2-2a04b51f093f","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004466 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-api, name: control-plane-machine-set-operator-75ffdb6fcd, uid: 59ab0d35-c5f2-46c1-afca-9833377e6d9b]" virtual=false 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004613 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-config-operator, name: machine-config-operator-67c9d58cbb, uid: ee69d9ef-da48-4f01-92cb-f7812f8b1668]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"machine-config-operator","uid":"7036b823-caf2-4fe7-9364-95791b080487","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004632 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-marketplace, name: marketplace-operator-547dbd544d, uid: c73360bd-2731-4391-a2fc-16ca0308e662]" virtual=false 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004711 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-6, uid: 7fbbc739-803e-4e42-8e9d-30d2b33db770]" 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004741 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-operator, name: network-operator-7bdcf4f5bd, uid: 3ae821d5-a7b7-467b-837d-641fc04a72a9]" virtual=false 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004889 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator-69d5f845f8, uid: 7d6f8705-6a84-4fe2-b00d-a715d0bef5ac]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"kube-controller-manager-operator","uid":"09857aec-2c93-4f0d-9e38-a820bd5b8362","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004929 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-service-ca, name: service-ca-74545575db, uid: 032d2299-12ca-4888-8267-6bf5e7408ffb]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"service-ca","uid":"c2be5831-b4ac-4748-a369-a82d422367a4","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004936 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator-565b79b866, uid: cb055748-116b-4578-bea3-474dc49e8de8]" virtual=false 2025-12-12T16:19:06.005063070+00:00 stderr F I1212 16:19:06.004956 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-oauth-apiserver, name: apiserver-8596bd845d, uid: 53219eb2-30a8-4ac5-ae8d-be8240dd833b]" virtual=false 2025-12-12T16:19:06.005159612+00:00 stderr F I1212 16:19:06.005094 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-console-operator, name: console-operator-67c89758df, uid: 9c191a3a-6ee1-46c7-8ce9-b0742e2f39ef]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"console-operator","uid":"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005170652+00:00 stderr F I1212 16:19:06.005154 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator-54f497555d, uid: b0612b3c-d060-46cd-ab62-3050979bcd07]" virtual=false 2025-12-12T16:19:06.005268905+00:00 stderr F I1212 16:19:06.005093 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-multus, name: multus-admission-controller-69db94689b, uid: 4d48d09d-7d9c-4332-b959-eb7de70b3fc1]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"multus-admission-controller","uid":"add425b8-cb71-4a29-b746-fade1ff57eee","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005284065+00:00 stderr F I1212 16:19:06.005261 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-api, name: machine-api-operator-755bb95488, uid: 1431fb3d-6bbc-422f-a28d-111402aaa458]" virtual=false 2025-12-12T16:19:06.005360177+00:00 stderr F I1212 16:19:06.005313 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-etcd-operator, name: etcd-operator-69b85846b6, uid: ee8bcd40-8099-4fe1-9b60-51e64a4cde02]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"etcd-operator","uid":"7bcc9069-5a71-4f51-8970-90dddeee56b2","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005409248+00:00 stderr F I1212 16:19:06.005359 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-route-controller-manager, name: route-controller-manager-67bd47cff9, uid: 5c748875-8459-42f0-a8c4-b1d94f4599c2]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"route-controller-manager","uid":"d871271b-4355-4114-942d-3289298ff327","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005435429+00:00 stderr F I1212 16:19:06.005389 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator-686468bdd5, uid: 5d8d8bc2-0b0a-4849-a202-916d4376e750]" virtual=false 2025-12-12T16:19:06.005543341+00:00 stderr F I1212 16:19:06.005510 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-controller-manager, name: controller-manager-7fffb5779, uid: c1bb4e6a-7815-489c-b7e2-493abd4b3cfc]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"controller-manager","uid":"2935ab56-0ed7-4afe-8c71-c57de10607f1","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.005555402+00:00 stderr F I1212 16:19:06.005542 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-machine-approver, uid: bf242ec0-9a9d-43a2-a357-f6d487eabb56]" virtual=false 2025-12-12T16:19:06.005591843+00:00 stderr F I1212 16:19:06.005422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-console, uid: ce6b46bc-daaa-4079-95d7-fcbba6af00f1]" virtual=false 2025-12-12T16:19:06.008646008+00:00 stderr F I1212 16:19:06.008577 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-config-operator, name: machine-config-controller-f9cdd68f7, uid: 0c0e2173-8b9c-4d1b-8f39-d7f0de20c165]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"machine-config-controller","uid":"12093a0c-63e4-4953-9f6e-fac6da714800","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.008665939+00:00 stderr F I1212 16:19:06.008640 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-apiserver, name: apiserver-9ddfb9f55, uid: 12415fbf-77d0-45fb-a32f-a0c55e208a42]" virtual=false 2025-12-12T16:19:06.008957266+00:00 stderr F I1212 16:19:06.008907 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: packageserver-7d4fc7d867, uid: b114bb7d-eae3-49ec-9f30-1aaaa852df8a]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"packageserver","uid":"fd54a58c-a2c9-4287-ac4c-f1cc73f92a6e","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.008957266+00:00 stderr F I1212 16:19:06.008933 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-authentication-operator, name: authentication-operator-7f5c659b84, uid: c010cc79-d053-47dc-8b2b-a40ac47ecea7]" virtual=false 2025-12-12T16:19:06.009458148+00:00 stderr F I1212 16:19:06.009415 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-image-registry, name: cluster-image-registry-operator-86c45576b9, uid: ae9ef406-4015-4436-bbed-259aa8d939f1]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"cluster-image-registry-operator","uid":"a4c18a44-787c-4851-97ac-f3da87e8d0e3","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.009458148+00:00 stderr F I1212 16:19:06.009443 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-marketplace, uid: 9165b720-653d-498f-9378-91ee6a28934f]" virtual=false 2025-12-12T16:19:06.010806512+00:00 stderr F I1212 16:19:06.010745 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-ingress, name: router-default-68cf44c8b8, uid: f7f15996-7847-48ee-ae2e-c7569e03a11d]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"router-default","uid":"6445a1ec-8ec2-4ec8-b191-9cc7fa235148","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.010806512+00:00 stderr F I1212 16:19:06.010776 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-ovn-kubernetes, name: ovnkube-control-plane-57b78d8988, uid: 160d5be8-29e0-4ae9-8c67-33d0f4a45cb0]" virtual=false 2025-12-12T16:19:06.011070538+00:00 stderr F I1212 16:19:06.011039 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: olm-operator-5cdf44d969, uid: 72578216-96a2-4ed2-a3b0-bffb59c17f89]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"olm-operator","uid":"e6e8c1a2-3934-417b-9f46-0df6a0dbf8d9","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.011120789+00:00 stderr F I1212 16:19:06.011102 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-cluster-machine-approver, name: machine-approver-54c688565, uid: d1056c1c-6d3c-4b3f-803d-432e142b399c]" virtual=false 2025-12-12T16:19:06.011210852+00:00 stderr F I1212 16:19:06.011133 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-authentication, name: oauth-openshift-6567f5ffdb, uid: 906ecfe3-d641-46a2-b1c8-6cfd0db88430]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"oauth-openshift","uid":"d3695806-c64f-4466-8682-9f2395f1448f","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.011244232+00:00 stderr F I1212 16:19:06.011217 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-api, name: control-plane-machine-set-operator-75ffdb6fcd, uid: 59ab0d35-c5f2-46c1-afca-9833377e6d9b]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"control-plane-machine-set-operator","uid":"7b943ba9-3321-444f-9be4-e7b351a28efa","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.011252633+00:00 stderr F I1212 16:19:06.011242 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-console, name: networking-console-plugin-5ff7774fd9, uid: 665f1706-d7e5-4a5a-9e31-d19dda2d76da]" virtual=false 2025-12-12T16:19:06.011259933+00:00 stderr F I1212 16:19:06.011238 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator-6b564684c8, uid: 55d70f85-f93a-4aae-bdf7-374163442ca6]" virtual=false 2025-12-12T16:19:06.011425087+00:00 stderr F I1212 16:19:06.011362 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator-575994946d, uid: 0a5e6eef-b7d1-48de-8316-f008277a7a09]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"kube-apiserver-operator","uid":"3a8705c5-b62b-40a4-8e43-30f0569fa490","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.011425087+00:00 stderr F I1212 16:19:06.011410 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[batch/v1/Job, namespace: openshift-operator-lifecycle-manager, name: collect-profiles-29425935, uid: 1f40f73c-36cb-4e4d-9644-eae7112c82fc]" virtual=false 2025-12-12T16:19:06.011548840+00:00 stderr F I1212 16:19:06.011517 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-service-ca-operator, name: service-ca-operator-5b9c976747, uid: 1120b639-31d4-4218-8480-450040ee91c8]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"service-ca-operator","uid":"1703c560-9cd5-4273-a6b7-22510bce9318","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.011598521+00:00 stderr F I1212 16:19:06.011578 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-77f986bd66, uid: 1e4fdc1e-7708-4e3b-afa4-3511f5d48b08]" virtual=false 2025-12-12T16:19:06.012104714+00:00 stderr F I1212 16:19:06.012051 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-machine-api, name: machine-api-operator-755bb95488, uid: 1431fb3d-6bbc-422f-a28d-111402aaa458]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"machine-api-operator","uid":"6e3281a2-74ca-4530-b743-ae9a62edcc78","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.012104714+00:00 stderr F I1212 16:19:06.012085 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-image-registry, name: image-registry-66587d64c8, uid: 6b0b0246-7f8b-49b0-94c7-d4080c9ac715]" virtual=false 2025-12-12T16:19:06.012331369+00:00 stderr F I1212 16:19:06.012272 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-config-operator, name: openshift-config-operator-5777786469, uid: 06dd195a-30c6-4f66-a070-af1607bbeb7f]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"openshift-config-operator","uid":"dc451fc9-e781-493f-8e7d-55e9072cc784","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.012331369+00:00 stderr F I1212 16:19:06.012311 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-console, name: console-64d44f6ddf, uid: b7c7033b-0c10-49bf-9976-54b6f03cba2f]" virtual=false 2025-12-12T16:19:06.012355190+00:00 stderr F I1212 16:19:06.012333 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-ingress-operator, name: ingress-operator-6b9cb4dbcf, uid: d469250a-60be-48f7-abbd-a47280b65246]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"ingress-operator","uid":"dcd260b6-d741-4056-94e9-f063ec7db58c","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.012390421+00:00 stderr F I1212 16:19:06.012360 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator-846cbfc458, uid: 8c7664f6-9e05-4255-99ac-e0fcf1f2eb2d]" virtual=false 2025-12-12T16:19:06.014099393+00:00 stderr F I1212 16:19:06.014064 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-marketplace, name: marketplace-operator-547dbd544d, uid: c73360bd-2731-4391-a2fc-16ca0308e662]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"marketplace-operator","uid":"d268648d-aa1b-439b-844b-8e7f98ea08a3","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.014120363+00:00 stderr F I1212 16:19:06.014094 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-storage-version-migrator, name: migrator-866fcbc849, uid: e72956a1-e4ff-4590-b4ff-88916cd8ca58]" virtual=false 2025-12-12T16:19:06.014323699+00:00 stderr F I1212 16:19:06.014270 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator-565b79b866, uid: cb055748-116b-4578-bea3-474dc49e8de8]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"kube-storage-version-migrator-operator","uid":"af746821-921a-4842-94da-28c08769612a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.014323699+00:00 stderr F I1212 16:19:06.014292 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-service-ca-operator, uid: 6c679b53-4beb-47e3-bcb3-3c8a99b44665]" virtual=false 2025-12-12T16:19:06.015170079+00:00 stderr F I1212 16:19:06.014584 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator-686468bdd5, uid: 5d8d8bc2-0b0a-4849-a202-916d4376e750]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"openshift-controller-manager-operator","uid":"5effb0d2-94d8-48b7-8c69-e538f7848429","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.015170079+00:00 stderr F I1212 16:19:06.014612 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-vsphere-infra, uid: 2ecc0567-f8a0-41d2-89f1-0b731ad62fbb]" virtual=false 2025-12-12T16:19:06.016576634+00:00 stderr F I1212 16:19:06.016537 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-console, uid: ce6b46bc-daaa-4079-95d7-fcbba6af00f1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.016576634+00:00 stderr F I1212 16:19:06.016566 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-managed, uid: addccfaa-40f8-4cbc-86ad-29a8e9b5ef78]" virtual=false 2025-12-12T16:19:06.016624915+00:00 stderr F I1212 16:19:06.016596 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator-54f497555d, uid: b0612b3c-d060-46cd-ab62-3050979bcd07]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"openshift-kube-scheduler-operator","uid":"c3ff943a-b570-4a98-8388-1f8a3280a85a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.016633336+00:00 stderr F I1212 16:19:06.016620 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-user-settings, uid: 04f36abf-3d91-487f-ba90-02965aa17b74]" virtual=false 2025-12-12T16:19:06.016800290+00:00 stderr F I1212 16:19:06.016769 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-operator, name: network-operator-7bdcf4f5bd, uid: 3ae821d5-a7b7-467b-837d-641fc04a72a9]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"network-operator","uid":"2c897060-d3cf-4d7f-8d38-ef464b7a697a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.016800290+00:00 stderr F I1212 16:19:06.016793 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-host-network, uid: 4b542650-5442-4772-92b3-4649992d4842]" virtual=false 2025-12-12T16:19:06.016910912+00:00 stderr F I1212 16:19:06.016867 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-77f986bd66, uid: 1e4fdc1e-7708-4e3b-afa4-3511f5d48b08]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"package-server-manager","uid":"8043f85f-0f9a-4179-b841-9d68d3642aae","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.016936613+00:00 stderr F I1212 16:19:06.016911 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-platform-infra, uid: 1d3ed5f4-a054-412f-81c5-af793a208a1f]" virtual=false 2025-12-12T16:19:06.016983754+00:00 stderr F I1212 16:19:06.016938 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-authentication-operator, name: authentication-operator-7f5c659b84, uid: c010cc79-d053-47dc-8b2b-a40ac47ecea7]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"authentication-operator","uid":"391a5d9a-ccb4-4c96-a945-870a508a19d6","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.017014605+00:00 stderr F I1212 16:19:06.016989 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-controller-manager-operator, uid: 8bc063ab-9aa2-42ac-a00e-f4a74b01a855]" virtual=false 2025-12-12T16:19:06.017345723+00:00 stderr F I1212 16:19:06.017295 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-machine-approver, uid: bf242ec0-9a9d-43a2-a357-f6d487eabb56]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.017402785+00:00 stderr F I1212 16:19:06.017387 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-apiserver-operator, uid: cd464d22-ebb5-4c4d-8bbb-1d343035fd2f]" virtual=false 2025-12-12T16:19:06.017474826+00:00 stderr F I1212 16:19:06.017427 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-marketplace, uid: 9165b720-653d-498f-9378-91ee6a28934f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.017505667+00:00 stderr F I1212 16:19:06.017481 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-config-operator, uid: b4fb1cd2-d1ab-4a46-a6a5-d18dc4abe3ff]" virtual=false 2025-12-12T16:19:06.017719482+00:00 stderr F I1212 16:19:06.017697 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-ovn-kubernetes, name: ovnkube-control-plane-57b78d8988, uid: 160d5be8-29e0-4ae9-8c67-33d0f4a45cb0]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"ovnkube-control-plane","uid":"8bfd4bef-4292-4ca1-b90f-38cca09fb8f8","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.017757353+00:00 stderr F I1212 16:19:06.017744 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-user-workload-monitoring, uid: 576d3059-0feb-4d0f-8353-b02539f1c62f]" virtual=false 2025-12-12T16:19:06.018620185+00:00 stderr F I1212 16:19:06.018588 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-oauth-apiserver, name: apiserver-8596bd845d, uid: 53219eb2-30a8-4ac5-ae8d-be8240dd833b]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"apiserver","uid":"9752e4b7-10ac-4a96-9fb5-b71be5f959c7","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.018675506+00:00 stderr F I1212 16:19:06.018658 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config, uid: ef6ba319-bb5c-4df3-99ee-e0d4da656faf]" virtual=false 2025-12-12T16:19:06.020083201+00:00 stderr F I1212 16:19:06.020049 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator-6b564684c8, uid: 55d70f85-f93a-4aae-bdf7-374163442ca6]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"cluster-samples-operator","uid":"83079835-b3de-4de8-ad7d-f332ab909932","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.020142042+00:00 stderr F I1212 16:19:06.020121 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-image-registry, uid: 46fdad02-53ed-489c-bc37-661151dcc55f]" virtual=false 2025-12-12T16:19:06.020287696+00:00 stderr F I1212 16:19:06.020245 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-console, name: networking-console-plugin-5ff7774fd9, uid: 665f1706-d7e5-4a5a-9e31-d19dda2d76da]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"networking-console-plugin","uid":"e8047e30-a40e-4ced-ae42-eea4288c975a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.020325227+00:00 stderr F I1212 16:19:06.020284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operator-lifecycle-manager, uid: fae25a02-86eb-4906-aa64-3c8f5894eb51]" virtual=false 2025-12-12T16:19:06.020562853+00:00 stderr F I1212 16:19:06.020516 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-apiserver, name: apiserver-9ddfb9f55, uid: 12415fbf-77d0-45fb-a32f-a0c55e208a42]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"apiserver","uid":"f913dfec-e49a-4051-9533-8f553abc8845","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.020562853+00:00 stderr F I1212 16:19:06.020550 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-storage-operator, uid: 197d9352-caa7-4358-80dd-a56a8cceb99f]" virtual=false 2025-12-12T16:19:06.021235209+00:00 stderr F I1212 16:19:06.020724 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[batch/v1/Job, namespace: openshift-operator-lifecycle-manager, name: collect-profiles-29425935, uid: 1f40f73c-36cb-4e4d-9644-eae7112c82fc]" owner=[{"apiVersion":"batch/v1","kind":"CronJob","name":"collect-profiles","uid":"a64f7156-efdf-4f2f-bdb6-f498fe674093","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.021235209+00:00 stderr F I1212 16:19:06.020753 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-version, uid: 00c20f60-51f9-4756-b585-93ed93b6029b]" virtual=false 2025-12-12T16:19:06.021235209+00:00 stderr F I1212 16:19:06.020809 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-kube-storage-version-migrator, name: migrator-866fcbc849, uid: e72956a1-e4ff-4590-b4ff-88916cd8ca58]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"migrator","uid":"e04da894-1c98-4971-8b8f-a1f4a381dbaf","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.021235209+00:00 stderr F I1212 16:19:06.020866 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-apiserver-operator, uid: 7b7e1a01-4e5a-4c5f-87de-07cde1d85301]" virtual=false 2025-12-12T16:19:06.021235209+00:00 stderr F I1212 16:19:06.021075 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-cluster-machine-approver, name: machine-approver-54c688565, uid: d1056c1c-6d3c-4b3f-803d-432e142b399c]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"machine-approver","uid":"614226dc-6dfc-4b23-a9e9-54341ad46bc9","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.021235209+00:00 stderr F I1212 16:19:06.021160 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: console-operator, uid: 49c566ed-810f-4e34-89c0-e69f4ba1f5fe]" virtual=false 2025-12-12T16:19:06.021544237+00:00 stderr F I1212 16:19:06.021511 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-console, name: console-64d44f6ddf, uid: b7c7033b-0c10-49bf-9976-54b6f03cba2f]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"console","uid":"3263a8c6-5259-42d6-a8da-588894b3887d","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.021600088+00:00 stderr F I1212 16:19:06.021576 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 35a6df22-e2af-4660-b4c6-16bf8a43042f]" virtual=false 2025-12-12T16:19:06.023513026+00:00 stderr F I1212 16:19:06.023099 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator-846cbfc458, uid: 8c7664f6-9e05-4255-99ac-e0fcf1f2eb2d]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"openshift-apiserver-operator","uid":"f8199ef4-1467-44ed-9019-69c1f1737f70","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.023513026+00:00 stderr F I1212 16:19:06.023148 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-network-config-controller, uid: af3d201f-bde4-4a45-ae31-631a3c175cdc]" virtual=false 2025-12-12T16:19:06.023513026+00:00 stderr F I1212 16:19:06.023349 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-service-ca-operator, uid: 6c679b53-4beb-47e3-bcb3-3c8a99b44665]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.023513026+00:00 stderr F I1212 16:19:06.023373 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operators, uid: bef0fff6-6eed-4771-85c8-e0dd5bdde9e0]" virtual=false 2025-12-12T16:19:06.023701970+00:00 stderr F I1212 16:19:06.023660 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-vsphere-infra, uid: 2ecc0567-f8a0-41d2-89f1-0b731ad62fbb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.023701970+00:00 stderr F I1212 16:19:06.023689 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: bd3ae2d6-78bd-48d1-be9f-a77e56ba96c8]" virtual=false 2025-12-12T16:19:06.024331556+00:00 stderr F I1212 16:19:06.024270 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-image-registry, name: image-registry-66587d64c8, uid: 6b0b0246-7f8b-49b0-94c7-d4080c9ac715]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"image-registry","uid":"d3f5db75-c64f-496e-937b-26ce08f3d633","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.024331556+00:00 stderr F I1212 16:19:06.024306 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 55006a20-e0ea-4a38-a3bc-8ae4f1472858]" virtual=false 2025-12-12T16:19:06.025170367+00:00 stderr F I1212 16:19:06.025094 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-platform-infra, uid: 1d3ed5f4-a054-412f-81c5-af793a208a1f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.025170367+00:00 stderr F I1212 16:19:06.025144 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: dab4d42f-1426-4619-af00-f3b882989b05]" virtual=false 2025-12-12T16:19:06.025736761+00:00 stderr F I1212 16:19:06.025362 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-host-network, uid: 4b542650-5442-4772-92b3-4649992d4842]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.025736761+00:00 stderr F I1212 16:19:06.025389 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ancillary-tools, uid: c21b8763-d204-40e9-8b00-8f8a8767dd88]" virtual=false 2025-12-12T16:19:06.025736761+00:00 stderr F I1212 16:19:06.025502 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-apiserver-operator, uid: cd464d22-ebb5-4c4d-8bbb-1d343035fd2f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.025736761+00:00 stderr F I1212 16:19:06.025517 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: marketplace-operator, uid: c100705c-9ecd-449a-969b-a207f023c1b0]" virtual=false 2025-12-12T16:19:06.025736761+00:00 stderr F I1212 16:19:06.025712 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-managed, uid: addccfaa-40f8-4cbc-86ad-29a8e9b5ef78]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.025763971+00:00 stderr F I1212 16:19:06.025731 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ac, uid: 8730bc0a-fa8f-4fcf-9d74-e54b3e4f4363]" virtual=false 2025-12-12T16:19:06.027579996+00:00 stderr F I1212 16:19:06.027536 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-config-operator, uid: b4fb1cd2-d1ab-4a46-a6a5-d18dc4abe3ff]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.027599587+00:00 stderr F I1212 16:19:06.027577 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: c63e31c2-61a6-4920-b002-afb13dcebab4]" virtual=false 2025-12-12T16:19:06.027729520+00:00 stderr F I1212 16:19:06.027683 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-user-workload-monitoring, uid: 576d3059-0feb-4d0f-8353-b02539f1c62f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.027790421+00:00 stderr F I1212 16:19:06.027763 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 8c7ce863-bde8-4efe-8b60-31d289bbf1f9]" virtual=false 2025-12-12T16:19:06.027981326+00:00 stderr F I1212 16:19:06.027960 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-controller-manager-operator, uid: 8bc063ab-9aa2-42ac-a00e-f4a74b01a855]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.028026687+00:00 stderr F I1212 16:19:06.028011 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-node-identity, uid: 58202f40-7743-4217-8ef0-132a1f911124]" virtual=false 2025-12-12T16:19:06.028831697+00:00 stderr F I1212 16:19:06.028773 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-user-settings, uid: 04f36abf-3d91-487f-ba90-02965aa17b74]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.028831697+00:00 stderr F I1212 16:19:06.028816 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-openstack-infra, uid: 9b523a2a-29c2-47a4-8d2f-638bdb038f6b]" virtual=false 2025-12-12T16:19:06.029044182+00:00 stderr F I1212 16:19:06.028999 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-storage-operator, uid: 197d9352-caa7-4358-80dd-a56a8cceb99f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.029052653+00:00 stderr F I1212 16:19:06.029034 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-controllers, uid: 9c65bbd1-8044-4ed7-b28a-8adb920c184f]" virtual=false 2025-12-12T16:19:06.029480823+00:00 stderr F I1212 16:19:06.029278 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: console-operator, uid: 49c566ed-810f-4e34-89c0-e69f4ba1f5fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.029480823+00:00 stderr F I1212 16:19:06.029316 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: openshift-config-operator, uid: bde8337f-c80d-4cd7-8e1c-8853e023fdb8]" virtual=false 2025-12-12T16:19:06.029898734+00:00 stderr F I1212 16:19:06.029535 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 35a6df22-e2af-4660-b4c6-16bf8a43042f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.029898734+00:00 stderr F I1212 16:19:06.029570 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 1281df22-53c2-4b26-aa63-b3d41b4761b8]" virtual=false 2025-12-12T16:19:06.029898734+00:00 stderr F I1212 16:19:06.029862 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-apiserver-operator, uid: 7b7e1a01-4e5a-4c5f-87de-07cde1d85301]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.029898734+00:00 stderr F I1212 16:19:06.029885 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-authentication-operator, uid: 6912c967-af53-4ba4-8d3d-98a1cb2e9bdc]" virtual=false 2025-12-12T16:19:06.030019837+00:00 stderr F I1212 16:19:06.029987 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config, uid: ef6ba319-bb5c-4df3-99ee-e0d4da656faf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.030062658+00:00 stderr F I1212 16:19:06.030045 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: network-tools, uid: dc81d8c3-d5c7-455a-90ee-f6abb15e272d]" virtual=false 2025-12-12T16:19:06.030116239+00:00 stderr F I1212 16:19:06.030080 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operator-lifecycle-manager, uid: fae25a02-86eb-4906-aa64-3c8f5894eb51]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.030124999+00:00 stderr F I1212 16:19:06.030113 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-operator, uid: 41451894-a97b-448d-aef0-4bc5d55372c1]" virtual=false 2025-12-12T16:19:06.032569520+00:00 stderr F I1212 16:19:06.032511 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-network-config-controller, uid: af3d201f-bde4-4a45-ae31-631a3c175cdc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.032597640+00:00 stderr F I1212 16:19:06.032567 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-nutanix-infra, uid: 8f2c7d6d-b7e9-4b3b-b558-7c4d41319c79]" virtual=false 2025-12-12T16:19:06.032646442+00:00 stderr F I1212 16:19:06.032516 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-version, uid: 00c20f60-51f9-4756-b585-93ed93b6029b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.032655702+00:00 stderr F I1212 16:19:06.032641 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: d89f5418-8a04-45bf-a138-83af4c524742]" virtual=false 2025-12-12T16:19:06.032772875+00:00 stderr F I1212 16:19:06.032745 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 55006a20-e0ea-4a38-a3bc-8ae4f1472858]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.032832376+00:00 stderr F I1212 16:19:06.032810 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-operator, uid: 1dc5d509-40a4-4bd8-a98c-cfe8b82b4f57]" virtual=false 2025-12-12T16:19:06.032943749+00:00 stderr F I1212 16:19:06.032901 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operators, uid: bef0fff6-6eed-4771-85c8-e0dd5bdde9e0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.032956439+00:00 stderr F I1212 16:19:06.032936 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-storage-version-migrator-operator, uid: 7c712a8d-4b1a-4eef-a1f6-050630ee028f]" virtual=false 2025-12-12T16:19:06.033089323+00:00 stderr F I1212 16:19:06.033065 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-image-registry, uid: 46fdad02-53ed-489c-bc37-661151dcc55f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.033163644+00:00 stderr F I1212 16:19:06.033114 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-multus, uid: b98014e1-4b49-40b7-a8a0-5258b064c959]" virtual=false 2025-12-12T16:19:06.034829865+00:00 stderr F I1212 16:19:06.034788 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: bd3ae2d6-78bd-48d1-be9f-a77e56ba96c8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.034849966+00:00 stderr F I1212 16:19:06.034798 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ac, uid: 8730bc0a-fa8f-4fcf-9d74-e54b3e4f4363]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.034867106+00:00 stderr F I1212 16:19:06.034837 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-operator, uid: faea3de1-356e-4f94-8518-0aca76aba835]" virtual=false 2025-12-12T16:19:06.034867106+00:00 stderr F I1212 16:19:06.034849 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: installer-artifacts, uid: 0e18a7c9-8c75-4c32-8d92-66303c9d209f]" virtual=false 2025-12-12T16:19:06.036401284+00:00 stderr F I1212 16:19:06.036366 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: c63e31c2-61a6-4920-b002-afb13dcebab4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.036449496+00:00 stderr F I1212 16:19:06.036435 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovn-kubernetes, uid: 14f4405e-2ee4-486f-b504-e6eb6327dd6e]" virtual=false 2025-12-12T16:19:06.040659780+00:00 stderr F I1212 16:19:06.040581 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-node-identity, uid: 58202f40-7743-4217-8ef0-132a1f911124]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.040659780+00:00 stderr F I1212 16:19:06.040644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: etcd-operator, uid: 0ce3e4fb-39b1-4367-9741-b9e539e4cdc1]" virtual=false 2025-12-12T16:19:06.043070619+00:00 stderr F I1212 16:19:06.043031 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ancillary-tools, uid: c21b8763-d204-40e9-8b00-8f8a8767dd88]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.043070619+00:00 stderr F I1212 16:19:06.043060 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: a1d81936-4e38-446d-819b-61c0b05df947]" virtual=false 2025-12-12T16:19:06.050426581+00:00 stderr F I1212 16:19:06.050325 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 8c7ce863-bde8-4efe-8b60-31d289bbf1f9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.050426581+00:00 stderr F I1212 16:19:06.050404 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: 0918fba0-c5ef-42c6-ab99-2fb4dcb34871]" virtual=false 2025-12-12T16:19:06.058405018+00:00 stderr F I1212 16:19:06.058317 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: marketplace-operator, uid: c100705c-9ecd-449a-969b-a207f023c1b0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.058513621+00:00 stderr F I1212 16:19:06.058490 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: machine-approver-sa, uid: 9d2e2fd5-f689-4ff8-bed1-c7547bf698d4]" virtual=false 2025-12-12T16:19:06.062243063+00:00 stderr F I1212 16:19:06.062196 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-controllers, uid: 9c65bbd1-8044-4ed7-b28a-8adb920c184f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.062317005+00:00 stderr F I1212 16:19:06.062299 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: dns-operator, uid: ededd462-781c-45db-afa8-736b78e4df88]" virtual=false 2025-12-12T16:19:06.064152080+00:00 stderr F I1212 16:19:06.064120 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: dab4d42f-1426-4619-af00-f3b882989b05]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.064240573+00:00 stderr F I1212 16:19:06.064223 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: node-ca, uid: 1476d897-3740-45c6-b3a2-3403be584014]" virtual=false 2025-12-12T16:19:06.067426241+00:00 stderr F I1212 16:19:06.067328 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 1281df22-53c2-4b26-aa63-b3d41b4761b8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.067449442+00:00 stderr F I1212 16:19:06.067420 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: iptables-alerter, uid: 67f6de68-003a-46d5-b769-d3ebce26fdf7]" virtual=false 2025-12-12T16:19:06.070160919+00:00 stderr F I1212 16:19:06.070092 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-openstack-infra, uid: 9b523a2a-29c2-47a4-8d2f-638bdb038f6b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.070250651+00:00 stderr F I1212 16:19:06.070231 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: cli, uid: 8c11cc8a-acca-44f2-abbb-b2abc36fc5e1]" virtual=false 2025-12-12T16:19:06.073582704+00:00 stderr F I1212 16:19:06.073541 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-authentication-operator, uid: 6912c967-af53-4ba4-8d3d-98a1cb2e9bdc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.073657445+00:00 stderr F I1212 16:19:06.073634 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: network-node-identity, uid: 020e6dbc-bbf9-4bb0-9224-ebe047b05265]" virtual=false 2025-12-12T16:19:06.080289059+00:00 stderr F I1212 16:19:06.080117 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: openshift-config-operator, uid: bde8337f-c80d-4cd7-8e1c-8853e023fdb8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.080289059+00:00 stderr F I1212 16:19:06.080204 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ingress-operator, uid: dafbc0ce-18be-4af5-8fd3-13010bc7349e]" virtual=false 2025-12-12T16:19:06.087721803+00:00 stderr F I1212 16:19:06.087592 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-operator, uid: 41451894-a97b-448d-aef0-4bc5d55372c1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.087721803+00:00 stderr F I1212 16:19:06.087678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kni-infra, uid: 4fc76540-3eed-4c8e-b1c4-9a8744b7c4fe]" virtual=false 2025-12-12T16:19:06.104479067+00:00 stderr F I1212 16:19:06.104365 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: network-tools, uid: dc81d8c3-d5c7-455a-90ee-f6abb15e272d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.104586620+00:00 stderr F I1212 16:19:06.104559 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-controller-manager-operator, uid: 6498c809-5d1f-41f3-9a77-71207d8b4490]" virtual=false 2025-12-12T16:19:06.116100255+00:00 stderr F I1212 16:19:06.116019 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-nutanix-infra, uid: 8f2c7d6d-b7e9-4b3b-b558-7c4d41319c79]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.116221438+00:00 stderr F I1212 16:19:06.116194 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-scheduler-operator, uid: e597c7c3-88eb-47f8-b1a0-825e57afdf8a]" virtual=false 2025-12-12T16:19:06.122418801+00:00 stderr F I1212 16:19:06.121759 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: d89f5418-8a04-45bf-a138-83af4c524742]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.122418801+00:00 stderr F I1212 16:19:06.121840 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: installer, uid: 3e674476-cd07-42b1-9fe1-083d4990ac3a]" virtual=false 2025-12-12T16:19:06.146517817+00:00 stderr F I1212 16:19:06.146395 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-operator, uid: 1dc5d509-40a4-4bd8-a98c-cfe8b82b4f57]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.146567438+00:00 stderr F I1212 16:19:06.146481 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovirt-infra, uid: 96a755c5-7f75-4b72-995e-f9dddfb24440]" virtual=false 2025-12-12T16:19:06.151114160+00:00 stderr F I1212 16:19:06.151049 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-storage-version-migrator-operator, uid: 7c712a8d-4b1a-4eef-a1f6-050630ee028f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.151145431+00:00 stderr F I1212 16:19:06.151110 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-etcd-operator, uid: e1772d22-6f09-435f-b8d7-78fdc6a9bc4a]" virtual=false 2025-12-12T16:19:06.157473618+00:00 stderr F I1212 16:19:06.157398 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-multus, uid: b98014e1-4b49-40b7-a8a0-5258b064c959]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.157498288+00:00 stderr F I1212 16:19:06.157476 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-monitoring, uid: 07c07c05-6e0f-4083-ae8f-f743734c6b19]" virtual=false 2025-12-12T16:19:06.160115943+00:00 stderr F I1212 16:19:06.160034 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-operator, uid: faea3de1-356e-4f94-8518-0aca76aba835]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.160115943+00:00 stderr F I1212 16:19:06.160070 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-samples-operator, uid: c06aeb09-cb69-4bbf-8dde-7145ef6be96d]" virtual=false 2025-12-12T16:19:06.165589618+00:00 stderr F I1212 16:19:06.165382 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovn-kubernetes, uid: 14f4405e-2ee4-486f-b504-e6eb6327dd6e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.165589618+00:00 stderr F I1212 16:19:06.165422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console, uid: 6c0a940d-a53d-4fc4-ad13-cda3ca9fe0f4]" virtual=false 2025-12-12T16:19:06.168149112+00:00 stderr F I1212 16:19:06.168104 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: installer-artifacts, uid: 0e18a7c9-8c75-4c32-8d92-66303c9d209f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.168166942+00:00 stderr F I1212 16:19:06.168143 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-diagnostics, uid: 3991475a-57bf-40e9-9954-a84192d5f5e1]" virtual=false 2025-12-12T16:19:06.173679938+00:00 stderr F I1212 16:19:06.173610 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: etcd-operator, uid: 0ce3e4fb-39b1-4367-9741-b9e539e4cdc1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.173679938+00:00 stderr F I1212 16:19:06.173641 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-dns-operator, uid: 2828084a-f369-4ab4-8c97-5bac5fa50528]" virtual=false 2025-12-12T16:19:06.179220945+00:00 stderr F I1212 16:19:06.177811 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: a1d81936-4e38-446d-819b-61c0b05df947]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.179220945+00:00 stderr F I1212 16:19:06.177860 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-api, uid: 79bcd8e8-bf75-45da-8612-c49cfd9d13df]" virtual=false 2025-12-12T16:19:06.185506511+00:00 stderr F I1212 16:19:06.185446 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: 0918fba0-c5ef-42c6-ab99-2fb4dcb34871]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.185506511+00:00 stderr F I1212 16:19:06.185485 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 749fdd6c-f7a2-4743-bbd6-4c00d16e8776]" virtual=false 2025-12-12T16:19:06.190856203+00:00 stderr F I1212 16:19:06.190607 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: machine-approver-sa, uid: 9d2e2fd5-f689-4ff8-bed1-c7547bf698d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.190856203+00:00 stderr F I1212 16:19:06.190640 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: e7d12ea0-5793-4ac6-b96e-798934641d22]" virtual=false 2025-12-12T16:19:06.193545449+00:00 stderr F I1212 16:19:06.193508 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: dns-operator, uid: ededd462-781c-45db-afa8-736b78e4df88]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.193564620+00:00 stderr F I1212 16:19:06.193540 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: cli-artifacts, uid: fdd6be6b-f6bc-47c7-bda0-1de138bd0a81]" virtual=false 2025-12-12T16:19:06.197609110+00:00 stderr F I1212 16:19:06.197553 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: node-ca, uid: 1476d897-3740-45c6-b3a2-3403be584014]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.197609110+00:00 stderr F I1212 16:19:06.197581 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-operator, uid: 27ab1b40-7038-45bc-a0c3-f51f5fd8e027]" virtual=false 2025-12-12T16:19:06.200432180+00:00 stderr F I1212 16:19:06.200376 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: iptables-alerter, uid: 67f6de68-003a-46d5-b769-d3ebce26fdf7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.200432180+00:00 stderr F I1212 16:19:06.200412 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 78bb60a8-531d-48f1-b653-8764a30ad047]" virtual=false 2025-12-12T16:19:06.205637938+00:00 stderr F I1212 16:19:06.205582 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: network-node-identity, uid: 020e6dbc-bbf9-4bb0-9224-ebe047b05265]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.205637938+00:00 stderr F I1212 16:19:06.205623 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: console, uid: 324553c0-d8c0-43b0-bbae-07283a98bcf1]" virtual=false 2025-12-12T16:19:06.208062008+00:00 stderr F I1212 16:19:06.208013 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: cli, uid: 8c11cc8a-acca-44f2-abbb-b2abc36fc5e1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.208079479+00:00 stderr F I1212 16:19:06.208051 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: metrics-daemon-sa, uid: abce43d0-2a82-4a99-9c68-e5d3d0e59581]" virtual=false 2025-12-12T16:19:06.213796140+00:00 stderr F I1212 16:19:06.213711 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ingress-operator, uid: dafbc0ce-18be-4af5-8fd3-13010bc7349e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.213816521+00:00 stderr F I1212 16:19:06.213789 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: cluster-network-operator, uid: 4474bfa7-4a4c-4eef-9e88-1d3ba71df974]" virtual=false 2025-12-12T16:19:06.222547436+00:00 stderr F I1212 16:19:06.222449 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kni-infra, uid: 4fc76540-3eed-4c8e-b1c4-9a8744b7c4fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.222547436+00:00 stderr F I1212 16:19:06.222506 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus, uid: 8ed5854e-19c6-4934-895c-1ff820fbc84c]" virtual=false 2025-12-12T16:19:06.237468815+00:00 stderr F I1212 16:19:06.237360 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-controller-manager-operator, uid: 6498c809-5d1f-41f3-9a77-71207d8b4490]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.237468815+00:00 stderr F I1212 16:19:06.237442 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: 736e29a2-1ce3-47d8-ac5c-ddba4e238245]" virtual=false 2025-12-12T16:19:06.247790551+00:00 stderr F I1212 16:19:06.247718 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-scheduler-operator, uid: e597c7c3-88eb-47f8-b1a0-825e57afdf8a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.247790551+00:00 stderr F I1212 16:19:06.247760 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: 5d04164f-45ca-4c60-8a08-c4459300ecda]" virtual=false 2025-12-12T16:19:06.258471335+00:00 stderr F I1212 16:19:06.258409 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: installer, uid: 3e674476-cd07-42b1-9fe1-083d4990ac3a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.258471335+00:00 stderr F I1212 16:19:06.258450 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: authentication-operator, uid: 063bc733-6edc-4f47-a43c-73cbfb5c3c8d]" virtual=false 2025-12-12T16:19:06.279047653+00:00 stderr F I1212 16:19:06.278914 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovirt-infra, uid: 96a755c5-7f75-4b72-995e-f9dddfb24440]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.279047653+00:00 stderr F I1212 16:19:06.279010 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: olm-operator-serviceaccount, uid: 0d23d03f-8d1c-40a1-b029-bb73930758f7]" virtual=false 2025-12-12T16:19:06.285477462+00:00 stderr F I1212 16:19:06.285404 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-etcd-operator, uid: e1772d22-6f09-435f-b8d7-78fdc6a9bc4a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.285499753+00:00 stderr F I1212 16:19:06.285465 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 180b3a1a-50c2-445c-8650-162b0f3a1d99]" virtual=false 2025-12-12T16:19:06.292030924+00:00 stderr F I1212 16:19:06.291928 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-monitoring, uid: 07c07c05-6e0f-4083-ae8f-f743734c6b19]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.292100016+00:00 stderr F I1212 16:19:06.292004 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: ingress-operator, uid: 75f07586-843e-4cd5-a497-25f3abe799ec]" virtual=false 2025-12-12T16:19:06.294117126+00:00 stderr F I1212 16:19:06.294060 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-samples-operator, uid: c06aeb09-cb69-4bbf-8dde-7145ef6be96d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.294117126+00:00 stderr F I1212 16:19:06.294094 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-termination-handler, uid: 1567c8f7-61db-45bd-a1b6-c1f4b610e91b]" virtual=false 2025-12-12T16:19:06.298001202+00:00 stderr F I1212 16:19:06.297830 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console, uid: 6c0a940d-a53d-4fc4-ad13-cda3ca9fe0f4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.298001202+00:00 stderr F I1212 16:19:06.297959 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler, uid: 56382e05-4f3e-45e2-9065-468d4f668091]" virtual=false 2025-12-12T16:19:06.302907033+00:00 stderr F I1212 16:19:06.302642 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-diagnostics, uid: 3991475a-57bf-40e9-9954-a84192d5f5e1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.302907033+00:00 stderr F I1212 16:19:06.302678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-monitoring, name: openshift-cluster-monitoring, uid: 4ce70de4-7730-472f-aa41-55b320f6a48b]" virtual=false 2025-12-12T16:19:06.307818945+00:00 stderr F I1212 16:19:06.307718 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-dns-operator, uid: 2828084a-f369-4ab4-8c97-5bac5fa50528]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.307868446+00:00 stderr F I1212 16:19:06.307797 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operator-lifecycle-manager, name: olm-operators, uid: 1332ecfd-3d6a-4222-b9b5-6e6e389f06df]" virtual=false 2025-12-12T16:19:06.310080390+00:00 stderr F I1212 16:19:06.310020 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-api, uid: 79bcd8e8-bf75-45da-8612-c49cfd9d13df]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.310080390+00:00 stderr F I1212 16:19:06.310049 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operators, name: global-operators, uid: 83417554-904c-4254-8944-b91da7453b27]" virtual=false 2025-12-12T16:19:06.318363155+00:00 stderr F I1212 16:19:06.318031 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 749fdd6c-f7a2-4743-bbd6-4c00d16e8776]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.318363155+00:00 stderr F I1212 16:19:06.318072 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: oauth-proxy, uid: 5b02e592-b496-421b-aa42-a67b5520f0dd]" virtual=false 2025-12-12T16:19:06.321912913+00:00 stderr F I1212 16:19:06.321580 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: e7d12ea0-5793-4ac6-b96e-798934641d22]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.321912913+00:00 stderr F I1212 16:19:06.321637 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: tools, uid: a2e57b51-274c-4040-8139-62eb4ada14e2]" virtual=false 2025-12-12T16:19:06.330569147+00:00 stderr F I1212 16:19:06.330471 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-operator, uid: 27ab1b40-7038-45bc-a0c3-f51f5fd8e027]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.330569147+00:00 stderr F I1212 16:19:06.330526 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: must-gather, uid: efe3cb11-0030-44ec-b454-a1c46849474f]" virtual=false 2025-12-12T16:19:06.331506060+00:00 stderr F I1212 16:19:06.331446 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: cli-artifacts, uid: fdd6be6b-f6bc-47c7-bda0-1de138bd0a81]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.331572612+00:00 stderr F I1212 16:19:06.331551 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: driver-toolkit, uid: ab3fdf24-4b72-43fe-8063-bf671901e9c6]" virtual=false 2025-12-12T16:19:06.334990636+00:00 stderr F I1212 16:19:06.334419 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 78bb60a8-531d-48f1-b653-8764a30ad047]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.334990636+00:00 stderr F I1212 16:19:06.334479 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: tests, uid: 0e4e0fb5-de56-48a0-8d2f-34844c794213]" virtual=false 2025-12-12T16:19:06.338391340+00:00 stderr F I1212 16:19:06.337225 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: console, uid: 324553c0-d8c0-43b0-bbae-07283a98bcf1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.338391340+00:00 stderr F I1212 16:19:06.337305 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-operator-lifecycle-manager, name: olm-alert-rules, uid: d90e2096-395c-40fd-9ade-393efa2e6c5b]" virtual=false 2025-12-12T16:19:06.339696933+00:00 stderr F I1212 16:19:06.339647 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: metrics-daemon-sa, uid: abce43d0-2a82-4a99-9c68-e5d3d0e59581]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.339696933+00:00 stderr F I1212 16:19:06.339676 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-version, name: cluster-version-operator, uid: f57fe110-2989-4987-a61c-24caa6fc9bb2]" virtual=false 2025-12-12T16:19:06.356276473+00:00 stderr F I1212 16:19:06.353542 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: cluster-network-operator, uid: 4474bfa7-4a4c-4eef-9e88-1d3ba71df974]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.356276473+00:00 stderr F I1212 16:19:06.353612 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-dns-operator, name: dns, uid: eb61aaa9-4e25-4e91-a620-88091d39e58f]" virtual=false 2025-12-12T16:19:06.361229535+00:00 stderr F I1212 16:19:06.360663 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus, uid: 8ed5854e-19c6-4934-895c-1ff820fbc84c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.361229535+00:00 stderr F I1212 16:19:06.360730 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-etcd-operator, name: etcd-prometheus-rules, uid: 97274c51-fc95-4eb6-ad00-7a3b4f31f2ca]" virtual=false 2025-12-12T16:19:06.384047549+00:00 stderr F I1212 16:19:06.383812 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: 736e29a2-1ce3-47d8-ac5c-ddba4e238245]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.384047549+00:00 stderr F I1212 16:19:06.383866 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 0b5fb497-c46f-4ec8-b3ac-f01c6ed66367]" virtual=false 2025-12-12T16:19:06.389247628+00:00 stderr F I1212 16:19:06.385982 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: 5d04164f-45ca-4c60-8a08-c4459300ecda]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.389247628+00:00 stderr F I1212 16:19:06.386013 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: b334ae77-e6a0-41f9-b470-35a7cb6618a5]" virtual=false 2025-12-12T16:19:06.399689086+00:00 stderr F I1212 16:19:06.396796 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: authentication-operator, uid: 063bc733-6edc-4f47-a43c-73cbfb5c3c8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.399689086+00:00 stderr F I1212 16:19:06.396861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-network-operator, name: openshift-network-operator-ipsec-rules, uid: 4b9d0264-6145-4803-a9c5-e7715dde16c7]" virtual=false 2025-12-12T16:19:06.421841613+00:00 stderr F I1212 16:19:06.421542 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 180b3a1a-50c2-445c-8650-162b0f3a1d99]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.421841613+00:00 stderr F I1212 16:19:06.421595 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-machine-approver, name: machineapprover-rules, uid: 03750884-15c0-4b90-b038-07d324e83865]" virtual=false 2025-12-12T16:19:06.429654257+00:00 stderr F I1212 16:19:06.429558 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: olm-operator-serviceaccount, uid: 0d23d03f-8d1c-40a1-b029-bb73930758f7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.429654257+00:00 stderr F I1212 16:19:06.429616 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 2877fbf1-ff5a-4aa9-b775-2605d4bccd96]" virtual=false 2025-12-12T16:19:06.434988199+00:00 stderr F I1212 16:19:06.434885 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: ingress-operator, uid: 75f07586-843e-4cd5-a497-25f3abe799ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.434988199+00:00 stderr F I1212 16:19:06.434942 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: master-rules, uid: a8f235d8-6055-4051-84b4-f75387ba159c]" virtual=false 2025-12-12T16:19:06.435577213+00:00 stderr F I1212 16:19:06.435124 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-termination-handler, uid: 1567c8f7-61db-45bd-a1b6-c1f4b610e91b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.435577213+00:00 stderr F I1212 16:19:06.435147 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-console-operator, name: cluster-monitoring-prometheus-rules, uid: 91547e96-31b3-460d-80d1-83b02bd7d873]" virtual=false 2025-12-12T16:19:06.436219469+00:00 stderr F I1212 16:19:06.436152 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-monitoring, name: openshift-cluster-monitoring, uid: 4ce70de4-7730-472f-aa41-55b320f6a48b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.436236229+00:00 stderr F I1212 16:19:06.436208 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-rules, uid: a1a802b1-bbfe-4655-8366-9f94d997c9ee]" virtual=false 2025-12-12T16:19:06.436378113+00:00 stderr F I1212 16:19:06.436335 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler, uid: 56382e05-4f3e-45e2-9065-468d4f668091]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.436378113+00:00 stderr F I1212 16:19:06.436359 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-machine-api, name: machine-api-operator-prometheus-rules, uid: ea38d425-d751-4336-a88d-4f52a8920d6e]" virtual=false 2025-12-12T16:19:06.437036809+00:00 stderr F I1212 16:19:06.436993 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operator-lifecycle-manager, name: olm-operators, uid: 1332ecfd-3d6a-4222-b9b5-6e6e389f06df]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.437036809+00:00 stderr F I1212 16:19:06.437020 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: networking-rules, uid: 31962b12-2774-4f24-9d5c-f55ad1ee66ac]" virtual=false 2025-12-12T16:19:06.442499234+00:00 stderr F I1212 16:19:06.442331 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7fffb5779" need=0 deleting=1 2025-12-12T16:19:06.442518415+00:00 stderr F I1212 16:19:06.442488 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7fffb5779" relatedReplicaSets=["openshift-controller-manager/controller-manager-7b9f779b68","openshift-controller-manager/controller-manager-5f76cf6594","openshift-controller-manager/controller-manager-6445bd5bb7","openshift-controller-manager/controller-manager-65b6cccf98","openshift-controller-manager/controller-manager-6ff9c7475c","openshift-controller-manager/controller-manager-74bfd85b68","openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-69f958c846","openshift-controller-manager/controller-manager-7fffb5779","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957"] 2025-12-12T16:19:06.442532785+00:00 stderr F I1212 16:19:06.442464 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operators, name: global-operators, uid: 83417554-904c-4254-8944-b91da7453b27]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.442574556+00:00 stderr F I1212 16:19:06.442547 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-samples-operator, name: samples-operator-alerts, uid: b6a0a11d-7bbc-4d70-aa23-0e557edf8696]" virtual=false 2025-12-12T16:19:06.443364576+00:00 stderr F I1212 16:19:06.442639 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-7fffb5779" pod="openshift-controller-manager/controller-manager-7fffb5779-6br5z" 2025-12-12T16:19:06.447614571+00:00 stderr F I1212 16:19:06.447554 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-67bd47cff9" need=0 deleting=1 2025-12-12T16:19:06.447703503+00:00 stderr F I1212 16:19:06.447681 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-67bd47cff9" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-66bd94d94f","openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-c47fcf799","openshift-route-controller-manager/route-controller-manager-8fdcdbb66","openshift-route-controller-manager/route-controller-manager-67bd47cff9","openshift-route-controller-manager/route-controller-manager-6b47f77689","openshift-route-controller-manager/route-controller-manager-6b749d96f6","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-76558c69dc","openshift-route-controller-manager/route-controller-manager-776cdc94d6","openshift-route-controller-manager/route-controller-manager-f4599bd79","openshift-route-controller-manager/route-controller-manager-5fccdd79b9"] 2025-12-12T16:19:06.447941519+00:00 stderr F I1212 16:19:06.447806 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-67bd47cff9" pod="openshift-route-controller-manager/route-controller-manager-67bd47cff9-br6nz" 2025-12-12T16:19:06.451597889+00:00 stderr F I1212 16:19:06.451487 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: oauth-proxy, uid: 5b02e592-b496-421b-aa42-a67b5520f0dd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.451597889+00:00 stderr F I1212 16:19:06.451561 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-marketplace, name: marketplace-alert-rules, uid: dc8ef252-29f9-421b-9166-5d8a6fb2cb84]" virtual=false 2025-12-12T16:19:06.452029450+00:00 stderr F I1212 16:19:06.451966 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:06.457644929+00:00 stderr F I1212 16:19:06.457154 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:06.458023058+00:00 stderr F I1212 16:19:06.457145 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: tools, uid: a2e57b51-274c-4040-8139-62eb4ada14e2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.458068159+00:00 stderr F I1212 16:19:06.458024 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-multus, name: prometheus-k8s-rules, uid: f66d3c2d-8031-4960-b90a-2518392083d6]" virtual=false 2025-12-12T16:19:06.463729719+00:00 stderr F I1212 16:19:06.463255 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: must-gather, uid: efe3cb11-0030-44ec-b454-a1c46849474f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.463729719+00:00 stderr F I1212 16:19:06.463303 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver, name: kube-apiserver-performance-recording-rules, uid: f911c2bf-6b61-41f5-9e5a-f111e13fea13]" virtual=false 2025-12-12T16:19:06.469833200+00:00 stderr F I1212 16:19:06.469766 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7b9f779b68" need=1 creating=1 2025-12-12T16:19:06.472237579+00:00 stderr F I1212 16:19:06.472152 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-operator-lifecycle-manager, name: olm-alert-rules, uid: d90e2096-395c-40fd-9ade-393efa2e6c5b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.472278400+00:00 stderr F I1212 16:19:06.472231 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: imagestreams-rules, uid: 7f8f7459-95b2-46d1-a5f5-c98861a56f22]" virtual=false 2025-12-12T16:19:06.472382373+00:00 stderr F I1212 16:19:06.472319 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-8fdcdbb66" need=1 creating=1 2025-12-12T16:19:06.472510406+00:00 stderr F I1212 16:19:06.472453 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: driver-toolkit, uid: ab3fdf24-4b72-43fe-8063-bf671901e9c6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.472530937+00:00 stderr F I1212 16:19:06.472511 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ingress-operator, name: ingress-operator, uid: 25ced67d-4442-487b-9828-7b52d14815c0]" virtual=false 2025-12-12T16:19:06.477947901+00:00 stderr F I1212 16:19:06.477874 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-version, name: cluster-version-operator, uid: f57fe110-2989-4987-a61c-24caa6fc9bb2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.478026183+00:00 stderr F I1212 16:19:06.478002 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-operator-alerts, uid: c76d6124-d19c-4231-b946-23f2c04f09c7]" virtual=false 2025-12-12T16:19:06.478542345+00:00 stderr F I1212 16:19:06.478473 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[image.openshift.io/v1/ImageStream, namespace: openshift, name: tests, uid: 0e4e0fb5-de56-48a0-8d2f-34844c794213]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.478559046+00:00 stderr F I1212 16:19:06.478530 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: dns-operator, uid: 79c48787-e4ef-45fe-9cb2-8707e9cd7d61]" virtual=false 2025-12-12T16:19:06.478866313+00:00 stderr F I1212 16:19:06.478839 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-dns-operator, name: dns, uid: eb61aaa9-4e25-4e91-a620-88091d39e58f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.478905784+00:00 stderr F I1212 16:19:06.478890 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: ingress-operator, uid: 9ef68a54-5a41-4cec-8a8e-0de1dd9c9610]" virtual=false 2025-12-12T16:19:06.486836690+00:00 stderr F I1212 16:19:06.486739 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-etcd-operator, name: etcd-prometheus-rules, uid: 97274c51-fc95-4eb6-ad00-7a3b4f31f2ca]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.486900272+00:00 stderr F I1212 16:19:06.486870 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 8906de79-0e02-4f0a-b8fe-92706ec66a89]" virtual=false 2025-12-12T16:19:06.491731651+00:00 stderr F I1212 16:19:06.491673 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:06.511560662+00:00 stderr F I1212 16:19:06.511472 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 0b5fb497-c46f-4ec8-b3ac-f01c6ed66367]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.511560662+00:00 stderr F I1212 16:19:06.511540 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: 39466c9d-67a7-445a-b328-b0ff5f22d5e2]" virtual=false 2025-12-12T16:19:06.514842323+00:00 stderr F I1212 16:19:06.514760 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: b334ae77-e6a0-41f9-b470-35a7cb6618a5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.514842323+00:00 stderr F I1212 16:19:06.514819 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: console-operator, uid: ca4d2ee6-7668-4b43-9cca-59d0d06f542a]" virtual=false 2025-12-12T16:19:06.527155687+00:00 stderr F I1212 16:19:06.527052 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-network-operator, name: openshift-network-operator-ipsec-rules, uid: 4b9d0264-6145-4803-a9c5-e7715dde16c7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.527155687+00:00 stderr F I1212 16:19:06.527132 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: edc23d92-6948-48f7-b843-ef543c0aec83]" virtual=false 2025-12-12T16:19:06.548474634+00:00 stderr F I1212 16:19:06.548387 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-machine-approver, name: machineapprover-rules, uid: 03750884-15c0-4b90-b038-07d324e83865]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.548511965+00:00 stderr F I1212 16:19:06.548460 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 4d74cb46-6482-4707-bff6-6547ff546015]" virtual=false 2025-12-12T16:19:06.555478607+00:00 stderr F I1212 16:19:06.555378 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 2877fbf1-ff5a-4aa9-b775-2605d4bccd96]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.555510048+00:00 stderr F I1212 16:19:06.555471 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: d6fad1f4-41ae-4595-bdb0-0d02b479944b]" virtual=false 2025-12-12T16:19:06.557304793+00:00 stderr F I1212 16:19:06.557242 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: master-rules, uid: a8f235d8-6055-4051-84b4-f75387ba159c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.557324123+00:00 stderr F I1212 16:19:06.557308 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: cluster-autoscaler, uid: 900244c5-73a7-4efd-89a9-4482faed30bf]" virtual=false 2025-12-12T16:19:06.560123102+00:00 stderr F I1212 16:19:06.560029 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-console-operator, name: cluster-monitoring-prometheus-rules, uid: 91547e96-31b3-460d-80d1-83b02bd7d873]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.560123102+00:00 stderr F I1212 16:19:06.560070 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 74c4b64c-8513-4ace-b791-ecc4897b9a04]" virtual=false 2025-12-12T16:19:06.564592163+00:00 stderr F I1212 16:19:06.564134 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-rules, uid: a1a802b1-bbfe-4655-8366-9f94d997c9ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.564592163+00:00 stderr F I1212 16:19:06.564559 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-vsphere-infra, name: host-networking-services, uid: 13dc3db2-b779-4912-a196-ee77feacea00]" virtual=false 2025-12-12T16:19:06.567088514+00:00 stderr F I1212 16:19:06.567035 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-machine-api, name: machine-api-operator-prometheus-rules, uid: ea38d425-d751-4336-a88d-4f52a8920d6e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.567088514+00:00 stderr F I1212 16:19:06.567070 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: 7dff5aea-b0b2-4e11-8d0a-aaee6bc5c894]" virtual=false 2025-12-12T16:19:06.570438927+00:00 stderr F I1212 16:19:06.570394 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: networking-rules, uid: 31962b12-2774-4f24-9d5c-f55ad1ee66ac]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.570438927+00:00 stderr F I1212 16:19:06.570426 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: 8a7d0ef2-903e-4505-8a80-ea97c68b4a10]" virtual=false 2025-12-12T16:19:06.573089463+00:00 stderr F I1212 16:19:06.573035 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-samples-operator, name: samples-operator-alerts, uid: b6a0a11d-7bbc-4d70-aa23-0e557edf8696]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.573162225+00:00 stderr F I1212 16:19:06.573132 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: node-ca, uid: 7fd7f4d9-78fa-4f5a-9f7c-d1f6264ee09a]" virtual=false 2025-12-12T16:19:06.585307345+00:00 stderr F I1212 16:19:06.585207 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-marketplace, name: marketplace-alert-rules, uid: dc8ef252-29f9-421b-9166-5d8a6fb2cb84]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.585307345+00:00 stderr F I1212 16:19:06.585280 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 8fcc503b-4599-415b-b68a-1f79d1f6a02d]" virtual=false 2025-12-12T16:19:06.588963285+00:00 stderr F I1212 16:19:06.588618 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-multus, name: prometheus-k8s-rules, uid: f66d3c2d-8031-4960-b90a-2518392083d6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.588963285+00:00 stderr F I1212 16:19:06.588684 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: machine-api-controllers, uid: 5160c796-e594-4e5c-a8fe-1ea7434a3ac9]" virtual=false 2025-12-12T16:19:06.592934163+00:00 stderr F I1212 16:19:06.592851 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver, name: kube-apiserver-performance-recording-rules, uid: f911c2bf-6b61-41f5-9e5a-f111e13fea13]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.592934163+00:00 stderr F I1212 16:19:06.592914 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: marketplace-operator, uid: dcd874ef-723a-40d7-9425-f4383187a07d]" virtual=false 2025-12-12T16:19:06.601749161+00:00 stderr F I1212 16:19:06.601668 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: imagestreams-rules, uid: 7f8f7459-95b2-46d1-a5f5-c98861a56f22]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.601749161+00:00 stderr F I1212 16:19:06.601736 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-nutanix-infra, name: host-networking-services, uid: 20a538b5-51d1-4f13-90b5-d59b3dc493bb]" virtual=false 2025-12-12T16:19:06.605211447+00:00 stderr F I1212 16:19:06.605135 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ingress-operator, name: ingress-operator, uid: 25ced67d-4442-487b-9828-7b52d14815c0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.605271748+00:00 stderr F I1212 16:19:06.605220 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 902067e6-576b-4c67-b503-46ba31250666]" virtual=false 2025-12-12T16:19:06.605407682+00:00 stderr F I1212 16:19:06.605379 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-operator-alerts, uid: c76d6124-d19c-4231-b946-23f2c04f09c7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.605416762+00:00 stderr F I1212 16:19:06.605403 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6b65bfe8-9060-45f8-b70c-21d30115c6f6]" virtual=false 2025-12-12T16:19:06.612985309+00:00 stderr F I1212 16:19:06.612913 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: dns-operator, uid: 79c48787-e4ef-45fe-9cb2-8707e9cd7d61]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.613007950+00:00 stderr F I1212 16:19:06.612970 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 2c65389f-32ed-4b66-b0bf-f697ede62460]" virtual=false 2025-12-12T16:19:06.613968133+00:00 stderr F I1212 16:19:06.613360 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: ingress-operator, uid: 9ef68a54-5a41-4cec-8a8e-0de1dd9c9610]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.613980174+00:00 stderr F I1212 16:19:06.613964 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: b42e1181-7f9e-4c64-b0f6-f8615e0c9572]" virtual=false 2025-12-12T16:19:06.617694686+00:00 stderr F I1212 16:19:06.617558 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 8906de79-0e02-4f0a-b8fe-92706ec66a89]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.617765507+00:00 stderr F I1212 16:19:06.617711 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: d5f0400c-c866-4461-91ca-1674c874bd8b]" virtual=false 2025-12-12T16:19:06.645566595+00:00 stderr F I1212 16:19:06.644990 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: 39466c9d-67a7-445a-b328-b0ff5f22d5e2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.645566595+00:00 stderr F I1212 16:19:06.645080 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-version, name: prometheus-k8s, uid: ffda4cc2-e551-454c-8bf1-be5336168d3f]" virtual=false 2025-12-12T16:19:06.647869202+00:00 stderr F I1212 16:19:06.647786 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: console-operator, uid: ca4d2ee6-7668-4b43-9cca-59d0d06f542a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.647913333+00:00 stderr F I1212 16:19:06.647872 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: openshift-network-public-role, uid: f85f6638-bb46-4041-bf1e-8d05c6621e59]" virtual=false 2025-12-12T16:19:06.660438642+00:00 stderr F I1212 16:19:06.660349 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: edc23d92-6948-48f7-b843-ef543c0aec83]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.660464493+00:00 stderr F I1212 16:19:06.660439 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: eae1ab86-3aac-4e4d-844b-b4aab669be67]" virtual=false 2025-12-12T16:19:06.680716294+00:00 stderr F I1212 16:19:06.680625 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 4d74cb46-6482-4707-bff6-6547ff546015]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.680738444+00:00 stderr F I1212 16:19:06.680703 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 77e7db24-b3cb-49de-8ddd-4832c5da528e]" virtual=false 2025-12-12T16:19:06.687641275+00:00 stderr F I1212 16:19:06.687570 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: d6fad1f4-41ae-4595-bdb0-0d02b479944b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.687668286+00:00 stderr F I1212 16:19:06.687632 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: b346ce36-850b-4b91-84fd-9009ca037189]" virtual=false 2025-12-12T16:19:06.689919311+00:00 stderr F I1212 16:19:06.689852 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: cluster-autoscaler, uid: 900244c5-73a7-4efd-89a9-4482faed30bf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.689935902+00:00 stderr F I1212 16:19:06.689915 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-node-limited, uid: 2e9cfa70-01d3-48a5-af7e-ac6d0f70489f]" virtual=false 2025-12-12T16:19:06.693541601+00:00 stderr F I1212 16:19:06.693450 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 74c4b64c-8513-4ace-b791-ecc4897b9a04]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.693541601+00:00 stderr F I1212 16:19:06.693526 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 7a8de694-4980-44f0-938f-b8112d953aa4]" virtual=false 2025-12-12T16:19:06.697210361+00:00 stderr F I1212 16:19:06.697125 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-vsphere-infra, name: host-networking-services, uid: 13dc3db2-b779-4912-a196-ee77feacea00]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.697210361+00:00 stderr F I1212 16:19:06.697164 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver, name: prometheus-k8s, uid: 6dfe5fa1-4be3-47c2-aa7e-e055b2344b88]" virtual=false 2025-12-12T16:19:06.700929863+00:00 stderr F I1212 16:19:06.700878 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: 7dff5aea-b0b2-4e11-8d0a-aaee6bc5c894]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.700929863+00:00 stderr F I1212 16:19:06.700914 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-operator, uid: 1ef3c1b1-685a-4417-9b53-23d61c410f1e]" virtual=false 2025-12-12T16:19:06.704804789+00:00 stderr F I1212 16:19:06.704737 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: 8a7d0ef2-903e-4505-8a80-ea97c68b4a10]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.704879441+00:00 stderr F I1212 16:19:06.704797 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: prometheus-k8s, uid: 56ff8849-354a-4ede-88f2-4436b0a3bde5]" virtual=false 2025-12-12T16:19:06.707381863+00:00 stderr F I1212 16:19:06.707334 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: node-ca, uid: 7fd7f4d9-78fa-4f5a-9f7c-d1f6264ee09a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.707381863+00:00 stderr F I1212 16:19:06.707366 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: f52bc923-0764-4ed9-8390-8f23448cb6a5]" virtual=false 2025-12-12T16:19:06.721670796+00:00 stderr F I1212 16:19:06.721572 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 8fcc503b-4599-415b-b68a-1f79d1f6a02d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.721670796+00:00 stderr F I1212 16:19:06.721629 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: coreos-pull-secret-reader, uid: 60a0a208-9961-463e-986f-3c7302769df7]" virtual=false 2025-12-12T16:19:06.723248495+00:00 stderr F I1212 16:19:06.723137 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: machine-api-controllers, uid: 5160c796-e594-4e5c-a8fe-1ea7434a3ac9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.723248495+00:00 stderr F I1212 16:19:06.723163 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: a3099dea-12c5-441e-b16c-2f2c07408c1a]" virtual=false 2025-12-12T16:19:06.726521176+00:00 stderr F I1212 16:19:06.725591 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: marketplace-operator, uid: dcd874ef-723a-40d7-9425-f4383187a07d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.726521176+00:00 stderr F I1212 16:19:06.725617 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: cc0f2db8-631b-40d1-928b-b96bb1c102ea]" virtual=false 2025-12-12T16:19:06.734381930+00:00 stderr F I1212 16:19:06.734283 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-nutanix-infra, name: host-networking-services, uid: 20a538b5-51d1-4f13-90b5-d59b3dc493bb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.734381930+00:00 stderr F I1212 16:19:06.734355 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: console-operator, uid: e74750bf-8b67-4097-800a-1f62f1d728e2]" virtual=false 2025-12-12T16:19:06.735328464+00:00 stderr F I1212 16:19:06.735249 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 902067e6-576b-4c67-b503-46ba31250666]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.735328464+00:00 stderr F I1212 16:19:06.735304 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-openstack-infra, name: host-networking-services, uid: ba93ff46-031f-415f-9480-767006d718e7]" virtual=false 2025-12-12T16:19:06.740780709+00:00 stderr F I1212 16:19:06.740713 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6b65bfe8-9060-45f8-b70c-21d30115c6f6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.740780709+00:00 stderr F I1212 16:19:06.740758 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift, name: copied-csv-viewer, uid: 1487dd1c-4a3b-4683-a18c-da8fd82bf4e5]" virtual=false 2025-12-12T16:19:06.741272271+00:00 stderr F I1212 16:19:06.741227 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 2c65389f-32ed-4b66-b0bf-f697ede62460]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.741272271+00:00 stderr F I1212 16:19:06.741257 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: b75388c6-a2ac-4323-97aa-74085d52c30a]" virtual=false 2025-12-12T16:19:06.744405438+00:00 stderr F I1212 16:19:06.744331 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: b42e1181-7f9e-4c64-b0f6-f8615e0c9572]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.744432089+00:00 stderr F I1212 16:19:06.744400 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: machine-api-controllers, uid: 8913b400-f591-4bff-8f47-c4026984f25f]" virtual=false 2025-12-12T16:19:06.750291114+00:00 stderr F I1212 16:19:06.750159 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: d5f0400c-c866-4461-91ca-1674c874bd8b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.750291114+00:00 stderr F I1212 16:19:06.750250 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: prometheus-k8s, uid: 253a0556-00ed-477b-b11e-727c668659f4]" virtual=false 2025-12-12T16:19:06.780234114+00:00 stderr F I1212 16:19:06.780053 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-version, name: prometheus-k8s, uid: ffda4cc2-e551-454c-8bf1-be5336168d3f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.780234114+00:00 stderr F I1212 16:19:06.780121 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 2e1f82c6-2d8a-417d-af7a-bc440909ea57]" virtual=false 2025-12-12T16:19:06.781269910+00:00 stderr F I1212 16:19:06.780479 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: openshift-network-public-role, uid: f85f6638-bb46-4041-bf1e-8d05c6621e59]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.781269910+00:00 stderr F I1212 16:19:06.780505 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: f119fd7b-c9f9-41a3-9f6b-6b849a2877ff]" virtual=false 2025-12-12T16:19:06.798689160+00:00 stderr F I1212 16:19:06.798578 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: eae1ab86-3aac-4e4d-844b-b4aab669be67]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.798689160+00:00 stderr F I1212 16:19:06.798644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: ingress-operator, uid: 32745b86-26b1-4411-83c8-6769afe0ef84]" virtual=false 2025-12-12T16:19:06.814145652+00:00 stderr F I1212 16:19:06.814011 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 77e7db24-b3cb-49de-8ddd-4832c5da528e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.814145652+00:00 stderr F I1212 16:19:06.814106 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager, name: prometheus-k8s, uid: ee712995-32ca-4cd3-addd-56c3dc98ac1b]" virtual=false 2025-12-12T16:19:06.820737475+00:00 stderr F I1212 16:19:06.820651 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: b346ce36-850b-4b91-84fd-9009ca037189]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.820758596+00:00 stderr F I1212 16:19:06.820727 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: prometheus-k8s, uid: c584e4f7-dc9a-4554-8568-6104eac0033f]" virtual=false 2025-12-12T16:19:06.823413422+00:00 stderr F I1212 16:19:06.823320 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-node-limited, uid: 2e9cfa70-01d3-48a5-af7e-ac6d0f70489f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.823431522+00:00 stderr F I1212 16:19:06.823403 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: prometheus-k8s, uid: eac9d6d9-611e-4c5c-8436-393910521b09]" virtual=false 2025-12-12T16:19:06.827345899+00:00 stderr F I1212 16:19:06.827163 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver, name: prometheus-k8s, uid: 6dfe5fa1-4be3-47c2-aa7e-e055b2344b88]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.827345899+00:00 stderr F I1212 16:19:06.827250 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: 39ffab32-69c7-4cde-83a4-37704d2add4d]" virtual=false 2025-12-12T16:19:06.831206704+00:00 stderr F I1212 16:19:06.831105 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 7a8de694-4980-44f0-938f-b8112d953aa4]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.831206704+00:00 stderr F I1212 16:19:06.831144 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v2/OperatorCondition, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: d15615f2-2624-4c4a-9ddd-e19c1181e778]" virtual=false 2025-12-12T16:19:06.833645664+00:00 stderr F I1212 16:19:06.833515 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-operator, uid: 1ef3c1b1-685a-4417-9b53-23d61c410f1e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.833645664+00:00 stderr F I1212 16:19:06.833625 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-operator, name: prometheus-k8s, uid: ac0827c7-174d-4310-a132-b4ec6df8afc8]" virtual=false 2025-12-12T16:19:06.836811793+00:00 stderr F I1212 16:19:06.836752 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: prometheus-k8s, uid: 56ff8849-354a-4ede-88f2-4436b0a3bde5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.836811793+00:00 stderr F I1212 16:19:06.836790 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 6ba7c917-27fc-4114-85d9-07825a840abc]" virtual=false 2025-12-12T16:19:06.841009957+00:00 stderr F I1212 16:19:06.840925 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: f52bc923-0764-4ed9-8390-8f23448cb6a5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.841009957+00:00 stderr F I1212 16:19:06.840978 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication, name: prometheus-k8s, uid: 2a54be00-974b-4c50-8d1c-f7162000d609]" virtual=false 2025-12-12T16:19:06.850774218+00:00 stderr F I1212 16:19:06.850688 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: coreos-pull-secret-reader, uid: 60a0a208-9961-463e-986f-3c7302769df7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.850774218+00:00 stderr F I1212 16:19:06.850727 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: d0fac19c-3e45-4cfd-a8ca-f121bc469295]" virtual=false 2025-12-12T16:19:06.853809453+00:00 stderr F I1212 16:19:06.853717 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: a3099dea-12c5-441e-b16c-2f2c07408c1a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.853809453+00:00 stderr F I1212 16:19:06.853772 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 8517d382-106b-43dd-b4d4-88aaaf262062]" virtual=false 2025-12-12T16:19:06.857217167+00:00 stderr F I1212 16:19:06.857095 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: cc0f2db8-631b-40d1-928b-b96bb1c102ea]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.857217167+00:00 stderr F I1212 16:19:06.857156 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-configmap-reader, uid: 5f8e3d3f-6c13-40f0-a1c4-0c2b0cee9aba]" virtual=false 2025-12-12T16:19:06.863376050+00:00 stderr F I1212 16:19:06.862954 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: console-operator, uid: e74750bf-8b67-4097-800a-1f62f1d728e2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.863376050+00:00 stderr F I1212 16:19:06.863008 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: prometheus-k8s-machine-api-operator, uid: e8dd2e37-36fc-48cd-909e-a3b3e7472070]" virtual=false 2025-12-12T16:19:06.867890821+00:00 stderr F I1212 16:19:06.867837 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-openstack-infra, name: host-networking-services, uid: ba93ff46-031f-415f-9480-767006d718e7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.867909182+00:00 stderr F I1212 16:19:06.867880 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-public, uid: a4c72545-652f-48d3-b484-7d5f7a310e7e]" virtual=false 2025-12-12T16:19:06.871342066+00:00 stderr F I1212 16:19:06.871294 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift, name: copied-csv-viewer, uid: 1487dd1c-4a3b-4683-a18c-da8fd82bf4e5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.871361147+00:00 stderr F I1212 16:19:06.871331 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 8a2a7e9d-fa82-48c8-842c-214567ad94ec]" virtual=false 2025-12-12T16:19:06.874112285+00:00 stderr F I1212 16:19:06.874068 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: b75388c6-a2ac-4323-97aa-74085d52c30a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.874130405+00:00 stderr F I1212 16:19:06.874103 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kni-infra, name: host-networking-services, uid: 9814d6e2-5d57-4f6a-a185-4f4b991702ec]" virtual=false 2025-12-12T16:19:06.878874683+00:00 stderr F I1212 16:19:06.877172 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: machine-api-controllers, uid: 8913b400-f591-4bff-8f47-c4026984f25f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.878874683+00:00 stderr F I1212 16:19:06.877257 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: machine-api-operator, uid: b550c9f6-9756-4297-9337-0ffa9ce691e0]" virtual=false 2025-12-12T16:19:06.885979398+00:00 stderr F I1212 16:19:06.885894 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: prometheus-k8s, uid: 253a0556-00ed-477b-b11e-727c668659f4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.886007409+00:00 stderr F I1212 16:19:06.885980 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: console-operator, uid: 770c2fcf-bc84-4b08-9f10-7f8e1853cee5]" virtual=false 2025-12-12T16:19:06.911763056+00:00 stderr F I1212 16:19:06.911457 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 2e1f82c6-2d8a-417d-af7a-bc440909ea57]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.911763056+00:00 stderr F I1212 16:19:06.911524 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: whereabouts-cni, uid: 3e189f77-ec81-4804-a69e-3406cea72d88]" virtual=false 2025-12-12T16:19:06.913305694+00:00 stderr F I1212 16:19:06.912956 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: f119fd7b-c9f9-41a3-9f6b-6b849a2877ff]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:06.913305694+00:00 stderr F I1212 16:19:06.912984 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-operator, name: prometheus-k8s, uid: 5dbba0e9-dd0c-4e5b-8f0d-878be7cd91a7]" virtual=false 2025-12-12T16:19:06.930599441+00:00 stderr F I1212 16:19:06.930413 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: ingress-operator, uid: 32745b86-26b1-4411-83c8-6769afe0ef84]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.930599441+00:00 stderr F I1212 16:19:06.930475 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: a79b075f-5ddc-4304-a7df-de0caa322fa5]" virtual=false 2025-12-12T16:19:06.947894459+00:00 stderr F I1212 16:19:06.947784 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager, name: prometheus-k8s, uid: ee712995-32ca-4cd3-addd-56c3dc98ac1b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.947894459+00:00 stderr F I1212 16:19:06.947856 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 6ff38600-b4d6-452f-82fc-0d24fdec9101]" virtual=false 2025-12-12T16:19:06.953834456+00:00 stderr F I1212 16:19:06.953777 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: prometheus-k8s, uid: c584e4f7-dc9a-4554-8568-6104eac0033f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.953859367+00:00 stderr F I1212 16:19:06.953826 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-approver, uid: b908a159-2cd6-4878-ab3a-f9388352a4d6]" virtual=false 2025-12-12T16:19:06.958288826+00:00 stderr F I1212 16:19:06.958233 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: prometheus-k8s, uid: eac9d6d9-611e-4c5c-8436-393910521b09]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.958316717+00:00 stderr F I1212 16:19:06.958268 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: prometheus-k8s-cluster-autoscaler-operator, uid: 4136e08c-fe31-425a-b7e9-3fafec0549bc]" virtual=false 2025-12-12T16:19:06.960917111+00:00 stderr F I1212 16:19:06.960860 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: 39ffab32-69c7-4cde-83a4-37704d2add4d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.960937122+00:00 stderr F I1212 16:19:06.960904 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: 7a0fca50-57b4-41d8-922f-f52b7051910d]" virtual=false 2025-12-12T16:19:06.965800382+00:00 stderr F I1212 16:19:06.964984 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v2/OperatorCondition, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: d15615f2-2624-4c4a-9ddd-e19c1181e778]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"ClusterServiceVersion","name":"packageserver","uid":"09b3d4b2-fc47-4ee0-a331-67a39502cf21","controller":true,"blockOwnerDeletion":false}] 2025-12-12T16:19:06.965800382+00:00 stderr F I1212 16:19:06.965053 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: prometheus-k8s, uid: ef46d29c-9cc9-4ab9-bcf0-8c1a79052bc9]" virtual=false 2025-12-12T16:19:06.967991516+00:00 stderr F I1212 16:19:06.967936 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-operator, name: prometheus-k8s, uid: ac0827c7-174d-4310-a132-b4ec6df8afc8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.968020307+00:00 stderr F I1212 16:19:06.967984 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-api-controllers, uid: 909fe707-d07c-45ed-ac20-11669b612d43]" virtual=false 2025-12-12T16:19:06.970900068+00:00 stderr F I1212 16:19:06.970848 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 6ba7c917-27fc-4114-85d9-07825a840abc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.970900068+00:00 stderr F I1212 16:19:06.970884 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: console-operator, uid: 094d60d7-e5b5-4603-ad96-cde7975bd83f]" virtual=false 2025-12-12T16:19:06.981321916+00:00 stderr F I1212 16:19:06.981212 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication, name: prometheus-k8s, uid: 2a54be00-974b-4c50-8d1c-f7162000d609]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.981321916+00:00 stderr F I1212 16:19:06.981277 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator-config, uid: 0460c09a-5deb-4529-9321-8eb18055c720]" virtual=false 2025-12-12T16:19:06.983988901+00:00 stderr F I1212 16:19:06.983918 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: d0fac19c-3e45-4cfd-a8ca-f121bc469295]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.983988901+00:00 stderr F I1212 16:19:06.983969 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-7, uid: b08accdd-34d1-43ec-b9f0-be52165b199c]" virtual=false 2025-12-12T16:19:06.987899568+00:00 stderr F I1212 16:19:06.987825 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 8517d382-106b-43dd-b4d4-88aaaf262062]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.987928799+00:00 stderr F I1212 16:19:06.987885 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-4, uid: 916d9208-ead5-4b5c-997f-82c9f885ea3e]" virtual=false 2025-12-12T16:19:06.992018440+00:00 stderr F I1212 16:19:06.991933 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-configmap-reader, uid: 5f8e3d3f-6c13-40f0-a1c4-0c2b0cee9aba]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.992018440+00:00 stderr F I1212 16:19:06.991997 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: trusted-ca-bundle, uid: e4870e2c-b1a3-40ee-870d-6be3d8416fa3]" virtual=false 2025-12-12T16:19:06.994667485+00:00 stderr F I1212 16:19:06.994609 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: prometheus-k8s-machine-api-operator, uid: e8dd2e37-36fc-48cd-909e-a3b3e7472070]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:06.994667485+00:00 stderr F I1212 16:19:06.994654 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver, name: revision-status-1, uid: 98275840-6123-4655-aacc-f5208af82455]" virtual=false 2025-12-12T16:19:07.001422212+00:00 stderr F I1212 16:19:07.001357 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-public, uid: a4c72545-652f-48d3-b484-7d5f7a310e7e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.001444023+00:00 stderr F I1212 16:19:07.001411 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-3, uid: ae4fdaf7-2628-4d35-8d24-e03e4228a048]" virtual=false 2025-12-12T16:19:07.004644252+00:00 stderr F I1212 16:19:07.004573 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 8a2a7e9d-fa82-48c8-842c-214567ad94ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.004644252+00:00 stderr F I1212 16:19:07.004624 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-9, uid: eb1cf43c-e507-46f9-97cb-9059b8b2bdd3]" virtual=false 2025-12-12T16:19:07.008672332+00:00 stderr F I1212 16:19:07.008603 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kni-infra, name: host-networking-services, uid: 9814d6e2-5d57-4f6a-a185-4f4b991702ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.008701562+00:00 stderr F I1212 16:19:07.008656 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-2, uid: 8901d31a-3867-4570-b0e5-ae6a39cff91d]" virtual=false 2025-12-12T16:19:07.011072561+00:00 stderr F I1212 16:19:07.011023 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: machine-api-operator, uid: b550c9f6-9756-4297-9337-0ffa9ce691e0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.011072561+00:00 stderr F I1212 16:19:07.011055 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-8, uid: 389efd57-822c-4936-b5ef-48f1f85dba11]" virtual=false 2025-12-12T16:19:07.017997442+00:00 stderr F I1212 16:19:07.017939 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: console-operator, uid: 770c2fcf-bc84-4b08-9f10-7f8e1853cee5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.017997442+00:00 stderr F I1212 16:19:07.017972 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-3, uid: f02d59da-bbe4-4f8b-a03b-6d3ee409478c]" virtual=false 2025-12-12T16:19:07.041451702+00:00 stderr F I1212 16:19:07.041333 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: whereabouts-cni, uid: 3e189f77-ec81-4804-a69e-3406cea72d88]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.041451702+00:00 stderr F I1212 16:19:07.041422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-2, uid: f5063641-afc0-44e4-b98e-2062cca37149]" virtual=false 2025-12-12T16:19:07.100621395+00:00 stderr F I1212 16:19:07.100507 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-8, uid: 389efd57-822c-4936-b5ef-48f1f85dba11]" 2025-12-12T16:19:07.100621395+00:00 stderr F I1212 16:19:07.100558 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-1, uid: 4768d56f-c284-4d80-a18a-7f2fc97d36f1]" virtual=false 2025-12-12T16:19:07.100730718+00:00 stderr F I1212 16:19:07.100688 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 6ff38600-b4d6-452f-82fc-0d24fdec9101]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.100740858+00:00 stderr F I1212 16:19:07.100723 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-6, uid: 9ceb9b72-2527-47c6-ae4e-f1d92f0aee7d]" virtual=false 2025-12-12T16:19:07.100828210+00:00 stderr F I1212 16:19:07.100786 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-3, uid: f02d59da-bbe4-4f8b-a03b-6d3ee409478c]" 2025-12-12T16:19:07.100828210+00:00 stderr F I1212 16:19:07.100812 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-7, uid: 36f6c219-47c9-453e-8adc-81a163318ca3]" virtual=false 2025-12-12T16:19:07.101307542+00:00 stderr F I1212 16:19:07.101097 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-9, uid: eb1cf43c-e507-46f9-97cb-9059b8b2bdd3]" 2025-12-12T16:19:07.101307542+00:00 stderr F I1212 16:19:07.101283 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-2, uid: 8901d31a-3867-4570-b0e5-ae6a39cff91d]" 2025-12-12T16:19:07.101322242+00:00 stderr F I1212 16:19:07.101304 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: ovn, uid: f3ff9d5c-ea26-43b0-91ee-b403a4b4d4f6]" virtual=false 2025-12-12T16:19:07.101322242+00:00 stderr F I1212 16:19:07.101302 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-network-node-identity, name: network-node-identity, uid: 19863168-4684-4a75-87e9-a586be776b3a]" virtual=false 2025-12-12T16:19:07.101417875+00:00 stderr F I1212 16:19:07.101168 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-7, uid: b08accdd-34d1-43ec-b9f0-be52165b199c]" 2025-12-12T16:19:07.101417875+00:00 stderr F I1212 16:19:07.101397 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-cluster, uid: bfdb4887-ce6d-45fa-ab0e-d0f8e92c6c08]" virtual=false 2025-12-12T16:19:07.101507957+00:00 stderr F I1212 16:19:07.101224 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-approver, uid: b908a159-2cd6-4878-ab3a-f9388352a4d6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.101507957+00:00 stderr F I1212 16:19:07.101488 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-metrics, uid: 6a07247b-872b-40d1-baeb-45b0e1ec1d09]" virtual=false 2025-12-12T16:19:07.101567258+00:00 stderr F I1212 16:19:07.101533 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: 7a0fca50-57b4-41d8-922f-f52b7051910d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.101595249+00:00 stderr F I1212 16:19:07.101567 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 6e667886-db08-4af0-9937-668c3a1a44aa]" virtual=false 2025-12-12T16:19:07.101595249+00:00 stderr F I1212 16:19:07.101583 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver, name: revision-status-1, uid: 98275840-6123-4655-aacc-f5208af82455]" 2025-12-12T16:19:07.101623590+00:00 stderr F I1212 16:19:07.101599 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operators, name: default-allow-all, uid: 232e5193-54e7-43f0-ad09-44fb764c3765]" virtual=false 2025-12-12T16:19:07.101857925+00:00 stderr F I1212 16:19:07.101815 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-3, uid: ae4fdaf7-2628-4d35-8d24-e03e4228a048]" 2025-12-12T16:19:07.101857925+00:00 stderr F I1212 16:19:07.101844 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-all-egress, uid: 69a87104-2112-439f-b270-5e735b3ccc9f]" virtual=false 2025-12-12T16:19:07.102053890+00:00 stderr F I1212 16:19:07.101990 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-4, uid: 916d9208-ead5-4b5c-997f-82c9f885ea3e]" 2025-12-12T16:19:07.102085851+00:00 stderr F I1212 16:19:07.102055 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-metal, uid: 02a4dba1-9dec-4992-a11c-4f4c03f7e69c]" virtual=false 2025-12-12T16:19:07.102475461+00:00 stderr F I1212 16:19:07.102422 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: prometheus-k8s, uid: ef46d29c-9cc9-4ab9-bcf0-8c1a79052bc9]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.102475461+00:00 stderr F I1212 16:19:07.102461 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: default-deny, uid: 6b29ed9b-3722-4226-8cf0-5f567d0b479b]" virtual=false 2025-12-12T16:19:07.102651275+00:00 stderr F I1212 16:19:07.102564 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: a79b075f-5ddc-4304-a7df-de0caa322fa5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.102651275+00:00 stderr F I1212 16:19:07.102634 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-operator, name: prometheus-k8s, uid: 5dbba0e9-dd0c-4e5b-8f0d-878be7cd91a7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.102685936+00:00 stderr F I1212 16:19:07.102659 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 60dc7d80-6fce-4c91-9304-81fc23842033]" virtual=false 2025-12-12T16:19:07.102744667+00:00 stderr F I1212 16:19:07.102655 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: authentication-operator-config, uid: 29e06f16-a1a0-4841-8ef4-6f319b3136e7]" virtual=false 2025-12-12T16:19:07.107382142+00:00 stderr F I1212 16:19:07.106095 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-api, name: prometheus-k8s-cluster-autoscaler-operator, uid: 4136e08c-fe31-425a-b7e9-3fafec0549bc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.107382142+00:00 stderr F I1212 16:19:07.106132 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: signer, uid: 491c5375-b7a4-4e86-8e7b-e538b36d6095]" virtual=false 2025-12-12T16:19:07.107382142+00:00 stderr F I1212 16:19:07.106410 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-api-controllers, uid: 909fe707-d07c-45ed-ac20-11669b612d43]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.107382142+00:00 stderr F I1212 16:19:07.106478 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: default-deny-all-traffic, uid: 5d7de421-b53a-4577-ad68-ea7393dc755c]" virtual=false 2025-12-12T16:19:07.107382142+00:00 stderr F I1212 16:19:07.106608 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: console-operator, uid: 094d60d7-e5b5-4603-ad96-cde7975bd83f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.107382142+00:00 stderr F I1212 16:19:07.106629 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-ingress-operator-gatewayapi-crd-admission, uid: bf5fe191-baad-427b-99ce-a2fea2958a87]" virtual=false 2025-12-12T16:19:07.108830178+00:00 stderr F I1212 16:19:07.108752 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-2, uid: f5063641-afc0-44e4-b98e-2062cca37149]" 2025-12-12T16:19:07.108849188+00:00 stderr F I1212 16:19:07.108818 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-storage-policy-validation, uid: 6a1e4dfe-42b3-45a0-8dce-5d973a54d3b1]" virtual=false 2025-12-12T16:19:07.111455053+00:00 stderr F I1212 16:19:07.111386 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-1, uid: 4768d56f-c284-4d80-a18a-7f2fc97d36f1]" 2025-12-12T16:19:07.111476063+00:00 stderr F I1212 16:19:07.111441 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: user-defined-networks-namespace-label, uid: 0f7de771-84f1-494d-ab22-4f5de9678262]" virtual=false 2025-12-12T16:19:07.121777008+00:00 stderr F I1212 16:19:07.121668 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-6, uid: 9ceb9b72-2527-47c6-ae4e-f1d92f0aee7d]" 2025-12-12T16:19:07.121777008+00:00 stderr F I1212 16:19:07.121739 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: default-deny-all, uid: 26158c82-b93b-4b40-ad84-d46310003f34]" virtual=false 2025-12-12T16:19:07.121900891+00:00 stderr F I1212 16:19:07.121873 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-7, uid: 36f6c219-47c9-453e-8adc-81a163318ca3]" 2025-12-12T16:19:07.121909191+00:00 stderr F I1212 16:19:07.121892 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: marketplace-operator, uid: 9a95e898-e71f-46f4-ab23-671d4fbd8588]" virtual=false 2025-12-12T16:19:07.122979578+00:00 stderr F I1212 16:19:07.122785 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator-config, uid: 0460c09a-5deb-4529-9321-8eb18055c720]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.122979578+00:00 stderr F I1212 16:19:07.122849 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: service-ca-bundle, uid: 03ab4823-2e17-4cbb-91e3-53adebde54a8]" virtual=false 2025-12-12T16:19:07.156413624+00:00 stderr F I1212 16:19:07.153099 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: trusted-ca-bundle, uid: e4870e2c-b1a3-40ee-870d-6be3d8416fa3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:07.156413624+00:00 stderr F I1212 16:19:07.153161 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: trusted-ca-bundle, uid: 3793e7a8-8895-4cac-aa6f-506f52f527ac]" virtual=false 2025-12-12T16:19:07.190231680+00:00 stderr F I1212 16:19:07.190111 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-network-node-identity, name: network-node-identity, uid: 19863168-4684-4a75-87e9-a586be776b3a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.190231680+00:00 stderr F I1212 16:19:07.190213 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: unpack-bundles, uid: d387f0e1-78f2-4f21-8b20-eb44e79f2d4e]" virtual=false 2025-12-12T16:19:07.193846750+00:00 stderr F I1212 16:19:07.193780 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: ovn, uid: f3ff9d5c-ea26-43b0-91ee-b403a4b4d4f6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.193866290+00:00 stderr F I1212 16:19:07.193851 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 9af2c58b-44d2-43f2-bfa6-abf8a256a724]" virtual=false 2025-12-12T16:19:07.196742571+00:00 stderr F I1212 16:19:07.196668 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-cluster, uid: bfdb4887-ce6d-45fa-ab0e-d0f8e92c6c08]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.196763602+00:00 stderr F I1212 16:19:07.196744 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: 85980716-45a2-4c0f-be28-4217783526a0]" virtual=false 2025-12-12T16:19:07.201397116+00:00 stderr F I1212 16:19:07.201345 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-metrics, uid: 6a07247b-872b-40d1-baeb-45b0e1ec1d09]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.201415017+00:00 stderr F I1212 16:19:07.201392 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: 91cc0f79-c51b-424a-a293-e2750a385ac6]" virtual=false 2025-12-12T16:19:07.204465842+00:00 stderr F I1212 16:19:07.204388 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 6e667886-db08-4af0-9937-668c3a1a44aa]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.204485183+00:00 stderr F I1212 16:19:07.204467 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-kubeapi, uid: 61749708-bb52-4e70-92b1-807c81c26e6a]" virtual=false 2025-12-12T16:19:07.207654591+00:00 stderr F I1212 16:19:07.207589 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operators, name: default-allow-all, uid: 232e5193-54e7-43f0-ad09-44fb764c3765]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.207654591+00:00 stderr F I1212 16:19:07.207640 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-4, uid: 3c0c45f7-a4ba-4891-8b19-1900e7bfb64c]" virtual=false 2025-12-12T16:19:07.210837200+00:00 stderr F I1212 16:19:07.210798 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-all-egress, uid: 69a87104-2112-439f-b270-5e735b3ccc9f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.210861570+00:00 stderr F I1212 16:19:07.210828 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-cluster-machine-approver, name: kube-rbac-proxy, uid: 78f8b8c9-a63a-46ff-8363-9dc3399ca474]" virtual=false 2025-12-12T16:19:07.213318401+00:00 stderr F I1212 16:19:07.213255 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-metal, uid: 02a4dba1-9dec-4992-a11c-4f4c03f7e69c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.213318401+00:00 stderr F I1212 16:19:07.213303 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-etcd-operator, name: etcd-client, uid: 3f64a35c-cf8e-418c-a4a0-bf68cea2beb1]" virtual=false 2025-12-12T16:19:07.230567808+00:00 stderr F I1212 16:19:07.229245 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: signer, uid: 491c5375-b7a4-4e86-8e7b-e538b36d6095]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.230567808+00:00 stderr F I1212 16:19:07.229301 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-5, uid: c91d53ac-6902-4aea-bc79-72ae21f68713]" virtual=false 2025-12-12T16:19:07.230567808+00:00 stderr F I1212 16:19:07.229494 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 60dc7d80-6fce-4c91-9304-81fc23842033]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.230567808+00:00 stderr F I1212 16:19:07.229517 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: admin-gates, uid: 3680cec0-5c0f-4291-87b8-30dabaa1c6bf]" virtual=false 2025-12-12T16:19:07.230610799+00:00 stderr F I1212 16:19:07.230594 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: authentication-operator-config, uid: 29e06f16-a1a0-4841-8ef4-6f319b3136e7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.230632579+00:00 stderr F I1212 16:19:07.230615 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-cluster-total, uid: d92d6a2e-2713-4f23-af8b-f867e1f08d0f]" virtual=false 2025-12-12T16:19:07.231262125+00:00 stderr F I1212 16:19:07.231227 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: default-deny, uid: 6b29ed9b-3722-4226-8cf0-5f567d0b479b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.231276135+00:00 stderr F I1212 16:19:07.231261 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-cluster, uid: e5c9ba21-d583-4acb-b833-94f1198d5ab5]" virtual=false 2025-12-12T16:19:07.233948131+00:00 stderr F I1212 16:19:07.233904 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: default-deny-all-traffic, uid: 5d7de421-b53a-4577-ad68-ea7393dc755c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.233948131+00:00 stderr F I1212 16:19:07.233929 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-namespace, uid: 8d2edc3f-4c5c-4ec5-9176-d9cc9534b507]" virtual=false 2025-12-12T16:19:07.239002546+00:00 stderr F I1212 16:19:07.238948 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-ingress-operator-gatewayapi-crd-admission, uid: bf5fe191-baad-427b-99ce-a2fea2958a87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.239020517+00:00 stderr F I1212 16:19:07.238997 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-node, uid: d81965cd-f883-4273-8d60-72d4cb125594]" virtual=false 2025-12-12T16:19:07.242293817+00:00 stderr F I1212 16:19:07.242245 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-storage-policy-validation, uid: 6a1e4dfe-42b3-45a0-8dce-5d973a54d3b1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.242317738+00:00 stderr F I1212 16:19:07.242285 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-pod, uid: a6a05fcd-e149-4da2-8605-ec8dcde98967]" virtual=false 2025-12-12T16:19:07.244253346+00:00 stderr F I1212 16:19:07.244203 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: user-defined-networks-namespace-label, uid: 0f7de771-84f1-494d-ab22-4f5de9678262]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.244253346+00:00 stderr F I1212 16:19:07.244239 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workload, uid: cb9cdd2c-716a-473e-b72e-73bf990cf716]" virtual=false 2025-12-12T16:19:07.247253070+00:00 stderr F I1212 16:19:07.247208 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: default-deny-all, uid: 26158c82-b93b-4b40-ad84-d46310003f34]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.247272261+00:00 stderr F I1212 16:19:07.247239 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-operator-lifecycle-manager, name: pprof-cert, uid: 78c31177-72ae-4588-82df-59ba321a257b]" virtual=false 2025-12-12T16:19:07.250124421+00:00 stderr F I1212 16:19:07.250051 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: marketplace-operator, uid: 9a95e898-e71f-46f4-ab23-671d4fbd8588]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.250124421+00:00 stderr F I1212 16:19:07.250103 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workloads-namespace, uid: b8105616-68c4-435a-9245-55e9c46771ae]" virtual=false 2025-12-12T16:19:07.255233937+00:00 stderr F I1212 16:19:07.254789 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: service-ca-bundle, uid: 03ab4823-2e17-4cbb-91e3-53adebde54a8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.255233937+00:00 stderr F I1212 16:19:07.254844 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-namespace-by-pod, uid: 45990539-39a3-4ba8-a3c5-7f8a8d43720f]" virtual=false 2025-12-12T16:19:07.273446208+00:00 stderr F I1212 16:19:07.273324 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-4, uid: 3c0c45f7-a4ba-4891-8b19-1900e7bfb64c]" 2025-12-12T16:19:07.273446208+00:00 stderr F I1212 16:19:07.273392 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-cluster-rsrc-use, uid: fa3cd139-cca8-48f7-8dfa-a5b39e2abb93]" virtual=false 2025-12-12T16:19:07.283986508+00:00 stderr F I1212 16:19:07.283867 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: trusted-ca-bundle, uid: 3793e7a8-8895-4cac-aa6f-506f52f527ac]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:07.283986508+00:00 stderr F I1212 16:19:07.283943 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-rsrc-use, uid: 569e3e4e-b085-47ef-9c8c-21ec4ca092f8]" virtual=false 2025-12-12T16:19:07.286026199+00:00 stderr F I1212 16:19:07.285959 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-5, uid: c91d53ac-6902-4aea-bc79-72ae21f68713]" 2025-12-12T16:19:07.286044079+00:00 stderr F I1212 16:19:07.286017 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-pod-total, uid: 453139e4-0319-4726-82b1-4f496e81434b]" virtual=false 2025-12-12T16:19:07.326825657+00:00 stderr F I1212 16:19:07.326699 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: unpack-bundles, uid: d387f0e1-78f2-4f21-8b20-eb44e79f2d4e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.326825657+00:00 stderr F I1212 16:19:07.326764 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-prometheus, uid: 51e84290-d8c9-4d3b-9936-601863537791]" virtual=false 2025-12-12T16:19:07.329321539+00:00 stderr F I1212 16:19:07.329164 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 9af2c58b-44d2-43f2-bfa6-abf8a256a724]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.329321539+00:00 stderr F I1212 16:19:07.329259 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-10, uid: abc26192-80da-4229-9267-91d2e7c7eb5f]" virtual=false 2025-12-12T16:19:07.331463152+00:00 stderr F I1212 16:19:07.331379 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: 85980716-45a2-4c0f-be28-4217783526a0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.331463152+00:00 stderr F I1212 16:19:07.331424 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-5, uid: b4c3ff81-9e93-4c5b-b530-e6a169c05a01]" virtual=false 2025-12-12T16:19:07.338036385+00:00 stderr F I1212 16:19:07.337901 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: 91cc0f79-c51b-424a-a293-e2750a385ac6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.338036385+00:00 stderr F I1212 16:19:07.337977 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: etcd-dashboard, uid: 25df2d95-6900-4ccf-ae26-d8063c384f29]" virtual=false 2025-12-12T16:19:07.339823639+00:00 stderr F I1212 16:19:07.339744 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-machine-api, name: allow-ingress-kubeapi, uid: 61749708-bb52-4e70-92b1-807c81c26e6a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.339823639+00:00 stderr F I1212 16:19:07.339800 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-network, uid: 9f83a4ef-cd08-40b7-a5a3-f1f39610e4bf]" virtual=false 2025-12-12T16:19:07.344104245+00:00 stderr F I1212 16:19:07.344018 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-cluster-machine-approver, name: kube-rbac-proxy, uid: 78f8b8c9-a63a-46ff-8363-9dc3399ca474]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.344137885+00:00 stderr F I1212 16:19:07.344091 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: grafana-dashboard-apiserver-performance, uid: 4b2354d3-44f4-4ce2-aee8-a2fc3853a98b]" virtual=false 2025-12-12T16:19:07.348165905+00:00 stderr F I1212 16:19:07.348096 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-etcd-operator, name: etcd-client, uid: 3f64a35c-cf8e-418c-a4a0-bf68cea2beb1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:07.348205926+00:00 stderr F I1212 16:19:07.348153 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager, name: prometheus-k8s, uid: 2adafd1c-c1b3-462c-9c6f-384653b668a8]" virtual=false 2025-12-12T16:19:07.358330526+00:00 stderr F I1212 16:19:07.358232 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: admin-gates, uid: 3680cec0-5c0f-4291-87b8-30dabaa1c6bf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.358330526+00:00 stderr F I1212 16:19:07.358296 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: 5bdcbcd3-fe52-498d-ae10-72a5939afa87]" virtual=false 2025-12-12T16:19:07.361307210+00:00 stderr F I1212 16:19:07.361253 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-cluster-total, uid: d92d6a2e-2713-4f23-af8b-f867e1f08d0f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.361307210+00:00 stderr F I1212 16:19:07.361284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-api-controllers, uid: 344d915b-b955-4603-a766-f7a1f3fbba7e]" virtual=false 2025-12-12T16:19:07.364199821+00:00 stderr F I1212 16:19:07.364103 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-cluster, uid: e5c9ba21-d583-4acb-b833-94f1198d5ab5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.364217712+00:00 stderr F I1212 16:19:07.364197 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: 6be26842-4041-4228-90d5-6e63e6300edd]" virtual=false 2025-12-12T16:19:07.368376725+00:00 stderr F I1212 16:19:07.368292 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-namespace, uid: 8d2edc3f-4c5c-4ec5-9176-d9cc9534b507]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.368376725+00:00 stderr F I1212 16:19:07.368362 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: prometheus-k8s, uid: 6da3f11b-9f14-4506-a933-bb3207d8d635]" virtual=false 2025-12-12T16:19:07.371647145+00:00 stderr F I1212 16:19:07.371553 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-node, uid: d81965cd-f883-4273-8d60-72d4cb125594]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.371666476+00:00 stderr F I1212 16:19:07.371644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-public, uid: 6a7460b0-4e35-480e-a71b-377d30ca36bd]" virtual=false 2025-12-12T16:19:07.374439604+00:00 stderr F I1212 16:19:07.374378 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workload, uid: cb9cdd2c-716a-473e-b72e-73bf990cf716]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.374439604+00:00 stderr F I1212 16:19:07.374421 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication, name: prometheus-k8s, uid: 53fed5e4-a2fc-47bb-8401-e9530ad5c771]" virtual=false 2025-12-12T16:19:07.377075860+00:00 stderr F I1212 16:19:07.377011 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-operator-lifecycle-manager, name: pprof-cert, uid: 78c31177-72ae-4588-82df-59ba321a257b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:07.377075860+00:00 stderr F I1212 16:19:07.377058 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-user-workload-monitoring, name: cluster-monitoring-operator, uid: b9104e38-7421-465a-bf98-b17ca0561bc6]" virtual=false 2025-12-12T16:19:07.382553345+00:00 stderr F I1212 16:19:07.382456 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-pod, uid: a6a05fcd-e149-4da2-8605-ec8dcde98967]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.382553345+00:00 stderr F I1212 16:19:07.382526 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: console-operator, uid: d129014f-8b06-4143-b4fa-3a0f098c6559]" virtual=false 2025-12-12T16:19:07.384533684+00:00 stderr F I1212 16:19:07.384450 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workloads-namespace, uid: b8105616-68c4-435a-9245-55e9c46771ae]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.384562415+00:00 stderr F I1212 16:19:07.384531 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-operator, name: prometheus-k8s, uid: 398479e3-c16d-452b-8f5e-f3c97e9918d2]" virtual=false 2025-12-12T16:19:07.389249611+00:00 stderr F I1212 16:19:07.389010 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-namespace-by-pod, uid: 45990539-39a3-4ba8-a3c5-7f8a8d43720f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.389314072+00:00 stderr F I1212 16:19:07.389208 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: system:openshift:scc:hostnetwork-v2, uid: 7e7e52cd-a43c-47dd-b267-7ac721bc6113]" virtual=false 2025-12-12T16:19:07.394304506+00:00 stderr F I1212 16:19:07.394204 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-10, uid: abc26192-80da-4229-9267-91d2e7c7eb5f]" 2025-12-12T16:19:07.394352667+00:00 stderr F I1212 16:19:07.394284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console, uid: 6292b407-20e8-47a5-be99-77a5e1f7a896]" virtual=false 2025-12-12T16:19:07.396657754+00:00 stderr F I1212 16:19:07.396588 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-5, uid: b4c3ff81-9e93-4c5b-b530-e6a169c05a01]" 2025-12-12T16:19:07.396700925+00:00 stderr F I1212 16:19:07.396659 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: copied-csv-viewers, uid: 7426cf47-f08c-4e75-99c5-9b5462dc97c2]" virtual=false 2025-12-12T16:19:07.407745948+00:00 stderr F I1212 16:19:07.407621 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-cluster-rsrc-use, uid: fa3cd139-cca8-48f7-8dfa-a5b39e2abb93]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.407745948+00:00 stderr F I1212 16:19:07.407699 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: ingress-operator, uid: 58ce10a7-6f25-4811-bff2-bc4a3cc330ee]" virtual=false 2025-12-12T16:19:07.419011216+00:00 stderr F I1212 16:19:07.418902 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-rsrc-use, uid: 569e3e4e-b085-47ef-9c8c-21ec4ca092f8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.419011216+00:00 stderr F I1212 16:19:07.418969 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 28e1dbac-2835-4e00-bbcd-c559366575bb]" virtual=false 2025-12-12T16:19:07.421347474+00:00 stderr F I1212 16:19:07.421290 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-pod-total, uid: 453139e4-0319-4726-82b1-4f496e81434b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.421347474+00:00 stderr F I1212 16:19:07.421329 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-nutanix-infra, name: host-networking-system-node, uid: 24dfe867-53c3-48b6-bbd7-5225bd207aed]" virtual=false 2025-12-12T16:19:07.457801915+00:00 stderr F I1212 16:19:07.457693 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-prometheus, uid: 51e84290-d8c9-4d3b-9936-601863537791]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.457801915+00:00 stderr F I1212 16:19:07.457755 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 68a586c5-c3d1-4cfe-9783-ca5e2027b642]" virtual=false 2025-12-12T16:19:07.467945156+00:00 stderr F I1212 16:19:07.467815 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: etcd-dashboard, uid: 25df2d95-6900-4ccf-ae26-d8063c384f29]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.467945156+00:00 stderr F I1212 16:19:07.467876 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: e276478b-d4fd-47f7-887b-1c90c397d03e]" virtual=false 2025-12-12T16:19:07.472030807+00:00 stderr F I1212 16:19:07.471885 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-network, uid: 9f83a4ef-cd08-40b7-a5a3-f1f39610e4bf]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.472030807+00:00 stderr F I1212 16:19:07.471966 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-version, name: prometheus-k8s, uid: 8406bc8f-05b0-490b-b240-1ec3ce9cbd3f]" virtual=false 2025-12-12T16:19:07.479517592+00:00 stderr F I1212 16:19:07.479449 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: grafana-dashboard-apiserver-performance, uid: 4b2354d3-44f4-4ce2-aee8-a2fc3853a98b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.479537443+00:00 stderr F I1212 16:19:07.479509 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: d68d9d3f-786d-4acb-9f43-c353ffe1644e]" virtual=false 2025-12-12T16:19:07.483222254+00:00 stderr F I1212 16:19:07.483115 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager, name: prometheus-k8s, uid: 2adafd1c-c1b3-462c-9c6f-384653b668a8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.483222254+00:00 stderr F I1212 16:19:07.483159 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: b09df175-99ba-4d3c-bace-ce35d4e83009]" virtual=false 2025-12-12T16:19:07.491237652+00:00 stderr F I1212 16:19:07.491131 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: 5bdcbcd3-fe52-498d-ae10-72a5939afa87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.491237652+00:00 stderr F I1212 16:19:07.491208 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 68c8d2f6-d2e7-4ba0-8c6d-3a210481e700]" virtual=false 2025-12-12T16:19:07.495108678+00:00 stderr F I1212 16:19:07.495056 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-api-controllers, uid: 344d915b-b955-4603-a766-f7a1f3fbba7e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.495108678+00:00 stderr F I1212 16:19:07.495093 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 6bd53a77-3f4a-4220-bb5c-858e7a9b0cd9]" virtual=false 2025-12-12T16:19:07.497707422+00:00 stderr F I1212 16:19:07.497659 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: 6be26842-4041-4228-90d5-6e63e6300edd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.497707422+00:00 stderr F I1212 16:19:07.497690 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6695f592-c1ca-4af1-8eaf-0e0b0d8959fc]" virtual=false 2025-12-12T16:19:07.501089586+00:00 stderr F I1212 16:19:07.500997 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: prometheus-k8s, uid: 6da3f11b-9f14-4506-a933-bb3207d8d635]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.501089586+00:00 stderr F I1212 16:19:07.501063 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: d3fbd83b-5d77-4307-a4ba-c9c842b63b86]" virtual=false 2025-12-12T16:19:07.503867924+00:00 stderr F I1212 16:19:07.503791 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-public, uid: 6a7460b0-4e35-480e-a71b-377d30ca36bd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.503867924+00:00 stderr F I1212 16:19:07.503824 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: ingress-operator, uid: a361b360-6755-4cb9-b68b-2998a0713612]" virtual=false 2025-12-12T16:19:07.509474343+00:00 stderr F I1212 16:19:07.509362 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication, name: prometheus-k8s, uid: 53fed5e4-a2fc-47bb-8401-e9530ad5c771]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.509474343+00:00 stderr F I1212 16:19:07.509427 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: cluster-autoscaler, uid: a90481e3-8aed-4ca6-b853-79e50236fafe]" virtual=false 2025-12-12T16:19:07.512081217+00:00 stderr F I1212 16:19:07.511379 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-user-workload-monitoring, name: cluster-monitoring-operator, uid: b9104e38-7421-465a-bf98-b17ca0561bc6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.512081217+00:00 stderr F I1212 16:19:07.511496 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: multus-whereabouts, uid: 306d9ccf-da01-46ab-b78e-02458b50939f]" virtual=false 2025-12-12T16:19:07.514076057+00:00 stderr F I1212 16:19:07.514022 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: console-operator, uid: d129014f-8b06-4143-b4fa-3a0f098c6559]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.514076057+00:00 stderr F I1212 16:19:07.514058 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-configmap-reader, uid: e3dfcc46-dd9b-4c39-b4af-1331760e7f87]" virtual=false 2025-12-12T16:19:07.516662921+00:00 stderr F I1212 16:19:07.516491 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-operator, name: prometheus-k8s, uid: 398479e3-c16d-452b-8f5e-f3c97e9918d2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.516662921+00:00 stderr F I1212 16:19:07.516527 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: node-ca, uid: 5f291cdd-c99d-4124-aa53-a0a0ab505e3f]" virtual=false 2025-12-12T16:19:07.519588843+00:00 stderr F I1212 16:19:07.519506 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: system:openshift:scc:hostnetwork-v2, uid: 7e7e52cd-a43c-47dd-b267-7ac721bc6113]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.519588843+00:00 stderr F I1212 16:19:07.519536 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: console-operator, uid: babad3be-1b02-48ca-8bc3-64551155108c]" virtual=false 2025-12-12T16:19:07.526729469+00:00 stderr F I1212 16:19:07.526598 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console, uid: 6292b407-20e8-47a5-be99-77a5e1f7a896]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.526729469+00:00 stderr F I1212 16:19:07.526641 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 376b7f3a-eb8d-4641-a54e-19994092fb5d]" virtual=false 2025-12-12T16:19:07.530150864+00:00 stderr F I1212 16:19:07.530054 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: copied-csv-viewers, uid: 7426cf47-f08c-4e75-99c5-9b5462dc97c2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.530150864+00:00 stderr F I1212 16:19:07.530131 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: 80be51ee-dda4-483c-b1c5-948301f3c52e]" virtual=false 2025-12-12T16:19:07.540256054+00:00 stderr F I1212 16:19:07.540167 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: ingress-operator, uid: 58ce10a7-6f25-4811-bff2-bc4a3cc330ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.540256054+00:00 stderr F I1212 16:19:07.540230 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 2ad12ea6-7b88-4bb1-ad15-8c6ea12eaa71]" virtual=false 2025-12-12T16:19:07.551578174+00:00 stderr F I1212 16:19:07.551457 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 28e1dbac-2835-4e00-bbcd-c559366575bb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.551578174+00:00 stderr F I1212 16:19:07.551512 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: 6546e654-a56c-40cd-b5e5-b0af53dc1922]" virtual=false 2025-12-12T16:19:07.554994378+00:00 stderr F I1212 16:19:07.554912 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-nutanix-infra, name: host-networking-system-node, uid: 24dfe867-53c3-48b6-bbd7-5225bd207aed]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.555016719+00:00 stderr F I1212 16:19:07.554983 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-nodes-identity-limited, uid: e866be9e-7de1-4a23-9ef1-15d71a5333a5]" virtual=false 2025-12-12T16:19:07.590872635+00:00 stderr F I1212 16:19:07.590770 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 68a586c5-c3d1-4cfe-9783-ca5e2027b642]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.590872635+00:00 stderr F I1212 16:19:07.590835 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 902b45ff-a672-4079-bc39-9c3db9fbed24]" virtual=false 2025-12-12T16:19:07.600513774+00:00 stderr F I1212 16:19:07.600439 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: e276478b-d4fd-47f7-887b-1c90c397d03e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.600513774+00:00 stderr F I1212 16:19:07.600494 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: marketplace-operator, uid: 8f0da541-5375-4494-a502-94aa8540020e]" virtual=false 2025-12-12T16:19:07.603847716+00:00 stderr F I1212 16:19:07.603802 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-version, name: prometheus-k8s, uid: 8406bc8f-05b0-490b-b240-1ec3ce9cbd3f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.603847716+00:00 stderr F I1212 16:19:07.603829 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: cluster-samples-operator-openshift-edit, uid: 995e7439-f4f4-4fa5-82e0-6afcc588fd52]" virtual=false 2025-12-12T16:19:07.610051919+00:00 stderr F I1212 16:19:07.609999 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: d68d9d3f-786d-4acb-9f43-c353ffe1644e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.610051919+00:00 stderr F I1212 16:19:07.610030 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: cluster-samples-operator-openshift-config-secret-reader, uid: 8a0fff18-901e-44e5-a8ee-842ba68cfac0]" virtual=false 2025-12-12T16:19:07.613629908+00:00 stderr F I1212 16:19:07.613582 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: b09df175-99ba-4d3c-bace-ce35d4e83009]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.613629908+00:00 stderr F I1212 16:19:07.613607 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: machine-api-controllers, uid: 25963d68-1e2f-4b0e-a08f-0ce996f7b51f]" virtual=false 2025-12-12T16:19:07.624043995+00:00 stderr F I1212 16:19:07.623980 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 68c8d2f6-d2e7-4ba0-8c6d-3a210481e700]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.624043995+00:00 stderr F I1212 16:19:07.624021 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: openshift-network-public-role-binding, uid: 3f544260-b7f2-4307-aa10-76c309175c0a]" virtual=false 2025-12-12T16:19:07.626598499+00:00 stderr F I1212 16:19:07.626528 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 6bd53a77-3f4a-4220-bb5c-858e7a9b0cd9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.626598499+00:00 stderr F I1212 16:19:07.626558 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: network-diagnostics, uid: b6e21cd4-4c3d-401f-92cc-7c008ff856b3]" virtual=false 2025-12-12T16:19:07.629540661+00:00 stderr F I1212 16:19:07.629489 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6695f592-c1ca-4af1-8eaf-0e0b0d8959fc]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.629540661+00:00 stderr F I1212 16:19:07.629516 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: prometheus-k8s, uid: 531f27d1-3010-40b8-a5d8-aaca67f7d382]" virtual=false 2025-12-12T16:19:07.635318824+00:00 stderr F I1212 16:19:07.635260 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: d3fbd83b-5d77-4307-a4ba-c9c842b63b86]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.635318824+00:00 stderr F I1212 16:19:07.635288 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: prometheus-k8s, uid: 5702339a-1e79-4ada-a460-4e2eff82de4d]" virtual=false 2025-12-12T16:19:07.637264112+00:00 stderr F I1212 16:19:07.637197 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: ingress-operator, uid: a361b360-6755-4cb9-b68b-2998a0713612]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.637264112+00:00 stderr F I1212 16:19:07.637221 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 0c867af6-79d5-465d-b2ab-1be4f069171f]" virtual=false 2025-12-12T16:19:07.642002289+00:00 stderr F I1212 16:19:07.641881 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: cluster-autoscaler, uid: a90481e3-8aed-4ca6-b853-79e50236fafe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.642002289+00:00 stderr F I1212 16:19:07.641924 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: console-operator, uid: 1d0d1e48-a9e1-4e4a-8a48-181bf893de48]" virtual=false 2025-12-12T16:19:07.646499081+00:00 stderr F I1212 16:19:07.646436 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: multus-whereabouts, uid: 306d9ccf-da01-46ab-b78e-02458b50939f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.646518501+00:00 stderr F I1212 16:19:07.646489 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: dns-operator, uid: 27eea001-0ad1-463e-b0b1-338bbbd06608]" virtual=false 2025-12-12T16:19:07.651241528+00:00 stderr F I1212 16:19:07.651140 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-configmap-reader, uid: e3dfcc46-dd9b-4c39-b4af-1331760e7f87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.651241528+00:00 stderr F I1212 16:19:07.651222 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: prometheus-k8s, uid: 2ee6fe08-e91c-466a-ba8b-85813c903312]" virtual=false 2025-12-12T16:19:07.654033357+00:00 stderr F I1212 16:19:07.653565 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: node-ca, uid: 5f291cdd-c99d-4124-aa53-a0a0ab505e3f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.654033357+00:00 stderr F I1212 16:19:07.653605 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: prometheus-k8s-cluster-autoscaler-operator, uid: 195e6c38-8d7f-4de3-b20e-404ee68478e3]" virtual=false 2025-12-12T16:19:07.661063861+00:00 stderr F I1212 16:19:07.657670 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: console-operator, uid: babad3be-1b02-48ca-8bc3-64551155108c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.661063861+00:00 stderr F I1212 16:19:07.657718 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-operator, uid: ccb991ad-450a-4068-adbb-b37825b8e6da]" virtual=false 2025-12-12T16:19:07.661063861+00:00 stderr F I1212 16:19:07.660295 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 376b7f3a-eb8d-4641-a54e-19994092fb5d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.661063861+00:00 stderr F I1212 16:19:07.660357 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: a9588721-392a-4c35-83c5-11f82c4c1cad]" virtual=false 2025-12-12T16:19:07.664318421+00:00 stderr F I1212 16:19:07.664242 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: 80be51ee-dda4-483c-b1c5-948301f3c52e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.664318421+00:00 stderr F I1212 16:19:07.664302 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: eb68d863-03ac-4a8d-8ebf-53c9f9785f85]" virtual=false 2025-12-12T16:19:07.674459222+00:00 stderr F I1212 16:19:07.674383 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 2ad12ea6-7b88-4bb1-ad15-8c6ea12eaa71]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.674486402+00:00 stderr F I1212 16:19:07.674459 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: 7627ebcc-7f0f-44e2-a90a-25d7f7f4fe20]" virtual=false 2025-12-12T16:19:07.687592866+00:00 stderr F I1212 16:19:07.687502 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: 6546e654-a56c-40cd-b5e5-b0af53dc1922]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.687592866+00:00 stderr F I1212 16:19:07.687568 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: 51617a9b-76c0-4d9f-8068-7c7a521e3991]" virtual=false 2025-12-12T16:19:07.690692163+00:00 stderr F I1212 16:19:07.690607 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-nodes-identity-limited, uid: e866be9e-7de1-4a23-9ef1-15d71a5333a5]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.690692163+00:00 stderr F I1212 16:19:07.690668 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: e0e93b2f-fa87-4069-8467-6366728d0a62]" virtual=false 2025-12-12T16:19:07.722297455+00:00 stderr F I1212 16:19:07.722147 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 902b45ff-a672-4079-bc39-9c3db9fbed24]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.722297455+00:00 stderr F I1212 16:19:07.722215 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 7f4699d7-5595-4e90-9a52-ed4769b0c986]" virtual=false 2025-12-12T16:19:07.734354653+00:00 stderr F I1212 16:19:07.733581 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: marketplace-operator, uid: 8f0da541-5375-4494-a502-94aa8540020e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.734354653+00:00 stderr F I1212 16:19:07.733656 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: csi-snapshot-controller-operator-authentication-reader, uid: 7fd23cab-d9ed-41e1-b2d7-92b6985e2dd1]" virtual=false 2025-12-12T16:19:07.739164721+00:00 stderr F I1212 16:19:07.739064 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: cluster-samples-operator-openshift-edit, uid: 995e7439-f4f4-4fa5-82e0-6afcc588fd52]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.739164721+00:00 stderr F I1212 16:19:07.739119 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 1d42a0b6-dd31-46cb-947c-4809e3fc9a44]" virtual=false 2025-12-12T16:19:07.744824811+00:00 stderr F I1212 16:19:07.744756 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: cluster-samples-operator-openshift-config-secret-reader, uid: 8a0fff18-901e-44e5-a8ee-842ba68cfac0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.744824811+00:00 stderr F I1212 16:19:07.744809 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: d9ba455d-d0ef-4279-b91b-2bb734b7a230]" virtual=false 2025-12-12T16:19:07.747654631+00:00 stderr F I1212 16:19:07.747600 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: machine-api-controllers, uid: 25963d68-1e2f-4b0e-a08f-0ce996f7b51f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.747654631+00:00 stderr F I1212 16:19:07.747634 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: 0b6bc908-facb-4580-82f5-bff11fb34309]" virtual=false 2025-12-12T16:19:07.753822444+00:00 stderr F I1212 16:19:07.753736 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: openshift-network-public-role-binding, uid: 3f544260-b7f2-4307-aa10-76c309175c0a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.753822444+00:00 stderr F I1212 16:19:07.753785 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: prometheus-k8s-machine-api-operator, uid: bb572898-23e1-47ec-a10d-a96fe1ecbe9d]" virtual=false 2025-12-12T16:19:07.760169641+00:00 stderr F I1212 16:19:07.759629 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: network-diagnostics, uid: b6e21cd4-4c3d-401f-92cc-7c008ff856b3]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.760169641+00:00 stderr F I1212 16:19:07.760124 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: fef91a3e-8916-4ea4-9554-535d6fd3b728]" virtual=false 2025-12-12T16:19:07.763329779+00:00 stderr F I1212 16:19:07.763273 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: prometheus-k8s, uid: 531f27d1-3010-40b8-a5d8-aaca67f7d382]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.763329779+00:00 stderr F I1212 16:19:07.763315 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 04f722e8-f3c8-48f9-8726-d3ed22a8f6db]" virtual=false 2025-12-12T16:19:07.766741033+00:00 stderr F I1212 16:19:07.766693 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: prometheus-k8s, uid: 5702339a-1e79-4ada-a460-4e2eff82de4d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.766763944+00:00 stderr F I1212 16:19:07.766733 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: machine-api-controllers, uid: 3541bdca-28da-4529-aa34-dc375f9b4fac]" virtual=false 2025-12-12T16:19:07.771269315+00:00 stderr F I1212 16:19:07.770920 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 0c867af6-79d5-465d-b2ab-1be4f069171f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.771269315+00:00 stderr F I1212 16:19:07.770973 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-vsphere-infra, name: host-networking-system-node, uid: 3070c3a2-b2ab-4c6e-9a4b-bdcd112336c5]" virtual=false 2025-12-12T16:19:07.773596493+00:00 stderr F I1212 16:19:07.773515 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: console-operator, uid: 1d0d1e48-a9e1-4e4a-8a48-181bf893de48]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.773616183+00:00 stderr F I1212 16:19:07.773589 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: console-operator, uid: 6ee80bf7-3aef-44c3-8ae6-babe6e24ccf2]" virtual=false 2025-12-12T16:19:07.777119020+00:00 stderr F I1212 16:19:07.777071 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: dns-operator, uid: 27eea001-0ad1-463e-b0b1-338bbbd06608]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.777119020+00:00 stderr F I1212 16:19:07.777105 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console-operator, uid: dedf5f86-0501-4a1d-b312-0cf452e2800c]" virtual=false 2025-12-12T16:19:07.781605881+00:00 stderr F I1212 16:19:07.781520 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: prometheus-k8s, uid: 2ee6fe08-e91c-466a-ba8b-85813c903312]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.781642812+00:00 stderr F I1212 16:19:07.781591 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 00a918ea-d345-4c69-8447-3b22d23e9f7b]" virtual=false 2025-12-12T16:19:07.783767314+00:00 stderr F I1212 16:19:07.783675 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: prometheus-k8s-cluster-autoscaler-operator, uid: 195e6c38-8d7f-4de3-b20e-404ee68478e3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.783789875+00:00 stderr F I1212 16:19:07.783752 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: prometheus-k8s, uid: 5fec5a95-01ea-4349-8aeb-22c31f6322b6]" virtual=false 2025-12-12T16:19:07.790687075+00:00 stderr F I1212 16:19:07.790573 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-operator, uid: ccb991ad-450a-4068-adbb-b37825b8e6da]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.790687075+00:00 stderr F I1212 16:19:07.790632 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 5aa0e1a1-4826-4259-98cd-75c369bad7a7]" virtual=false 2025-12-12T16:19:07.793622038+00:00 stderr F I1212 16:19:07.793559 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: a9588721-392a-4c35-83c5-11f82c4c1cad]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.793622038+00:00 stderr F I1212 16:19:07.793589 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kni-infra, name: host-networking-system-node, uid: 67f45f81-1023-4da0-9870-d9545d0217d4]" virtual=false 2025-12-12T16:19:07.798518599+00:00 stderr F I1212 16:19:07.798444 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: eb68d863-03ac-4a8d-8ebf-53c9f9785f85]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.798518599+00:00 stderr F I1212 16:19:07.798476 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: fba13e8c-652c-4d18-9044-6893349b700d]" virtual=false 2025-12-12T16:19:07.808997438+00:00 stderr F I1212 16:19:07.808451 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: 7627ebcc-7f0f-44e2-a90a-25d7f7f4fe20]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.808997438+00:00 stderr F I1212 16:19:07.808507 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-openstack-infra, name: host-networking-system-node, uid: 46d26db4-f098-4515-a946-0219c7756c23]" virtual=false 2025-12-12T16:19:07.818729499+00:00 stderr F I1212 16:19:07.817826 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: 51617a9b-76c0-4d9f-8068-7c7a521e3991]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.818729499+00:00 stderr F I1212 16:19:07.817895 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: machine-api-operator, uid: 148fb231-346b-4ba3-a947-f077fd1f2673]" virtual=false 2025-12-12T16:19:07.826742307+00:00 stderr F I1212 16:19:07.825631 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: e0e93b2f-fa87-4069-8467-6366728d0a62]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.826742307+00:00 stderr F I1212 16:19:07.825690 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-operator, name: prometheus-k8s, uid: 62b3747f-27ac-4810-bbdd-3307f382f740]" virtual=false 2025-12-12T16:19:07.860305476+00:00 stderr F I1212 16:19:07.860224 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 7f4699d7-5595-4e90-9a52-ed4769b0c986]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.860305476+00:00 stderr F I1212 16:19:07.860287 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver, name: prometheus-k8s, uid: 0d74114b-fee4-4b23-b205-94074f133ad6]" virtual=false 2025-12-12T16:19:07.866966861+00:00 stderr F I1212 16:19:07.866895 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: csi-snapshot-controller-operator-authentication-reader, uid: 7fd23cab-d9ed-41e1-b2d7-92b6985e2dd1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.866988542+00:00 stderr F I1212 16:19:07.866953 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: e384bcab-90cf-46b3-b3fc-3faa6575f457]" virtual=false 2025-12-12T16:19:07.871062512+00:00 stderr F I1212 16:19:07.870997 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 1d42a0b6-dd31-46cb-947c-4809e3fc9a44]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.871084523+00:00 stderr F I1212 16:19:07.871050 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: 675d5d01-bbab-41f4-b22a-cb301a5bfd68]" virtual=false 2025-12-12T16:19:07.876635810+00:00 stderr F I1212 16:19:07.876580 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: d9ba455d-d0ef-4279-b91b-2bb734b7a230]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.876652441+00:00 stderr F I1212 16:19:07.876627 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-approver, uid: 3ffb919c-31ad-497c-b082-f862e1b96a30]" virtual=false 2025-12-12T16:19:07.882419513+00:00 stderr F I1212 16:19:07.882358 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: 0b6bc908-facb-4580-82f5-bff11fb34309]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.882441474+00:00 stderr F I1212 16:19:07.882412 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-2, uid: c41b0f0a-724d-401f-b8bd-abaae84f2c11]" virtual=false 2025-12-12T16:19:07.887408366+00:00 stderr F I1212 16:19:07.887345 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: prometheus-k8s-machine-api-operator, uid: bb572898-23e1-47ec-a10d-a96fe1ecbe9d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.887437357+00:00 stderr F I1212 16:19:07.887406 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-1, uid: f0431d0b-c8bb-453e-adcc-46f809a3bc04]" virtual=false 2025-12-12T16:19:07.894546013+00:00 stderr F I1212 16:19:07.894466 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: fef91a3e-8916-4ea4-9554-535d6fd3b728]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.894546013+00:00 stderr F I1212 16:19:07.894524 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-oauth-apiserver, name: openshift-oauth-apiserver, uid: 668f8445-4e0b-4306-a0c6-9208bf76efb5]" virtual=false 2025-12-12T16:19:07.897140277+00:00 stderr F I1212 16:19:07.897040 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: machine-api-controllers, uid: 3541bdca-28da-4529-aa34-dc375f9b4fac]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.897140277+00:00 stderr F I1212 16:19:07.897085 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 308b0b19-f7aa-40a2-b0b9-10a5d2b356f8]" virtual=false 2025-12-12T16:19:07.902080059+00:00 stderr F I1212 16:19:07.901716 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 04f722e8-f3c8-48f9-8726-d3ed22a8f6db]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.902080059+00:00 stderr F I1212 16:19:07.901778 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 767bca57-71f8-40a0-8831-50d92f59808c]" virtual=false 2025-12-12T16:19:07.904633552+00:00 stderr F I1212 16:19:07.904371 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-vsphere-infra, name: host-networking-system-node, uid: 3070c3a2-b2ab-4c6e-9a4b-bdcd112336c5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.904633552+00:00 stderr F I1212 16:19:07.904404 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: 92c9f946-b2a0-4dc5-975c-25ebc3bb9c4e]" virtual=false 2025-12-12T16:19:07.906429247+00:00 stderr F I1212 16:19:07.906368 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: console-operator, uid: 6ee80bf7-3aef-44c3-8ae6-babe6e24ccf2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.906446227+00:00 stderr F I1212 16:19:07.906410 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-config-operator, name: config-operator, uid: dd012f1c-40f4-428c-a842-46f94cbe2c6c]" virtual=false 2025-12-12T16:19:07.912023085+00:00 stderr F I1212 16:19:07.911953 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 00a918ea-d345-4c69-8447-3b22d23e9f7b]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.912023085+00:00 stderr F I1212 16:19:07.911995 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry, uid: fc13ca00-4d30-4cf5-ba4e-0aeb5356211f]" virtual=false 2025-12-12T16:19:07.913626745+00:00 stderr F I1212 16:19:07.913585 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console-operator, uid: dedf5f86-0501-4a1d-b312-0cf452e2800c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.913626745+00:00 stderr F I1212 16:19:07.913613 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: cfa008c8-2c0c-470c-bc32-f95c0e394dd5]" virtual=false 2025-12-12T16:19:07.917140562+00:00 stderr F I1212 16:19:07.917081 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: prometheus-k8s, uid: 5fec5a95-01ea-4349-8aeb-22c31f6322b6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:07.917158522+00:00 stderr F I1212 16:19:07.917140 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-control-plane-metrics, uid: e8d49b63-d110-45e5-a2ec-20435c71bd60]" virtual=false 2025-12-12T16:19:07.920824143+00:00 stderr F I1212 16:19:07.920779 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kni-infra, name: host-networking-system-node, uid: 67f45f81-1023-4da0-9870-d9545d0217d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.920824143+00:00 stderr F I1212 16:19:07.920803 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-route-controller-manager, name: openshift-route-controller-manager, uid: 6c85dcfe-44cb-4596-b7d4-b6e79a18159e]" virtual=false 2025-12-12T16:19:07.923194601+00:00 stderr F I1212 16:19:07.923133 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 5aa0e1a1-4826-4259-98cd-75c369bad7a7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.923194601+00:00 stderr F I1212 16:19:07.923161 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver, uid: 38fd95a7-b03c-4438-abb8-83e272fd6912]" virtual=false 2025-12-12T16:19:07.930733438+00:00 stderr F I1212 16:19:07.930641 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: fba13e8c-652c-4d18-9044-6893349b700d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.930733438+00:00 stderr F I1212 16:19:07.930710 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver-operator-check-endpoints, uid: bb13a4c2-4a26-4c4c-be0a-c92c6553ee6e]" virtual=false 2025-12-12T16:19:07.940393856+00:00 stderr F I1212 16:19:07.940312 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-openstack-infra, name: host-networking-system-node, uid: 46d26db4-f098-4515-a946-0219c7756c23]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.940419627+00:00 stderr F I1212 16:19:07.940378 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication, name: oauth-openshift, uid: f570f814-28f0-43c6-a672-c5ff8b60e0a0]" virtual=false 2025-12-12T16:19:07.946614340+00:00 stderr F I1212 16:19:07.946570 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-2, uid: c41b0f0a-724d-401f-b8bd-abaae84f2c11]" 2025-12-12T16:19:07.946632071+00:00 stderr F I1212 16:19:07.946606 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-etcd-operator, name: etcd-operator, uid: aef2648c-8377-4d11-a4a1-fb24f5095a8d]" virtual=false 2025-12-12T16:19:07.951393468+00:00 stderr F I1212 16:19:07.951330 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-api, name: machine-api-operator, uid: 148fb231-346b-4ba3-a947-f077fd1f2673]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.951393468+00:00 stderr F I1212 16:19:07.951377 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 005ad9fa-457c-4a70-9bb0-2a624385cac9]" virtual=false 2025-12-12T16:19:07.953301366+00:00 stderr F I1212 16:19:07.953254 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-1, uid: f0431d0b-c8bb-453e-adcc-46f809a3bc04]" 2025-12-12T16:19:07.953301366+00:00 stderr F I1212 16:19:07.953284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 927e38ed-2fe0-4faa-8d67-18f898398255]" virtual=false 2025-12-12T16:19:07.958582086+00:00 stderr F I1212 16:19:07.958501 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-operator, name: prometheus-k8s, uid: 62b3747f-27ac-4810-bbdd-3307f382f740]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.958582086+00:00 stderr F I1212 16:19:07.958547 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-node, uid: 992d1159-1bbf-4ff7-adf1-580c362b690a]" virtual=false 2025-12-12T16:19:07.993538090+00:00 stderr F I1212 16:19:07.993445 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver, name: prometheus-k8s, uid: 0d74114b-fee4-4b23-b205-94074f133ad6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:07.993538090+00:00 stderr F I1212 16:19:07.993507 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console, name: console, uid: ce6eb94a-67e7-4594-8332-d294f5f0ed28]" virtual=false 2025-12-12T16:19:08.001540608+00:00 stderr F I1212 16:19:08.001498 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: e384bcab-90cf-46b3-b3fc-3faa6575f457]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.001540608+00:00 stderr F I1212 16:19:08.001528 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: 1e85867d-2566-494d-83b0-620b526f122f]" virtual=false 2025-12-12T16:19:08.004662245+00:00 stderr F I1212 16:19:08.004619 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: 675d5d01-bbab-41f4-b22a-cb301a5bfd68]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.004662245+00:00 stderr F I1212 16:19:08.004645 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver, name: kube-apiserver, uid: 9de9deed-9721-49d9-9ffc-fe6fde17ec88]" virtual=false 2025-12-12T16:19:08.009956146+00:00 stderr F I1212 16:19:08.009814 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-approver, uid: 3ffb919c-31ad-497c-b082-f862e1b96a30]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.009956146+00:00 stderr F I1212 16:19:08.009861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-machine-approver, name: cluster-machine-approver, uid: 5a288427-478d-4f77-8ab7-5a6a841b42ec]" virtual=false 2025-12-12T16:19:08.027341446+00:00 stderr F I1212 16:19:08.027217 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-oauth-apiserver, name: openshift-oauth-apiserver, uid: 668f8445-4e0b-4306-a0c6-9208bf76efb5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.027341446+00:00 stderr F I1212 16:19:08.027283 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3cf7eed4-951b-4ed5-b5fe-6175a10b9554]" virtual=false 2025-12-12T16:19:08.029565591+00:00 stderr F I1212 16:19:08.029491 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 308b0b19-f7aa-40a2-b0b9-10a5d2b356f8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.029565591+00:00 stderr F I1212 16:19:08.029520 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: b6b1ba29-2359-42ad-a1f3-c61c995c528f]" virtual=false 2025-12-12T16:19:08.038254046+00:00 stderr F I1212 16:19:08.036030 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 767bca57-71f8-40a0-8831-50d92f59808c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.038254046+00:00 stderr F I1212 16:19:08.036090 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager, name: openshift-controller-manager, uid: 3ead3b3c-6cb1-4941-8e45-fcfb01cfa6b6]" virtual=false 2025-12-12T16:19:08.038254046+00:00 stderr F I1212 16:19:08.036156 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: 92c9f946-b2a0-4dc5-975c-25ebc3bb9c4e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.038254046+00:00 stderr F I1212 16:19:08.036211 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ingress-operator, name: ingress-operator, uid: e02df054-9826-42fd-bd6b-a68c996ebec0]" virtual=false 2025-12-12T16:19:08.040263626+00:00 stderr F I1212 16:19:08.039841 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-config-operator, name: config-operator, uid: dd012f1c-40f4-428c-a842-46f94cbe2c6c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.040263626+00:00 stderr F I1212 16:19:08.039883 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-api, name: machine-api-controllers, uid: 8da4502b-6d27-4eb6-af73-39a8176f83fe]" virtual=false 2025-12-12T16:19:08.043682910+00:00 stderr F I1212 16:19:08.043629 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry, uid: fc13ca00-4d30-4cf5-ba4e-0aeb5356211f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.043682910+00:00 stderr F I1212 16:19:08.043669 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-api, name: machine-api-operator, uid: 81b76741-2b33-44b0-94f3-2547eb6fc915]" virtual=false 2025-12-12T16:19:08.047637258+00:00 stderr F I1212 16:19:08.047557 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: cfa008c8-2c0c-470c-bc32-f95c0e394dd5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.047677819+00:00 stderr F I1212 16:19:08.047625 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication-operator, name: authentication-operator, uid: 01f7855a-a823-4705-8db6-27c45980a6cb]" virtual=false 2025-12-12T16:19:08.050896028+00:00 stderr F I1212 16:19:08.050822 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-control-plane-metrics, uid: e8d49b63-d110-45e5-a2ec-20435c71bd60]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.050896028+00:00 stderr F I1212 16:19:08.050856 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry-operator, uid: 7693198b-e80b-4c67-a087-641d91ca8741]" virtual=false 2025-12-12T16:19:08.053501483+00:00 stderr F I1212 16:19:08.053439 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-route-controller-manager, name: openshift-route-controller-manager, uid: 6c85dcfe-44cb-4596-b7d4-b6e79a18159e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.053501483+00:00 stderr F I1212 16:19:08.053470 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-marketplace, name: marketplace-operator, uid: dca395b3-cf6a-4fc7-92bc-15c290009884]" virtual=false 2025-12-12T16:19:08.057768468+00:00 stderr F I1212 16:19:08.057670 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver, uid: 38fd95a7-b03c-4438-abb8-83e272fd6912]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.057768468+00:00 stderr F I1212 16:19:08.057708 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-multus-admission-controller, uid: 01fa16c6-f969-42c1-b69a-43de0deff522]" virtual=false 2025-12-12T16:19:08.064919795+00:00 stderr F I1212 16:19:08.064847 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver-operator-check-endpoints, uid: bb13a4c2-4a26-4c4c-be0a-c92c6553ee6e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.064919795+00:00 stderr F I1212 16:19:08.064896 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-diagnostics, name: network-check-source, uid: a77ddccd-3bdf-40da-b44d-39f9ccca28bb]" virtual=false 2025-12-12T16:19:08.074504672+00:00 stderr F I1212 16:19:08.074412 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication, name: oauth-openshift, uid: f570f814-28f0-43c6-a672-c5ff8b60e0a0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.074504672+00:00 stderr F I1212 16:19:08.074477 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-operator, name: network-operator, uid: 738c1721-593f-4f60-a567-4597ff37ea6a]" virtual=false 2025-12-12T16:19:08.081937276+00:00 stderr F I1212 16:19:08.081853 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-etcd-operator, name: etcd-operator, uid: aef2648c-8377-4d11-a4a1-fb24f5095a8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.081937276+00:00 stderr F I1212 16:19:08.081913 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 4979bb5a-bf15-43a9-9eee-231d52574ca5]" virtual=false 2025-12-12T16:19:08.086355435+00:00 stderr F I1212 16:19:08.086277 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 005ad9fa-457c-4a70-9bb0-2a624385cac9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.086377226+00:00 stderr F I1212 16:19:08.086355 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console-operator, name: console-operator, uid: 67aaa41b-07c9-42c2-b24b-e21c702aaf38]" virtual=false 2025-12-12T16:19:08.088973170+00:00 stderr F I1212 16:19:08.087811 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 927e38ed-2fe0-4faa-8d67-18f898398255]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.088973170+00:00 stderr F I1212 16:19:08.087842 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: 6380b6dc-76d4-4a0c-bd2b-ad07c6b511ca]" virtual=false 2025-12-12T16:19:08.090971669+00:00 stderr F I1212 16:19:08.090919 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-node, uid: 992d1159-1bbf-4ff7-adf1-580c362b690a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.090991180+00:00 stderr F I1212 16:19:08.090973 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: c54dfa10-f53a-4c08-8fe5-f78de034450b]" virtual=false 2025-12-12T16:19:08.127480452+00:00 stderr F I1212 16:19:08.127354 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console, name: console, uid: ce6eb94a-67e7-4594-8332-d294f5f0ed28]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.127480452+00:00 stderr F I1212 16:19:08.127410 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-dns-operator, name: dns-operator, uid: 50a8933f-a9ac-4a80-b460-c36e9fb81474]" virtual=false 2025-12-12T16:19:08.134746261+00:00 stderr F I1212 16:19:08.134661 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: 1e85867d-2566-494d-83b0-620b526f122f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.134746261+00:00 stderr F I1212 16:19:08.134726 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager, name: kube-controller-manager, uid: 43eede0a-d0aa-4d52-9cba-d673fe0fc344]" virtual=false 2025-12-12T16:19:08.139618692+00:00 stderr F I1212 16:19:08.139547 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver, name: kube-apiserver, uid: 9de9deed-9721-49d9-9ffc-fe6fde17ec88]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.139618692+00:00 stderr F I1212 16:19:08.139581 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler, name: kube-scheduler, uid: 0ef34820-7d38-4563-bcab-20b7d718ade2]" virtual=false 2025-12-12T16:19:08.145408775+00:00 stderr F I1212 16:19:08.145358 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-machine-approver, name: cluster-machine-approver, uid: 5a288427-478d-4f77-8ab7-5a6a841b42ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.145408775+00:00 stderr F I1212 16:19:08.145384 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: node-cluster, uid: 22f15010-d1ca-4097-8c09-5f6abd7133cb]" virtual=false 2025-12-12T16:19:08.161768639+00:00 stderr F I1212 16:19:08.161684 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3cf7eed4-951b-4ed5-b5fe-6175a10b9554]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.161768639+00:00 stderr F I1212 16:19:08.161721 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: openshift-network-features, uid: 38e41668-0d59-4727-95be-557e985b00b4]" virtual=false 2025-12-12T16:19:08.164172049+00:00 stderr F I1212 16:19:08.164129 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: b6b1ba29-2359-42ad-a1f3-c61c995c528f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.164172049+00:00 stderr F I1212 16:19:08.164158 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: release-verification, uid: 59da9e83-cee9-4730-b30e-ca58e8763a4b]" virtual=false 2025-12-12T16:19:08.166330132+00:00 stderr F I1212 16:19:08.166286 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager, name: openshift-controller-manager, uid: 3ead3b3c-6cb1-4941-8e45-fcfb01cfa6b6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.166330132+00:00 stderr F I1212 16:19:08.166311 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config, name: installer-images, uid: 01612b6d-7428-4473-93ad-08016393a900]" virtual=false 2025-12-12T16:19:08.171902030+00:00 stderr F I1212 16:19:08.171812 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ingress-operator, name: ingress-operator, uid: e02df054-9826-42fd-bd6b-a68c996ebec0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.171942751+00:00 stderr F I1212 16:19:08.171883 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: console-operator-config, uid: 5a9c5550-185f-4031-bbc1-1f01e01294a1]" virtual=false 2025-12-12T16:19:08.175614872+00:00 stderr F I1212 16:19:08.175552 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-api, name: machine-api-controllers, uid: 8da4502b-6d27-4eb6-af73-39a8176f83fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.175614872+00:00 stderr F I1212 16:19:08.175585 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: telemetry-config, uid: 6dcd23fc-7253-4882-bb1f-3352e899b423]" virtual=false 2025-12-12T16:19:08.179411176+00:00 stderr F I1212 16:19:08.179360 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-api, name: machine-api-operator, uid: 81b76741-2b33-44b0-94f3-2547eb6fc915]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.179411176+00:00 stderr F I1212 16:19:08.179386 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: trusted-ca, uid: 78374417-3661-4c04-b1a1-0b4c5a1a6af7]" virtual=false 2025-12-12T16:19:08.180741699+00:00 stderr F I1212 16:19:08.180677 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication-operator, name: authentication-operator, uid: 01f7855a-a823-4705-8db6-27c45980a6cb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.180761669+00:00 stderr F I1212 16:19:08.180736 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-images, uid: 3832d875-802b-4913-a66f-e8ac89956e81]" virtual=false 2025-12-12T16:19:08.184009569+00:00 stderr F I1212 16:19:08.183954 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry-operator, uid: 7693198b-e80b-4c67-a087-641d91ca8741]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.184029430+00:00 stderr F I1212 16:19:08.183998 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator-config, uid: c2cc6eb9-b241-447f-8aec-c5ac92d13d67]" virtual=false 2025-12-12T16:19:08.187266030+00:00 stderr F I1212 16:19:08.186715 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-marketplace, name: marketplace-operator, uid: dca395b3-cf6a-4fc7-92bc-15c290009884]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.187266030+00:00 stderr F I1212 16:19:08.186749 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-ca-bundle, uid: 6b2c1752-2a08-4bc9-b635-9cfdbfe46be7]" virtual=false 2025-12-12T16:19:08.190330976+00:00 stderr F I1212 16:19:08.190279 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-multus-admission-controller, uid: 01fa16c6-f969-42c1-b69a-43de0deff522]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.190330976+00:00 stderr F I1212 16:19:08.190306 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-operator-config, uid: d3443717-bd0b-4439-948e-c5a15bd7afe3]" virtual=false 2025-12-12T16:19:08.197559174+00:00 stderr F I1212 16:19:08.197471 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-diagnostics, name: network-check-source, uid: a77ddccd-3bdf-40da-b44d-39f9ccca28bb]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.197559174+00:00 stderr F I1212 16:19:08.197525 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-service-ca-bundle, uid: 973b291d-38fb-414d-9a6e-45f5a3945bb0]" virtual=false 2025-12-12T16:19:08.212358350+00:00 stderr F I1212 16:19:08.212285 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-operator, name: network-operator, uid: 738c1721-593f-4f60-a567-4597ff37ea6a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.212400161+00:00 stderr F I1212 16:19:08.212349 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-image-registry, name: trusted-ca, uid: 9f4415b2-fbd5-47b3-8169-1efaed0d251e]" virtual=false 2025-12-12T16:19:08.214718379+00:00 stderr F I1212 16:19:08.214662 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 4979bb5a-bf15-43a9-9eee-231d52574ca5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.214741789+00:00 stderr F I1212 16:19:08.214725 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ingress-operator, name: trusted-ca, uid: 4051881b-c1aa-4bd5-a631-cd6296ac1f64]" virtual=false 2025-12-12T16:19:08.217193100+00:00 stderr F I1212 16:19:08.217139 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console-operator, name: console-operator, uid: 67aaa41b-07c9-42c2-b24b-e21c702aaf38]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.217220660+00:00 stderr F I1212 16:19:08.217192 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator-config, uid: 7f8e88a6-f42f-43ac-a331-9780bdd83dcd]" virtual=false 2025-12-12T16:19:08.220517612+00:00 stderr F I1212 16:19:08.220446 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: 6380b6dc-76d4-4a0c-bd2b-ad07c6b511ca]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.220517612+00:00 stderr F I1212 16:19:08.220496 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator, uid: 02e02888-69ca-4c76-a391-38374550c227]" virtual=false 2025-12-12T16:19:08.224521231+00:00 stderr F I1212 16:19:08.224477 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: c54dfa10-f53a-4c08-8fe5-f78de034450b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.224521231+00:00 stderr F I1212 16:19:08.224504 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:anyuid, uid: fb1736f6-bcf6-428e-9b30-467c1e706809]" virtual=false 2025-12-12T16:19:08.262332656+00:00 stderr F I1212 16:19:08.262192 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-dns-operator, name: dns-operator, uid: 50a8933f-a9ac-4a80-b460-c36e9fb81474]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.262332656+00:00 stderr F I1212 16:19:08.262265 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator, uid: c95714ce-ec97-4ab5-b450-51c3a885f6d9]" virtual=false 2025-12-12T16:19:08.268190921+00:00 stderr F I1212 16:19:08.268092 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager, name: kube-controller-manager, uid: 43eede0a-d0aa-4d52-9cba-d673fe0fc344]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.268190921+00:00 stderr F I1212 16:19:08.268149 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-editor, uid: 6a005bc8-0efa-4343-81c3-19838aa8a393]" virtual=false 2025-12-12T16:19:08.279129891+00:00 stderr F I1212 16:19:08.276374 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler, name: kube-scheduler, uid: 0ef34820-7d38-4563-bcab-20b7d718ade2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.279129891+00:00 stderr F I1212 16:19:08.276426 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted, uid: 007a6d58-59f5-40e5-8ee9-c81ac33a0ce1]" virtual=false 2025-12-12T16:19:08.283295464+00:00 stderr F I1212 16:19:08.281465 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: node-cluster, uid: 22f15010-d1ca-4097-8c09-5f6abd7133cb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.283295464+00:00 stderr F I1212 16:19:08.281501 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: registry-monitoring, uid: f680858f-9ffc-4198-be65-7a886972bb9c]" virtual=false 2025-12-12T16:19:08.295689980+00:00 stderr F I1212 16:19:08.294525 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: openshift-network-features, uid: 38e41668-0d59-4727-95be-557e985b00b4]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.295689980+00:00 stderr F I1212 16:19:08.294601 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:controller:machine-approver, uid: 823e8161-9f7c-4be7-a752-8ff8412404eb]" virtual=false 2025-12-12T16:19:08.298272274+00:00 stderr F I1212 16:19:08.297226 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: release-verification, uid: 59da9e83-cee9-4730-b30e-ca58e8763a4b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.298272274+00:00 stderr F I1212 16:19:08.297261 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:cluster-config-operator:cluster-reader, uid: 5adbf822-4950-4513-bbf5-d78ebf25fcb4]" virtual=false 2025-12-12T16:19:08.301360021+00:00 stderr F I1212 16:19:08.301300 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config, name: installer-images, uid: 01612b6d-7428-4473-93ad-08016393a900]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.301360021+00:00 stderr F I1212 16:19:08.301336 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator, uid: 54d437f7-f4ad-48ff-96fe-ad923eacc808]" virtual=false 2025-12-12T16:19:08.304827096+00:00 stderr F I1212 16:19:08.304772 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: console-operator-config, uid: 5a9c5550-185f-4031-bbc1-1f01e01294a1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.304899998+00:00 stderr F I1212 16:19:08.304874 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: operatorhub-config-reader, uid: 3a3873d7-8caa-496e-b208-16e11bbb5222]" virtual=false 2025-12-12T16:19:08.308158879+00:00 stderr F I1212 16:19:08.308101 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: telemetry-config, uid: 6dcd23fc-7253-4882-bb1f-3352e899b423]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.308158879+00:00 stderr F I1212 16:19:08.308144 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nonroot, uid: a2c10611-bd90-4dd0-ada7-7617b531e742]" virtual=false 2025-12-12T16:19:08.312052105+00:00 stderr F I1212 16:19:08.311970 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: trusted-ca, uid: 78374417-3661-4c04-b1a1-0b4c5a1a6af7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:08.312052105+00:00 stderr F I1212 16:19:08.312006 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: helm-chartrepos-viewer, uid: e7ccf99c-35cd-4925-bccf-00341a7c0226]" virtual=false 2025-12-12T16:19:08.315245304+00:00 stderr F I1212 16:19:08.315042 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-images, uid: 3832d875-802b-4913-a66f-e8ac89956e81]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.315245304+00:00 stderr F I1212 16:19:08.315083 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator:cluster-reader, uid: ae390e5b-3179-4936-a6f1-0691a189c71f]" virtual=false 2025-12-12T16:19:08.317518150+00:00 stderr F I1212 16:19:08.317440 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator-config, uid: c2cc6eb9-b241-447f-8aec-c5ac92d13d67]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.317538551+00:00 stderr F I1212 16:19:08.317512 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator-namespaced, uid: 4e5afe0e-92fd-4bf8-bfe5-922660926418]" virtual=false 2025-12-12T16:19:08.321307474+00:00 stderr F I1212 16:19:08.321191 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-ca-bundle, uid: 6b2c1752-2a08-4bc9-b635-9cfdbfe46be7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:08.321307474+00:00 stderr F I1212 16:19:08.321243 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:controller:operator-lifecycle-manager, uid: 396b0df4-1cea-4e77-93d2-c31f13f44462]" virtual=false 2025-12-12T16:19:08.325036496+00:00 stderr F I1212 16:19:08.324909 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-operator-config, uid: d3443717-bd0b-4439-948e-c5a15bd7afe3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.325036496+00:00 stderr F I1212 16:19:08.324976 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted-v3, uid: febb0e6f-0c55-4d8e-8517-036b929457ee]" virtual=false 2025-12-12T16:19:08.336923500+00:00 stderr F I1212 16:19:08.334877 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-service-ca-bundle, uid: 973b291d-38fb-414d-9a6e-45f5a3945bb0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:08.336923500+00:00 stderr F I1212 16:19:08.334941 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console, uid: b95b8768-0164-4998-b6e5-6165e7ba01ef]" virtual=false 2025-12-12T16:19:08.341787140+00:00 stderr F I1212 16:19:08.341702 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-image-registry, name: trusted-ca, uid: 9f4415b2-fbd5-47b3-8169-1efaed0d251e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:08.341823181+00:00 stderr F I1212 16:19:08.341777 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler, uid: 86f423b5-d7c6-42a4-89ff-2089943b04e6]" virtual=false 2025-12-12T16:19:08.348594728+00:00 stderr F I1212 16:19:08.347813 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ingress-operator, name: trusted-ca, uid: 4051881b-c1aa-4bd5-a631-cd6296ac1f64]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:08.348594728+00:00 stderr F I1212 16:19:08.347881 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:machine-config-operator:cluster-reader, uid: 630c9210-f467-4f1f-a8e8-0f29f67432a2]" virtual=false 2025-12-12T16:19:08.353324865+00:00 stderr F I1212 16:19:08.353246 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator-config, uid: 7f8e88a6-f42f-43ac-a331-9780bdd83dcd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.353324865+00:00 stderr F I1212 16:19:08.353298 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus, uid: 3776dc79-8fd8-4cdf-b093-35c526784a28]" virtual=false 2025-12-12T16:19:08.356842072+00:00 stderr F I1212 16:19:08.356794 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator, uid: 02e02888-69ca-4c76-a391-38374550c227]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.356902304+00:00 stderr F I1212 16:19:08.356882 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-diagnostics, uid: b3eadd94-78e3-4cab-ac8f-94a3b05bb801]" virtual=false 2025-12-12T16:19:08.357222992+00:00 stderr F I1212 16:19:08.357192 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:anyuid, uid: fb1736f6-bcf6-428e-9b30-467c1e706809]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.357250842+00:00 stderr F I1212 16:19:08.357221 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator-config, uid: ae00bb8f-1b2a-469a-aece-0cdda9885a20]" virtual=false 2025-12-12T16:19:08.394108454+00:00 stderr F I1212 16:19:08.394033 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator, uid: c95714ce-ec97-4ab5-b450-51c3a885f6d9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.394222376+00:00 stderr F I1212 16:19:08.394199 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: metrics-daemon-role, uid: cd81010d-36e3-4272-9c11-bc2a176cac77]" virtual=false 2025-12-12T16:19:08.401430615+00:00 stderr F I1212 16:19:08.401359 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-editor, uid: 6a005bc8-0efa-4343-81c3-19838aa8a393]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.401459405+00:00 stderr F I1212 16:19:08.401417 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator-config, uid: a0a8541b-3197-4955-b6a0-a48f553f3956]" virtual=false 2025-12-12T16:19:08.404470620+00:00 stderr F I1212 16:19:08.404439 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted, uid: 007a6d58-59f5-40e5-8ee9-c81ac33a0ce1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.404488380+00:00 stderr F I1212 16:19:08.404465 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-cluster-reader, uid: 8a39b760-1b0e-474e-9e1b-f6c769f5ed13]" virtual=false 2025-12-12T16:19:08.410274563+00:00 stderr F I1212 16:19:08.410234 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: registry-monitoring, uid: f680858f-9ffc-4198-be65-7a886972bb9c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.410274563+00:00 stderr F I1212 16:19:08.410263 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-ancillary-tools, uid: f41f1829-7b7c-453d-9fac-5a6645faf319]" virtual=false 2025-12-12T16:19:08.426609147+00:00 stderr F I1212 16:19:08.426470 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:controller:machine-approver, uid: 823e8161-9f7c-4be7-a752-8ff8412404eb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.426609147+00:00 stderr F I1212 16:19:08.426547 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiregistration.k8s.io/v1/APIService, namespace: , name: v1.packages.operators.coreos.com, uid: 1eb523e0-0a92-4ae6-84a4-16192bab1fca]" virtual=false 2025-12-12T16:19:08.431349754+00:00 stderr F I1212 16:19:08.431289 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:cluster-config-operator:cluster-reader, uid: 5adbf822-4950-4513-bbf5-d78ebf25fcb4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.431368405+00:00 stderr F I1212 16:19:08.431345 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: net-attach-def-project, uid: 8ea7b21b-7f89-4964-98a8-62069f159aa7]" virtual=false 2025-12-12T16:19:08.434485112+00:00 stderr F I1212 16:19:08.434436 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator, uid: 54d437f7-f4ad-48ff-96fe-ad923eacc808]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.434566614+00:00 stderr F I1212 16:19:08.434535 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 8d0bf8dc-ddf1-4412-a8d3-d2cca7aae8a6]" virtual=false 2025-12-12T16:19:08.436585424+00:00 stderr F I1212 16:19:08.436548 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: operatorhub-config-reader, uid: 3a3873d7-8caa-496e-b208-16e11bbb5222]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.436603864+00:00 stderr F I1212 16:19:08.436580 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-node-identity, uid: d7ddb0cd-f3ce-4f72-aac7-7fd1c95d3152]" virtual=false 2025-12-12T16:19:08.440241334+00:00 stderr F I1212 16:19:08.440142 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator:cluster-reader, uid: ae390e5b-3179-4936-a6f1-0691a189c71f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.440241334+00:00 stderr F I1212 16:19:08.440229 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-admission-controller-webhook, uid: 1d70faff-2868-472e-86ee-7d454da7084b]" virtual=false 2025-12-12T16:19:08.444080609+00:00 stderr F I1212 16:19:08.443826 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nonroot, uid: a2c10611-bd90-4dd0-ada7-7617b531e742]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.444080609+00:00 stderr F I1212 16:19:08.443853 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-iptables-alerter, uid: 50c71944-8927-4a26-8fdf-453e314e3135]" virtual=false 2025-12-12T16:19:08.448427817+00:00 stderr F I1212 16:19:08.448379 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: helm-chartrepos-viewer, uid: e7ccf99c-35cd-4925-bccf-00341a7c0226]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.448494688+00:00 stderr F I1212 16:19:08.448473 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: whereabouts-cni, uid: 81765a55-8ddf-4236-a0d9-3a707f7f82a2]" virtual=false 2025-12-12T16:19:08.450490948+00:00 stderr F I1212 16:19:08.450461 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator-namespaced, uid: 4e5afe0e-92fd-4bf8-bfe5-922660926418]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.450544369+00:00 stderr F I1212 16:19:08.450524 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-kube-rbac-proxy, uid: 6000bf1a-c327-42b3-89bf-6737effbc9cc]" virtual=false 2025-12-12T16:19:08.455259515+00:00 stderr F I1212 16:19:08.455164 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:controller:operator-lifecycle-manager, uid: 396b0df4-1cea-4e77-93d2-c31f13f44462]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.455343468+00:00 stderr F I1212 16:19:08.455322 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-node-limited, uid: d46dbbf0-e5a5-4718-9cf2-73a3c26e0e10]" virtual=false 2025-12-12T16:19:08.456979748+00:00 stderr F I1212 16:19:08.456892 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted-v3, uid: febb0e6f-0c55-4d8e-8517-036b929457ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.457035649+00:00 stderr F I1212 16:19:08.457016 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-viewer, uid: 18062659-d954-4db4-a1b2-1563fcdc226e]" virtual=false 2025-12-12T16:19:08.465002666+00:00 stderr F I1212 16:19:08.464950 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console, uid: b95b8768-0164-4998-b6e5-6165e7ba01ef]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.465089648+00:00 stderr F I1212 16:19:08.465067 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: multus.openshift.io, uid: 6af572ca-45a2-4100-8c12-1c0aea97f3b4]" virtual=false 2025-12-12T16:19:08.474076001+00:00 stderr F I1212 16:19:08.474034 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler, uid: 86f423b5-d7c6-42a4-89ff-2089943b04e6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.474138302+00:00 stderr F I1212 16:19:08.474117 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: network-node-identity.openshift.io, uid: 5c2fe880-8ccc-45c8-8bc7-7f3bd0bb9e5f]" virtual=false 2025-12-12T16:19:08.482194531+00:00 stderr F I1212 16:19:08.482129 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:machine-config-operator:cluster-reader, uid: 630c9210-f467-4f1f-a8e8-0f29f67432a2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.482282793+00:00 stderr F I1212 16:19:08.482258 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: cni-copy-resources, uid: 29088134-82cd-49c9-891c-3834b37dd6f8]" virtual=false 2025-12-12T16:19:08.484312854+00:00 stderr F I1212 16:19:08.484278 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus, uid: 3776dc79-8fd8-4cdf-b093-35c526784a28]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.484362455+00:00 stderr F I1212 16:19:08.484346 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: default-cni-sysctl-allowlist, uid: 5e80d595-c63d-44bf-914d-7f260b4d19dd]" virtual=false 2025-12-12T16:19:08.488068467+00:00 stderr F I1212 16:19:08.487945 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-diagnostics, uid: b3eadd94-78e3-4cab-ac8f-94a3b05bb801]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.488203580+00:00 stderr F I1212 16:19:08.488154 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: multus-daemon-config, uid: 46bee54e-7954-481f-b845-7ef5fd73b0e0]" virtual=false 2025-12-12T16:19:08.491742137+00:00 stderr F I1212 16:19:08.491686 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator-config, uid: ae00bb8f-1b2a-469a-aece-0cdda9885a20]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.491842750+00:00 stderr F I1212 16:19:08.491813 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: whereabouts-flatfile-config, uid: 8db09e57-a56b-4b45-94d6-a2796260e9cd]" virtual=false 2025-12-12T16:19:08.492802794+00:00 stderr F I1212 16:19:08.492773 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apiregistration.k8s.io/v1/APIService, namespace: , name: v1.packages.operators.coreos.com, uid: 1eb523e0-0a92-4ae6-84a4-16192bab1fca]" 2025-12-12T16:19:08.492855265+00:00 stderr F I1212 16:19:08.492836 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-console, name: networking-console-plugin, uid: 3aae3443-66e1-496f-98c2-65b5a65de071]" virtual=false 2025-12-12T16:19:08.528393754+00:00 stderr F I1212 16:19:08.528272 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: metrics-daemon-role, uid: cd81010d-36e3-4272-9c11-bc2a176cac77]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.528541627+00:00 stderr F I1212 16:19:08.528514 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-node-identity, name: ovnkube-identity-cm, uid: 69be1ac7-0211-4526-80ec-483251460038]" virtual=false 2025-12-12T16:19:08.533613753+00:00 stderr F I1212 16:19:08.533566 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator-config, uid: a0a8541b-3197-4955-b6a0-a48f553f3956]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:19:08.533699045+00:00 stderr F I1212 16:19:08.533672 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: applied-cluster, uid: d45e81f6-f612-465e-aeaa-ac35b5a9786a]" virtual=false 2025-12-12T16:19:08.542275957+00:00 stderr F I1212 16:19:08.542162 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-cluster-reader, uid: 8a39b760-1b0e-474e-9e1b-f6c769f5ed13]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.542313438+00:00 stderr F I1212 16:19:08.542267 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: iptables-alerter-script, uid: d6454361-d693-4bd5-970e-8dee8c867368]" virtual=false 2025-12-12T16:19:08.545641830+00:00 stderr F I1212 16:19:08.544672 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-ancillary-tools, uid: f41f1829-7b7c-453d-9fac-5a6645faf319]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.545641830+00:00 stderr F I1212 16:19:08.544723 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-oauth-apiserver, name: revision-status-1, uid: 444fbf90-75af-4a73-9695-57f0d8acfffe]" virtual=false 2025-12-12T16:19:08.563855110+00:00 stderr F I1212 16:19:08.563785 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: net-attach-def-project, uid: 8ea7b21b-7f89-4964-98a8-62069f159aa7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.563954403+00:00 stderr F I1212 16:19:08.563926 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-config, uid: bb2cfc2c-89dd-4cea-b292-4d5129ae614c]" virtual=false 2025-12-12T16:19:08.575678613+00:00 stderr F I1212 16:19:08.575592 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-admission-controller-webhook, uid: 1d70faff-2868-472e-86ee-7d454da7084b]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.575778025+00:00 stderr F I1212 16:19:08.575747 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-script-lib, uid: 72382e9c-006e-43aa-a927-ef5b09d76e4a]" virtual=false 2025-12-12T16:19:08.575981650+00:00 stderr F I1212 16:19:08.575958 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-node-identity, uid: d7ddb0cd-f3ce-4f72-aac7-7fd1c95d3152]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.576148834+00:00 stderr F I1212 16:19:08.576130 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: user-defined-networks-namespace-label-binding, uid: b35820f9-16e9-4c63-bcf1-aa21a6b55045]" virtual=false 2025-12-12T16:19:08.576450582+00:00 stderr F I1212 16:19:08.576374 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 8d0bf8dc-ddf1-4412-a8d3-d2cca7aae8a6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.576481152+00:00 stderr F I1212 16:19:08.576449 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-ovn-kubernetes, uid: 76336446-d881-4c62-80aa-a8abd361631e]" virtual=false 2025-12-12T16:19:08.579347413+00:00 stderr F I1212 16:19:08.579303 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-iptables-alerter, uid: 50c71944-8927-4a26-8fdf-453e314e3135]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.579376424+00:00 stderr F I1212 16:19:08.579341 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: metrics-daemon-sa-rolebinding, uid: 4c34a54f-0d8d-4ff2-be71-472375b8b859]" virtual=false 2025-12-12T16:19:08.580865951+00:00 stderr F I1212 16:19:08.580826 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: whereabouts-cni, uid: 81765a55-8ddf-4236-a0d9-3a707f7f82a2]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.580883171+00:00 stderr F I1212 16:19:08.580863 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-admission-controller-webhook, uid: 545e1457-64e7-48f5-b19a-0ddbbb2165b1]" virtual=false 2025-12-12T16:19:08.583727382+00:00 stderr F I1212 16:19:08.583671 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-kube-rbac-proxy, uid: 6000bf1a-c327-42b3-89bf-6737effbc9cc]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.583786363+00:00 stderr F I1212 16:19:08.583769 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-ancillary-tools, uid: 920cac91-1e97-4b78-9407-de7abd17435c]" virtual=false 2025-12-12T16:19:08.593130484+00:00 stderr F I1212 16:19:08.593069 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-node-limited, uid: d46dbbf0-e5a5-4718-9cf2-73a3c26e0e10]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.593291718+00:00 stderr F I1212 16:19:08.593266 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-cluster-readers, uid: f90bb80b-8164-47fa-a8d7-2f339bb24054]" virtual=false 2025-12-12T16:19:08.594610251+00:00 stderr F I1212 16:19:08.594583 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-viewer, uid: 18062659-d954-4db4-a1b2-1563fcdc226e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.594665862+00:00 stderr F I1212 16:19:08.594650 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-group, uid: 55936cb0-8925-4d56-b4ed-afd17c252ccc]" virtual=false 2025-12-12T16:19:08.598056316+00:00 stderr F I1212 16:19:08.597954 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: multus.openshift.io, uid: 6af572ca-45a2-4100-8c12-1c0aea97f3b4]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.598056316+00:00 stderr F I1212 16:19:08.597985 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-transient, uid: bdf36000-8acb-4bfb-9410-ceb68275d539]" virtual=false 2025-12-12T16:19:08.624467099+00:00 stderr F I1212 16:19:08.624386 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-oauth-apiserver, name: revision-status-1, uid: 444fbf90-75af-4a73-9695-57f0d8acfffe]" 2025-12-12T16:19:08.624467099+00:00 stderr F I1212 16:19:08.624435 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-whereabouts, uid: 84084398-4ced-4b04-aa3c-896feffb01cb]" virtual=false 2025-12-12T16:19:08.625432553+00:00 stderr F I1212 16:19:08.625321 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: default-cni-sysctl-allowlist, uid: 5e80d595-c63d-44bf-914d-7f260b4d19dd]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.625432553+00:00 stderr F I1212 16:19:08.625380 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-diagnostics, uid: 99939742-bf08-407c-8e6b-b481dd120b5e]" virtual=false 2025-12-12T16:19:08.625531805+00:00 stderr F I1212 16:19:08.625497 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: multus-daemon-config, uid: 46bee54e-7954-481f-b845-7ef5fd73b0e0]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.625531805+00:00 stderr F I1212 16:19:08.625518 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-node-identity, uid: 68a4cac1-791b-476a-b674-8f1dc0ccb944]" virtual=false 2025-12-12T16:19:08.626154590+00:00 stderr F I1212 16:19:08.626105 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: network-node-identity.openshift.io, uid: 5c2fe880-8ccc-45c8-8bc7-7f3bd0bb9e5f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.626154590+00:00 stderr F I1212 16:19:08.626132 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-iptables-alerter, uid: 7689b5d9-8b5c-4cf9-b2f1-14e7d8e26f1d]" virtual=false 2025-12-12T16:19:08.626363796+00:00 stderr F I1212 16:19:08.626327 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: whereabouts-flatfile-config, uid: 8db09e57-a56b-4b45-94d6-a2796260e9cd]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.626363796+00:00 stderr F I1212 16:19:08.626349 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 2bbdc40e-0150-42d0-aa51-03a941b0dd1c]" virtual=false 2025-12-12T16:19:08.626770026+00:00 stderr F I1212 16:19:08.626719 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-console, name: networking-console-plugin, uid: 3aae3443-66e1-496f-98c2-65b5a65de071]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.626770026+00:00 stderr F I1212 16:19:08.626743 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-identity-limited, uid: c9164259-3318-4e4b-af57-db18e27bc239]" virtual=false 2025-12-12T16:19:08.627088324+00:00 stderr F I1212 16:19:08.627042 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: cni-copy-resources, uid: 29088134-82cd-49c9-891c-3834b37dd6f8]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.627088324+00:00 stderr F I1212 16:19:08.627063 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-kube-rbac-proxy, uid: ed2f03f2-1418-497a-b850-35c5a0081d19]" virtual=false 2025-12-12T16:19:08.656683295+00:00 stderr F I1212 16:19:08.656578 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-node-identity, name: ovnkube-identity-cm, uid: 69be1ac7-0211-4526-80ec-483251460038]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.656683295+00:00 stderr F I1212 16:19:08.656642 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/Image, namespace: , name: cluster, uid: 1f572312-825c-4dcf-8f7b-48c596dc9647]" virtual=false 2025-12-12T16:19:08.668138608+00:00 stderr F I1212 16:19:08.667689 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: applied-cluster, uid: d45e81f6-f612-465e-aeaa-ac35b5a9786a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.671630235+00:00 stderr F I1212 16:19:08.670671 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: iptables-alerter-script, uid: d6454361-d693-4bd5-970e-8dee8c867368]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.699924424+00:00 stderr F I1212 16:19:08.699838 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-config, uid: bb2cfc2c-89dd-4cea-b292-4d5129ae614c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.701819621+00:00 stderr F I1212 16:19:08.701775 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-script-lib, uid: 72382e9c-006e-43aa-a927-ef5b09d76e4a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.705134693+00:00 stderr F I1212 16:19:08.705094 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: user-defined-networks-namespace-label-binding, uid: b35820f9-16e9-4c63-bcf1-aa21a6b55045]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.707528082+00:00 stderr F I1212 16:19:08.707489 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-ovn-kubernetes, uid: 76336446-d881-4c62-80aa-a8abd361631e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.711727006+00:00 stderr F I1212 16:19:08.711668 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: metrics-daemon-sa-rolebinding, uid: 4c34a54f-0d8d-4ff2-be71-472375b8b859]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.715173261+00:00 stderr F I1212 16:19:08.715101 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-admission-controller-webhook, uid: 545e1457-64e7-48f5-b19a-0ddbbb2165b1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.720687698+00:00 stderr F I1212 16:19:08.720487 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-ancillary-tools, uid: 920cac91-1e97-4b78-9407-de7abd17435c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.721276392+00:00 stderr F I1212 16:19:08.721212 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-cluster-readers, uid: f90bb80b-8164-47fa-a8d7-2f339bb24054]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.730712435+00:00 stderr F I1212 16:19:08.730612 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-group, uid: 55936cb0-8925-4d56-b4ed-afd17c252ccc]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.734645803+00:00 stderr F I1212 16:19:08.733115 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-transient, uid: bdf36000-8acb-4bfb-9410-ceb68275d539]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.734645803+00:00 stderr F I1212 16:19:08.733621 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-whereabouts, uid: 84084398-4ced-4b04-aa3c-896feffb01cb]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.741380679+00:00 stderr F I1212 16:19:08.741280 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-node-identity, uid: 68a4cac1-791b-476a-b674-8f1dc0ccb944]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.743824490+00:00 stderr F I1212 16:19:08.743759 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-iptables-alerter, uid: 7689b5d9-8b5c-4cf9-b2f1-14e7d8e26f1d]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.746089376+00:00 stderr F I1212 16:19:08.746015 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-diagnostics, uid: 99939742-bf08-407c-8e6b-b481dd120b5e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.748080095+00:00 stderr F I1212 16:19:08.748028 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 2bbdc40e-0150-42d0-aa51-03a941b0dd1c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.756959104+00:00 stderr F I1212 16:19:08.756864 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-kube-rbac-proxy, uid: ed2f03f2-1418-497a-b850-35c5a0081d19]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.757319273+00:00 stderr F I1212 16:19:08.757265 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-identity-limited, uid: c9164259-3318-4e4b-af57-db18e27bc239]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:19:08.757570919+00:00 stderr F I1212 16:19:08.757520 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/Image, namespace: , name: cluster, uid: 1f572312-825c-4dcf-8f7b-48c596dc9647]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:19:26.381638282+00:00 stderr F I1212 16:19:26.381518 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7b9f779b68" need=0 deleting=1 2025-12-12T16:19:26.381638282+00:00 stderr F I1212 16:19:26.381596 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7b9f779b68" relatedReplicaSets=["openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-69f958c846","openshift-controller-manager/controller-manager-7fffb5779","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957","openshift-controller-manager/controller-manager-7b9f779b68","openshift-controller-manager/controller-manager-79d797b698","openshift-controller-manager/controller-manager-5f76cf6594","openshift-controller-manager/controller-manager-6445bd5bb7","openshift-controller-manager/controller-manager-65b6cccf98","openshift-controller-manager/controller-manager-74bfd85b68"] 2025-12-12T16:19:26.381787096+00:00 stderr F I1212 16:19:26.381743 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-7b9f779b68" pod="openshift-controller-manager/controller-manager-7b9f779b68-xk96c" 2025-12-12T16:19:26.391522550+00:00 stderr F I1212 16:19:26.391433 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:26.403140852+00:00 stderr F I1212 16:19:26.403046 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-79d797b698" need=1 creating=1 2025-12-12T16:19:26.403487300+00:00 stderr F I1212 16:19:26.403424 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-8fdcdbb66" need=0 deleting=1 2025-12-12T16:19:26.403537492+00:00 stderr F I1212 16:19:26.403491 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-8fdcdbb66" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-bf6bf5794","openshift-route-controller-manager/route-controller-manager-5fccdd79b9","openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-c47fcf799","openshift-route-controller-manager/route-controller-manager-8fdcdbb66","openshift-route-controller-manager/route-controller-manager-67bd47cff9","openshift-route-controller-manager/route-controller-manager-6b47f77689","openshift-route-controller-manager/route-controller-manager-6b749d96f6","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-76558c69dc","openshift-route-controller-manager/route-controller-manager-776cdc94d6","openshift-route-controller-manager/route-controller-manager-f4599bd79"] 2025-12-12T16:19:26.403704986+00:00 stderr F I1212 16:19:26.403675 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-8fdcdbb66" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-vvkdl" 2025-12-12T16:19:26.409626555+00:00 stderr F I1212 16:19:26.409548 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:26.421006200+00:00 stderr F I1212 16:19:26.420898 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-bf6bf5794" need=1 creating=1 2025-12-12T16:19:26.426626041+00:00 stderr F I1212 16:19:26.426513 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:26.465701763+00:00 stderr F I1212 16:19:26.465591 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:46.367610188+00:00 stderr F I1212 16:19:46.367501 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on replicasets.apps \"controller-manager-7b9f779b68\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:46.383044495+00:00 stderr F I1212 16:19:46.382918 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-79d797b698" need=0 deleting=1 2025-12-12T16:19:46.383044495+00:00 stderr F I1212 16:19:46.382997 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-79d797b698" relatedReplicaSets=["openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-69f958c846","openshift-controller-manager/controller-manager-7fffb5779","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957","openshift-controller-manager/controller-manager-7b9f779b68","openshift-controller-manager/controller-manager-79d797b698","openshift-controller-manager/controller-manager-5f76cf6594","openshift-controller-manager/controller-manager-6445bd5bb7","openshift-controller-manager/controller-manager-65b6cccf98"] 2025-12-12T16:19:46.383194159+00:00 stderr F I1212 16:19:46.383133 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-79d797b698" pod="openshift-controller-manager/controller-manager-79d797b698-v4v6j" 2025-12-12T16:19:46.389869607+00:00 stderr F I1212 16:19:46.389799 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:46.405163981+00:00 stderr F I1212 16:19:46.405093 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7b9f779b68" need=1 creating=1 2025-12-12T16:20:35.784887465+00:00 stderr F I1212 16:20:35.784530 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-marketplace/marketplace-operator-547dbd544d" need=1 creating=1 2025-12-12T16:20:46.385319901+00:00 stderr F I1212 16:20:46.384428 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on replicasets.apps \"route-controller-manager-8fdcdbb66\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:46.404931556+00:00 stderr F I1212 16:20:46.404818 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-bf6bf5794" need=0 deleting=1 2025-12-12T16:20:46.404931556+00:00 stderr F I1212 16:20:46.404907 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-bf6bf5794" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-8fdcdbb66","openshift-route-controller-manager/route-controller-manager-67bd47cff9","openshift-route-controller-manager/route-controller-manager-6b47f77689","openshift-route-controller-manager/route-controller-manager-6b749d96f6","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-776cdc94d6","openshift-route-controller-manager/route-controller-manager-f4599bd79","openshift-route-controller-manager/route-controller-manager-bf6bf5794","openshift-route-controller-manager/route-controller-manager-5fccdd79b9","openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-c47fcf799"] 2025-12-12T16:20:46.405110420+00:00 stderr F I1212 16:20:46.405081 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-bf6bf5794" pod="openshift-route-controller-manager/route-controller-manager-bf6bf5794-d5zzt" 2025-12-12T16:20:46.412293149+00:00 stderr F I1212 16:20:46.412137 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:46.423306118+00:00 stderr F I1212 16:20:46.423224 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-8fdcdbb66" need=1 creating=1 2025-12-12T16:20:46.439816181+00:00 stderr F I1212 16:20:46.439740 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:46.458836439+00:00 stderr F I1212 16:20:46.458745 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:25:04.308943035+00:00 stderr F E1212 16:25:04.308844 1 publisher.go:173] "Unhandled Error" err="syncing \"openstack\" failed: configmaps \"kube-root-ca.crt\" already exists" logger="UnhandledError" 2025-12-12T16:25:26.860157871+00:00 stderr F I1212 16:25:26.859116 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988" need=0 deleting=1 2025-12-12T16:25:26.860157871+00:00 stderr F I1212 16:25:26.860015 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988" relatedReplicaSets=["openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988","openshift-ovn-kubernetes/ovnkube-control-plane-97c9b6c48"] 2025-12-12T16:25:26.860157871+00:00 stderr F I1212 16:25:26.860125 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="ovnkube-control-plane-57b78d8988" pod="openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988-xtrkr" 2025-12-12T16:25:26.865817943+00:00 stderr F I1212 16:25:26.864967 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-ovn-kubernetes/ovnkube-control-plane" err="Operation cannot be fulfilled on deployments.apps \"ovnkube-control-plane\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:25:26.875969659+00:00 stderr F I1212 16:25:26.875874 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-ovn-kubernetes/ovnkube-control-plane-97c9b6c48" need=1 creating=1 2025-12-12T16:25:26.896082445+00:00 stderr F I1212 16:25:26.893026 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-ovn-kubernetes/ovnkube-control-plane" err="Operation cannot be fulfilled on deployments.apps \"ovnkube-control-plane\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:25:27.084023434+00:00 stderr F I1212 16:25:27.083928 1 controller_utils.go:618] "Deleting pod" logger="daemonset-controller" controller="ovnkube-node" pod="openshift-ovn-kubernetes/ovnkube-node-wjw4g" 2025-12-12T16:26:38.869979637+00:00 stderr F I1212 16:26:38.869768 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: redhat-marketplace-unpack-bundles, uid: 1bce222b-2fd6-49f3-b48e-0645dc5ba923]" virtual=false 2025-12-12T16:26:38.870120281+00:00 stderr F I1212 16:26:38.870063 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: redhat-marketplace-grpc-server, uid: d8e2a367-fe9f-43f2-8b9f-a3e878703116]" virtual=false 2025-12-12T16:26:38.870120281+00:00 stderr F I1212 16:26:38.870103 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: redhat-marketplace, uid: 298d54b6-a7d4-42db-8228-ccf2494c46fb]" virtual=false 2025-12-12T16:26:38.870136181+00:00 stderr F I1212 16:26:38.870120 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Pod, namespace: openshift-marketplace, name: redhat-marketplace-jkgqd, uid: 5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4]" virtual=false 2025-12-12T16:26:38.870309266+00:00 stderr F I1212 16:26:38.870199 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: redhat-marketplace, uid: 4349139a-211f-4b82-a867-d0f135c54aa4]" virtual=false 2025-12-12T16:26:38.886514025+00:00 stderr F I1212 16:26:38.886363 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: redhat-marketplace-unpack-bundles, uid: 1bce222b-2fd6-49f3-b48e-0645dc5ba923]" propagationPolicy="Background" 2025-12-12T16:26:38.886514025+00:00 stderr F I1212 16:26:38.886429 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: redhat-marketplace, uid: 298d54b6-a7d4-42db-8228-ccf2494c46fb]" propagationPolicy="Background" 2025-12-12T16:26:38.886514025+00:00 stderr F I1212 16:26:38.886466 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: redhat-marketplace, uid: 4349139a-211f-4b82-a867-d0f135c54aa4]" propagationPolicy="Background" 2025-12-12T16:26:38.886514025+00:00 stderr F I1212 16:26:38.886484 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/Pod, namespace: openshift-marketplace, name: redhat-marketplace-jkgqd, uid: 5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4]" propagationPolicy="Background" 2025-12-12T16:26:38.886621888+00:00 stderr F I1212 16:26:38.886524 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: redhat-marketplace-grpc-server, uid: d8e2a367-fe9f-43f2-8b9f-a3e878703116]" propagationPolicy="Background" 2025-12-12T16:26:38.893446700+00:00 stderr F I1212 16:26:38.893334 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-marketplace, name: redhat-marketplace-dockercfg-gg4w7, uid: b6c8f41a-3f43-4cfb-853f-5068aaaac8e3]" virtual=false 2025-12-12T16:26:38.894623170+00:00 stderr F I1212 16:26:38.893902 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[discovery.k8s.io/v1/EndpointSlice, namespace: openshift-marketplace, name: redhat-marketplace-d7cgr, uid: 0e4e2da6-858b-498e-943c-9aabf723c124]" virtual=false 2025-12-12T16:26:38.900711354+00:00 stderr F I1212 16:26:38.900450 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-marketplace, name: redhat-marketplace-dockercfg-gg4w7, uid: b6c8f41a-3f43-4cfb-853f-5068aaaac8e3]" propagationPolicy="Background" 2025-12-12T16:26:38.900734814+00:00 stderr F I1212 16:26:38.900716 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[discovery.k8s.io/v1/EndpointSlice, namespace: openshift-marketplace, name: redhat-marketplace-d7cgr, uid: 0e4e2da6-858b-498e-943c-9aabf723c124]" propagationPolicy="Background" 2025-12-12T16:26:39.908579626+00:00 stderr F I1212 16:26:39.908425 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-image-registry/image-registry-5d9d95bf5b" need=1 creating=1 2025-12-12T16:26:39.938715217+00:00 stderr F I1212 16:26:39.929854 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-image-registry/image-registry" err="Operation cannot be fulfilled on deployments.apps \"image-registry\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:26:39.953241694+00:00 stderr F I1212 16:26:39.953114 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-image-registry/image-registry" err="Operation cannot be fulfilled on deployments.apps \"image-registry\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:26:39.993603013+00:00 stderr F I1212 16:26:39.992440 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-image-registry/image-registry" err="Operation cannot be fulfilled on deployments.apps \"image-registry\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:26:42.798206398+00:00 stderr F I1212 16:26:42.797593 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="0s" 2025-12-12T16:26:42.798206398+00:00 stderr F I1212 16:26:42.797803 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" nextSyncIn="9m59.99999629s" 2025-12-12T16:26:42.826803451+00:00 stderr F I1212 16:26:42.826670 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:42.831484299+00:00 stderr F I1212 16:26:42.830849 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:42.832457263+00:00 stderr F I1212 16:26:42.832414 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:42.844068977+00:00 stderr F I1212 16:26:42.843968 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:42.869819827+00:00 stderr F I1212 16:26:42.869571 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:43.382345596+00:00 stderr F I1212 16:26:43.382068 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:43.827932912+00:00 stderr F I1212 16:26:43.827848 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" nextSyncIn="9m58.172182911s" 2025-12-12T16:26:43.995445124+00:00 stderr F I1212 16:26:43.994722 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:44.996782891+00:00 stderr F I1212 16:26:44.995848 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" nextSyncIn="9m57.004164183s" 2025-12-12T16:26:46.009469525+00:00 stderr F I1212 16:26:46.009337 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:47.010378552+00:00 stderr F I1212 16:26:47.010212 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" nextSyncIn="9m54.989843143s" 2025-12-12T16:26:47.021758690+00:00 stderr F I1212 16:26:47.021591 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:48.022354657+00:00 stderr F I1212 16:26:48.022014 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" nextSyncIn="9m53.978006932s" 2025-12-12T16:26:48.280586711+00:00 stderr F I1212 16:26:48.280381 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:49.192918720+00:00 stderr F I1212 16:26:49.191904 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="0s" 2025-12-12T16:26:49.192918720+00:00 stderr F I1212 16:26:49.192246 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" nextSyncIn="9m59.99999341s" 2025-12-12T16:26:49.202901462+00:00 stderr F I1212 16:26:49.202847 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:49.210727800+00:00 stderr F I1212 16:26:49.209809 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:49.220386394+00:00 stderr F I1212 16:26:49.219893 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:49.225367980+00:00 stderr F I1212 16:26:49.225196 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:49.271962897+00:00 stderr F I1212 16:26:49.271863 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:49.280729609+00:00 stderr F I1212 16:26:49.280650 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" nextSyncIn="9m52.719366574s" 2025-12-12T16:26:49.286887574+00:00 stderr F I1212 16:26:49.286846 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:49.301397151+00:00 stderr F I1212 16:26:49.301323 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5" delay="1s" 2025-12-12T16:26:49.799464994+00:00 stderr F I1212 16:26:49.799389 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:50.043701284+00:00 stderr F I1212 16:26:50.043617 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:50.203491071+00:00 stderr F I1212 16:26:50.203403 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" nextSyncIn="9m58.796610822s" 2025-12-12T16:26:51.053058153+00:00 stderr F I1212 16:26:51.050267 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:52.051116678+00:00 stderr F I1212 16:26:52.051016 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" nextSyncIn="9m56.949006285s" 2025-12-12T16:26:53.035716632+00:00 stderr F I1212 16:26:53.035614 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="0s" 2025-12-12T16:26:53.035773214+00:00 stderr F I1212 16:26:53.035739 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m59.99999527s" 2025-12-12T16:26:53.055469511+00:00 stderr F I1212 16:26:53.054668 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:53.062875689+00:00 stderr F I1212 16:26:53.062685 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:53.076335679+00:00 stderr F I1212 16:26:53.074720 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:53.121162271+00:00 stderr F I1212 16:26:53.118578 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:53.161310845+00:00 stderr F I1212 16:26:53.158621 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:53.950540954+00:00 stderr F I1212 16:26:53.949944 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:54.057379773+00:00 stderr F I1212 16:26:54.055698 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m58.94432359s" 2025-12-12T16:26:54.105724295+00:00 stderr F I1212 16:26:54.104921 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:55.108551629+00:00 stderr F I1212 16:26:55.108438 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" nextSyncIn="9m53.891582135s" 2025-12-12T16:26:55.205212611+00:00 stderr F I1212 16:26:55.205114 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:26:55.251284155+00:00 stderr F I1212 16:26:55.251196 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:56.205583214+00:00 stderr F I1212 16:26:56.205480 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m56.794542639s" 2025-12-12T16:26:56.251704459+00:00 stderr F I1212 16:26:56.251578 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" nextSyncIn="9m52.748438704s" 2025-12-12T16:26:56.380591865+00:00 stderr F I1212 16:26:56.379782 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:57.380887377+00:00 stderr F I1212 16:26:57.380642 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" nextSyncIn="9m51.61937873s" 2025-12-12T16:26:57.387220127+00:00 stderr F I1212 16:26:57.387128 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:26:57.404258437+00:00 stderr F I1212 16:26:57.402700 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8" delay="1s" 2025-12-12T16:27:00.448244493+00:00 stderr F I1212 16:27:00.445951 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operators/obo-prometheus-operator-86648f486b" need=1 creating=1 2025-12-12T16:27:00.470064695+00:00 stderr F I1212 16:27:00.469975 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/obo-prometheus-operator" err="Operation cannot be fulfilled on deployments.apps \"obo-prometheus-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:00.535504241+00:00 stderr F I1212 16:27:00.533926 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/obo-prometheus-operator" err="Operation cannot be fulfilled on deployments.apps \"obo-prometheus-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:00.599158942+00:00 stderr F I1212 16:27:00.598798 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95" need=2 creating=2 2025-12-12T16:27:00.622245057+00:00 stderr F I1212 16:27:00.621600 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/obo-prometheus-operator-admission-webhook" err="Operation cannot be fulfilled on deployments.apps \"obo-prometheus-operator-admission-webhook\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:00.646775328+00:00 stderr F I1212 16:27:00.646637 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/obo-prometheus-operator-admission-webhook" err="Operation cannot be fulfilled on deployments.apps \"obo-prometheus-operator-admission-webhook\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:00.821473939+00:00 stderr F I1212 16:27:00.820789 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operators/observability-operator-78c97476f4" need=1 creating=1 2025-12-12T16:27:00.847909128+00:00 stderr F I1212 16:27:00.847233 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/observability-operator" err="Operation cannot be fulfilled on deployments.apps \"observability-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:00.858363413+00:00 stderr F I1212 16:27:00.858296 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/observability-operator" err="Operation cannot be fulfilled on deployments.apps \"observability-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:01.034440599+00:00 stderr F I1212 16:27:01.034311 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operators/perses-operator-68bdb49cbf" need=1 creating=1 2025-12-12T16:27:01.060606711+00:00 stderr F I1212 16:27:01.056606 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/perses-operator" err="Operation cannot be fulfilled on deployments.apps \"perses-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:01.069257070+00:00 stderr F I1212 16:27:01.066336 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operators/perses-operator" err="Operation cannot be fulfilled on deployments.apps \"perses-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:03.055040368+00:00 stderr F I1212 16:27:03.054951 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-image-registry/image-registry-66587d64c8" need=0 deleting=1 2025-12-12T16:27:03.055167731+00:00 stderr F I1212 16:27:03.055149 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-image-registry/image-registry-66587d64c8" relatedReplicaSets=["openshift-image-registry/image-registry-66587d64c8","openshift-image-registry/image-registry-7994588b6","openshift-image-registry/image-registry-79b89679dd","openshift-image-registry/image-registry-868c685c8f","openshift-image-registry/image-registry-5d9d95bf5b"] 2025-12-12T16:27:03.055309384+00:00 stderr F I1212 16:27:03.055280 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="image-registry-66587d64c8" pod="openshift-image-registry/image-registry-66587d64c8-jqtjf" 2025-12-12T16:27:05.148943601+00:00 stderr F I1212 16:27:05.148856 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="service-telemetry/elastic-operator-6c994c654b" need=1 creating=1 2025-12-12T16:27:05.163479149+00:00 stderr F I1212 16:27:05.163389 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="service-telemetry/elastic-operator" err="Operation cannot be fulfilled on deployments.apps \"elastic-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:05.187382744+00:00 stderr F I1212 16:27:05.186342 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="service-telemetry/elastic-operator" err="Operation cannot be fulfilled on deployments.apps \"elastic-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:05.206255351+00:00 stderr F I1212 16:27:05.206013 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="service-telemetry/elastic-operator" err="Operation cannot be fulfilled on deployments.apps \"elastic-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853200 1 resource_quota_controller.go:476] "syncing resource quota controller with updated resources from discovery" logger="resourcequota-controller" diff="added: [agent.k8s.elastic.co/v1alpha1, Resource=agents apm.k8s.elastic.co/v1, Resource=apmservers autoscaling.k8s.elastic.co/v1alpha1, Resource=elasticsearchautoscalers beat.k8s.elastic.co/v1beta1, Resource=beats elasticsearch.k8s.elastic.co/v1, Resource=elasticsearches enterprisesearch.k8s.elastic.co/v1, Resource=enterprisesearches kibana.k8s.elastic.co/v1, Resource=kibanas logstash.k8s.elastic.co/v1alpha1, Resource=logstashes maps.k8s.elastic.co/v1alpha1, Resource=elasticmapsservers monitoring.rhobs/v1, Resource=alertmanagers monitoring.rhobs/v1, Resource=podmonitors monitoring.rhobs/v1, Resource=probes monitoring.rhobs/v1, Resource=prometheuses monitoring.rhobs/v1, Resource=prometheusrules monitoring.rhobs/v1, Resource=servicemonitors monitoring.rhobs/v1, Resource=thanosrulers monitoring.rhobs/v1alpha1, Resource=alertmanagerconfigs monitoring.rhobs/v1alpha1, Resource=monitoringstacks monitoring.rhobs/v1alpha1, Resource=prometheusagents monitoring.rhobs/v1alpha1, Resource=scrapeconfigs monitoring.rhobs/v1alpha1, Resource=thanosqueriers observability.openshift.io/v1alpha1, Resource=observabilityinstallers perses.dev/v1alpha1, Resource=perses perses.dev/v1alpha1, Resource=persesdashboards perses.dev/v1alpha1, Resource=persesdatasources stackconfigpolicy.k8s.elastic.co/v1alpha1, Resource=stackconfigpolicies], removed: []" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853333 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="elasticmapsservers.maps.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853357 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="thanosqueriers.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853377 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="elasticsearches.elasticsearch.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853395 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="probes.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853413 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertmanagerconfigs.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853436 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="agents.agent.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853467 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="apmservers.apm.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853489 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="elasticsearchautoscalers.autoscaling.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853525 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="servicemonitors.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853549 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="perses.perses.dev" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853568 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="kibanas.kibana.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853588 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertmanagers.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853613 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheusagents.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853632 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="persesdatasources.perses.dev" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853655 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="thanosrulers.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853701 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="monitoringstacks.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853723 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="beats.beat.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853741 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="observabilityinstallers.observability.openshift.io" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853759 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="persesdashboards.perses.dev" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853775 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="logstashes.logstash.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853796 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podmonitors.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853812 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="scrapeconfigs.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853832 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="enterprisesearches.enterprisesearch.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853850 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheusrules.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853869 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="stackconfigpolicies.stackconfigpolicy.k8s.elastic.co" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.853886 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheuses.monitoring.rhobs" 2025-12-12T16:27:05.855006910+00:00 stderr F I1212 16:27:05.854483 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:27:05.863294340+00:00 stderr F I1212 16:27:05.862589 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.863294340+00:00 stderr F I1212 16:27:05.862899 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.863294340+00:00 stderr F I1212 16:27:05.863069 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.863294340+00:00 stderr F I1212 16:27:05.863246 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.863483095+00:00 stderr F I1212 16:27:05.863390 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.863672850+00:00 stderr F I1212 16:27:05.863559 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.863733621+00:00 stderr F I1212 16:27:05.863713 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.881320426+00:00 stderr F I1212 16:27:05.881254 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.912926486+00:00 stderr F I1212 16:27:05.912866 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:05.992619553+00:00 stderr F I1212 16:27:05.992559 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="service-telemetry/elastic-operator" err="Operation cannot be fulfilled on deployments.apps \"elastic-operator\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:05.999414375+00:00 stderr F I1212 16:27:05.999364 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.034229386+00:00 stderr F I1212 16:27:06.033565 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.065982120+00:00 stderr F I1212 16:27:06.065848 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.102517875+00:00 stderr F I1212 16:27:06.102450 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.118282014+00:00 stderr F I1212 16:27:06.118160 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.131774825+00:00 stderr F I1212 16:27:06.131718 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.144054236+00:00 stderr F I1212 16:27:06.142797 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.144054236+00:00 stderr F I1212 16:27:06.143942 1 garbagecollector.go:203] "syncing garbage collector with updated resources from discovery" logger="garbage-collector-controller" diff="added: [agent.k8s.elastic.co/v1alpha1, Resource=agents apm.k8s.elastic.co/v1, Resource=apmservers autoscaling.k8s.elastic.co/v1alpha1, Resource=elasticsearchautoscalers beat.k8s.elastic.co/v1beta1, Resource=beats elasticsearch.k8s.elastic.co/v1, Resource=elasticsearches enterprisesearch.k8s.elastic.co/v1, Resource=enterprisesearches kibana.k8s.elastic.co/v1, Resource=kibanas logstash.k8s.elastic.co/v1alpha1, Resource=logstashes maps.k8s.elastic.co/v1alpha1, Resource=elasticmapsservers monitoring.rhobs/v1, Resource=alertmanagers monitoring.rhobs/v1, Resource=podmonitors monitoring.rhobs/v1, Resource=probes monitoring.rhobs/v1, Resource=prometheuses monitoring.rhobs/v1, Resource=prometheusrules monitoring.rhobs/v1, Resource=servicemonitors monitoring.rhobs/v1, Resource=thanosrulers monitoring.rhobs/v1alpha1, Resource=alertmanagerconfigs monitoring.rhobs/v1alpha1, Resource=monitoringstacks monitoring.rhobs/v1alpha1, Resource=prometheusagents monitoring.rhobs/v1alpha1, Resource=scrapeconfigs monitoring.rhobs/v1alpha1, Resource=thanosqueriers observability.openshift.io/v1alpha1, Resource=observabilityinstallers observability.openshift.io/v1alpha1, Resource=uiplugins perses.dev/v1alpha1, Resource=perses perses.dev/v1alpha1, Resource=persesdashboards perses.dev/v1alpha1, Resource=persesdatasources stackconfigpolicy.k8s.elastic.co/v1alpha1, Resource=stackconfigpolicies], removed: []" 2025-12-12T16:27:06.155721961+00:00 stderr F I1212 16:27:06.155658 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.157648390+00:00 stderr F I1212 16:27:06.156599 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:27:06.167540760+00:00 stderr F I1212 16:27:06.167474 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.182446988+00:00 stderr F I1212 16:27:06.182380 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.206002044+00:00 stderr F I1212 16:27:06.205932 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.261550960+00:00 stderr F I1212 16:27:06.261483 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.286103381+00:00 stderr F I1212 16:27:06.286040 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:27:06.292740619+00:00 stderr F I1212 16:27:06.292651 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.302518987+00:00 stderr F I1212 16:27:06.302465 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.323305353+00:00 stderr F I1212 16:27:06.320628 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.357738324+00:00 stderr F I1212 16:27:06.357651 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.374459357+00:00 stderr F I1212 16:27:06.374365 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.428322840+00:00 stderr F I1212 16:27:06.425024 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:06.455772115+00:00 stderr F I1212 16:27:06.455656 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:27:06.455772115+00:00 stderr F I1212 16:27:06.455688 1 resource_quota_controller.go:502] "synced quota controller" logger="resourcequota-controller" 2025-12-12T16:27:06.457015647+00:00 stderr F I1212 16:27:06.456980 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:27:06.457015647+00:00 stderr F I1212 16:27:06.456999 1 garbagecollector.go:235] "synced garbage collector" logger="garbage-collector-controller" 2025-12-12T16:27:07.288247934+00:00 stderr F I1212 16:27:07.288131 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m45.711882199s" 2025-12-12T16:27:07.308318672+00:00 stderr F I1212 16:27:07.306802 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:27:08.308232109+00:00 stderr F I1212 16:27:08.308125 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m44.691893024s" 2025-12-12T16:27:08.417056643+00:00 stderr F I1212 16:27:08.411864 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:27:09.412707401+00:00 stderr F I1212 16:27:09.412569 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m43.587447693s" 2025-12-12T16:27:09.780087379+00:00 stderr F I1212 16:27:09.777284 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:27:10.795118308+00:00 stderr F I1212 16:27:10.793539 1 job_controller.go:945] "Job has activeDeadlineSeconds configuration. Will sync this job again" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" nextSyncIn="9m42.206478112s" 2025-12-12T16:27:10.802854794+00:00 stderr F I1212 16:27:10.802773 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:27:10.815575526+00:00 stderr F I1212 16:27:10.814762 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3" delay="1s" 2025-12-12T16:27:22.726080013+00:00 stderr F I1212 16:27:22.725921 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager-operator/cert-manager-operator-controller-manager-64c74584c4" need=1 creating=1 2025-12-12T16:27:22.742659053+00:00 stderr F I1212 16:27:22.742583 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="cert-manager-operator/cert-manager-operator-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"cert-manager-operator-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:32.900529464+00:00 stderr F I1212 16:27:32.900447 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-admin, uid: 990cdffb-4a69-4ce9-b501-12d223bf45fa]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: agents.agent.k8s.elastic.co, uid: 80e8fdc8-6be7-45b2-bfc7-f86334e99030]" 2025-12-12T16:27:32.900581885+00:00 stderr F I1212 16:27:32.900502 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-admin, uid: 990cdffb-4a69-4ce9-b501-12d223bf45fa]" virtual=false 2025-12-12T16:27:32.904503054+00:00 stderr F I1212 16:27:32.904420 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-edit, uid: ea7631ed-b13b-4a9c-9e3f-4e032eef1f7d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: agents.agent.k8s.elastic.co, uid: 80e8fdc8-6be7-45b2-bfc7-f86334e99030]" 2025-12-12T16:27:32.904577106+00:00 stderr F I1212 16:27:32.904511 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-edit, uid: ea7631ed-b13b-4a9c-9e3f-4e032eef1f7d]" virtual=false 2025-12-12T16:27:32.913323978+00:00 stderr F I1212 16:27:32.913229 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-admin, uid: 990cdffb-4a69-4ce9-b501-12d223bf45fa]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"agents.agent.k8s.elastic.co","uid":"80e8fdc8-6be7-45b2-bfc7-f86334e99030","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.919321809+00:00 stderr F I1212 16:27:32.919200 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-view, uid: 6f55234a-d356-4230-a3ee-96ad0fa3fafc]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: agents.agent.k8s.elastic.co, uid: 80e8fdc8-6be7-45b2-bfc7-f86334e99030]" 2025-12-12T16:27:32.919355020+00:00 stderr F I1212 16:27:32.919277 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-view, uid: 6f55234a-d356-4230-a3ee-96ad0fa3fafc]" virtual=false 2025-12-12T16:27:32.921211957+00:00 stderr F I1212 16:27:32.920970 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-edit, uid: ea7631ed-b13b-4a9c-9e3f-4e032eef1f7d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"agents.agent.k8s.elastic.co","uid":"80e8fdc8-6be7-45b2-bfc7-f86334e99030","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.931467267+00:00 stderr F I1212 16:27:32.928901 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-crdview, uid: 25fb8b2d-ca0e-425b-96f4-df53f73bb83a]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: agents.agent.k8s.elastic.co, uid: 80e8fdc8-6be7-45b2-bfc7-f86334e99030]" 2025-12-12T16:27:32.931467267+00:00 stderr F I1212 16:27:32.928997 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-crdview, uid: 25fb8b2d-ca0e-425b-96f4-df53f73bb83a]" virtual=false 2025-12-12T16:27:32.931467267+00:00 stderr F I1212 16:27:32.930703 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-view, uid: 6f55234a-d356-4230-a3ee-96ad0fa3fafc]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"agents.agent.k8s.elastic.co","uid":"80e8fdc8-6be7-45b2-bfc7-f86334e99030","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.941859660+00:00 stderr F I1212 16:27:32.941229 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-admin, uid: d3371d14-f923-4881-9797-300a1a751336]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: apmservers.apm.k8s.elastic.co, uid: a6b32a36-7a86-44fc-a7e2-8950c55c1296]" 2025-12-12T16:27:32.941859660+00:00 stderr F I1212 16:27:32.941297 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-admin, uid: d3371d14-f923-4881-9797-300a1a751336]" virtual=false 2025-12-12T16:27:32.944715092+00:00 stderr F I1212 16:27:32.944120 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-edit, uid: b1cfb385-8798-4066-8f35-f1a9aaec48c3]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: apmservers.apm.k8s.elastic.co, uid: a6b32a36-7a86-44fc-a7e2-8950c55c1296]" 2025-12-12T16:27:32.944715092+00:00 stderr F I1212 16:27:32.944362 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-edit, uid: b1cfb385-8798-4066-8f35-f1a9aaec48c3]" virtual=false 2025-12-12T16:27:32.945112322+00:00 stderr F I1212 16:27:32.945067 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: agents.agent.k8s.elastic.co-v1alpha1-crdview, uid: 25fb8b2d-ca0e-425b-96f4-df53f73bb83a]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"agents.agent.k8s.elastic.co","uid":"80e8fdc8-6be7-45b2-bfc7-f86334e99030","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.949165775+00:00 stderr F I1212 16:27:32.949095 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-view, uid: e0b11e33-5f66-45e0-aff5-28ba5ff1546c]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: apmservers.apm.k8s.elastic.co, uid: a6b32a36-7a86-44fc-a7e2-8950c55c1296]" 2025-12-12T16:27:32.949263917+00:00 stderr F I1212 16:27:32.949160 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-view, uid: e0b11e33-5f66-45e0-aff5-28ba5ff1546c]" virtual=false 2025-12-12T16:27:32.953070184+00:00 stderr F I1212 16:27:32.953001 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-admin, uid: d3371d14-f923-4881-9797-300a1a751336]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"apmservers.apm.k8s.elastic.co","uid":"a6b32a36-7a86-44fc-a7e2-8950c55c1296","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.957533067+00:00 stderr F I1212 16:27:32.957396 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-crdview, uid: 0fa3c6cb-8299-421e-93ca-f9d643fc1ee4]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: apmservers.apm.k8s.elastic.co, uid: a6b32a36-7a86-44fc-a7e2-8950c55c1296]" 2025-12-12T16:27:32.957533067+00:00 stderr F I1212 16:27:32.957451 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-crdview, uid: 0fa3c6cb-8299-421e-93ca-f9d643fc1ee4]" virtual=false 2025-12-12T16:27:32.959263770+00:00 stderr F I1212 16:27:32.957961 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-view, uid: e0b11e33-5f66-45e0-aff5-28ba5ff1546c]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"apmservers.apm.k8s.elastic.co","uid":"a6b32a36-7a86-44fc-a7e2-8950c55c1296","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.959263770+00:00 stderr F I1212 16:27:32.958038 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-edit, uid: b1cfb385-8798-4066-8f35-f1a9aaec48c3]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"apmservers.apm.k8s.elastic.co","uid":"a6b32a36-7a86-44fc-a7e2-8950c55c1296","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.975196644+00:00 stderr F I1212 16:27:32.975085 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-admin, uid: c5e9c0ca-7fd7-4e6e-8bae-42641432c51a]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: beats.beat.k8s.elastic.co, uid: 9ba97ad7-3500-4447-9937-5b974f093a81]" 2025-12-12T16:27:32.975253295+00:00 stderr F I1212 16:27:32.975144 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-admin, uid: c5e9c0ca-7fd7-4e6e-8bae-42641432c51a]" virtual=false 2025-12-12T16:27:32.975529202+00:00 stderr F I1212 16:27:32.975435 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: apmservers.apm.k8s.elastic.co-v1-crdview, uid: 0fa3c6cb-8299-421e-93ca-f9d643fc1ee4]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"apmservers.apm.k8s.elastic.co","uid":"a6b32a36-7a86-44fc-a7e2-8950c55c1296","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.981302518+00:00 stderr F I1212 16:27:32.980916 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-edit, uid: 97043862-81a8-454a-8fce-f81b4968fd1a]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: beats.beat.k8s.elastic.co, uid: 9ba97ad7-3500-4447-9937-5b974f093a81]" 2025-12-12T16:27:32.981302518+00:00 stderr F I1212 16:27:32.980992 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-edit, uid: 97043862-81a8-454a-8fce-f81b4968fd1a]" virtual=false 2025-12-12T16:27:32.986263454+00:00 stderr F I1212 16:27:32.986115 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-admin, uid: c5e9c0ca-7fd7-4e6e-8bae-42641432c51a]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"beats.beat.k8s.elastic.co","uid":"9ba97ad7-3500-4447-9937-5b974f093a81","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.989453884+00:00 stderr F I1212 16:27:32.989279 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-view, uid: 656a34f7-0824-4278-942d-7a62e9021643]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: beats.beat.k8s.elastic.co, uid: 9ba97ad7-3500-4447-9937-5b974f093a81]" 2025-12-12T16:27:32.989453884+00:00 stderr F I1212 16:27:32.989353 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-view, uid: 656a34f7-0824-4278-942d-7a62e9021643]" virtual=false 2025-12-12T16:27:32.993519487+00:00 stderr F I1212 16:27:32.993072 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-edit, uid: 97043862-81a8-454a-8fce-f81b4968fd1a]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"beats.beat.k8s.elastic.co","uid":"9ba97ad7-3500-4447-9937-5b974f093a81","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:32.993519487+00:00 stderr F I1212 16:27:32.993144 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-crdview, uid: 3b19f137-b08d-472f-8995-f547b108ed86]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: beats.beat.k8s.elastic.co, uid: 9ba97ad7-3500-4447-9937-5b974f093a81]" 2025-12-12T16:27:32.993519487+00:00 stderr F I1212 16:27:32.993252 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-crdview, uid: 3b19f137-b08d-472f-8995-f547b108ed86]" virtual=false 2025-12-12T16:27:33.001612812+00:00 stderr F I1212 16:27:33.001529 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-admin, uid: 06acb629-0d18-4e73-ba4f-02a6ec5821ef]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticmapsservers.maps.k8s.elastic.co, uid: 5167abdf-a93d-477f-b5cd-d9d3680622d4]" 2025-12-12T16:27:33.001659083+00:00 stderr F I1212 16:27:33.001590 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-admin, uid: 06acb629-0d18-4e73-ba4f-02a6ec5821ef]" virtual=false 2025-12-12T16:27:33.001834228+00:00 stderr F I1212 16:27:33.001738 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-view, uid: 656a34f7-0824-4278-942d-7a62e9021643]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"beats.beat.k8s.elastic.co","uid":"9ba97ad7-3500-4447-9937-5b974f093a81","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.003975002+00:00 stderr F I1212 16:27:33.003914 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-edit, uid: b94f7f54-fe4e-4d77-bacf-b586facc3de8]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticmapsservers.maps.k8s.elastic.co, uid: 5167abdf-a93d-477f-b5cd-d9d3680622d4]" 2025-12-12T16:27:33.004073954+00:00 stderr F I1212 16:27:33.003969 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-edit, uid: b94f7f54-fe4e-4d77-bacf-b586facc3de8]" virtual=false 2025-12-12T16:27:33.008072016+00:00 stderr F I1212 16:27:33.007995 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: beats.beat.k8s.elastic.co-v1beta1-crdview, uid: 3b19f137-b08d-472f-8995-f547b108ed86]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"beats.beat.k8s.elastic.co","uid":"9ba97ad7-3500-4447-9937-5b974f093a81","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.019047273+00:00 stderr F I1212 16:27:33.018940 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-admin, uid: 06acb629-0d18-4e73-ba4f-02a6ec5821ef]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticmapsservers.maps.k8s.elastic.co","uid":"5167abdf-a93d-477f-b5cd-d9d3680622d4","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.019142036+00:00 stderr F I1212 16:27:33.018979 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-edit, uid: b94f7f54-fe4e-4d77-bacf-b586facc3de8]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticmapsservers.maps.k8s.elastic.co","uid":"5167abdf-a93d-477f-b5cd-d9d3680622d4","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.020772197+00:00 stderr F I1212 16:27:33.020699 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-view, uid: a77b4cf2-aa8a-4672-86f0-d2d6f2854e62]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticmapsservers.maps.k8s.elastic.co, uid: 5167abdf-a93d-477f-b5cd-d9d3680622d4]" 2025-12-12T16:27:33.021632019+00:00 stderr F I1212 16:27:33.020778 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-view, uid: a77b4cf2-aa8a-4672-86f0-d2d6f2854e62]" virtual=false 2025-12-12T16:27:33.027518738+00:00 stderr F I1212 16:27:33.027456 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-crdview, uid: 4d28de65-6c8a-4d83-bdde-e719fcf710ef]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticmapsservers.maps.k8s.elastic.co, uid: 5167abdf-a93d-477f-b5cd-d9d3680622d4]" 2025-12-12T16:27:33.027617870+00:00 stderr F I1212 16:27:33.027576 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-crdview, uid: 4d28de65-6c8a-4d83-bdde-e719fcf710ef]" virtual=false 2025-12-12T16:27:33.033715105+00:00 stderr F I1212 16:27:33.033623 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-view, uid: a77b4cf2-aa8a-4672-86f0-d2d6f2854e62]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticmapsservers.maps.k8s.elastic.co","uid":"5167abdf-a93d-477f-b5cd-d9d3680622d4","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.038206838+00:00 stderr F I1212 16:27:33.036871 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticmapsservers.maps.k8s.elastic.co-v1alpha1-crdview, uid: 4d28de65-6c8a-4d83-bdde-e719fcf710ef]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticmapsservers.maps.k8s.elastic.co","uid":"5167abdf-a93d-477f-b5cd-d9d3680622d4","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.038206838+00:00 stderr F I1212 16:27:33.037030 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-view, uid: 6655555d-732a-40a1-a538-1aa1a6525174]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co, uid: e3f95fc0-8259-4b63-8743-b65f6fba926d]" 2025-12-12T16:27:33.038206838+00:00 stderr F I1212 16:27:33.037094 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-view, uid: 6655555d-732a-40a1-a538-1aa1a6525174]" virtual=false 2025-12-12T16:27:33.047026332+00:00 stderr F I1212 16:27:33.046923 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-admin, uid: 158616cc-4382-44e9-9983-e9382825670b]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co, uid: e3f95fc0-8259-4b63-8743-b65f6fba926d]" 2025-12-12T16:27:33.047072893+00:00 stderr F I1212 16:27:33.046993 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-admin, uid: 158616cc-4382-44e9-9983-e9382825670b]" virtual=false 2025-12-12T16:27:33.047226257+00:00 stderr F I1212 16:27:33.047140 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-view, uid: 6655555d-732a-40a1-a538-1aa1a6525174]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearchautoscalers.autoscaling.k8s.elastic.co","uid":"e3f95fc0-8259-4b63-8743-b65f6fba926d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.051423313+00:00 stderr F I1212 16:27:33.051340 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-edit, uid: ffb147e3-39ba-4deb-88a9-35c2e0ec3396]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co, uid: e3f95fc0-8259-4b63-8743-b65f6fba926d]" 2025-12-12T16:27:33.051455174+00:00 stderr F I1212 16:27:33.051411 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-edit, uid: ffb147e3-39ba-4deb-88a9-35c2e0ec3396]" virtual=false 2025-12-12T16:27:33.053668870+00:00 stderr F I1212 16:27:33.053608 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-admin, uid: 158616cc-4382-44e9-9983-e9382825670b]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearchautoscalers.autoscaling.k8s.elastic.co","uid":"e3f95fc0-8259-4b63-8743-b65f6fba926d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.067663914+00:00 stderr F I1212 16:27:33.067419 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-edit, uid: ffb147e3-39ba-4deb-88a9-35c2e0ec3396]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearchautoscalers.autoscaling.k8s.elastic.co","uid":"e3f95fc0-8259-4b63-8743-b65f6fba926d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.074613450+00:00 stderr F I1212 16:27:33.074473 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-crdview, uid: 6dd53952-9fb4-4ec3-b19e-7ecc8c4fce90]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co, uid: e3f95fc0-8259-4b63-8743-b65f6fba926d]" 2025-12-12T16:27:33.074739023+00:00 stderr F I1212 16:27:33.074701 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-crdview, uid: 6dd53952-9fb4-4ec3-b19e-7ecc8c4fce90]" virtual=false 2025-12-12T16:27:33.078482528+00:00 stderr F I1212 16:27:33.078352 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-edit, uid: 5418d682-fab2-4da4-b113-a4b9dbf62258]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co, uid: d13c3251-6397-486d-b294-14b9f4c30762]" 2025-12-12T16:27:33.078482528+00:00 stderr F I1212 16:27:33.078451 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-edit, uid: 5418d682-fab2-4da4-b113-a4b9dbf62258]" virtual=false 2025-12-12T16:27:33.078889518+00:00 stderr F I1212 16:27:33.078831 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-view, uid: 51d3959a-deef-4ce6-892f-ed7e477972f1]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co, uid: d13c3251-6397-486d-b294-14b9f4c30762]" 2025-12-12T16:27:33.078923809+00:00 stderr F I1212 16:27:33.078891 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-admin, uid: 822cd633-34d2-47f5-a49d-e107f23899c6]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co, uid: d13c3251-6397-486d-b294-14b9f4c30762]" 2025-12-12T16:27:33.078957230+00:00 stderr F I1212 16:27:33.078927 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-view, uid: 51d3959a-deef-4ce6-892f-ed7e477972f1]" virtual=false 2025-12-12T16:27:33.079243517+00:00 stderr F I1212 16:27:33.079205 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-admin, uid: 822cd633-34d2-47f5-a49d-e107f23899c6]" virtual=false 2025-12-12T16:27:33.095190381+00:00 stderr F I1212 16:27:33.095048 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-crdview, uid: b5d734d3-8b74-4f0f-80c9-4c9b2e05fc11]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co, uid: d13c3251-6397-486d-b294-14b9f4c30762]" 2025-12-12T16:27:33.095190381+00:00 stderr F I1212 16:27:33.095119 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-crdview, uid: b5d734d3-8b74-4f0f-80c9-4c9b2e05fc11]" virtual=false 2025-12-12T16:27:33.095388476+00:00 stderr F I1212 16:27:33.095337 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearchautoscalers.autoscaling.k8s.elastic.co-v1alpha1-crdview, uid: 6dd53952-9fb4-4ec3-b19e-7ecc8c4fce90]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearchautoscalers.autoscaling.k8s.elastic.co","uid":"e3f95fc0-8259-4b63-8743-b65f6fba926d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.099075199+00:00 stderr F I1212 16:27:33.098972 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-admin, uid: 822cd633-34d2-47f5-a49d-e107f23899c6]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearches.elasticsearch.k8s.elastic.co","uid":"d13c3251-6397-486d-b294-14b9f4c30762","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.113715039+00:00 stderr F I1212 16:27:33.113593 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-view, uid: 51d3959a-deef-4ce6-892f-ed7e477972f1]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearches.elasticsearch.k8s.elastic.co","uid":"d13c3251-6397-486d-b294-14b9f4c30762","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.113920 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-crdview, uid: b5d734d3-8b74-4f0f-80c9-4c9b2e05fc11]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearches.elasticsearch.k8s.elastic.co","uid":"d13c3251-6397-486d-b294-14b9f4c30762","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.113973 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: elasticsearches.elasticsearch.k8s.elastic.co-v1-edit, uid: 5418d682-fab2-4da4-b113-a4b9dbf62258]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"elasticsearches.elasticsearch.k8s.elastic.co","uid":"d13c3251-6397-486d-b294-14b9f4c30762","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.114892 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-admin, uid: b550b429-9128-4725-9aa2-7b270f847c1c]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co, uid: 5cef7994-84ad-4a87-b9e2-163fb5db039e]" 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.114920 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-edit, uid: ce434f16-1915-47dd-8bd0-5f19353d8098]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co, uid: 5cef7994-84ad-4a87-b9e2-163fb5db039e]" 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.114934 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-view, uid: 46313832-aeea-4e79-be52-d51cc26c1129]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co, uid: 5cef7994-84ad-4a87-b9e2-163fb5db039e]" 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.114949 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-admin, uid: b550b429-9128-4725-9aa2-7b270f847c1c]" virtual=false 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.115062 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-edit, uid: ce434f16-1915-47dd-8bd0-5f19353d8098]" virtual=false 2025-12-12T16:27:33.117429243+00:00 stderr F I1212 16:27:33.115150 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-view, uid: 46313832-aeea-4e79-be52-d51cc26c1129]" virtual=false 2025-12-12T16:27:33.124500042+00:00 stderr F I1212 16:27:33.123667 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-crdview, uid: 87eb1a97-88b5-4729-a75c-d9db2980be35]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co, uid: 5cef7994-84ad-4a87-b9e2-163fb5db039e]" 2025-12-12T16:27:33.124500042+00:00 stderr F I1212 16:27:33.123730 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-crdview, uid: 87eb1a97-88b5-4729-a75c-d9db2980be35]" virtual=false 2025-12-12T16:27:33.125604130+00:00 stderr F I1212 16:27:33.125425 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-admin, uid: b550b429-9128-4725-9aa2-7b270f847c1c]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"enterprisesearches.enterprisesearch.k8s.elastic.co","uid":"5cef7994-84ad-4a87-b9e2-163fb5db039e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.131306365+00:00 stderr F I1212 16:27:33.127752 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-view, uid: 46313832-aeea-4e79-be52-d51cc26c1129]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"enterprisesearches.enterprisesearch.k8s.elastic.co","uid":"5cef7994-84ad-4a87-b9e2-163fb5db039e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.135137452+00:00 stderr F I1212 16:27:33.135044 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-admin, uid: 603753a5-4b1c-4597-a90d-c127c68ad946]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kibanas.kibana.k8s.elastic.co, uid: e8dc7587-af57-416c-9cfb-1702fe87860e]" 2025-12-12T16:27:33.135137452+00:00 stderr F I1212 16:27:33.135104 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-admin, uid: 603753a5-4b1c-4597-a90d-c127c68ad946]" virtual=false 2025-12-12T16:27:33.144456677+00:00 stderr F I1212 16:27:33.133331 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-crdview, uid: 87eb1a97-88b5-4729-a75c-d9db2980be35]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"enterprisesearches.enterprisesearch.k8s.elastic.co","uid":"5cef7994-84ad-4a87-b9e2-163fb5db039e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.145372731+00:00 stderr F I1212 16:27:33.145262 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: enterprisesearches.enterprisesearch.k8s.elastic.co-v1-edit, uid: ce434f16-1915-47dd-8bd0-5f19353d8098]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"enterprisesearches.enterprisesearch.k8s.elastic.co","uid":"5cef7994-84ad-4a87-b9e2-163fb5db039e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.146433107+00:00 stderr F I1212 16:27:33.146317 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-edit, uid: d2de5832-bd69-49ad-82c0-edef66106c28]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kibanas.kibana.k8s.elastic.co, uid: e8dc7587-af57-416c-9cfb-1702fe87860e]" 2025-12-12T16:27:33.146469098+00:00 stderr F I1212 16:27:33.146428 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-edit, uid: d2de5832-bd69-49ad-82c0-edef66106c28]" virtual=false 2025-12-12T16:27:33.146600622+00:00 stderr F I1212 16:27:33.146542 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-view, uid: fa01132a-b680-4801-8bdc-d9cf9b8b28b1]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kibanas.kibana.k8s.elastic.co, uid: e8dc7587-af57-416c-9cfb-1702fe87860e]" 2025-12-12T16:27:33.146633632+00:00 stderr F I1212 16:27:33.146583 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-view, uid: fa01132a-b680-4801-8bdc-d9cf9b8b28b1]" virtual=false 2025-12-12T16:27:33.149396342+00:00 stderr F I1212 16:27:33.149323 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-crdview, uid: c2274b7c-f9c0-4d44-a02e-007061dd29e7]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kibanas.kibana.k8s.elastic.co, uid: e8dc7587-af57-416c-9cfb-1702fe87860e]" 2025-12-12T16:27:33.149415933+00:00 stderr F I1212 16:27:33.149386 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-crdview, uid: c2274b7c-f9c0-4d44-a02e-007061dd29e7]" virtual=false 2025-12-12T16:27:33.155829145+00:00 stderr F I1212 16:27:33.155733 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-admin, uid: 603753a5-4b1c-4597-a90d-c127c68ad946]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"kibanas.kibana.k8s.elastic.co","uid":"e8dc7587-af57-416c-9cfb-1702fe87860e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.159266612+00:00 stderr F I1212 16:27:33.157667 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-view, uid: 3305cc4b-0a5b-4798-b4f8-75d983f32cff]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: logstashes.logstash.k8s.elastic.co, uid: d8ed4e16-d503-4f18-a616-d7a8cc685317]" 2025-12-12T16:27:33.159266612+00:00 stderr F I1212 16:27:33.157724 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-view, uid: 3305cc4b-0a5b-4798-b4f8-75d983f32cff]" virtual=false 2025-12-12T16:27:33.167468750+00:00 stderr F I1212 16:27:33.164454 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-crdview, uid: c2274b7c-f9c0-4d44-a02e-007061dd29e7]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"kibanas.kibana.k8s.elastic.co","uid":"e8dc7587-af57-416c-9cfb-1702fe87860e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.167468750+00:00 stderr F I1212 16:27:33.164604 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-edit, uid: d2de5832-bd69-49ad-82c0-edef66106c28]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"kibanas.kibana.k8s.elastic.co","uid":"e8dc7587-af57-416c-9cfb-1702fe87860e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.167468750+00:00 stderr F I1212 16:27:33.164711 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-admin, uid: 83fa7d60-c300-49f7-91a3-832a3990486f]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: logstashes.logstash.k8s.elastic.co, uid: d8ed4e16-d503-4f18-a616-d7a8cc685317]" 2025-12-12T16:27:33.167468750+00:00 stderr F I1212 16:27:33.164756 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-admin, uid: 83fa7d60-c300-49f7-91a3-832a3990486f]" virtual=false 2025-12-12T16:27:33.167468750+00:00 stderr F I1212 16:27:33.164916 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: kibanas.kibana.k8s.elastic.co-v1-view, uid: fa01132a-b680-4801-8bdc-d9cf9b8b28b1]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"kibanas.kibana.k8s.elastic.co","uid":"e8dc7587-af57-416c-9cfb-1702fe87860e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.173341908+00:00 stderr F I1212 16:27:33.173243 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-edit, uid: 4be1f8f1-1f9d-4f8e-9cb8-b3e62c4854b5]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: logstashes.logstash.k8s.elastic.co, uid: d8ed4e16-d503-4f18-a616-d7a8cc685317]" 2025-12-12T16:27:33.173341908+00:00 stderr F I1212 16:27:33.173310 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-edit, uid: 4be1f8f1-1f9d-4f8e-9cb8-b3e62c4854b5]" virtual=false 2025-12-12T16:27:33.175310768+00:00 stderr F I1212 16:27:33.173727 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-view, uid: 3305cc4b-0a5b-4798-b4f8-75d983f32cff]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"logstashes.logstash.k8s.elastic.co","uid":"d8ed4e16-d503-4f18-a616-d7a8cc685317","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.180166951+00:00 stderr F I1212 16:27:33.180097 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-crdview, uid: cca280fb-6c53-4a38-8490-65f4aaeebe6e]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: logstashes.logstash.k8s.elastic.co, uid: d8ed4e16-d503-4f18-a616-d7a8cc685317]" 2025-12-12T16:27:33.180216342+00:00 stderr F I1212 16:27:33.180157 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-crdview, uid: cca280fb-6c53-4a38-8490-65f4aaeebe6e]" virtual=false 2025-12-12T16:27:33.188897652+00:00 stderr F I1212 16:27:33.187140 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-admin, uid: 6ad0f182-d40f-48f1-82bb-a73cbaa1347d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co, uid: 9d325e42-4d4f-4e8c-92e9-081dd02c7489]" 2025-12-12T16:27:33.188897652+00:00 stderr F I1212 16:27:33.187217 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-admin, uid: 6ad0f182-d40f-48f1-82bb-a73cbaa1347d]" virtual=false 2025-12-12T16:27:33.191674692+00:00 stderr F I1212 16:27:33.189915 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-edit, uid: 4be1f8f1-1f9d-4f8e-9cb8-b3e62c4854b5]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"logstashes.logstash.k8s.elastic.co","uid":"d8ed4e16-d503-4f18-a616-d7a8cc685317","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.191674692+00:00 stderr F I1212 16:27:33.190126 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-admin, uid: 83fa7d60-c300-49f7-91a3-832a3990486f]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"logstashes.logstash.k8s.elastic.co","uid":"d8ed4e16-d503-4f18-a616-d7a8cc685317","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.191674692+00:00 stderr F I1212 16:27:33.191057 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-edit, uid: 3029b8f3-08f4-438d-924d-2d7915eaee81]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co, uid: 9d325e42-4d4f-4e8c-92e9-081dd02c7489]" 2025-12-12T16:27:33.191674692+00:00 stderr F I1212 16:27:33.191100 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-edit, uid: 3029b8f3-08f4-438d-924d-2d7915eaee81]" virtual=false 2025-12-12T16:27:33.199616203+00:00 stderr F I1212 16:27:33.199348 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-view, uid: 865f4090-c8d1-4897-8c86-6d60a8bcb752]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co, uid: 9d325e42-4d4f-4e8c-92e9-081dd02c7489]" 2025-12-12T16:27:33.199616203+00:00 stderr F I1212 16:27:33.199424 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-view, uid: 865f4090-c8d1-4897-8c86-6d60a8bcb752]" virtual=false 2025-12-12T16:27:33.199724416+00:00 stderr F I1212 16:27:33.199615 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: logstashes.logstash.k8s.elastic.co-v1alpha1-crdview, uid: cca280fb-6c53-4a38-8490-65f4aaeebe6e]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"logstashes.logstash.k8s.elastic.co","uid":"d8ed4e16-d503-4f18-a616-d7a8cc685317","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.201813669+00:00 stderr F I1212 16:27:33.201732 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-crdview, uid: fd06bf93-46da-40ec-b00b-2ec10a14d6f6]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co, uid: 9d325e42-4d4f-4e8c-92e9-081dd02c7489]" 2025-12-12T16:27:33.201813669+00:00 stderr F I1212 16:27:33.201769 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-crdview, uid: fd06bf93-46da-40ec-b00b-2ec10a14d6f6]" virtual=false 2025-12-12T16:27:33.202383603+00:00 stderr F I1212 16:27:33.202281 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-admin, uid: 6ad0f182-d40f-48f1-82bb-a73cbaa1347d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"stackconfigpolicies.stackconfigpolicy.k8s.elastic.co","uid":"9d325e42-4d4f-4e8c-92e9-081dd02c7489","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.208587140+00:00 stderr F I1212 16:27:33.208047 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-edit, uid: 3029b8f3-08f4-438d-924d-2d7915eaee81]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"stackconfigpolicies.stackconfigpolicy.k8s.elastic.co","uid":"9d325e42-4d4f-4e8c-92e9-081dd02c7489","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.218272306+00:00 stderr F I1212 16:27:33.217629 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-view, uid: 865f4090-c8d1-4897-8c86-6d60a8bcb752]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"stackconfigpolicies.stackconfigpolicy.k8s.elastic.co","uid":"9d325e42-4d4f-4e8c-92e9-081dd02c7489","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:33.218272306+00:00 stderr F I1212 16:27:33.217700 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: stackconfigpolicies.stackconfigpolicy.k8s.elastic.co-v1alpha1-crdview, uid: fd06bf93-46da-40ec-b00b-2ec10a14d6f6]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"stackconfigpolicies.stackconfigpolicy.k8s.elastic.co","uid":"9d325e42-4d4f-4e8c-92e9-081dd02c7489","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.149400588+00:00 stderr F I1212 16:27:36.149280 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-view, uid: 85defb55-1fa2-4966-a30e-d27814d62cd7]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificaterequests.cert-manager.io, uid: abba6923-874f-484f-bffa-8b88b15e0ec5]" 2025-12-12T16:27:36.149400588+00:00 stderr F I1212 16:27:36.149361 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-view, uid: 85defb55-1fa2-4966-a30e-d27814d62cd7]" virtual=false 2025-12-12T16:27:36.161015492+00:00 stderr F I1212 16:27:36.160929 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-admin, uid: 4d2c2afa-6db3-422e-a6f3-cc6023cb38f5]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificaterequests.cert-manager.io, uid: abba6923-874f-484f-bffa-8b88b15e0ec5]" 2025-12-12T16:27:36.161015492+00:00 stderr F I1212 16:27:36.160987 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-admin, uid: 4d2c2afa-6db3-422e-a6f3-cc6023cb38f5]" virtual=false 2025-12-12T16:27:36.183006148+00:00 stderr F I1212 16:27:36.182495 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-edit, uid: 8b0e0588-5801-4048-a320-b41750b76186]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificaterequests.cert-manager.io, uid: abba6923-874f-484f-bffa-8b88b15e0ec5]" 2025-12-12T16:27:36.183006148+00:00 stderr F I1212 16:27:36.182549 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-edit, uid: 8b0e0588-5801-4048-a320-b41750b76186]" virtual=false 2025-12-12T16:27:36.186704732+00:00 stderr F I1212 16:27:36.186599 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-crdview, uid: 33708f5a-b7d2-4911-bfed-5041f9af4f03]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificaterequests.cert-manager.io, uid: abba6923-874f-484f-bffa-8b88b15e0ec5]" 2025-12-12T16:27:36.186704732+00:00 stderr F I1212 16:27:36.186683 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-crdview, uid: 33708f5a-b7d2-4911-bfed-5041f9af4f03]" virtual=false 2025-12-12T16:27:36.209694554+00:00 stderr F I1212 16:27:36.209232 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-edit, uid: 58b58a0b-e1ca-4ab9-97dc-278483efd0c5]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificates.cert-manager.io, uid: aab8e068-e644-44c0-9698-798f9b3ccb2a]" 2025-12-12T16:27:36.209694554+00:00 stderr F I1212 16:27:36.209288 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-edit, uid: 58b58a0b-e1ca-4ab9-97dc-278483efd0c5]" virtual=false 2025-12-12T16:27:36.210318560+00:00 stderr F I1212 16:27:36.209993 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-edit, uid: 8b0e0588-5801-4048-a320-b41750b76186]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificaterequests.cert-manager.io","uid":"abba6923-874f-484f-bffa-8b88b15e0ec5","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.210318560+00:00 stderr F I1212 16:27:36.210085 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-admin, uid: 4d2c2afa-6db3-422e-a6f3-cc6023cb38f5]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificaterequests.cert-manager.io","uid":"abba6923-874f-484f-bffa-8b88b15e0ec5","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.210318560+00:00 stderr F I1212 16:27:36.210132 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-view, uid: 85defb55-1fa2-4966-a30e-d27814d62cd7]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificaterequests.cert-manager.io","uid":"abba6923-874f-484f-bffa-8b88b15e0ec5","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.213049349+00:00 stderr F I1212 16:27:36.210602 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificaterequests.cert-manager.io-v1-crdview, uid: 33708f5a-b7d2-4911-bfed-5041f9af4f03]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificaterequests.cert-manager.io","uid":"abba6923-874f-484f-bffa-8b88b15e0ec5","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.231252440+00:00 stderr F I1212 16:27:36.230372 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-edit, uid: 58b58a0b-e1ca-4ab9-97dc-278483efd0c5]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificates.cert-manager.io","uid":"aab8e068-e644-44c0-9698-798f9b3ccb2a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.240694508+00:00 stderr F I1212 16:27:36.239743 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-view, uid: 1567ec45-4097-4a6a-abf2-fcc85e23da0a]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificates.cert-manager.io, uid: aab8e068-e644-44c0-9698-798f9b3ccb2a]" 2025-12-12T16:27:36.240694508+00:00 stderr F I1212 16:27:36.239808 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-view, uid: 1567ec45-4097-4a6a-abf2-fcc85e23da0a]" virtual=false 2025-12-12T16:27:36.257745400+00:00 stderr F I1212 16:27:36.257372 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-admin, uid: eaaa6de2-6453-4550-a1d0-d45a0782a164]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificates.cert-manager.io, uid: aab8e068-e644-44c0-9698-798f9b3ccb2a]" 2025-12-12T16:27:36.257745400+00:00 stderr F I1212 16:27:36.257440 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-admin, uid: eaaa6de2-6453-4550-a1d0-d45a0782a164]" virtual=false 2025-12-12T16:27:36.262944922+00:00 stderr F I1212 16:27:36.261230 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-view, uid: 1567ec45-4097-4a6a-abf2-fcc85e23da0a]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificates.cert-manager.io","uid":"aab8e068-e644-44c0-9698-798f9b3ccb2a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.279931092+00:00 stderr F I1212 16:27:36.279842 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-admin, uid: eaaa6de2-6453-4550-a1d0-d45a0782a164]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificates.cert-manager.io","uid":"aab8e068-e644-44c0-9698-798f9b3ccb2a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.283878851+00:00 stderr F I1212 16:27:36.283782 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-crdview, uid: 2786618a-c81b-4912-a076-f550d84dccd8]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certificates.cert-manager.io, uid: aab8e068-e644-44c0-9698-798f9b3ccb2a]" 2025-12-12T16:27:36.283878851+00:00 stderr F I1212 16:27:36.283846 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-crdview, uid: 2786618a-c81b-4912-a076-f550d84dccd8]" virtual=false 2025-12-12T16:27:36.309217733+00:00 stderr F I1212 16:27:36.309061 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certificates.cert-manager.io-v1-crdview, uid: 2786618a-c81b-4912-a076-f550d84dccd8]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certificates.cert-manager.io","uid":"aab8e068-e644-44c0-9698-798f9b3ccb2a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.329499886+00:00 stderr F I1212 16:27:36.328825 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-admin, uid: c11ccf86-7100-4aea-bccf-13aeef4c8e81]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certmanagers.operator.openshift.io, uid: 64290f3e-e0ed-4371-8a36-eb1177441a9f]" 2025-12-12T16:27:36.329499886+00:00 stderr F I1212 16:27:36.328905 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-admin, uid: c11ccf86-7100-4aea-bccf-13aeef4c8e81]" virtual=false 2025-12-12T16:27:36.329499886+00:00 stderr F I1212 16:27:36.329113 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-edit, uid: fdc6b869-558c-4728-a556-8350b0d822aa]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certmanagers.operator.openshift.io, uid: 64290f3e-e0ed-4371-8a36-eb1177441a9f]" 2025-12-12T16:27:36.329499886+00:00 stderr F I1212 16:27:36.329235 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-edit, uid: fdc6b869-558c-4728-a556-8350b0d822aa]" virtual=false 2025-12-12T16:27:36.329562588+00:00 stderr F I1212 16:27:36.329512 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-view, uid: dd8105a7-382f-439e-90c6-1c24808d5b70]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certmanagers.operator.openshift.io, uid: 64290f3e-e0ed-4371-8a36-eb1177441a9f]" 2025-12-12T16:27:36.329607309+00:00 stderr F I1212 16:27:36.329566 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-view, uid: dd8105a7-382f-439e-90c6-1c24808d5b70]" virtual=false 2025-12-12T16:27:36.337637462+00:00 stderr F I1212 16:27:36.337371 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-crdview, uid: bef7000c-f4cc-4e9e-9f25-ab8dc57c3300]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: certmanagers.operator.openshift.io, uid: 64290f3e-e0ed-4371-8a36-eb1177441a9f]" 2025-12-12T16:27:36.337637462+00:00 stderr F I1212 16:27:36.337444 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-crdview, uid: bef7000c-f4cc-4e9e-9f25-ab8dc57c3300]" virtual=false 2025-12-12T16:27:36.340064943+00:00 stderr F I1212 16:27:36.340009 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-admin, uid: c11ccf86-7100-4aea-bccf-13aeef4c8e81]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certmanagers.operator.openshift.io","uid":"64290f3e-e0ed-4371-8a36-eb1177441a9f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.354664833+00:00 stderr F I1212 16:27:36.352382 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-admin, uid: 29e5919b-b831-4684-9ca8-48dd68235601]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: challenges.acme.cert-manager.io, uid: c37a6a6b-19a9-4531-a625-f6fee1016993]" 2025-12-12T16:27:36.354759805+00:00 stderr F I1212 16:27:36.354664 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-admin, uid: 29e5919b-b831-4684-9ca8-48dd68235601]" virtual=false 2025-12-12T16:27:36.355730880+00:00 stderr F I1212 16:27:36.355003 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-view, uid: dd8105a7-382f-439e-90c6-1c24808d5b70]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certmanagers.operator.openshift.io","uid":"64290f3e-e0ed-4371-8a36-eb1177441a9f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.360339897+00:00 stderr F I1212 16:27:36.360254 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-edit, uid: bc1f4c64-2be0-4293-89fb-9322ffb41132]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: challenges.acme.cert-manager.io, uid: c37a6a6b-19a9-4531-a625-f6fee1016993]" 2025-12-12T16:27:36.360433079+00:00 stderr F I1212 16:27:36.360397 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-view, uid: 6dbd8ec4-a576-473e-a4b5-f0717e13fa2c]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: challenges.acme.cert-manager.io, uid: c37a6a6b-19a9-4531-a625-f6fee1016993]" 2025-12-12T16:27:36.360526921+00:00 stderr F I1212 16:27:36.360469 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-edit, uid: bc1f4c64-2be0-4293-89fb-9322ffb41132]" virtual=false 2025-12-12T16:27:36.360704036+00:00 stderr F I1212 16:27:36.360639 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-view, uid: 6dbd8ec4-a576-473e-a4b5-f0717e13fa2c]" virtual=false 2025-12-12T16:27:36.360983753+00:00 stderr F I1212 16:27:36.360935 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.362827770+00:00 stderr F I1212 16:27:36.361332 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-crdview, uid: bef7000c-f4cc-4e9e-9f25-ab8dc57c3300]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certmanagers.operator.openshift.io","uid":"64290f3e-e0ed-4371-8a36-eb1177441a9f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.362981093+00:00 stderr F I1212 16:27:36.362935 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: certmanagers.operator.openshift.io-v1alpha1-edit, uid: fdc6b869-558c-4728-a556-8350b0d822aa]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"certmanagers.operator.openshift.io","uid":"64290f3e-e0ed-4371-8a36-eb1177441a9f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.372799912+00:00 stderr F I1212 16:27:36.372712 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-crdview, uid: bb14d9be-4e8c-415c-83d9-88d1a85cf1ea]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: challenges.acme.cert-manager.io, uid: c37a6a6b-19a9-4531-a625-f6fee1016993]" 2025-12-12T16:27:36.372966916+00:00 stderr F I1212 16:27:36.372902 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-crdview, uid: bb14d9be-4e8c-415c-83d9-88d1a85cf1ea]" virtual=false 2025-12-12T16:27:36.373543591+00:00 stderr F I1212 16:27:36.373511 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.378962388+00:00 stderr F I1212 16:27:36.378820 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-admin, uid: 29e5919b-b831-4684-9ca8-48dd68235601]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"challenges.acme.cert-manager.io","uid":"c37a6a6b-19a9-4531-a625-f6fee1016993","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.379601294+00:00 stderr F I1212 16:27:36.379521 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-edit, uid: bc1f4c64-2be0-4293-89fb-9322ffb41132]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"challenges.acme.cert-manager.io","uid":"c37a6a6b-19a9-4531-a625-f6fee1016993","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.385905894+00:00 stderr F I1212 16:27:36.385793 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="cert-manager/cert-manager-cainjector" err="Operation cannot be fulfilled on deployments.apps \"cert-manager-cainjector\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:36.386150160+00:00 stderr F E1212 16:27:36.386115 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:36.387965766+00:00 stderr F I1212 16:27:36.387925 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.389239638+00:00 stderr F I1212 16:27:36.389192 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-crdview, uid: bb14d9be-4e8c-415c-83d9-88d1a85cf1ea]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"challenges.acme.cert-manager.io","uid":"c37a6a6b-19a9-4531-a625-f6fee1016993","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.394041450+00:00 stderr F I1212 16:27:36.393904 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.394041450+00:00 stderr F E1212 16:27:36.393968 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:36.396839790+00:00 stderr F I1212 16:27:36.396723 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: challenges.acme.cert-manager.io-v1-view, uid: 6dbd8ec4-a576-473e-a4b5-f0717e13fa2c]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"challenges.acme.cert-manager.io","uid":"c37a6a6b-19a9-4531-a625-f6fee1016993","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.396934713+00:00 stderr F I1212 16:27:36.396893 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.398144273+00:00 stderr F I1212 16:27:36.398086 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-view, uid: f814f39b-13fe-4c4d-8343-f1e44786f069]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterissuers.cert-manager.io, uid: 5006263a-fa87-46c8-a48c-e3ecaf997c2d]" 2025-12-12T16:27:36.398164724+00:00 stderr F I1212 16:27:36.398136 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-view, uid: f814f39b-13fe-4c4d-8343-f1e44786f069]" virtual=false 2025-12-12T16:27:36.404367041+00:00 stderr F I1212 16:27:36.404273 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.404417472+00:00 stderr F E1212 16:27:36.404354 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:36.408652039+00:00 stderr F I1212 16:27:36.408564 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.408994438+00:00 stderr F I1212 16:27:36.408932 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-admin, uid: e7846b46-ea81-4f71-932f-d37d110f220d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterissuers.cert-manager.io, uid: 5006263a-fa87-46c8-a48c-e3ecaf997c2d]" 2025-12-12T16:27:36.409039299+00:00 stderr F I1212 16:27:36.409002 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-admin, uid: e7846b46-ea81-4f71-932f-d37d110f220d]" virtual=false 2025-12-12T16:27:36.411222694+00:00 stderr F I1212 16:27:36.411062 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-view, uid: f814f39b-13fe-4c4d-8343-f1e44786f069]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"clusterissuers.cert-manager.io","uid":"5006263a-fa87-46c8-a48c-e3ecaf997c2d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.419252018+00:00 stderr F I1212 16:27:36.419073 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.419252018+00:00 stderr F E1212 16:27:36.419156 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:36.424578592+00:00 stderr F I1212 16:27:36.424446 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-edit, uid: 1416fda1-3a2a-4546-8b76-a3ded6ecdcb7]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterissuers.cert-manager.io, uid: 5006263a-fa87-46c8-a48c-e3ecaf997c2d]" 2025-12-12T16:27:36.424578592+00:00 stderr F I1212 16:27:36.424516 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-edit, uid: 1416fda1-3a2a-4546-8b76-a3ded6ecdcb7]" virtual=false 2025-12-12T16:27:36.429405265+00:00 stderr F I1212 16:27:36.429320 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.443589194+00:00 stderr F I1212 16:27:36.443477 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-admin, uid: e7846b46-ea81-4f71-932f-d37d110f220d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"clusterissuers.cert-manager.io","uid":"5006263a-fa87-46c8-a48c-e3ecaf997c2d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.445765169+00:00 stderr F I1212 16:27:36.445543 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-crdview, uid: 8218bf19-f84f-41ab-b85d-fdb70862a761]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterissuers.cert-manager.io, uid: 5006263a-fa87-46c8-a48c-e3ecaf997c2d]" 2025-12-12T16:27:36.445765169+00:00 stderr F I1212 16:27:36.445593 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-crdview, uid: 8218bf19-f84f-41ab-b85d-fdb70862a761]" virtual=false 2025-12-12T16:27:36.449359440+00:00 stderr F I1212 16:27:36.449309 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.449502723+00:00 stderr F E1212 16:27:36.449478 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:36.451213526+00:00 stderr F I1212 16:27:36.451097 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-edit, uid: 1416fda1-3a2a-4546-8b76-a3ded6ecdcb7]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"clusterissuers.cert-manager.io","uid":"5006263a-fa87-46c8-a48c-e3ecaf997c2d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.460497421+00:00 stderr F I1212 16:27:36.460412 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-admin, uid: 8f9c5d7a-b898-4c6c-ae05-e645628d669e]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: issuers.cert-manager.io, uid: 32d3fee6-fdb0-4d88-a830-f6c154b45f59]" 2025-12-12T16:27:36.460660286+00:00 stderr F I1212 16:27:36.460630 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-admin, uid: 8f9c5d7a-b898-4c6c-ae05-e645628d669e]" virtual=false 2025-12-12T16:27:36.467035997+00:00 stderr F I1212 16:27:36.466971 1 resource_quota_controller.go:476] "syncing resource quota controller with updated resources from discovery" logger="resourcequota-controller" diff="added: [acme.cert-manager.io/v1, Resource=challenges acme.cert-manager.io/v1, Resource=orders cert-manager.io/v1, Resource=certificaterequests cert-manager.io/v1, Resource=certificates cert-manager.io/v1, Resource=issuers operator.openshift.io/v1alpha1, Resource=istiocsrs], removed: []" 2025-12-12T16:27:36.467572120+00:00 stderr F I1212 16:27:36.467540 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-edit, uid: bd02d914-9309-4355-9952-8f6a262557d9]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: issuers.cert-manager.io, uid: 32d3fee6-fdb0-4d88-a830-f6c154b45f59]" 2025-12-12T16:27:36.467666833+00:00 stderr F I1212 16:27:36.467616 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-edit, uid: bd02d914-9309-4355-9952-8f6a262557d9]" virtual=false 2025-12-12T16:27:36.468755900+00:00 stderr F I1212 16:27:36.468720 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: clusterissuers.cert-manager.io-v1-crdview, uid: 8218bf19-f84f-41ab-b85d-fdb70862a761]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"clusterissuers.cert-manager.io","uid":"5006263a-fa87-46c8-a48c-e3ecaf997c2d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.468885494+00:00 stderr F I1212 16:27:36.468868 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="certificates.cert-manager.io" 2025-12-12T16:27:36.468947555+00:00 stderr F I1212 16:27:36.468936 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="issuers.cert-manager.io" 2025-12-12T16:27:36.469017027+00:00 stderr F I1212 16:27:36.469006 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="orders.acme.cert-manager.io" 2025-12-12T16:27:36.469071418+00:00 stderr F I1212 16:27:36.469060 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="istiocsrs.operator.openshift.io" 2025-12-12T16:27:36.469115860+00:00 stderr F I1212 16:27:36.469106 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="challenges.acme.cert-manager.io" 2025-12-12T16:27:36.469196672+00:00 stderr F I1212 16:27:36.469163 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="certificaterequests.cert-manager.io" 2025-12-12T16:27:36.469409497+00:00 stderr F I1212 16:27:36.469397 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:27:36.473666415+00:00 stderr F I1212 16:27:36.473607 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.473862720+00:00 stderr F I1212 16:27:36.473840 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.474260630+00:00 stderr F I1212 16:27:36.474136 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-view, uid: 90898f38-15fd-45db-9b76-bd69d89a6153]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: issuers.cert-manager.io, uid: 32d3fee6-fdb0-4d88-a830-f6c154b45f59]" 2025-12-12T16:27:36.474311051+00:00 stderr F I1212 16:27:36.474282 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-view, uid: 90898f38-15fd-45db-9b76-bd69d89a6153]" virtual=false 2025-12-12T16:27:36.480953399+00:00 stderr F I1212 16:27:36.480877 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-crdview, uid: da6c873f-e410-4167-abac-93b54bb887c2]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: issuers.cert-manager.io, uid: 32d3fee6-fdb0-4d88-a830-f6c154b45f59]" 2025-12-12T16:27:36.480953399+00:00 stderr F I1212 16:27:36.480932 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-crdview, uid: da6c873f-e410-4167-abac-93b54bb887c2]" virtual=false 2025-12-12T16:27:36.481321729+00:00 stderr F I1212 16:27:36.481133 1 garbagecollector.go:203] "syncing garbage collector with updated resources from discovery" logger="garbage-collector-controller" diff="added: [acme.cert-manager.io/v1, Resource=challenges acme.cert-manager.io/v1, Resource=orders cert-manager.io/v1, Resource=certificaterequests cert-manager.io/v1, Resource=certificates cert-manager.io/v1, Resource=clusterissuers cert-manager.io/v1, Resource=issuers operator.openshift.io/v1alpha1, Resource=certmanagers operator.openshift.io/v1alpha1, Resource=istiocsrs], removed: []" 2025-12-12T16:27:36.487290580+00:00 stderr F I1212 16:27:36.487166 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-admin, uid: 8f9c5d7a-b898-4c6c-ae05-e645628d669e]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"issuers.cert-manager.io","uid":"32d3fee6-fdb0-4d88-a830-f6c154b45f59","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.508630570+00:00 stderr F I1212 16:27:36.508439 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.510431295+00:00 stderr F I1212 16:27:36.509605 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-view, uid: 90898f38-15fd-45db-9b76-bd69d89a6153]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"issuers.cert-manager.io","uid":"32d3fee6-fdb0-4d88-a830-f6c154b45f59","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.510431295+00:00 stderr F I1212 16:27:36.509692 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-edit, uid: bd02d914-9309-4355-9952-8f6a262557d9]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"issuers.cert-manager.io","uid":"32d3fee6-fdb0-4d88-a830-f6c154b45f59","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.510527908+00:00 stderr F I1212 16:27:36.510487 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-admin, uid: fdfc648c-1a9b-4260-93c7-03aa5e636e68]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: istiocsrs.operator.openshift.io, uid: 94bab050-5608-4dea-8b7a-818c3dd95e28]" 2025-12-12T16:27:36.510580869+00:00 stderr F I1212 16:27:36.510527 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-admin, uid: fdfc648c-1a9b-4260-93c7-03aa5e636e68]" virtual=false 2025-12-12T16:27:36.513902083+00:00 stderr F I1212 16:27:36.513811 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-edit, uid: 0449053f-78c5-458e-a620-295365aba791]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: istiocsrs.operator.openshift.io, uid: 94bab050-5608-4dea-8b7a-818c3dd95e28]" 2025-12-12T16:27:36.513935974+00:00 stderr F I1212 16:27:36.513911 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-edit, uid: 0449053f-78c5-458e-a620-295365aba791]" virtual=false 2025-12-12T16:27:36.518725415+00:00 stderr F I1212 16:27:36.518654 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.531433577+00:00 stderr F I1212 16:27:36.531239 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.534822223+00:00 stderr F I1212 16:27:36.534692 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-view, uid: c084e82a-c651-4265-8f3c-2a2dd26b6bf1]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: istiocsrs.operator.openshift.io, uid: 94bab050-5608-4dea-8b7a-818c3dd95e28]" 2025-12-12T16:27:36.534868704+00:00 stderr F I1212 16:27:36.534773 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-view, uid: c084e82a-c651-4265-8f3c-2a2dd26b6bf1]" virtual=false 2025-12-12T16:27:36.547663988+00:00 stderr F I1212 16:27:36.547576 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.547738399+00:00 stderr F E1212 16:27:36.547709 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:36.548139430+00:00 stderr F I1212 16:27:36.548043 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-crdview, uid: e9d6e0bb-061e-4016-9ff5-f2886f30dcbf]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: istiocsrs.operator.openshift.io, uid: 94bab050-5608-4dea-8b7a-818c3dd95e28]" 2025-12-12T16:27:36.548215092+00:00 stderr F I1212 16:27:36.548155 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-crdview, uid: e9d6e0bb-061e-4016-9ff5-f2886f30dcbf]" virtual=false 2025-12-12T16:27:36.554653314+00:00 stderr F I1212 16:27:36.554416 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:27:36.561490127+00:00 stderr F I1212 16:27:36.560672 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-admin, uid: 9567ff8c-9e4a-49fa-9f27-1500dd1c1ffa]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: orders.acme.cert-manager.io, uid: 78dcf4d0-0da8-4159-8dad-203bc627f29a]" 2025-12-12T16:27:36.561490127+00:00 stderr F I1212 16:27:36.560725 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-admin, uid: 9567ff8c-9e4a-49fa-9f27-1500dd1c1ffa]" virtual=false 2025-12-12T16:27:36.564254887+00:00 stderr F I1212 16:27:36.561738 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.564254887+00:00 stderr F I1212 16:27:36.563940 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-view, uid: c084e82a-c651-4265-8f3c-2a2dd26b6bf1]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"istiocsrs.operator.openshift.io","uid":"94bab050-5608-4dea-8b7a-818c3dd95e28","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.564352500+00:00 stderr F I1212 16:27:36.564293 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-edit, uid: 0449053f-78c5-458e-a620-295365aba791]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"istiocsrs.operator.openshift.io","uid":"94bab050-5608-4dea-8b7a-818c3dd95e28","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.565086408+00:00 stderr F I1212 16:27:36.564917 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: issuers.cert-manager.io-v1-crdview, uid: da6c873f-e410-4167-abac-93b54bb887c2]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"issuers.cert-manager.io","uid":"32d3fee6-fdb0-4d88-a830-f6c154b45f59","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.571733837+00:00 stderr F I1212 16:27:36.571659 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.572889056+00:00 stderr F I1212 16:27:36.572841 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-edit, uid: 473d6a17-452a-45ff-bc06-af645b7dbb48]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: orders.acme.cert-manager.io, uid: 78dcf4d0-0da8-4159-8dad-203bc627f29a]" 2025-12-12T16:27:36.572964738+00:00 stderr F I1212 16:27:36.572938 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-edit, uid: 473d6a17-452a-45ff-bc06-af645b7dbb48]" virtual=false 2025-12-12T16:27:36.577648766+00:00 stderr F I1212 16:27:36.577545 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-admin, uid: fdfc648c-1a9b-4260-93c7-03aa5e636e68]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"istiocsrs.operator.openshift.io","uid":"94bab050-5608-4dea-8b7a-818c3dd95e28","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.581153665+00:00 stderr F I1212 16:27:36.581068 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-admin, uid: 9567ff8c-9e4a-49fa-9f27-1500dd1c1ffa]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"orders.acme.cert-manager.io","uid":"78dcf4d0-0da8-4159-8dad-203bc627f29a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.585268629+00:00 stderr F I1212 16:27:36.581415 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: istiocsrs.operator.openshift.io-v1alpha1-crdview, uid: e9d6e0bb-061e-4016-9ff5-f2886f30dcbf]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"istiocsrs.operator.openshift.io","uid":"94bab050-5608-4dea-8b7a-818c3dd95e28","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.585472684+00:00 stderr F I1212 16:27:36.585418 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-view, uid: 1f5b5014-bf22-4dbd-94af-f964faf60a4d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: orders.acme.cert-manager.io, uid: 78dcf4d0-0da8-4159-8dad-203bc627f29a]" 2025-12-12T16:27:36.585487225+00:00 stderr F I1212 16:27:36.585466 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-view, uid: 1f5b5014-bf22-4dbd-94af-f964faf60a4d]" virtual=false 2025-12-12T16:27:36.592584584+00:00 stderr F I1212 16:27:36.592444 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-edit, uid: 473d6a17-452a-45ff-bc06-af645b7dbb48]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"orders.acme.cert-manager.io","uid":"78dcf4d0-0da8-4159-8dad-203bc627f29a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.595824546+00:00 stderr F I1212 16:27:36.595752 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-crdview, uid: b9065f37-7a9a-41e5-89c5-953beb9299f7]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: orders.acme.cert-manager.io, uid: 78dcf4d0-0da8-4159-8dad-203bc627f29a]" 2025-12-12T16:27:36.595878648+00:00 stderr F I1212 16:27:36.595811 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-crdview, uid: b9065f37-7a9a-41e5-89c5-953beb9299f7]" virtual=false 2025-12-12T16:27:36.606705992+00:00 stderr F I1212 16:27:36.605805 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-view, uid: 1f5b5014-bf22-4dbd-94af-f964faf60a4d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"orders.acme.cert-manager.io","uid":"78dcf4d0-0da8-4159-8dad-203bc627f29a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.617511265+00:00 stderr F I1212 16:27:36.616313 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.617511265+00:00 stderr F I1212 16:27:36.617238 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: orders.acme.cert-manager.io-v1-crdview, uid: b9065f37-7a9a-41e5-89c5-953beb9299f7]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"orders.acme.cert-manager.io","uid":"78dcf4d0-0da8-4159-8dad-203bc627f29a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:36.646689954+00:00 stderr F I1212 16:27:36.646617 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:27:36.655017175+00:00 stderr F I1212 16:27:36.654932 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:27:36.655017175+00:00 stderr F I1212 16:27:36.654964 1 garbagecollector.go:235] "synced garbage collector" logger="garbage-collector-controller" 2025-12-12T16:27:36.669795338+00:00 stderr F I1212 16:27:36.669712 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:27:36.669795338+00:00 stderr F I1212 16:27:36.669750 1 resource_quota_controller.go:502] "synced quota controller" logger="resourcequota-controller" 2025-12-12T16:27:36.710719614+00:00 stderr F I1212 16:27:36.710617 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:36.724008801+00:00 stderr F I1212 16:27:36.723676 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:36.724753989+00:00 stderr F E1212 16:27:36.724705 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:37.052310489+00:00 stderr F I1212 16:27:37.051383 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:37.056780123+00:00 stderr F I1212 16:27:37.056736 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:37.056800963+00:00 stderr F E1212 16:27:37.056787 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:37.314233107+00:00 stderr F I1212 16:27:37.314034 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.320980118+00:00 stderr F I1212 16:27:37.320735 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.326067357+00:00 stderr F I1212 16:27:37.325823 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="cert-manager/cert-manager-webhook" err="Operation cannot be fulfilled on deployments.apps \"cert-manager-webhook\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:37.327542424+00:00 stderr F E1212 16:27:37.327452 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.330089199+00:00 stderr F I1212 16:27:37.330027 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.336333857+00:00 stderr F I1212 16:27:37.334837 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.336333857+00:00 stderr F E1212 16:27:37.334905 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.337854335+00:00 stderr F I1212 16:27:37.336549 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.340968254+00:00 stderr F I1212 16:27:37.340913 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.340988265+00:00 stderr F E1212 16:27:37.340976 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.346809132+00:00 stderr F I1212 16:27:37.346645 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.351677575+00:00 stderr F I1212 16:27:37.351574 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.351677575+00:00 stderr F E1212 16:27:37.351636 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.393285518+00:00 stderr F I1212 16:27:37.393130 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.398059579+00:00 stderr F I1212 16:27:37.398010 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.398092990+00:00 stderr F E1212 16:27:37.398079 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.481465990+00:00 stderr F I1212 16:27:37.481330 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.485966104+00:00 stderr F I1212 16:27:37.485877 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.486089407+00:00 stderr F E1212 16:27:37.486039 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.649251966+00:00 stderr F I1212 16:27:37.648511 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:37.657230998+00:00 stderr F I1212 16:27:37.656546 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" 2025-12-12T16:27:37.657230998+00:00 stderr F E1212 16:27:37.656658 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-webhook-7894b5b9b4\" failed with pods \"cert-manager-webhook-7894b5b9b4-\" is forbidden: error looking up service account cert-manager/cert-manager-webhook: serviceaccount \"cert-manager-webhook\" not found" logger="UnhandledError" 2025-12-12T16:27:37.700227117+00:00 stderr F I1212 16:27:37.699899 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:37.707226594+00:00 stderr F I1212 16:27:37.706923 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:37.707226594+00:00 stderr F E1212 16:27:37.707004 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:37.978281734+00:00 stderr F I1212 16:27:37.978166 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-webhook-7894b5b9b4" need=1 creating=1 2025-12-12T16:27:38.989339782+00:00 stderr F I1212 16:27:38.989259 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:38.995317254+00:00 stderr F I1212 16:27:38.995283 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" 2025-12-12T16:27:38.995386755+00:00 stderr F E1212 16:27:38.995345 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-cainjector-7dbf76d5c8\" failed with pods \"cert-manager-cainjector-7dbf76d5c8-\" is forbidden: error looking up service account cert-manager/cert-manager-cainjector: serviceaccount \"cert-manager-cainjector\" not found" logger="UnhandledError" 2025-12-12T16:27:41.119689698+00:00 stderr F I1212 16:27:41.119533 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.125638319+00:00 stderr F I1212 16:27:41.125550 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.136068593+00:00 stderr F E1212 16:27:41.135983 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.137727194+00:00 stderr F I1212 16:27:41.137590 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.144278650+00:00 stderr F I1212 16:27:41.143730 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="cert-manager/cert-manager" err="Operation cannot be fulfilled on deployments.apps \"cert-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:41.144278650+00:00 stderr F I1212 16:27:41.144139 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.144278650+00:00 stderr F E1212 16:27:41.144241 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.145645005+00:00 stderr F I1212 16:27:41.145587 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.150021796+00:00 stderr F I1212 16:27:41.149962 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.150067447+00:00 stderr F E1212 16:27:41.150033 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.155714570+00:00 stderr F I1212 16:27:41.155652 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.155736740+00:00 stderr F I1212 16:27:41.155696 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="cert-manager/cert-manager" err="Operation cannot be fulfilled on deployments.apps \"cert-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:41.160579293+00:00 stderr F I1212 16:27:41.160492 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.160667285+00:00 stderr F E1212 16:27:41.160635 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.203753216+00:00 stderr F I1212 16:27:41.203532 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.207883760+00:00 stderr F I1212 16:27:41.207822 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.207922881+00:00 stderr F E1212 16:27:41.207890 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.290046839+00:00 stderr F I1212 16:27:41.289935 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.295800875+00:00 stderr F I1212 16:27:41.295754 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.295852626+00:00 stderr F E1212 16:27:41.295833 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.458132434+00:00 stderr F I1212 16:27:41.457997 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.463081259+00:00 stderr F I1212 16:27:41.462572 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.463081259+00:00 stderr F E1212 16:27:41.462664 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:41.557880598+00:00 stderr F I1212 16:27:41.557769 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-cainjector-7dbf76d5c8" need=1 creating=1 2025-12-12T16:27:41.784741110+00:00 stderr F I1212 16:27:41.784649 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:41.789735256+00:00 stderr F I1212 16:27:41.789637 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:41.789826328+00:00 stderr F E1212 16:27:41.789722 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:42.432368482+00:00 stderr F I1212 16:27:42.431581 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:42.437227685+00:00 stderr F I1212 16:27:42.437141 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:42.437249495+00:00 stderr F E1212 16:27:42.437235 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:43.719918939+00:00 stderr F I1212 16:27:43.719027 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:43.726158917+00:00 stderr F I1212 16:27:43.726067 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:43.726250729+00:00 stderr F E1212 16:27:43.726210 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:44.376682050+00:00 stderr F I1212 16:27:44.376565 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-admin, uid: 3fa8104f-1cef-4ef1-a43c-a5185cffba5f]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagerconfigs.monitoring.rhobs, uid: bd4f0381-0524-4296-8264-181d723362df]" 2025-12-12T16:27:44.376682050+00:00 stderr F I1212 16:27:44.376628 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-admin, uid: 3fa8104f-1cef-4ef1-a43c-a5185cffba5f]" virtual=false 2025-12-12T16:27:44.381390079+00:00 stderr F I1212 16:27:44.381320 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-edit, uid: c62ce700-bdc5-4654-9e0f-f6ead4e3043f]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagerconfigs.monitoring.rhobs, uid: bd4f0381-0524-4296-8264-181d723362df]" 2025-12-12T16:27:44.381448071+00:00 stderr F I1212 16:27:44.381376 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-edit, uid: c62ce700-bdc5-4654-9e0f-f6ead4e3043f]" virtual=false 2025-12-12T16:27:44.385796981+00:00 stderr F I1212 16:27:44.384164 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-view, uid: 14e13099-b910-4d95-a307-02bfb1de13b0]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagerconfigs.monitoring.rhobs, uid: bd4f0381-0524-4296-8264-181d723362df]" 2025-12-12T16:27:44.386010326+00:00 stderr F I1212 16:27:44.385946 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-view, uid: 14e13099-b910-4d95-a307-02bfb1de13b0]" virtual=false 2025-12-12T16:27:44.390999302+00:00 stderr F I1212 16:27:44.390622 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-crdview, uid: e7ed0cd7-72c8-4b62-b028-fc5d6140b84d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagerconfigs.monitoring.rhobs, uid: bd4f0381-0524-4296-8264-181d723362df]" 2025-12-12T16:27:44.390999302+00:00 stderr F I1212 16:27:44.390702 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-crdview, uid: e7ed0cd7-72c8-4b62-b028-fc5d6140b84d]" virtual=false 2025-12-12T16:27:44.418173990+00:00 stderr F I1212 16:27:44.414921 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-edit, uid: c62ce700-bdc5-4654-9e0f-f6ead4e3043f]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagerconfigs.monitoring.rhobs","uid":"bd4f0381-0524-4296-8264-181d723362df","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.422865409+00:00 stderr F I1212 16:27:44.422711 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-admin, uid: 3fa8104f-1cef-4ef1-a43c-a5185cffba5f]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagerconfigs.monitoring.rhobs","uid":"bd4f0381-0524-4296-8264-181d723362df","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.426817749+00:00 stderr F I1212 16:27:44.426740 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-admin, uid: a83caa0f-7a5c-4546-9677-e31f968bd69d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagers.monitoring.rhobs, uid: 4fda672f-3605-4bfa-85f4-c346ddf5fbba]" 2025-12-12T16:27:44.426980153+00:00 stderr F I1212 16:27:44.426902 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-admin, uid: a83caa0f-7a5c-4546-9677-e31f968bd69d]" virtual=false 2025-12-12T16:27:44.430426410+00:00 stderr F I1212 16:27:44.430365 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-view, uid: 14e13099-b910-4d95-a307-02bfb1de13b0]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagerconfigs.monitoring.rhobs","uid":"bd4f0381-0524-4296-8264-181d723362df","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.438695769+00:00 stderr F I1212 16:27:44.438128 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagerconfigs.monitoring.rhobs-v1alpha1-crdview, uid: e7ed0cd7-72c8-4b62-b028-fc5d6140b84d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagerconfigs.monitoring.rhobs","uid":"bd4f0381-0524-4296-8264-181d723362df","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.439853209+00:00 stderr F I1212 16:27:44.439736 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-edit, uid: 1f37355e-1638-4b46-9c7a-b654e21bde0e]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagers.monitoring.rhobs, uid: 4fda672f-3605-4bfa-85f4-c346ddf5fbba]" 2025-12-12T16:27:44.439937951+00:00 stderr F I1212 16:27:44.439861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-edit, uid: 1f37355e-1638-4b46-9c7a-b654e21bde0e]" virtual=false 2025-12-12T16:27:44.458341387+00:00 stderr F I1212 16:27:44.455699 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-view, uid: f5257388-1fcd-43c8-8a10-7cf5bec499ef]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagers.monitoring.rhobs, uid: 4fda672f-3605-4bfa-85f4-c346ddf5fbba]" 2025-12-12T16:27:44.458341387+00:00 stderr F I1212 16:27:44.455806 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-view, uid: f5257388-1fcd-43c8-8a10-7cf5bec499ef]" virtual=false 2025-12-12T16:27:44.458341387+00:00 stderr F I1212 16:27:44.456709 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-crdview, uid: cb072484-8d83-406e-b63e-ff0d0f025f79]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagers.monitoring.rhobs, uid: 4fda672f-3605-4bfa-85f4-c346ddf5fbba]" 2025-12-12T16:27:44.458341387+00:00 stderr F I1212 16:27:44.456789 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-crdview, uid: cb072484-8d83-406e-b63e-ff0d0f025f79]" virtual=false 2025-12-12T16:27:44.458341387+00:00 stderr F I1212 16:27:44.457871 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-admin, uid: ea86536a-c388-4c70-936e-4579c708e85b]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: monitoringstacks.monitoring.rhobs, uid: fb546113-65aa-41e9-a2aa-3c4e44e778da]" 2025-12-12T16:27:44.458341387+00:00 stderr F I1212 16:27:44.457926 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-admin, uid: ea86536a-c388-4c70-936e-4579c708e85b]" virtual=false 2025-12-12T16:27:44.459795393+00:00 stderr F I1212 16:27:44.459728 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-admin, uid: a83caa0f-7a5c-4546-9677-e31f968bd69d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagers.monitoring.rhobs","uid":"4fda672f-3605-4bfa-85f4-c346ddf5fbba","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.466611526+00:00 stderr F I1212 16:27:44.466505 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-edit, uid: 0cccfea2-86cd-4517-85ff-93f8cf04c356]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: monitoringstacks.monitoring.rhobs, uid: fb546113-65aa-41e9-a2aa-3c4e44e778da]" 2025-12-12T16:27:44.466781200+00:00 stderr F I1212 16:27:44.466703 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-edit, uid: 0cccfea2-86cd-4517-85ff-93f8cf04c356]" virtual=false 2025-12-12T16:27:44.473879750+00:00 stderr F I1212 16:27:44.473744 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-admin, uid: ea86536a-c388-4c70-936e-4579c708e85b]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"monitoringstacks.monitoring.rhobs","uid":"fb546113-65aa-41e9-a2aa-3c4e44e778da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.481458572+00:00 stderr F I1212 16:27:44.481348 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-view, uid: 0ce2cf4c-e576-4def-9057-0bea944574ab]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: monitoringstacks.monitoring.rhobs, uid: fb546113-65aa-41e9-a2aa-3c4e44e778da]" 2025-12-12T16:27:44.481458572+00:00 stderr F I1212 16:27:44.481408 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-view, uid: 0ce2cf4c-e576-4def-9057-0bea944574ab]" virtual=false 2025-12-12T16:27:44.482043326+00:00 stderr F I1212 16:27:44.481977 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-crdview, uid: cb072484-8d83-406e-b63e-ff0d0f025f79]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagers.monitoring.rhobs","uid":"4fda672f-3605-4bfa-85f4-c346ddf5fbba","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.483021441+00:00 stderr F I1212 16:27:44.482973 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-edit, uid: 1f37355e-1638-4b46-9c7a-b654e21bde0e]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagers.monitoring.rhobs","uid":"4fda672f-3605-4bfa-85f4-c346ddf5fbba","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.485474573+00:00 stderr F I1212 16:27:44.485410 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-edit, uid: 0cccfea2-86cd-4517-85ff-93f8cf04c356]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"monitoringstacks.monitoring.rhobs","uid":"fb546113-65aa-41e9-a2aa-3c4e44e778da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.501579411+00:00 stderr F I1212 16:27:44.491952 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: alertmanagers.monitoring.rhobs-v1-view, uid: f5257388-1fcd-43c8-8a10-7cf5bec499ef]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"alertmanagers.monitoring.rhobs","uid":"4fda672f-3605-4bfa-85f4-c346ddf5fbba","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.501579411+00:00 stderr F I1212 16:27:44.493570 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-view, uid: 0ce2cf4c-e576-4def-9057-0bea944574ab]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"monitoringstacks.monitoring.rhobs","uid":"fb546113-65aa-41e9-a2aa-3c4e44e778da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.501579411+00:00 stderr F I1212 16:27:44.497860 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-crdview, uid: c503c810-77e2-4710-ba81-8a0179345ced]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: monitoringstacks.monitoring.rhobs, uid: fb546113-65aa-41e9-a2aa-3c4e44e778da]" 2025-12-12T16:27:44.501579411+00:00 stderr F I1212 16:27:44.497910 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-crdview, uid: c503c810-77e2-4710-ba81-8a0179345ced]" virtual=false 2025-12-12T16:27:44.507557402+00:00 stderr F I1212 16:27:44.506003 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-admin, uid: 0e40b052-fe16-4ba3-8a76-fcc24ecca465]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: observabilityinstallers.observability.openshift.io, uid: 1a6d05b1-1bc0-412b-814b-1f39e3847d86]" 2025-12-12T16:27:44.507557402+00:00 stderr F I1212 16:27:44.506073 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-admin, uid: 0e40b052-fe16-4ba3-8a76-fcc24ecca465]" virtual=false 2025-12-12T16:27:44.511868481+00:00 stderr F I1212 16:27:44.511726 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-edit, uid: 32782267-1a99-4323-814e-b17d3a23692c]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: observabilityinstallers.observability.openshift.io, uid: 1a6d05b1-1bc0-412b-814b-1f39e3847d86]" 2025-12-12T16:27:44.511868481+00:00 stderr F I1212 16:27:44.511796 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-edit, uid: 32782267-1a99-4323-814e-b17d3a23692c]" virtual=false 2025-12-12T16:27:44.513397950+00:00 stderr F I1212 16:27:44.513336 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: monitoringstacks.monitoring.rhobs-v1alpha1-crdview, uid: c503c810-77e2-4710-ba81-8a0179345ced]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"monitoringstacks.monitoring.rhobs","uid":"fb546113-65aa-41e9-a2aa-3c4e44e778da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.517632557+00:00 stderr F I1212 16:27:44.517334 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-admin, uid: 0e40b052-fe16-4ba3-8a76-fcc24ecca465]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"observabilityinstallers.observability.openshift.io","uid":"1a6d05b1-1bc0-412b-814b-1f39e3847d86","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.519581467+00:00 stderr F I1212 16:27:44.519053 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-view, uid: 83206542-9ba9-4bca-b9e5-ac460beebb74]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: observabilityinstallers.observability.openshift.io, uid: 1a6d05b1-1bc0-412b-814b-1f39e3847d86]" 2025-12-12T16:27:44.519581467+00:00 stderr F I1212 16:27:44.519092 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-view, uid: 83206542-9ba9-4bca-b9e5-ac460beebb74]" virtual=false 2025-12-12T16:27:44.523554557+00:00 stderr F I1212 16:27:44.522127 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-edit, uid: 32782267-1a99-4323-814e-b17d3a23692c]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"observabilityinstallers.observability.openshift.io","uid":"1a6d05b1-1bc0-412b-814b-1f39e3847d86","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.523554557+00:00 stderr F I1212 16:27:44.523414 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-crdview, uid: ba83996c-c2c8-496e-9ac3-ab755c621b12]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: observabilityinstallers.observability.openshift.io, uid: 1a6d05b1-1bc0-412b-814b-1f39e3847d86]" 2025-12-12T16:27:44.523617419+00:00 stderr F I1212 16:27:44.523503 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-crdview, uid: ba83996c-c2c8-496e-9ac3-ab755c621b12]" virtual=false 2025-12-12T16:27:44.532482223+00:00 stderr F I1212 16:27:44.532368 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-view, uid: 83206542-9ba9-4bca-b9e5-ac460beebb74]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"observabilityinstallers.observability.openshift.io","uid":"1a6d05b1-1bc0-412b-814b-1f39e3847d86","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.547258567+00:00 stderr F I1212 16:27:44.544705 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-admin, uid: 0a4a2622-a9b2-4c28-aac3-bce8ac41387e]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: perses.perses.dev, uid: beeb7515-1cf5-4863-8d90-a15c34feb74d]" 2025-12-12T16:27:44.547258567+00:00 stderr F I1212 16:27:44.544761 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-admin, uid: 0a4a2622-a9b2-4c28-aac3-bce8ac41387e]" virtual=false 2025-12-12T16:27:44.557702741+00:00 stderr F I1212 16:27:44.556695 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: observabilityinstallers.observability.openshift.io-v1alpha1-crdview, uid: ba83996c-c2c8-496e-9ac3-ab755c621b12]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"observabilityinstallers.observability.openshift.io","uid":"1a6d05b1-1bc0-412b-814b-1f39e3847d86","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.562164844+00:00 stderr F I1212 16:27:44.557928 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-edit, uid: 8bf4a6bf-f8b9-493d-a4b3-8736a35d5021]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: perses.perses.dev, uid: beeb7515-1cf5-4863-8d90-a15c34feb74d]" 2025-12-12T16:27:44.562164844+00:00 stderr F I1212 16:27:44.558003 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-edit, uid: 8bf4a6bf-f8b9-493d-a4b3-8736a35d5021]" virtual=false 2025-12-12T16:27:44.583483594+00:00 stderr F I1212 16:27:44.580470 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-admin, uid: 0a4a2622-a9b2-4c28-aac3-bce8ac41387e]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"perses.perses.dev","uid":"beeb7515-1cf5-4863-8d90-a15c34feb74d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.584774426+00:00 stderr F I1212 16:27:44.584637 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-view, uid: 44910846-fba9-4543-bb3f-c8a9423b49f6]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: perses.perses.dev, uid: beeb7515-1cf5-4863-8d90-a15c34feb74d]" 2025-12-12T16:27:44.584774426+00:00 stderr F I1212 16:27:44.584679 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-view, uid: 44910846-fba9-4543-bb3f-c8a9423b49f6]" virtual=false 2025-12-12T16:27:44.586046749+00:00 stderr F I1212 16:27:44.585198 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-edit, uid: 8bf4a6bf-f8b9-493d-a4b3-8736a35d5021]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"perses.perses.dev","uid":"beeb7515-1cf5-4863-8d90-a15c34feb74d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.593529278+00:00 stderr F I1212 16:27:44.593434 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-crdview, uid: e116f156-abbd-451d-a24c-f27bd548bc3d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: perses.perses.dev, uid: beeb7515-1cf5-4863-8d90-a15c34feb74d]" 2025-12-12T16:27:44.593586060+00:00 stderr F I1212 16:27:44.593492 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-crdview, uid: e116f156-abbd-451d-a24c-f27bd548bc3d]" virtual=false 2025-12-12T16:27:44.608545478+00:00 stderr F I1212 16:27:44.608259 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-view, uid: 44910846-fba9-4543-bb3f-c8a9423b49f6]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"perses.perses.dev","uid":"beeb7515-1cf5-4863-8d90-a15c34feb74d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.608545478+00:00 stderr F I1212 16:27:44.608516 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-edit, uid: 94e60ad5-15f7-49cf-be73-403ca5722c84]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdashboards.perses.dev, uid: 8e418188-c56d-4d7a-ba7d-60c68ef34f31]" 2025-12-12T16:27:44.608629270+00:00 stderr F I1212 16:27:44.608551 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-edit, uid: 94e60ad5-15f7-49cf-be73-403ca5722c84]" virtual=false 2025-12-12T16:27:44.616599942+00:00 stderr F I1212 16:27:44.616365 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-view, uid: a6a5384b-f39f-4dde-91e4-62f3d4b8f40a]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdashboards.perses.dev, uid: 8e418188-c56d-4d7a-ba7d-60c68ef34f31]" 2025-12-12T16:27:44.616599942+00:00 stderr F I1212 16:27:44.616426 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-view, uid: a6a5384b-f39f-4dde-91e4-62f3d4b8f40a]" virtual=false 2025-12-12T16:27:44.619624589+00:00 stderr F I1212 16:27:44.619549 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: perses.perses.dev-v1alpha1-crdview, uid: e116f156-abbd-451d-a24c-f27bd548bc3d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"perses.perses.dev","uid":"beeb7515-1cf5-4863-8d90-a15c34feb74d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.624510242+00:00 stderr F I1212 16:27:44.623671 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-edit, uid: 94e60ad5-15f7-49cf-be73-403ca5722c84]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdashboards.perses.dev","uid":"8e418188-c56d-4d7a-ba7d-60c68ef34f31","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.627521758+00:00 stderr F I1212 16:27:44.627434 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-admin, uid: 94761e07-c18b-4a9b-8567-d31fcf0af274]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdashboards.perses.dev, uid: 8e418188-c56d-4d7a-ba7d-60c68ef34f31]" 2025-12-12T16:27:44.627521758+00:00 stderr F I1212 16:27:44.627481 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-admin, uid: 94761e07-c18b-4a9b-8567-d31fcf0af274]" virtual=false 2025-12-12T16:27:44.634469104+00:00 stderr F I1212 16:27:44.633770 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-view, uid: a6a5384b-f39f-4dde-91e4-62f3d4b8f40a]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdashboards.perses.dev","uid":"8e418188-c56d-4d7a-ba7d-60c68ef34f31","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.641107962+00:00 stderr F I1212 16:27:44.640781 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-crdview, uid: 9e12b658-1ef7-4ddc-a7ef-0d8af27a4861]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdashboards.perses.dev, uid: 8e418188-c56d-4d7a-ba7d-60c68ef34f31]" 2025-12-12T16:27:44.641107962+00:00 stderr F I1212 16:27:44.640857 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-crdview, uid: 9e12b658-1ef7-4ddc-a7ef-0d8af27a4861]" virtual=false 2025-12-12T16:27:44.647578856+00:00 stderr F I1212 16:27:44.647459 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-admin, uid: 94761e07-c18b-4a9b-8567-d31fcf0af274]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdashboards.perses.dev","uid":"8e418188-c56d-4d7a-ba7d-60c68ef34f31","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.654919432+00:00 stderr F I1212 16:27:44.653671 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-admin, uid: 049f05d2-d3fa-4b7f-8fad-d3220aa1be28]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdatasources.perses.dev, uid: 60307fce-12a4-4d2e-bf4f-022ada40564c]" 2025-12-12T16:27:44.654919432+00:00 stderr F I1212 16:27:44.653765 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-admin, uid: 049f05d2-d3fa-4b7f-8fad-d3220aa1be28]" virtual=false 2025-12-12T16:27:44.683902715+00:00 stderr F I1212 16:27:44.683752 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdashboards.perses.dev-v1alpha1-crdview, uid: 9e12b658-1ef7-4ddc-a7ef-0d8af27a4861]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdashboards.perses.dev","uid":"8e418188-c56d-4d7a-ba7d-60c68ef34f31","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.701960542+00:00 stderr F I1212 16:27:44.701802 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-edit, uid: 9e8e0038-5cac-46c3-9675-42f4602bcc79]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdatasources.perses.dev, uid: 60307fce-12a4-4d2e-bf4f-022ada40564c]" 2025-12-12T16:27:44.702144557+00:00 stderr F I1212 16:27:44.702094 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-edit, uid: 9e8e0038-5cac-46c3-9675-42f4602bcc79]" virtual=false 2025-12-12T16:27:44.703092461+00:00 stderr F I1212 16:27:44.703059 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-view, uid: 3d49904d-a2c0-4507-ad6f-684cc7f345f1]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdatasources.perses.dev, uid: 60307fce-12a4-4d2e-bf4f-022ada40564c]" 2025-12-12T16:27:44.703236935+00:00 stderr F I1212 16:27:44.703176 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-view, uid: 3d49904d-a2c0-4507-ad6f-684cc7f345f1]" virtual=false 2025-12-12T16:27:44.704599289+00:00 stderr F I1212 16:27:44.704483 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-crdview, uid: 36cdfc8b-fae0-4cc3-8053-576310a15dc8]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: persesdatasources.perses.dev, uid: 60307fce-12a4-4d2e-bf4f-022ada40564c]" 2025-12-12T16:27:44.704651930+00:00 stderr F I1212 16:27:44.704606 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-edit, uid: ba1adf09-b4f2-49e0-8d5f-260985b63a65]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: podmonitors.monitoring.rhobs, uid: 6cedb4a3-6858-46ca-95a0-f572d788af1d]" 2025-12-12T16:27:44.704694021+00:00 stderr F I1212 16:27:44.704654 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-crdview, uid: 36cdfc8b-fae0-4cc3-8053-576310a15dc8]" virtual=false 2025-12-12T16:27:44.705083491+00:00 stderr F I1212 16:27:44.705044 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-edit, uid: ba1adf09-b4f2-49e0-8d5f-260985b63a65]" virtual=false 2025-12-12T16:27:44.716235044+00:00 stderr F I1212 16:27:44.714885 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-view, uid: a9636cf5-effa-4d7b-8332-548c3c0bc998]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: podmonitors.monitoring.rhobs, uid: 6cedb4a3-6858-46ca-95a0-f572d788af1d]" 2025-12-12T16:27:44.716235044+00:00 stderr F I1212 16:27:44.715031 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-view, uid: a9636cf5-effa-4d7b-8332-548c3c0bc998]" virtual=false 2025-12-12T16:27:44.720344558+00:00 stderr F I1212 16:27:44.700837 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-admin, uid: 049f05d2-d3fa-4b7f-8fad-d3220aa1be28]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdatasources.perses.dev","uid":"60307fce-12a4-4d2e-bf4f-022ada40564c","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.720878101+00:00 stderr F I1212 16:27:44.720813 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-admin, uid: 3ab22d3a-5085-4cc2-8d59-cca5cc6bb702]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: podmonitors.monitoring.rhobs, uid: 6cedb4a3-6858-46ca-95a0-f572d788af1d]" 2025-12-12T16:27:44.725506358+00:00 stderr F I1212 16:27:44.725391 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-crdview, uid: eda91768-63be-4c96-9baa-0e1f7715465f]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: podmonitors.monitoring.rhobs, uid: 6cedb4a3-6858-46ca-95a0-f572d788af1d]" 2025-12-12T16:27:44.726241087+00:00 stderr F I1212 16:27:44.726105 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-admin, uid: 3ab22d3a-5085-4cc2-8d59-cca5cc6bb702]" virtual=false 2025-12-12T16:27:44.726676678+00:00 stderr F I1212 16:27:44.722501 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-crdview, uid: 36cdfc8b-fae0-4cc3-8053-576310a15dc8]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdatasources.perses.dev","uid":"60307fce-12a4-4d2e-bf4f-022ada40564c","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.726710169+00:00 stderr F I1212 16:27:44.726676 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-crdview, uid: eda91768-63be-4c96-9baa-0e1f7715465f]" virtual=false 2025-12-12T16:27:44.726921744+00:00 stderr F I1212 16:27:44.722655 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-edit, uid: ba1adf09-b4f2-49e0-8d5f-260985b63a65]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"podmonitors.monitoring.rhobs","uid":"6cedb4a3-6858-46ca-95a0-f572d788af1d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.737998574+00:00 stderr F I1212 16:27:44.737860 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-admin, uid: 97b599af-837a-4652-a326-633e2430d323]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: probes.monitoring.rhobs, uid: 983e0d5d-5976-4e10-b23b-08afae82208a]" 2025-12-12T16:27:44.737998574+00:00 stderr F I1212 16:27:44.737946 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-admin, uid: 97b599af-837a-4652-a326-633e2430d323]" virtual=false 2025-12-12T16:27:44.738544428+00:00 stderr F I1212 16:27:44.738495 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-edit, uid: 1ed67edf-fa93-484d-909f-26f6c49af29f]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: probes.monitoring.rhobs, uid: 983e0d5d-5976-4e10-b23b-08afae82208a]" 2025-12-12T16:27:44.738586249+00:00 stderr F I1212 16:27:44.738556 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-edit, uid: 1ed67edf-fa93-484d-909f-26f6c49af29f]" virtual=false 2025-12-12T16:27:44.739741318+00:00 stderr F I1212 16:27:44.739649 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-view, uid: 3d49904d-a2c0-4507-ad6f-684cc7f345f1]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdatasources.perses.dev","uid":"60307fce-12a4-4d2e-bf4f-022ada40564c","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.740414236+00:00 stderr F I1212 16:27:44.740380 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: persesdatasources.perses.dev-v1alpha1-edit, uid: 9e8e0038-5cac-46c3-9675-42f4602bcc79]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"persesdatasources.perses.dev","uid":"60307fce-12a4-4d2e-bf4f-022ada40564c","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.741590945+00:00 stderr F I1212 16:27:44.741538 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-view, uid: a9636cf5-effa-4d7b-8332-548c3c0bc998]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"podmonitors.monitoring.rhobs","uid":"6cedb4a3-6858-46ca-95a0-f572d788af1d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.749665910+00:00 stderr F I1212 16:27:44.747394 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-view, uid: dcf8b82b-25a7-42ba-bcc8-f03a8a3792ff]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: probes.monitoring.rhobs, uid: 983e0d5d-5976-4e10-b23b-08afae82208a]" 2025-12-12T16:27:44.749665910+00:00 stderr F I1212 16:27:44.747487 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-view, uid: dcf8b82b-25a7-42ba-bcc8-f03a8a3792ff]" virtual=false 2025-12-12T16:27:44.749665910+00:00 stderr F I1212 16:27:44.747972 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-admin, uid: 3ab22d3a-5085-4cc2-8d59-cca5cc6bb702]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"podmonitors.monitoring.rhobs","uid":"6cedb4a3-6858-46ca-95a0-f572d788af1d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.757887868+00:00 stderr F I1212 16:27:44.757776 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: podmonitors.monitoring.rhobs-v1-crdview, uid: eda91768-63be-4c96-9baa-0e1f7715465f]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"podmonitors.monitoring.rhobs","uid":"6cedb4a3-6858-46ca-95a0-f572d788af1d","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.758822141+00:00 stderr F I1212 16:27:44.758777 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-admin, uid: 97b599af-837a-4652-a326-633e2430d323]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"probes.monitoring.rhobs","uid":"983e0d5d-5976-4e10-b23b-08afae82208a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.764687360+00:00 stderr F I1212 16:27:44.764578 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-crdview, uid: b44b623d-37e2-4c21-bc62-3b4b0551539e]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: probes.monitoring.rhobs, uid: 983e0d5d-5976-4e10-b23b-08afae82208a]" 2025-12-12T16:27:44.764869174+00:00 stderr F I1212 16:27:44.764804 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-crdview, uid: b44b623d-37e2-4c21-bc62-3b4b0551539e]" virtual=false 2025-12-12T16:27:44.766042174+00:00 stderr F I1212 16:27:44.765989 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-edit, uid: 1ed67edf-fa93-484d-909f-26f6c49af29f]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"probes.monitoring.rhobs","uid":"983e0d5d-5976-4e10-b23b-08afae82208a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.766211648+00:00 stderr F I1212 16:27:44.766157 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-view, uid: dcf8b82b-25a7-42ba-bcc8-f03a8a3792ff]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"probes.monitoring.rhobs","uid":"983e0d5d-5976-4e10-b23b-08afae82208a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.782219064+00:00 stderr F I1212 16:27:44.782070 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-view, uid: 288b2c80-e9e4-45bf-a015-eb4488e58532]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusagents.monitoring.rhobs, uid: 6663e2d5-d2a5-4596-9d63-b5e473c7aa47]" 2025-12-12T16:27:44.782219064+00:00 stderr F I1212 16:27:44.782152 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-view, uid: 288b2c80-e9e4-45bf-a015-eb4488e58532]" virtual=false 2025-12-12T16:27:44.782720006+00:00 stderr F I1212 16:27:44.782602 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-admin, uid: 64201c57-81f8-4281-9c4e-29687f8cbdbb]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusagents.monitoring.rhobs, uid: 6663e2d5-d2a5-4596-9d63-b5e473c7aa47]" 2025-12-12T16:27:44.782822469+00:00 stderr F I1212 16:27:44.782790 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-admin, uid: 64201c57-81f8-4281-9c4e-29687f8cbdbb]" virtual=false 2025-12-12T16:27:44.791866908+00:00 stderr F I1212 16:27:44.790377 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-edit, uid: 41bfdda9-9e24-4fc0-80bc-058de5775325]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusagents.monitoring.rhobs, uid: 6663e2d5-d2a5-4596-9d63-b5e473c7aa47]" 2025-12-12T16:27:44.791866908+00:00 stderr F I1212 16:27:44.790464 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-edit, uid: 41bfdda9-9e24-4fc0-80bc-058de5775325]" virtual=false 2025-12-12T16:27:44.791866908+00:00 stderr F I1212 16:27:44.791168 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: probes.monitoring.rhobs-v1-crdview, uid: b44b623d-37e2-4c21-bc62-3b4b0551539e]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"probes.monitoring.rhobs","uid":"983e0d5d-5976-4e10-b23b-08afae82208a","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.798627309+00:00 stderr F I1212 16:27:44.798526 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-crdview, uid: 23a21a96-9110-480f-80ab-3990b820e602]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusagents.monitoring.rhobs, uid: 6663e2d5-d2a5-4596-9d63-b5e473c7aa47]" 2025-12-12T16:27:44.798710401+00:00 stderr F I1212 16:27:44.798617 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-crdview, uid: 23a21a96-9110-480f-80ab-3990b820e602]" virtual=false 2025-12-12T16:27:44.803364819+00:00 stderr F I1212 16:27:44.803285 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-admin, uid: e9fa162b-3bd6-4bce-9dec-1e0dae1d0da1]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheuses.monitoring.rhobs, uid: 0838044d-3d38-4d96-9cb5-2e0fadccc5d3]" 2025-12-12T16:27:44.803364819+00:00 stderr F I1212 16:27:44.803326 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-admin, uid: e9fa162b-3bd6-4bce-9dec-1e0dae1d0da1]" virtual=false 2025-12-12T16:27:44.823101578+00:00 stderr F I1212 16:27:44.821001 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-edit, uid: af1368f8-c805-4eae-b4f6-db0e703133f7]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheuses.monitoring.rhobs, uid: 0838044d-3d38-4d96-9cb5-2e0fadccc5d3]" 2025-12-12T16:27:44.823101578+00:00 stderr F I1212 16:27:44.821066 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-edit, uid: af1368f8-c805-4eae-b4f6-db0e703133f7]" virtual=false 2025-12-12T16:27:44.838298833+00:00 stderr F I1212 16:27:44.837517 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-view, uid: 5f86dc6d-4067-4dce-8431-aafc054905f1]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheuses.monitoring.rhobs, uid: 0838044d-3d38-4d96-9cb5-2e0fadccc5d3]" 2025-12-12T16:27:44.838298833+00:00 stderr F I1212 16:27:44.837628 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-view, uid: 5f86dc6d-4067-4dce-8431-aafc054905f1]" virtual=false 2025-12-12T16:27:44.842353745+00:00 stderr F I1212 16:27:44.842260 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-view, uid: 288b2c80-e9e4-45bf-a015-eb4488e58532]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusagents.monitoring.rhobs","uid":"6663e2d5-d2a5-4596-9d63-b5e473c7aa47","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.870468187+00:00 stderr F I1212 16:27:44.870296 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-crdview, uid: 23a21a96-9110-480f-80ab-3990b820e602]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusagents.monitoring.rhobs","uid":"6663e2d5-d2a5-4596-9d63-b5e473c7aa47","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.871251557+00:00 stderr F I1212 16:27:44.871168 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-crdview, uid: be971807-04fd-494d-87dc-1c67cdd17950]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheuses.monitoring.rhobs, uid: 0838044d-3d38-4d96-9cb5-2e0fadccc5d3]" 2025-12-12T16:27:44.871367260+00:00 stderr F I1212 16:27:44.871335 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-crdview, uid: be971807-04fd-494d-87dc-1c67cdd17950]" virtual=false 2025-12-12T16:27:44.871681248+00:00 stderr F I1212 16:27:44.871633 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-edit, uid: 41bfdda9-9e24-4fc0-80bc-058de5775325]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusagents.monitoring.rhobs","uid":"6663e2d5-d2a5-4596-9d63-b5e473c7aa47","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.871861532+00:00 stderr F I1212 16:27:44.871785 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-admin, uid: 480e1d6c-a063-4dc1-b819-dde4e23ac12b]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusrules.monitoring.rhobs, uid: f9661705-9c1b-41fe-b5f5-2834a2f233da]" 2025-12-12T16:27:44.871861532+00:00 stderr F I1212 16:27:44.871818 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-admin, uid: 480e1d6c-a063-4dc1-b819-dde4e23ac12b]" virtual=false 2025-12-12T16:27:44.874367256+00:00 stderr F I1212 16:27:44.874311 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-admin, uid: e9fa162b-3bd6-4bce-9dec-1e0dae1d0da1]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheuses.monitoring.rhobs","uid":"0838044d-3d38-4d96-9cb5-2e0fadccc5d3","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.874654293+00:00 stderr F I1212 16:27:44.874514 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusagents.monitoring.rhobs-v1alpha1-admin, uid: 64201c57-81f8-4281-9c4e-29687f8cbdbb]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusagents.monitoring.rhobs","uid":"6663e2d5-d2a5-4596-9d63-b5e473c7aa47","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.875934635+00:00 stderr F I1212 16:27:44.875888 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-edit, uid: 2ace560e-a969-4d2c-8f8a-9f86c223185c]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusrules.monitoring.rhobs, uid: f9661705-9c1b-41fe-b5f5-2834a2f233da]" 2025-12-12T16:27:44.875951306+00:00 stderr F I1212 16:27:44.875924 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-edit, uid: 2ace560e-a969-4d2c-8f8a-9f86c223185c]" virtual=false 2025-12-12T16:27:44.879065125+00:00 stderr F I1212 16:27:44.879008 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-view, uid: ef71d218-f93b-4fae-acae-4dae22eff226]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusrules.monitoring.rhobs, uid: f9661705-9c1b-41fe-b5f5-2834a2f233da]" 2025-12-12T16:27:44.879065125+00:00 stderr F I1212 16:27:44.879047 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-view, uid: ef71d218-f93b-4fae-acae-4dae22eff226]" virtual=false 2025-12-12T16:27:44.894549736+00:00 stderr F I1212 16:27:44.893033 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-admin, uid: 480e1d6c-a063-4dc1-b819-dde4e23ac12b]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusrules.monitoring.rhobs","uid":"f9661705-9c1b-41fe-b5f5-2834a2f233da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.894549736+00:00 stderr F I1212 16:27:44.893808 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-edit, uid: af1368f8-c805-4eae-b4f6-db0e703133f7]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheuses.monitoring.rhobs","uid":"0838044d-3d38-4d96-9cb5-2e0fadccc5d3","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.897536602+00:00 stderr F I1212 16:27:44.897438 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-view, uid: ef71d218-f93b-4fae-acae-4dae22eff226]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusrules.monitoring.rhobs","uid":"f9661705-9c1b-41fe-b5f5-2834a2f233da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.899333198+00:00 stderr F I1212 16:27:44.897677 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-edit, uid: 2ace560e-a969-4d2c-8f8a-9f86c223185c]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusrules.monitoring.rhobs","uid":"f9661705-9c1b-41fe-b5f5-2834a2f233da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.899880151+00:00 stderr F I1212 16:27:44.899819 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-crdview, uid: 804fdabc-36eb-4ea7-b06d-c6e5d08d0ad5]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusrules.monitoring.rhobs, uid: f9661705-9c1b-41fe-b5f5-2834a2f233da]" 2025-12-12T16:27:44.899926233+00:00 stderr F I1212 16:27:44.899894 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-crdview, uid: 804fdabc-36eb-4ea7-b06d-c6e5d08d0ad5]" virtual=false 2025-12-12T16:27:44.903207846+00:00 stderr F I1212 16:27:44.902753 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-admin, uid: d3dd9b15-01e6-43a0-bcee-520e9208a419]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: scrapeconfigs.monitoring.rhobs, uid: 2b96ae9f-37d3-4b5e-8598-42b080191429]" 2025-12-12T16:27:44.903207846+00:00 stderr F I1212 16:27:44.902838 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-admin, uid: d3dd9b15-01e6-43a0-bcee-520e9208a419]" virtual=false 2025-12-12T16:27:44.905599186+00:00 stderr F I1212 16:27:44.904949 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-view, uid: 5f86dc6d-4067-4dce-8431-aafc054905f1]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheuses.monitoring.rhobs","uid":"0838044d-3d38-4d96-9cb5-2e0fadccc5d3","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.916076481+00:00 stderr F I1212 16:27:44.915297 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-edit, uid: 89e0e9ff-5b96-4c52-aab8-38551e6cd511]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: scrapeconfigs.monitoring.rhobs, uid: 2b96ae9f-37d3-4b5e-8598-42b080191429]" 2025-12-12T16:27:44.916076481+00:00 stderr F I1212 16:27:44.915396 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-edit, uid: 89e0e9ff-5b96-4c52-aab8-38551e6cd511]" virtual=false 2025-12-12T16:27:44.917740513+00:00 stderr F I1212 16:27:44.916223 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheusrules.monitoring.rhobs-v1-crdview, uid: 804fdabc-36eb-4ea7-b06d-c6e5d08d0ad5]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheusrules.monitoring.rhobs","uid":"f9661705-9c1b-41fe-b5f5-2834a2f233da","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.920705439+00:00 stderr F I1212 16:27:44.920624 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-view, uid: c0a1989a-2a30-4197-a60d-8862a592f389]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: scrapeconfigs.monitoring.rhobs, uid: 2b96ae9f-37d3-4b5e-8598-42b080191429]" 2025-12-12T16:27:44.920770190+00:00 stderr F I1212 16:27:44.920746 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-view, uid: c0a1989a-2a30-4197-a60d-8862a592f389]" virtual=false 2025-12-12T16:27:44.926060064+00:00 stderr F I1212 16:27:44.925966 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheuses.monitoring.rhobs-v1-crdview, uid: be971807-04fd-494d-87dc-1c67cdd17950]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"prometheuses.monitoring.rhobs","uid":"0838044d-3d38-4d96-9cb5-2e0fadccc5d3","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.936630662+00:00 stderr F I1212 16:27:44.936469 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-admin, uid: d3dd9b15-01e6-43a0-bcee-520e9208a419]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"scrapeconfigs.monitoring.rhobs","uid":"2b96ae9f-37d3-4b5e-8598-42b080191429","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.937076243+00:00 stderr F I1212 16:27:44.937035 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-crdview, uid: aa727189-de8c-4816-8e1b-a07fdafdf4d8]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: scrapeconfigs.monitoring.rhobs, uid: 2b96ae9f-37d3-4b5e-8598-42b080191429]" 2025-12-12T16:27:44.937118664+00:00 stderr F I1212 16:27:44.937088 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-crdview, uid: aa727189-de8c-4816-8e1b-a07fdafdf4d8]" virtual=false 2025-12-12T16:27:44.937198656+00:00 stderr F I1212 16:27:44.937142 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-admin, uid: 38656845-4dba-4155-8981-d99d836ef737]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicemonitors.monitoring.rhobs, uid: f16a2947-4eb3-463d-b2b6-dcd2f33c38dd]" 2025-12-12T16:27:44.937241747+00:00 stderr F I1212 16:27:44.937210 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-admin, uid: 38656845-4dba-4155-8981-d99d836ef737]" virtual=false 2025-12-12T16:27:44.951268402+00:00 stderr F I1212 16:27:44.948538 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-edit, uid: fcfbf601-af5b-41c9-afea-891a06e51609]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicemonitors.monitoring.rhobs, uid: f16a2947-4eb3-463d-b2b6-dcd2f33c38dd]" 2025-12-12T16:27:44.951268402+00:00 stderr F I1212 16:27:44.948631 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-edit, uid: fcfbf601-af5b-41c9-afea-891a06e51609]" virtual=false 2025-12-12T16:27:44.958797353+00:00 stderr F I1212 16:27:44.958687 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-view, uid: 19a05750-0b71-41dd-ab40-05860a8852d0]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicemonitors.monitoring.rhobs, uid: f16a2947-4eb3-463d-b2b6-dcd2f33c38dd]" 2025-12-12T16:27:44.958797353+00:00 stderr F I1212 16:27:44.958761 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-view, uid: 19a05750-0b71-41dd-ab40-05860a8852d0]" virtual=false 2025-12-12T16:27:44.959386617+00:00 stderr F I1212 16:27:44.959329 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-admin, uid: 38656845-4dba-4155-8981-d99d836ef737]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"servicemonitors.monitoring.rhobs","uid":"f16a2947-4eb3-463d-b2b6-dcd2f33c38dd","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.959971002+00:00 stderr F I1212 16:27:44.959916 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-view, uid: c0a1989a-2a30-4197-a60d-8862a592f389]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"scrapeconfigs.monitoring.rhobs","uid":"2b96ae9f-37d3-4b5e-8598-42b080191429","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.962475286+00:00 stderr F I1212 16:27:44.962398 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-edit, uid: fcfbf601-af5b-41c9-afea-891a06e51609]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"servicemonitors.monitoring.rhobs","uid":"f16a2947-4eb3-463d-b2b6-dcd2f33c38dd","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.962610049+00:00 stderr F I1212 16:27:44.962571 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-edit, uid: 89e0e9ff-5b96-4c52-aab8-38551e6cd511]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"scrapeconfigs.monitoring.rhobs","uid":"2b96ae9f-37d3-4b5e-8598-42b080191429","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.962788294+00:00 stderr F I1212 16:27:44.962739 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-crdview, uid: 890d57cd-e8a3-4264-9c6b-d9765aec3b77]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicemonitors.monitoring.rhobs, uid: f16a2947-4eb3-463d-b2b6-dcd2f33c38dd]" 2025-12-12T16:27:44.962823794+00:00 stderr F I1212 16:27:44.962793 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-crdview, uid: 890d57cd-e8a3-4264-9c6b-d9765aec3b77]" virtual=false 2025-12-12T16:27:44.966450636+00:00 stderr F I1212 16:27:44.966381 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-view, uid: 19a05750-0b71-41dd-ab40-05860a8852d0]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"servicemonitors.monitoring.rhobs","uid":"f16a2947-4eb3-463d-b2b6-dcd2f33c38dd","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.969834452+00:00 stderr F I1212 16:27:44.969770 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-edit, uid: b920f976-dec3-43ba-a14c-c34e24698e35]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosqueriers.monitoring.rhobs, uid: 407bed2f-5c09-40a3-b0c8-7de8b58e5e6f]" 2025-12-12T16:27:44.969947755+00:00 stderr F I1212 16:27:44.969925 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-edit, uid: b920f976-dec3-43ba-a14c-c34e24698e35]" virtual=false 2025-12-12T16:27:44.983339344+00:00 stderr F I1212 16:27:44.983195 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: servicemonitors.monitoring.rhobs-v1-crdview, uid: 890d57cd-e8a3-4264-9c6b-d9765aec3b77]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"servicemonitors.monitoring.rhobs","uid":"f16a2947-4eb3-463d-b2b6-dcd2f33c38dd","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.993874850+00:00 stderr F I1212 16:27:44.993777 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: scrapeconfigs.monitoring.rhobs-v1alpha1-crdview, uid: aa727189-de8c-4816-8e1b-a07fdafdf4d8]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"scrapeconfigs.monitoring.rhobs","uid":"2b96ae9f-37d3-4b5e-8598-42b080191429","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.993960992+00:00 stderr F I1212 16:27:44.993855 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-view, uid: 8f51abd2-6611-4f82-9836-6e0b7ba26cb3]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosqueriers.monitoring.rhobs, uid: 407bed2f-5c09-40a3-b0c8-7de8b58e5e6f]" 2025-12-12T16:27:44.993960992+00:00 stderr F I1212 16:27:44.993898 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-admin, uid: a34bca6f-ea00-49fe-aa11-1124ef04d49d]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosqueriers.monitoring.rhobs, uid: 407bed2f-5c09-40a3-b0c8-7de8b58e5e6f]" 2025-12-12T16:27:44.993960992+00:00 stderr F I1212 16:27:44.993918 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-view, uid: 8f51abd2-6611-4f82-9836-6e0b7ba26cb3]" virtual=false 2025-12-12T16:27:44.994042105+00:00 stderr F I1212 16:27:44.993960 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-edit, uid: b920f976-dec3-43ba-a14c-c34e24698e35]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosqueriers.monitoring.rhobs","uid":"407bed2f-5c09-40a3-b0c8-7de8b58e5e6f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:44.994100956+00:00 stderr F I1212 16:27:44.994048 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-admin, uid: a34bca6f-ea00-49fe-aa11-1124ef04d49d]" virtual=false 2025-12-12T16:27:45.012023830+00:00 stderr F I1212 16:27:45.011909 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-crdview, uid: b70bdf30-022b-422d-9eb5-9fc204a56629]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosqueriers.monitoring.rhobs, uid: 407bed2f-5c09-40a3-b0c8-7de8b58e5e6f]" 2025-12-12T16:27:45.012023830+00:00 stderr F I1212 16:27:45.011993 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-crdview, uid: b70bdf30-022b-422d-9eb5-9fc204a56629]" virtual=false 2025-12-12T16:27:45.012604094+00:00 stderr F I1212 16:27:45.012565 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-admin, uid: ed27b1ae-7161-438b-99f3-c04970297096]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosrulers.monitoring.rhobs, uid: d1ed0e9d-d220-4ec7-a444-381282aa8c79]" 2025-12-12T16:27:45.012622645+00:00 stderr F I1212 16:27:45.012597 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-admin, uid: ed27b1ae-7161-438b-99f3-c04970297096]" virtual=false 2025-12-12T16:27:45.019279223+00:00 stderr F I1212 16:27:45.017119 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-edit, uid: 99c2bf72-72a2-4904-9f51-16237743d79a]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosrulers.monitoring.rhobs, uid: d1ed0e9d-d220-4ec7-a444-381282aa8c79]" 2025-12-12T16:27:45.019279223+00:00 stderr F I1212 16:27:45.017201 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-edit, uid: 99c2bf72-72a2-4904-9f51-16237743d79a]" virtual=false 2025-12-12T16:27:45.020441763+00:00 stderr F I1212 16:27:45.020347 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-view, uid: 8f51abd2-6611-4f82-9836-6e0b7ba26cb3]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosqueriers.monitoring.rhobs","uid":"407bed2f-5c09-40a3-b0c8-7de8b58e5e6f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.023510570+00:00 stderr F I1212 16:27:45.020672 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-admin, uid: a34bca6f-ea00-49fe-aa11-1124ef04d49d]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosqueriers.monitoring.rhobs","uid":"407bed2f-5c09-40a3-b0c8-7de8b58e5e6f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.023510570+00:00 stderr F I1212 16:27:45.022949 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-view, uid: 0b424693-3e7f-4d86-baf0-769d21c80448]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosrulers.monitoring.rhobs, uid: d1ed0e9d-d220-4ec7-a444-381282aa8c79]" 2025-12-12T16:27:45.023510570+00:00 stderr F I1212 16:27:45.023026 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-view, uid: 0b424693-3e7f-4d86-baf0-769d21c80448]" virtual=false 2025-12-12T16:27:45.027788489+00:00 stderr F I1212 16:27:45.027717 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosqueriers.monitoring.rhobs-v1alpha1-crdview, uid: b70bdf30-022b-422d-9eb5-9fc204a56629]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosqueriers.monitoring.rhobs","uid":"407bed2f-5c09-40a3-b0c8-7de8b58e5e6f","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.029743178+00:00 stderr F I1212 16:27:45.029652 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-crdview, uid: b9a6dfe3-bd78-4c9b-bd73-66087e5de740]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosrulers.monitoring.rhobs, uid: d1ed0e9d-d220-4ec7-a444-381282aa8c79]" 2025-12-12T16:27:45.029791159+00:00 stderr F I1212 16:27:45.029756 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-crdview, uid: b9a6dfe3-bd78-4c9b-bd73-66087e5de740]" virtual=false 2025-12-12T16:27:45.032961460+00:00 stderr F I1212 16:27:45.032906 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-admin, uid: ea1787a2-78ae-43cb-a9d4-550861a69382]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: uiplugins.observability.openshift.io, uid: 2bc3cdf5-608d-41c2-a740-69086fb4b14e]" 2025-12-12T16:27:45.033039381+00:00 stderr F I1212 16:27:45.033012 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-admin, uid: ea1787a2-78ae-43cb-a9d4-550861a69382]" virtual=false 2025-12-12T16:27:45.043052275+00:00 stderr F I1212 16:27:45.042102 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-edit, uid: 55e1faad-5135-4ace-b487-7fd647bd6566]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: uiplugins.observability.openshift.io, uid: 2bc3cdf5-608d-41c2-a740-69086fb4b14e]" 2025-12-12T16:27:45.043052275+00:00 stderr F I1212 16:27:45.042159 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-edit, uid: 55e1faad-5135-4ace-b487-7fd647bd6566]" virtual=false 2025-12-12T16:27:45.046639916+00:00 stderr F I1212 16:27:45.046525 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-admin, uid: ea1787a2-78ae-43cb-a9d4-550861a69382]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"uiplugins.observability.openshift.io","uid":"2bc3cdf5-608d-41c2-a740-69086fb4b14e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.051841547+00:00 stderr F I1212 16:27:45.051751 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-view, uid: 0b424693-3e7f-4d86-baf0-769d21c80448]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosrulers.monitoring.rhobs","uid":"d1ed0e9d-d220-4ec7-a444-381282aa8c79","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.051884328+00:00 stderr F I1212 16:27:45.051823 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-edit, uid: 55e1faad-5135-4ace-b487-7fd647bd6566]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"uiplugins.observability.openshift.io","uid":"2bc3cdf5-608d-41c2-a740-69086fb4b14e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.051884328+00:00 stderr F I1212 16:27:45.051852 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-admin, uid: ed27b1ae-7161-438b-99f3-c04970297096]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosrulers.monitoring.rhobs","uid":"d1ed0e9d-d220-4ec7-a444-381282aa8c79","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.052106764+00:00 stderr F I1212 16:27:45.052041 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-view, uid: 17aa4711-405b-41a4-8002-f1c71112c363]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: uiplugins.observability.openshift.io, uid: 2bc3cdf5-608d-41c2-a740-69086fb4b14e]" 2025-12-12T16:27:45.052141575+00:00 stderr F I1212 16:27:45.052111 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-view, uid: 17aa4711-405b-41a4-8002-f1c71112c363]" virtual=false 2025-12-12T16:27:45.055022848+00:00 stderr F I1212 16:27:45.054929 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-crdview, uid: c4c89424-92c9-4539-a520-56e1d4c11fa9]" owner="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: uiplugins.observability.openshift.io, uid: 2bc3cdf5-608d-41c2-a740-69086fb4b14e]" 2025-12-12T16:27:45.055044018+00:00 stderr F I1212 16:27:45.055021 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-crdview, uid: c4c89424-92c9-4539-a520-56e1d4c11fa9]" virtual=false 2025-12-12T16:27:45.064831536+00:00 stderr F I1212 16:27:45.064716 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-view, uid: 17aa4711-405b-41a4-8002-f1c71112c363]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"uiplugins.observability.openshift.io","uid":"2bc3cdf5-608d-41c2-a740-69086fb4b14e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.068751665+00:00 stderr F I1212 16:27:45.068656 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: uiplugins.observability.openshift.io-v1alpha1-crdview, uid: c4c89424-92c9-4539-a520-56e1d4c11fa9]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"uiplugins.observability.openshift.io","uid":"2bc3cdf5-608d-41c2-a740-69086fb4b14e","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.068858468+00:00 stderr F I1212 16:27:45.068791 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-crdview, uid: b9a6dfe3-bd78-4c9b-bd73-66087e5de740]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosrulers.monitoring.rhobs","uid":"d1ed0e9d-d220-4ec7-a444-381282aa8c79","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:45.068858468+00:00 stderr F I1212 16:27:45.068827 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: thanosrulers.monitoring.rhobs-v1-edit, uid: 99c2bf72-72a2-4904-9f51-16237743d79a]" owner=[{"apiVersion":"apiextensions.k8s.io/v1","kind":"customresourcedefinition","name":"thanosrulers.monitoring.rhobs","uid":"d1ed0e9d-d220-4ec7-a444-381282aa8c79","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:27:46.288689001+00:00 stderr F I1212 16:27:46.287753 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:46.297343890+00:00 stderr F I1212 16:27:46.296587 1 replica_set.go:615] "Slow-start failure. Skipping creation of pods, decrementing expectations" logger="replicaset-controller" podsSkipped=1 kind="ReplicaSet" replicaSet="cert-manager/cert-manager-858d87f86b" 2025-12-12T16:27:46.297343890+00:00 stderr F E1212 16:27:46.296691 1 replica_set.go:562] "Unhandled Error" err="sync \"cert-manager/cert-manager-858d87f86b\" failed with pods \"cert-manager-858d87f86b-\" is forbidden: error looking up service account cert-manager/cert-manager: serviceaccount \"cert-manager\" not found" logger="UnhandledError" 2025-12-12T16:27:51.422098492+00:00 stderr F I1212 16:27:51.421906 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="cert-manager/cert-manager-858d87f86b" need=1 creating=1 2025-12-12T16:27:51.459705694+00:00 stderr F I1212 16:27:51.459063 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="cert-manager/cert-manager" err="Operation cannot be fulfilled on deployments.apps \"cert-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:28:27.479703450+00:00 stderr F I1212 16:28:27.478894 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-1-global-ca, uid: 87190996-18a3-4be0-a00f-21a29046c5d5]" virtual=false 2025-12-12T16:28:27.479703450+00:00 stderr F I1212 16:28:27.479516 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-1-ca, uid: cb9d11d1-674c-491f-b7bc-1f8591f44b6a]" virtual=false 2025-12-12T16:28:27.479703450+00:00 stderr F I1212 16:28:27.479552 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-1-sys-config, uid: 088141f1-db83-46df-b813-317ac73ccea1]" virtual=false 2025-12-12T16:28:27.492693229+00:00 stderr F I1212 16:28:27.492616 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-1-ca, uid: cb9d11d1-674c-491f-b7bc-1f8591f44b6a]" propagationPolicy="Background" 2025-12-12T16:28:27.492915875+00:00 stderr F I1212 16:28:27.492894 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-1-global-ca, uid: 87190996-18a3-4be0-a00f-21a29046c5d5]" propagationPolicy="Background" 2025-12-12T16:28:27.493126440+00:00 stderr F I1212 16:28:27.493103 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-1-sys-config, uid: 088141f1-db83-46df-b813-317ac73ccea1]" propagationPolicy="Background" 2025-12-12T16:29:00.733723265+00:00 stderr F I1212 16:29:00.733464 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-2-ca, uid: 9c6a22c9-b73d-4849-b5e9-9ee25313739e]" virtual=false 2025-12-12T16:29:00.733860709+00:00 stderr F I1212 16:29:00.733821 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-2-sys-config, uid: 7d17cf04-af30-4f6b-a2df-146b35d3fa9e]" virtual=false 2025-12-12T16:29:00.735424398+00:00 stderr F I1212 16:29:00.733883 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-2-global-ca, uid: b7a2af5d-4d40-49c7-8464-f86c1bb207b9]" virtual=false 2025-12-12T16:29:00.750095290+00:00 stderr F I1212 16:29:00.747515 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-2-global-ca, uid: b7a2af5d-4d40-49c7-8464-f86c1bb207b9]" propagationPolicy="Background" 2025-12-12T16:29:00.750095290+00:00 stderr F I1212 16:29:00.748067 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-2-sys-config, uid: 7d17cf04-af30-4f6b-a2df-146b35d3fa9e]" propagationPolicy="Background" 2025-12-12T16:29:00.750095290+00:00 stderr F I1212 16:29:00.748215 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-2-ca, uid: 9c6a22c9-b73d-4849-b5e9-9ee25313739e]" propagationPolicy="Background" 2025-12-12T16:29:09.857272833+00:00 stderr F I1212 16:29:09.857129 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-3-global-ca, uid: 3c858481-30c0-4fc4-9393-4a1687ec0889]" virtual=false 2025-12-12T16:29:09.857366335+00:00 stderr F I1212 16:29:09.857129 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-3-sys-config, uid: 39b45f69-60b7-40f0-903e-04b322903f84]" virtual=false 2025-12-12T16:29:09.857491958+00:00 stderr F I1212 16:29:09.857355 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-3-ca, uid: 45d86912-2d31-4f21-a33e-965f482ad5e5]" virtual=false 2025-12-12T16:29:09.871402629+00:00 stderr F I1212 16:29:09.871062 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-3-global-ca, uid: 3c858481-30c0-4fc4-9393-4a1687ec0889]" propagationPolicy="Background" 2025-12-12T16:29:09.871402629+00:00 stderr F I1212 16:29:09.871082 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-3-sys-config, uid: 39b45f69-60b7-40f0-903e-04b322903f84]" propagationPolicy="Background" 2025-12-12T16:29:09.872979099+00:00 stderr F I1212 16:29:09.871809 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-3-ca, uid: 45d86912-2d31-4f21-a33e-965f482ad5e5]" propagationPolicy="Background" 2025-12-12T16:29:23.965314966+00:00 stderr F I1212 16:29:23.965061 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-4-ca, uid: 2347e1ed-7503-4a9d-9272-af754e3d58f8]" virtual=false 2025-12-12T16:29:23.965314966+00:00 stderr F I1212 16:29:23.965129 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-4-sys-config, uid: 78601b72-1851-4556-aaf2-3255e715b90c]" virtual=false 2025-12-12T16:29:23.965375937+00:00 stderr F I1212 16:29:23.965304 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-4-global-ca, uid: efeff3ab-c354-4f55-8b31-9faadd4669d8]" virtual=false 2025-12-12T16:29:23.968427184+00:00 stderr F I1212 16:29:23.968372 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-4-ca, uid: 2347e1ed-7503-4a9d-9272-af754e3d58f8]" propagationPolicy="Background" 2025-12-12T16:29:23.968702251+00:00 stderr F I1212 16:29:23.968665 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-4-global-ca, uid: efeff3ab-c354-4f55-8b31-9faadd4669d8]" propagationPolicy="Background" 2025-12-12T16:29:23.968772633+00:00 stderr F I1212 16:29:23.968730 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: service-telemetry, name: service-telemetry-framework-index-4-sys-config, uid: 78601b72-1851-4556-aaf2-3255e715b90c]" propagationPolicy="Background" 2025-12-12T16:30:00.138009729+00:00 stderr F I1212 16:30:00.137427 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="0s" 2025-12-12T16:30:00.149008226+00:00 stderr F I1212 16:30:00.148908 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:00.154801613+00:00 stderr F I1212 16:30:00.154401 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:00.154880124+00:00 stderr F I1212 16:30:00.154834 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:00.166389025+00:00 stderr F I1212 16:30:00.165630 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:00.209252435+00:00 stderr F I1212 16:30:00.208827 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:00.965468059+00:00 stderr F I1212 16:30:00.965387 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:01.237516558+00:00 stderr F I1212 16:30:01.237446 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:02.246881455+00:00 stderr F I1212 16:30:02.246751 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:02.249958972+00:00 stderr F I1212 16:30:02.249882 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:03.253971494+00:00 stderr F I1212 16:30:03.253893 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:03.497351150+00:00 stderr F I1212 16:30:03.495483 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:04.264841378+00:00 stderr F I1212 16:30:04.263033 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:04.272596104+00:00 stderr F I1212 16:30:04.272460 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425950" delay="1s" 2025-12-12T16:30:04.283156840+00:00 stderr F I1212 16:30:04.283078 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369355" delay="0s" 2025-12-12T16:43:04.726338977+00:00 stderr F I1212 16:43:04.726139 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: must-gather-k5chq, uid: 9b7fddba-067d-4250-a782-d5a57f8fd401]" virtual=false 2025-12-12T16:43:04.739325624+00:00 stderr F I1212 16:43:04.739211 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: must-gather-k5chq, uid: 9b7fddba-067d-4250-a782-d5a57f8fd401]" propagationPolicy="Background" 2025-12-12T16:43:09.715535062+00:00 stderr F I1212 16:43:09.715457 1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="openshift-must-gather-2sjxj" ././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000644000175000017500000412345515117043043033072 0ustar zuulzuul2025-12-12T16:15:01.386709813+00:00 stderr F + timeout 3m /bin/bash -exuo pipefail -c 'while [ -n "$(ss -Htanop \( sport = 10257 \))" ]; do sleep 1; done' 2025-12-12T16:15:01.390407003+00:00 stderr F ++ ss -Htanop '(' sport = 10257 ')' 2025-12-12T16:15:01.407120097+00:00 stderr F + '[' -n '' ']' 2025-12-12T16:15:01.407944075+00:00 stderr F + '[' -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt ']' 2025-12-12T16:15:01.408217101+00:00 stdout F Copying system trust bundle 2025-12-12T16:15:01.408229781+00:00 stderr F + echo 'Copying system trust bundle' 2025-12-12T16:15:01.408229781+00:00 stderr F + cp -f /etc/kubernetes/static-pod-certs/configmaps/trusted-ca-bundle/ca-bundle.crt /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem 2025-12-12T16:15:01.415910958+00:00 stderr F + '[' -f /etc/kubernetes/static-pod-resources/configmaps/cloud-config/ca-bundle.pem ']' 2025-12-12T16:15:01.416509751+00:00 stderr F + exec hyperkube kube-controller-manager --openshift-config=/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --authentication-kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --authorization-kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --client-ca-file=/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt --requestheader-client-ca-file=/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt -v=2 --tls-cert-file=/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt --tls-private-key-file=/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key --allocate-node-cidrs=false --cert-dir=/var/run/kubernetes --cloud-provider=external --cluster-cidr=10.217.0.0/22 --cluster-name=crc-rzkkk --cluster-signing-cert-file=/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt --cluster-signing-duration=720h --cluster-signing-key-file=/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key '--controllers=*' --controllers=-bootstrapsigner --controllers=-tokencleaner --controllers=-ttl --controllers=selinux-warning-controller --enable-dynamic-provisioning=true --feature-gates=AWSClusterHostedDNS=false --feature-gates=AWSClusterHostedDNSInstall=false --feature-gates=AWSDedicatedHosts=false --feature-gates=AWSServiceLBNetworkSecurityGroup=false --feature-gates=AdditionalRoutingCapabilities=true --feature-gates=AdminNetworkPolicy=true --feature-gates=AlibabaPlatform=true --feature-gates=AutomatedEtcdBackup=false --feature-gates=AzureClusterHostedDNSInstall=false --feature-gates=AzureDedicatedHosts=false --feature-gates=AzureMultiDisk=false --feature-gates=AzureWorkloadIdentity=true --feature-gates=BootImageSkewEnforcement=false --feature-gates=BootcNodeManagement=false --feature-gates=BuildCSIVolumes=true --feature-gates=CPMSMachineNamePrefix=true --feature-gates=ClusterAPIInstall=false --feature-gates=ClusterAPIInstallIBMCloud=false --feature-gates=ClusterMonitoringConfig=false --feature-gates=ClusterVersionOperatorConfiguration=false --feature-gates=ConsolePluginContentSecurityPolicy=true --feature-gates=DNSNameResolver=false --feature-gates=DualReplica=false --feature-gates=DyanmicServiceEndpointIBMCloud=false --feature-gates=DynamicResourceAllocation=false --feature-gates=EtcdBackendQuota=false --feature-gates=EventedPLEG=false --feature-gates=Example2=false --feature-gates=Example=false --feature-gates=ExternalOIDC=false --feature-gates=ExternalOIDCWithUIDAndExtraClaimMappings=false --feature-gates=ExternalSnapshotMetadata=false --feature-gates=GCPClusterHostedDNS=false --feature-gates=GCPClusterHostedDNSInstall=false --feature-gates=GCPCustomAPIEndpoints=false --feature-gates=GCPCustomAPIEndpointsInstall=false --feature-gates=GatewayAPI=true --feature-gates=GatewayAPIController=true --feature-gates=HighlyAvailableArbiter=true --feature-gates=ImageModeStatusReporting=false --feature-gates=ImageStreamImportMode=false --feature-gates=ImageVolume=true --feature-gates=IngressControllerDynamicConfigurationManager=false --feature-gates=IngressControllerLBSubnetsAWS=true --feature-gates=InsightsConfig=false --feature-gates=InsightsConfigAPI=false --feature-gates=InsightsOnDemandDataGather=false --feature-gates=IrreconcilableMachineConfig=false --feature-gates=KMSEncryptionProvider=false --feature-gates=KMSv1=true --feature-gates=MachineAPIMigration=false --feature-gates=MachineAPIOperatorDisableMachineHealthCheckController=false --feature-gates=MachineConfigNodes=true --feature-gates=ManagedBootImages=true --feature-gates=ManagedBootImagesAWS=true --feature-gates=ManagedBootImagesAzure=false --feature-gates=ManagedBootImagesvSphere=false --feature-gates=MaxUnavailableStatefulSet=false --feature-gates=MetricsCollectionProfiles=true --feature-gates=MinimumKubeletVersion=false --feature-gates=MixedCPUsAllocation=false --feature-gates=MultiArchInstallAzure=false --feature-gates=MultiDiskSetup=false --feature-gates=MutatingAdmissionPolicy=false --feature-gates=NetworkDiagnosticsConfig=true --feature-gates=NetworkLiveMigration=true --feature-gates=NetworkSegmentation=true --feature-gates=NewOLM=true --feature-gates=NewOLMCatalogdAPIV1Metas=false --feature-gates=NewOLMOwnSingleNamespace=false --feature-gates=NewOLMPreflightPermissionChecks=false --feature-gates=NewOLMWebhookProviderOpenshiftServiceCA=false --feature-gates=NoRegistryClusterOperations=false --feature-gates=NodeSwap=false --feature-gates=NutanixMultiSubnets=false --feature-gates=OVNObservability=false --feature-gates=OpenShiftPodSecurityAdmission=false --feature-gates=PinnedImages=true --feature-gates=PreconfiguredUDNAddresses=false --feature-gates=ProcMountType=true --feature-gates=RouteAdvertisements=true --feature-gates=RouteExternalCertificate=true --feature-gates=SELinuxMount=false --feature-gates=ServiceAccountTokenNodeBinding=true --feature-gates=SetEIPForNLBIngressController=true --feature-gates=ShortCertRotation=false --feature-gates=SignatureStores=false --feature-gates=SigstoreImageVerification=true --feature-gates=SigstoreImageVerificationPKI=false --feature-gates=StoragePerformantSecurityPolicy=true --feature-gates=TranslateStreamCloseWebsocketRequests=false --feature-gates=UpgradeStatus=true --feature-gates=UserNamespacesPodSecurityStandards=true --feature-gates=UserNamespacesSupport=true --feature-gates=VSphereConfigurableMaxAllowedBlockVolumesPerNode=false --feature-gates=VSphereHostVMGroupZonal=false --feature-gates=VSphereMixedNodeEnv=false --feature-gates=VSphereMultiDisk=true --feature-gates=VSphereMultiNetworks=true --feature-gates=VolumeAttributesClass=false --feature-gates=VolumeGroupSnapshot=false --flex-volume-plugin-dir=/etc/kubernetes/kubelet-plugins/volume/exec --kube-api-burst=300 --kube-api-qps=150 --leader-elect-renew-deadline=12s --leader-elect-resource-lock=leases --leader-elect-retry-period=3s --leader-elect=true --pv-recycler-pod-template-filepath-hostpath=/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml --pv-recycler-pod-template-filepath-nfs=/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml --root-ca-file=/etc/kubernetes/static-pod-resources/configmaps/serviceaccount-ca/ca-bundle.crt --secure-port=10257 --service-account-private-key-file=/etc/kubernetes/static-pod-resources/secrets/service-account-private-key/service-account.key --service-cluster-ip-range=10.217.4.0/23 --use-service-account-credentials=true --tls-cipher-suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 --tls-min-version=VersionTLS12 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.652818 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653048 1 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653053 1 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653056 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653059 1 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653066 1 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653068 1 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653071 1 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653074 1 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653076 1 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653078 1 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653081 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653083 1 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653085 1 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653087 1 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:15:01.653095569+00:00 stderr F W1212 16:15:01.653090 1 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653092 1 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653095 1 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653097 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653099 1 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653101 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653104 1 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653106 1 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653109 1 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653111 1 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653115 1 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653117 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653119 1 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653122 1 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653130 1 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653133 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653136 1 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653138 1 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653140 1 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653143 1 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653149 1 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653153 1 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:15:01.653161551+00:00 stderr F W1212 16:15:01.653156 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653158 1 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653162 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653164 1 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653167 1 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653169 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653171 1 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653188 1 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653190 1 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653193 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653195 1 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653197 1 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653200 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653202 1 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653204 1 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:15:01.653211952+00:00 stderr F W1212 16:15:01.653206 1 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653209 1 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653212 1 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653215 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653217 1 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653219 1 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653222 1 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653224 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653227 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:15:01.653233272+00:00 stderr F W1212 16:15:01.653229 1 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653231 1 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653234 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653236 1 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653239 1 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653241 1 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653243 1 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:15:01.653249672+00:00 stderr F W1212 16:15:01.653245 1 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:15:01.653258663+00:00 stderr F W1212 16:15:01.653248 1 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:15:01.653258663+00:00 stderr F W1212 16:15:01.653250 1 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:15:01.653258663+00:00 stderr F W1212 16:15:01.653253 1 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:15:01.653258663+00:00 stderr F W1212 16:15:01.653255 1 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:15:01.653267233+00:00 stderr F W1212 16:15:01.653258 1 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:15:01.653267233+00:00 stderr F W1212 16:15:01.653260 1 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:15:01.653267233+00:00 stderr F W1212 16:15:01.653262 1 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:15:01.653267233+00:00 stderr F W1212 16:15:01.653265 1 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:15:01.653274983+00:00 stderr F W1212 16:15:01.653267 1 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:15:01.653274983+00:00 stderr F W1212 16:15:01.653271 1 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:15:01.653281973+00:00 stderr F W1212 16:15:01.653274 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:15:01.653281973+00:00 stderr F W1212 16:15:01.653277 1 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:15:01.653290123+00:00 stderr F W1212 16:15:01.653281 1 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:15:01.653290123+00:00 stderr F W1212 16:15:01.653283 1 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:15:01.653290123+00:00 stderr F W1212 16:15:01.653285 1 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:15:01.653299134+00:00 stderr F W1212 16:15:01.653288 1 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:15:01.653299134+00:00 stderr F W1212 16:15:01.653291 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:15:01.653544389+00:00 stderr F I1212 16:15:01.653517 1 flags.go:64] FLAG: --allocate-node-cidrs="false" 2025-12-12T16:15:01.653544389+00:00 stderr F I1212 16:15:01.653535 1 flags.go:64] FLAG: --allow-metric-labels="[]" 2025-12-12T16:15:01.653557859+00:00 stderr F I1212 16:15:01.653542 1 flags.go:64] FLAG: --allow-metric-labels-manifest="" 2025-12-12T16:15:01.653557859+00:00 stderr F I1212 16:15:01.653553 1 flags.go:64] FLAG: --allow-untagged-cloud="false" 2025-12-12T16:15:01.653566699+00:00 stderr F I1212 16:15:01.653556 1 flags.go:64] FLAG: --attach-detach-reconcile-sync-period="1m0s" 2025-12-12T16:15:01.653566699+00:00 stderr F I1212 16:15:01.653562 1 flags.go:64] FLAG: --authentication-kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig" 2025-12-12T16:15:01.653574530+00:00 stderr F I1212 16:15:01.653565 1 flags.go:64] FLAG: --authentication-skip-lookup="false" 2025-12-12T16:15:01.653574530+00:00 stderr F I1212 16:15:01.653568 1 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="10s" 2025-12-12T16:15:01.653574530+00:00 stderr F I1212 16:15:01.653571 1 flags.go:64] FLAG: --authentication-tolerate-lookup-failure="false" 2025-12-12T16:15:01.653582360+00:00 stderr F I1212 16:15:01.653574 1 flags.go:64] FLAG: --authorization-always-allow-paths="[/healthz,/readyz,/livez]" 2025-12-12T16:15:01.653589350+00:00 stderr F I1212 16:15:01.653580 1 flags.go:64] FLAG: --authorization-kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig" 2025-12-12T16:15:01.653589350+00:00 stderr F I1212 16:15:01.653584 1 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="10s" 2025-12-12T16:15:01.653589350+00:00 stderr F I1212 16:15:01.653586 1 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="10s" 2025-12-12T16:15:01.653597190+00:00 stderr F I1212 16:15:01.653589 1 flags.go:64] FLAG: --bind-address="0.0.0.0" 2025-12-12T16:15:01.653604300+00:00 stderr F I1212 16:15:01.653594 1 flags.go:64] FLAG: --cert-dir="/var/run/kubernetes" 2025-12-12T16:15:01.653604300+00:00 stderr F I1212 16:15:01.653598 1 flags.go:64] FLAG: --cidr-allocator-type="RangeAllocator" 2025-12-12T16:15:01.653604300+00:00 stderr F I1212 16:15:01.653601 1 flags.go:64] FLAG: --client-ca-file="/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:15:01.653612280+00:00 stderr F I1212 16:15:01.653604 1 flags.go:64] FLAG: --cloud-config="" 2025-12-12T16:15:01.653612280+00:00 stderr F I1212 16:15:01.653607 1 flags.go:64] FLAG: --cloud-provider="external" 2025-12-12T16:15:01.653620831+00:00 stderr F I1212 16:15:01.653609 1 flags.go:64] FLAG: --cluster-cidr="10.217.0.0/22" 2025-12-12T16:15:01.653620831+00:00 stderr F I1212 16:15:01.653613 1 flags.go:64] FLAG: --cluster-name="crc-rzkkk" 2025-12-12T16:15:01.653620831+00:00 stderr F I1212 16:15:01.653616 1 flags.go:64] FLAG: --cluster-signing-cert-file="/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt" 2025-12-12T16:15:01.653630101+00:00 stderr F I1212 16:15:01.653619 1 flags.go:64] FLAG: --cluster-signing-duration="720h0m0s" 2025-12-12T16:15:01.653630101+00:00 stderr F I1212 16:15:01.653622 1 flags.go:64] FLAG: --cluster-signing-key-file="/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:01.653630101+00:00 stderr F I1212 16:15:01.653626 1 flags.go:64] FLAG: --cluster-signing-kube-apiserver-client-cert-file="" 2025-12-12T16:15:01.653639491+00:00 stderr F I1212 16:15:01.653628 1 flags.go:64] FLAG: --cluster-signing-kube-apiserver-client-key-file="" 2025-12-12T16:15:01.653639491+00:00 stderr F I1212 16:15:01.653631 1 flags.go:64] FLAG: --cluster-signing-kubelet-client-cert-file="" 2025-12-12T16:15:01.653639491+00:00 stderr F I1212 16:15:01.653634 1 flags.go:64] FLAG: --cluster-signing-kubelet-client-key-file="" 2025-12-12T16:15:01.653639491+00:00 stderr F I1212 16:15:01.653636 1 flags.go:64] FLAG: --cluster-signing-kubelet-serving-cert-file="" 2025-12-12T16:15:01.653656431+00:00 stderr F I1212 16:15:01.653639 1 flags.go:64] FLAG: --cluster-signing-kubelet-serving-key-file="" 2025-12-12T16:15:01.653656431+00:00 stderr F I1212 16:15:01.653642 1 flags.go:64] FLAG: --cluster-signing-legacy-unknown-cert-file="" 2025-12-12T16:15:01.653656431+00:00 stderr F I1212 16:15:01.653645 1 flags.go:64] FLAG: --cluster-signing-legacy-unknown-key-file="" 2025-12-12T16:15:01.653656431+00:00 stderr F I1212 16:15:01.653647 1 flags.go:64] FLAG: --concurrent-cron-job-syncs="5" 2025-12-12T16:15:01.653656431+00:00 stderr F I1212 16:15:01.653651 1 flags.go:64] FLAG: --concurrent-daemonset-syncs="2" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653654 1 flags.go:64] FLAG: --concurrent-deployment-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653657 1 flags.go:64] FLAG: --concurrent-endpoint-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653660 1 flags.go:64] FLAG: --concurrent-ephemeralvolume-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653662 1 flags.go:64] FLAG: --concurrent-gc-syncs="20" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653667 1 flags.go:64] FLAG: --concurrent-horizontal-pod-autoscaler-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653670 1 flags.go:64] FLAG: --concurrent-job-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653673 1 flags.go:64] FLAG: --concurrent-namespace-syncs="10" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653675 1 flags.go:64] FLAG: --concurrent-rc-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653678 1 flags.go:64] FLAG: --concurrent-replicaset-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653680 1 flags.go:64] FLAG: --concurrent-resource-quota-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653683 1 flags.go:64] FLAG: --concurrent-service-endpoint-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653685 1 flags.go:64] FLAG: --concurrent-service-syncs="1" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653688 1 flags.go:64] FLAG: --concurrent-serviceaccount-token-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653690 1 flags.go:64] FLAG: --concurrent-statefulset-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653693 1 flags.go:64] FLAG: --concurrent-ttl-after-finished-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653695 1 flags.go:64] FLAG: --concurrent-validating-admission-policy-status-syncs="5" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653698 1 flags.go:64] FLAG: --configure-cloud-routes="true" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653700 1 flags.go:64] FLAG: --contention-profiling="false" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653703 1 flags.go:64] FLAG: --controller-start-interval="0s" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653705 1 flags.go:64] FLAG: --controllers="[*,-bootstrapsigner,-tokencleaner,-ttl,selinux-warning-controller]" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653710 1 flags.go:64] FLAG: --disable-attach-detach-reconcile-sync="false" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653712 1 flags.go:64] FLAG: --disable-force-detach-on-timeout="false" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653715 1 flags.go:64] FLAG: --disable-http2-serving="false" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653718 1 flags.go:64] FLAG: --disabled-metrics="[]" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653721 1 flags.go:64] FLAG: --emulated-version="[]" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653724 1 flags.go:64] FLAG: --enable-dynamic-provisioning="true" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653727 1 flags.go:64] FLAG: --enable-garbage-collector="true" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653729 1 flags.go:64] FLAG: --enable-hostpath-provisioner="false" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653732 1 flags.go:64] FLAG: --enable-leader-migration="false" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653735 1 flags.go:64] FLAG: --endpoint-updates-batch-period="0s" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653737 1 flags.go:64] FLAG: --endpointslice-updates-batch-period="0s" 2025-12-12T16:15:01.653769934+00:00 stderr F I1212 16:15:01.653740 1 flags.go:64] FLAG: --external-cloud-volume-plugin="" 2025-12-12T16:15:01.653810065+00:00 stderr F I1212 16:15:01.653742 1 flags.go:64] FLAG: --feature-gates=":AWSClusterHostedDNS=false,:AWSClusterHostedDNSInstall=false,:AWSDedicatedHosts=false,:AWSServiceLBNetworkSecurityGroup=false,:AdditionalRoutingCapabilities=true,:AdminNetworkPolicy=true,:AlibabaPlatform=true,:AutomatedEtcdBackup=false,:AzureClusterHostedDNSInstall=false,:AzureDedicatedHosts=false,:AzureMultiDisk=false,:AzureWorkloadIdentity=true,:BootImageSkewEnforcement=false,:BootcNodeManagement=false,:BuildCSIVolumes=true,:CPMSMachineNamePrefix=true,:ClusterAPIInstall=false,:ClusterAPIInstallIBMCloud=false,:ClusterMonitoringConfig=false,:ClusterVersionOperatorConfiguration=false,:ConsolePluginContentSecurityPolicy=true,:DNSNameResolver=false,:DualReplica=false,:DyanmicServiceEndpointIBMCloud=false,:DynamicResourceAllocation=false,:EtcdBackendQuota=false,:EventedPLEG=false,:Example2=false,:Example=false,:ExternalOIDC=false,:ExternalOIDCWithUIDAndExtraClaimMappings=false,:ExternalSnapshotMetadata=false,:GCPClusterHostedDNS=false,:GCPClusterHostedDNSInstall=false,:GCPCustomAPIEndpoints=false,:GCPCustomAPIEndpointsInstall=false,:GatewayAPI=true,:GatewayAPIController=true,:HighlyAvailableArbiter=true,:ImageModeStatusReporting=false,:ImageStreamImportMode=false,:ImageVolume=true,:IngressControllerDynamicConfigurationManager=false,:IngressControllerLBSubnetsAWS=true,:InsightsConfig=false,:InsightsConfigAPI=false,:InsightsOnDemandDataGather=false,:IrreconcilableMachineConfig=false,:KMSEncryptionProvider=false,:KMSv1=true,:MachineAPIMigration=false,:MachineAPIOperatorDisableMachineHealthCheckController=false,:MachineConfigNodes=true,:ManagedBootImages=true,:ManagedBootImagesAWS=true,:ManagedBootImagesAzure=false,:ManagedBootImagesvSphere=false,:MaxUnavailableStatefulSet=false,:MetricsCollectionProfiles=true,:MinimumKubeletVersion=false,:MixedCPUsAllocation=false,:MultiArchInstallAzure=false,:MultiDiskSetup=false,:MutatingAdmissionPolicy=false,:NetworkDiagnosticsConfig=true,:NetworkLiveMigration=true,:NetworkSegmentation=true,:NewOLM=true,:NewOLMCatalogdAPIV1Metas=false,:NewOLMOwnSingleNamespace=false,:NewOLMPreflightPermissionChecks=false,:NewOLMWebhookProviderOpenshiftServiceCA=false,:NoRegistryClusterOperations=false,:NodeSwap=false,:NutanixMultiSubnets=false,:OVNObservability=false,:OpenShiftPodSecurityAdmission=false,:PinnedImages=true,:PreconfiguredUDNAddresses=false,:ProcMountType=true,:RouteAdvertisements=true,:RouteExternalCertificate=true,:SELinuxMount=false,:ServiceAccountTokenNodeBinding=true,:SetEIPForNLBIngressController=true,:ShortCertRotation=false,:SignatureStores=false,:SigstoreImageVerification=true,:SigstoreImageVerificationPKI=false,:StoragePerformantSecurityPolicy=true,:TranslateStreamCloseWebsocketRequests=false,:UpgradeStatus=true,:UserNamespacesPodSecurityStandards=true,:UserNamespacesSupport=true,:VSphereConfigurableMaxAllowedBlockVolumesPerNode=false,:VSphereHostVMGroupZonal=false,:VSphereMixedNodeEnv=false,:VSphereMultiDisk=true,:VSphereMultiNetworks=true,:VolumeAttributesClass=false,:VolumeGroupSnapshot=false" 2025-12-12T16:15:01.653810065+00:00 stderr F I1212 16:15:01.653799 1 flags.go:64] FLAG: --flex-volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" 2025-12-12T16:15:01.653810065+00:00 stderr F I1212 16:15:01.653803 1 flags.go:64] FLAG: --help="false" 2025-12-12T16:15:01.653810065+00:00 stderr F I1212 16:15:01.653806 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-cpu-initialization-period="5m0s" 2025-12-12T16:15:01.653822805+00:00 stderr F I1212 16:15:01.653809 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-downscale-stabilization="5m0s" 2025-12-12T16:15:01.653822805+00:00 stderr F I1212 16:15:01.653813 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-initial-readiness-delay="30s" 2025-12-12T16:15:01.653822805+00:00 stderr F I1212 16:15:01.653815 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-sync-period="15s" 2025-12-12T16:15:01.653832345+00:00 stderr F I1212 16:15:01.653818 1 flags.go:64] FLAG: --horizontal-pod-autoscaler-tolerance="0.1" 2025-12-12T16:15:01.653839285+00:00 stderr F I1212 16:15:01.653829 1 flags.go:64] FLAG: --http2-max-streams-per-connection="0" 2025-12-12T16:15:01.653839285+00:00 stderr F I1212 16:15:01.653833 1 flags.go:64] FLAG: --kube-api-burst="300" 2025-12-12T16:15:01.653839285+00:00 stderr F I1212 16:15:01.653836 1 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" 2025-12-12T16:15:01.653847785+00:00 stderr F I1212 16:15:01.653839 1 flags.go:64] FLAG: --kube-api-qps="150" 2025-12-12T16:15:01.653847785+00:00 stderr F I1212 16:15:01.653843 1 flags.go:64] FLAG: --kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig" 2025-12-12T16:15:01.653855386+00:00 stderr F I1212 16:15:01.653847 1 flags.go:64] FLAG: --large-cluster-size-threshold="50" 2025-12-12T16:15:01.653855386+00:00 stderr F I1212 16:15:01.653850 1 flags.go:64] FLAG: --leader-elect="true" 2025-12-12T16:15:01.653862526+00:00 stderr F I1212 16:15:01.653853 1 flags.go:64] FLAG: --leader-elect-lease-duration="15s" 2025-12-12T16:15:01.653862526+00:00 stderr F I1212 16:15:01.653856 1 flags.go:64] FLAG: --leader-elect-renew-deadline="12s" 2025-12-12T16:15:01.653862526+00:00 stderr F I1212 16:15:01.653859 1 flags.go:64] FLAG: --leader-elect-resource-lock="leases" 2025-12-12T16:15:01.653870136+00:00 stderr F I1212 16:15:01.653862 1 flags.go:64] FLAG: --leader-elect-resource-name="kube-controller-manager" 2025-12-12T16:15:01.653870136+00:00 stderr F I1212 16:15:01.653865 1 flags.go:64] FLAG: --leader-elect-resource-namespace="kube-system" 2025-12-12T16:15:01.653870136+00:00 stderr F I1212 16:15:01.653867 1 flags.go:64] FLAG: --leader-elect-retry-period="3s" 2025-12-12T16:15:01.653877686+00:00 stderr F I1212 16:15:01.653870 1 flags.go:64] FLAG: --leader-migration-config="" 2025-12-12T16:15:01.653877686+00:00 stderr F I1212 16:15:01.653873 1 flags.go:64] FLAG: --legacy-service-account-token-clean-up-period="8760h0m0s" 2025-12-12T16:15:01.653884846+00:00 stderr F I1212 16:15:01.653876 1 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:15:01.653884846+00:00 stderr F I1212 16:15:01.653879 1 flags.go:64] FLAG: --log-json-info-buffer-size="0" 2025-12-12T16:15:01.653892036+00:00 stderr F I1212 16:15:01.653884 1 flags.go:64] FLAG: --log-json-split-stream="false" 2025-12-12T16:15:01.653892036+00:00 stderr F I1212 16:15:01.653887 1 flags.go:64] FLAG: --log-text-info-buffer-size="0" 2025-12-12T16:15:01.653892036+00:00 stderr F I1212 16:15:01.653889 1 flags.go:64] FLAG: --log-text-split-stream="false" 2025-12-12T16:15:01.653899547+00:00 stderr F I1212 16:15:01.653893 1 flags.go:64] FLAG: --logging-format="text" 2025-12-12T16:15:01.653899547+00:00 stderr F I1212 16:15:01.653895 1 flags.go:64] FLAG: --master="" 2025-12-12T16:15:01.653906697+00:00 stderr F I1212 16:15:01.653898 1 flags.go:64] FLAG: --max-endpoints-per-slice="100" 2025-12-12T16:15:01.653906697+00:00 stderr F I1212 16:15:01.653901 1 flags.go:64] FLAG: --min-resync-period="12h0m0s" 2025-12-12T16:15:01.653906697+00:00 stderr F I1212 16:15:01.653903 1 flags.go:64] FLAG: --mirroring-concurrent-service-endpoint-syncs="5" 2025-12-12T16:15:01.653917707+00:00 stderr F I1212 16:15:01.653906 1 flags.go:64] FLAG: --mirroring-endpointslice-updates-batch-period="0s" 2025-12-12T16:15:01.653917707+00:00 stderr F I1212 16:15:01.653909 1 flags.go:64] FLAG: --mirroring-max-endpoints-per-subset="1000" 2025-12-12T16:15:01.653917707+00:00 stderr F I1212 16:15:01.653912 1 flags.go:64] FLAG: --namespace-sync-period="5m0s" 2025-12-12T16:15:01.653917707+00:00 stderr F I1212 16:15:01.653914 1 flags.go:64] FLAG: --node-cidr-mask-size="0" 2025-12-12T16:15:01.653925577+00:00 stderr F I1212 16:15:01.653917 1 flags.go:64] FLAG: --node-cidr-mask-size-ipv4="0" 2025-12-12T16:15:01.653925577+00:00 stderr F I1212 16:15:01.653920 1 flags.go:64] FLAG: --node-cidr-mask-size-ipv6="0" 2025-12-12T16:15:01.653925577+00:00 stderr F I1212 16:15:01.653922 1 flags.go:64] FLAG: --node-eviction-rate="0.1" 2025-12-12T16:15:01.653933337+00:00 stderr F I1212 16:15:01.653925 1 flags.go:64] FLAG: --node-monitor-grace-period="50s" 2025-12-12T16:15:01.653933337+00:00 stderr F I1212 16:15:01.653928 1 flags.go:64] FLAG: --node-monitor-period="5s" 2025-12-12T16:15:01.653940457+00:00 stderr F I1212 16:15:01.653931 1 flags.go:64] FLAG: --node-startup-grace-period="1m0s" 2025-12-12T16:15:01.653940457+00:00 stderr F I1212 16:15:01.653935 1 flags.go:64] FLAG: --node-sync-period="0s" 2025-12-12T16:15:01.653940457+00:00 stderr F I1212 16:15:01.653937 1 flags.go:64] FLAG: --openshift-config="/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml" 2025-12-12T16:15:01.653948208+00:00 stderr F I1212 16:15:01.653940 1 flags.go:64] FLAG: --permit-address-sharing="false" 2025-12-12T16:15:01.653948208+00:00 stderr F I1212 16:15:01.653943 1 flags.go:64] FLAG: --permit-port-sharing="false" 2025-12-12T16:15:01.653955328+00:00 stderr F I1212 16:15:01.653946 1 flags.go:64] FLAG: --profiling="true" 2025-12-12T16:15:01.653955328+00:00 stderr F I1212 16:15:01.653949 1 flags.go:64] FLAG: --pv-recycler-increment-timeout-nfs="30" 2025-12-12T16:15:01.653955328+00:00 stderr F I1212 16:15:01.653952 1 flags.go:64] FLAG: --pv-recycler-minimum-timeout-hostpath="60" 2025-12-12T16:15:01.653962798+00:00 stderr F I1212 16:15:01.653954 1 flags.go:64] FLAG: --pv-recycler-minimum-timeout-nfs="300" 2025-12-12T16:15:01.653962798+00:00 stderr F I1212 16:15:01.653957 1 flags.go:64] FLAG: --pv-recycler-pod-template-filepath-hostpath="/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml" 2025-12-12T16:15:01.653969988+00:00 stderr F I1212 16:15:01.653960 1 flags.go:64] FLAG: --pv-recycler-pod-template-filepath-nfs="/etc/kubernetes/static-pod-resources/configmaps/recycler-config/recycler-pod.yaml" 2025-12-12T16:15:01.653969988+00:00 stderr F I1212 16:15:01.653964 1 flags.go:64] FLAG: --pv-recycler-timeout-increment-hostpath="30" 2025-12-12T16:15:01.653969988+00:00 stderr F I1212 16:15:01.653966 1 flags.go:64] FLAG: --pvclaimbinder-sync-period="15s" 2025-12-12T16:15:01.653977498+00:00 stderr F I1212 16:15:01.653969 1 flags.go:64] FLAG: --requestheader-allowed-names="[]" 2025-12-12T16:15:01.653977498+00:00 stderr F I1212 16:15:01.653972 1 flags.go:64] FLAG: --requestheader-client-ca-file="/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:15:01.653984628+00:00 stderr F I1212 16:15:01.653976 1 flags.go:64] FLAG: --requestheader-extra-headers-prefix="[x-remote-extra-]" 2025-12-12T16:15:01.653984628+00:00 stderr F I1212 16:15:01.653980 1 flags.go:64] FLAG: --requestheader-group-headers="[x-remote-group]" 2025-12-12T16:15:01.653991789+00:00 stderr F I1212 16:15:01.653983 1 flags.go:64] FLAG: --requestheader-uid-headers="[]" 2025-12-12T16:15:01.653991789+00:00 stderr F I1212 16:15:01.653987 1 flags.go:64] FLAG: --requestheader-username-headers="[x-remote-user]" 2025-12-12T16:15:01.654003759+00:00 stderr F I1212 16:15:01.653992 1 flags.go:64] FLAG: --resource-quota-sync-period="5m0s" 2025-12-12T16:15:01.654003759+00:00 stderr F I1212 16:15:01.653994 1 flags.go:64] FLAG: --root-ca-file="/etc/kubernetes/static-pod-resources/configmaps/serviceaccount-ca/ca-bundle.crt" 2025-12-12T16:15:01.654003759+00:00 stderr F I1212 16:15:01.653998 1 flags.go:64] FLAG: --route-reconciliation-period="10s" 2025-12-12T16:15:01.654003759+00:00 stderr F I1212 16:15:01.654000 1 flags.go:64] FLAG: --secondary-node-eviction-rate="0.01" 2025-12-12T16:15:01.654013199+00:00 stderr F I1212 16:15:01.654004 1 flags.go:64] FLAG: --secure-port="10257" 2025-12-12T16:15:01.654013199+00:00 stderr F I1212 16:15:01.654006 1 flags.go:64] FLAG: --service-account-private-key-file="/etc/kubernetes/static-pod-resources/secrets/service-account-private-key/service-account.key" 2025-12-12T16:15:01.654013199+00:00 stderr F I1212 16:15:01.654010 1 flags.go:64] FLAG: --service-cluster-ip-range="10.217.4.0/23" 2025-12-12T16:15:01.654028009+00:00 stderr F I1212 16:15:01.654012 1 flags.go:64] FLAG: --show-hidden-metrics-for-version="" 2025-12-12T16:15:01.654028009+00:00 stderr F I1212 16:15:01.654015 1 flags.go:64] FLAG: --terminated-pod-gc-threshold="12500" 2025-12-12T16:15:01.654028009+00:00 stderr F I1212 16:15:01.654018 1 flags.go:64] FLAG: --tls-cert-file="/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt" 2025-12-12T16:15:01.654036620+00:00 stderr F I1212 16:15:01.654021 1 flags.go:64] FLAG: --tls-cipher-suites="[TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256]" 2025-12-12T16:15:01.654036620+00:00 stderr F I1212 16:15:01.654030 1 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:15:01.654036620+00:00 stderr F I1212 16:15:01.654033 1 flags.go:64] FLAG: --tls-private-key-file="/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:01.654044190+00:00 stderr F I1212 16:15:01.654037 1 flags.go:64] FLAG: --tls-sni-cert-key="[]" 2025-12-12T16:15:01.654044190+00:00 stderr F I1212 16:15:01.654040 1 flags.go:64] FLAG: --unhealthy-zone-threshold="0.55" 2025-12-12T16:15:01.654051470+00:00 stderr F I1212 16:15:01.654044 1 flags.go:64] FLAG: --unsupported-kube-api-over-localhost="false" 2025-12-12T16:15:01.654051470+00:00 stderr F I1212 16:15:01.654047 1 flags.go:64] FLAG: --use-service-account-credentials="true" 2025-12-12T16:15:01.654058560+00:00 stderr F I1212 16:15:01.654049 1 flags.go:64] FLAG: --v="2" 2025-12-12T16:15:01.654058560+00:00 stderr F I1212 16:15:01.654054 1 flags.go:64] FLAG: --version="false" 2025-12-12T16:15:01.654065630+00:00 stderr F I1212 16:15:01.654057 1 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654166 1 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654190 1 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654193 1 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654196 1 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654199 1 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654202 1 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654204 1 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654207 1 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654210 1 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:15:01.654216373+00:00 stderr F W1212 16:15:01.654212 1 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654214 1 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654217 1 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654219 1 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654221 1 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654224 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654227 1 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654230 1 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654232 1 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654235 1 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654237 1 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654240 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:15:01.654245804+00:00 stderr F W1212 16:15:01.654242 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:15:01.654259434+00:00 stderr F W1212 16:15:01.654244 1 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:15:01.654259434+00:00 stderr F W1212 16:15:01.654251 1 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:15:01.654259434+00:00 stderr F W1212 16:15:01.654253 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:15:01.654259434+00:00 stderr F W1212 16:15:01.654256 1 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:15:01.654269675+00:00 stderr F W1212 16:15:01.654258 1 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:15:01.654269675+00:00 stderr F W1212 16:15:01.654260 1 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:15:01.654269675+00:00 stderr F W1212 16:15:01.654263 1 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:15:01.654269675+00:00 stderr F W1212 16:15:01.654265 1 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:15:01.654279485+00:00 stderr F W1212 16:15:01.654267 1 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:15:01.654279485+00:00 stderr F W1212 16:15:01.654270 1 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:15:01.654279485+00:00 stderr F W1212 16:15:01.654273 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:15:01.654279485+00:00 stderr F W1212 16:15:01.654275 1 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:15:01.654288185+00:00 stderr F W1212 16:15:01.654277 1 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:15:01.654288185+00:00 stderr F W1212 16:15:01.654280 1 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:15:01.654288185+00:00 stderr F W1212 16:15:01.654282 1 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:15:01.654288185+00:00 stderr F W1212 16:15:01.654285 1 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:15:01.654299965+00:00 stderr F W1212 16:15:01.654287 1 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:15:01.654299965+00:00 stderr F W1212 16:15:01.654290 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:15:01.654299965+00:00 stderr F W1212 16:15:01.654292 1 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:15:01.654299965+00:00 stderr F W1212 16:15:01.654294 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:15:01.654299965+00:00 stderr F W1212 16:15:01.654296 1 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:15:01.654308115+00:00 stderr F W1212 16:15:01.654299 1 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:15:01.654308115+00:00 stderr F W1212 16:15:01.654302 1 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:15:01.654308115+00:00 stderr F W1212 16:15:01.654305 1 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:15:01.654315656+00:00 stderr F W1212 16:15:01.654307 1 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:15:01.654315656+00:00 stderr F W1212 16:15:01.654310 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:15:01.654315656+00:00 stderr F W1212 16:15:01.654312 1 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:15:01.654323426+00:00 stderr F W1212 16:15:01.654314 1 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:15:01.654323426+00:00 stderr F W1212 16:15:01.654317 1 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:15:01.654323426+00:00 stderr F W1212 16:15:01.654319 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:15:01.654332636+00:00 stderr F W1212 16:15:01.654321 1 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:15:01.654332636+00:00 stderr F W1212 16:15:01.654324 1 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:15:01.654332636+00:00 stderr F W1212 16:15:01.654326 1 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:15:01.654332636+00:00 stderr F W1212 16:15:01.654329 1 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:15:01.654341776+00:00 stderr F W1212 16:15:01.654333 1 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:15:01.654341776+00:00 stderr F W1212 16:15:01.654336 1 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:15:01.654350886+00:00 stderr F W1212 16:15:01.654341 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:15:01.654350886+00:00 stderr F W1212 16:15:01.654344 1 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:15:01.654350886+00:00 stderr F W1212 16:15:01.654346 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:15:01.654359307+00:00 stderr F W1212 16:15:01.654349 1 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:15:01.654359307+00:00 stderr F W1212 16:15:01.654352 1 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:15:01.654359307+00:00 stderr F W1212 16:15:01.654354 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:15:01.654359307+00:00 stderr F W1212 16:15:01.654356 1 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:15:01.654371247+00:00 stderr F W1212 16:15:01.654359 1 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:15:01.654371247+00:00 stderr F W1212 16:15:01.654361 1 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:15:01.654371247+00:00 stderr F W1212 16:15:01.654364 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:15:01.654371247+00:00 stderr F W1212 16:15:01.654366 1 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:15:01.654371247+00:00 stderr F W1212 16:15:01.654368 1 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:15:01.654379827+00:00 stderr F W1212 16:15:01.654371 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:15:01.654379827+00:00 stderr F W1212 16:15:01.654373 1 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:15:01.654379827+00:00 stderr F W1212 16:15:01.654375 1 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:15:01.654387717+00:00 stderr F W1212 16:15:01.654378 1 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:15:01.654387717+00:00 stderr F W1212 16:15:01.654381 1 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:15:01.654387717+00:00 stderr F W1212 16:15:01.654383 1 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:15:01.654387717+00:00 stderr F W1212 16:15:01.654385 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:15:01.654395577+00:00 stderr F W1212 16:15:01.654388 1 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:15:01.654395577+00:00 stderr F W1212 16:15:01.654390 1 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:15:01.654395577+00:00 stderr F W1212 16:15:01.654392 1 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:15:01.654403098+00:00 stderr F W1212 16:15:01.654395 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:15:01.654403098+00:00 stderr F W1212 16:15:01.654397 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:15:01.654403098+00:00 stderr F W1212 16:15:01.654400 1 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:15:01.654410528+00:00 stderr F W1212 16:15:01.654402 1 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:15:01.654410528+00:00 stderr F W1212 16:15:01.654404 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:15:01.654410528+00:00 stderr F W1212 16:15:01.654407 1 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:15:01.663933225+00:00 stderr F I1212 16:15:01.663867 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:02.380381734+00:00 stderr F I1212 16:15:02.380323 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:15:02.380702261+00:00 stderr F I1212 16:15:02.380678 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:15:02.388228044+00:00 stderr F I1212 16:15:02.388117 1 controllermanager.go:203] "Starting" version="v1.33.5" 2025-12-12T16:15:02.388228044+00:00 stderr F I1212 16:15:02.388163 1 controllermanager.go:205] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" 2025-12-12T16:15:02.390814091+00:00 stderr F I1212 16:15:02.390771 1 dynamic_cafile_content.go:161] "Starting controller" name="request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" 2025-12-12T16:15:02.391189939+00:00 stderr F I1212 16:15:02.390822 1 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:15:02.392147410+00:00 stderr F I1212 16:15:02.392106 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:02.392232462+00:00 stderr F I1212 16:15:02.392148 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:15:02.392122979 +0000 UTC))" 2025-12-12T16:15:02.392247962+00:00 stderr F I1212 16:15:02.392234 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:15:02.392224951 +0000 UTC))" 2025-12-12T16:15:02.392277633+00:00 stderr F I1212 16:15:02.392254 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:15:02.392246542 +0000 UTC))" 2025-12-12T16:15:02.392277633+00:00 stderr F I1212 16:15:02.392270 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:15:02.392265992 +0000 UTC))" 2025-12-12T16:15:02.392303163+00:00 stderr F I1212 16:15:02.392283 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:15:02.392277563 +0000 UTC))" 2025-12-12T16:15:02.392312063+00:00 stderr F I1212 16:15:02.392299 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:15:02.392295203 +0000 UTC))" 2025-12-12T16:15:02.392331134+00:00 stderr F I1212 16:15:02.392314 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:15:02.392308303 +0000 UTC))" 2025-12-12T16:15:02.392358234+00:00 stderr F I1212 16:15:02.392333 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:15:02.392324964 +0000 UTC))" 2025-12-12T16:15:02.392517858+00:00 stderr F I1212 16:15:02.392490 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:15:02.392481977 +0000 UTC))" 2025-12-12T16:15:02.392657641+00:00 stderr F I1212 16:15:02.392634 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:01 +0000 UTC to 2028-12-12 15:15:01 +0000 UTC (now=2025-12-12 16:15:02.39262723 +0000 UTC))" 2025-12-12T16:15:02.392683911+00:00 stderr F I1212 16:15:02.392664 1 secure_serving.go:211] Serving securely on [::]:10257 2025-12-12T16:15:02.392943427+00:00 stderr F I1212 16:15:02.392906 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:15:02.393052469+00:00 stderr F I1212 16:15:02.393030 1 leaderelection.go:257] attempting to acquire leader lease kube-system/kube-controller-manager... 2025-12-12T16:15:02.394223215+00:00 stderr F E1212 16:15:02.394167 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:15:13.308199657+00:00 stderr F E1212 16:15:13.307978 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) 2025-12-12T16:15:13.308199657+00:00 stderr F I1212 16:15:13.307980 1 transport.go:356] "Warning: unable to cancel request" roundTripperType="*app.rejectIfNotReadyHeaderRT" 2025-12-12T16:15:17.032701134+00:00 stderr F I1212 16:15:17.032632 1 leaderelection.go:271] successfully acquired lease kube-system/kube-controller-manager 2025-12-12T16:15:17.032990761+00:00 stderr F I1212 16:15:17.032870 1 event.go:389] "Event occurred" object="kube-system/kube-controller-manager" fieldPath="" kind="Lease" apiVersion="coordination.k8s.io/v1" type="Normal" reason="LeaderElection" message="crc_1f58dfcf-aafc-4c2b-9b31-87d6b1fec2af became leader" 2025-12-12T16:15:17.036473455+00:00 stderr F I1212 16:15:17.036438 1 controllermanager.go:796] "Starting controller" controller="serviceaccount-token-controller" 2025-12-12T16:15:17.039771364+00:00 stderr F I1212 16:15:17.039683 1 controllermanager.go:827] "Started controller" controller="serviceaccount-token-controller" 2025-12-12T16:15:17.039771364+00:00 stderr F I1212 16:15:17.039713 1 controllermanager.go:796] "Starting controller" controller="certificatesigningrequest-cleaner-controller" 2025-12-12T16:15:17.039892757+00:00 stderr F I1212 16:15:17.039863 1 shared_informer.go:350] "Waiting for caches to sync" controller="tokens" 2025-12-12T16:15:17.044040237+00:00 stderr F I1212 16:15:17.043973 1 controllermanager.go:827] "Started controller" controller="certificatesigningrequest-cleaner-controller" 2025-12-12T16:15:17.044040237+00:00 stderr F I1212 16:15:17.044025 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-attach-detach-controller" 2025-12-12T16:15:17.044156120+00:00 stderr F I1212 16:15:17.044121 1 cleaner.go:83] "Starting CSR cleaner controller" logger="certificatesigningrequest-cleaner-controller" 2025-12-12T16:15:17.051198010+00:00 stderr F I1212 16:15:17.051104 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.055336449+00:00 stderr F W1212 16:15:17.055289 1 probe.go:272] Flexvolume plugin directory at /etc/kubernetes/kubelet-plugins/volume/exec does not exist. Recreating. 2025-12-12T16:15:17.058919236+00:00 stderr F I1212 16:15:17.058874 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/fc" 2025-12-12T16:15:17.058937696+00:00 stderr F I1212 16:15:17.058931 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" 2025-12-12T16:15:17.058985177+00:00 stderr F I1212 16:15:17.058966 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/csi" 2025-12-12T16:15:17.059303425+00:00 stderr F I1212 16:15:17.059278 1 controllermanager.go:827] "Started controller" controller="persistentvolume-attach-detach-controller" 2025-12-12T16:15:17.059385627+00:00 stderr F I1212 16:15:17.059307 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="storageversion-garbage-collector-controller" requiredFeatureGates=["APIServerIdentity","StorageVersionAPI"] 2025-12-12T16:15:17.059395707+00:00 stderr F I1212 16:15:17.059384 1 controllermanager.go:796] "Starting controller" controller="resourcequota-controller" 2025-12-12T16:15:17.059936600+00:00 stderr F I1212 16:15:17.059874 1 attach_detach_controller.go:338] "Starting attach detach controller" logger="persistentvolume-attach-detach-controller" 2025-12-12T16:15:17.059949830+00:00 stderr F I1212 16:15:17.059935 1 shared_informer.go:350] "Waiting for caches to sync" controller="attach detach" 2025-12-12T16:15:17.096744357+00:00 stderr F E1212 16:15:17.096659 1 resource_quota_controller.go:175] "Unhandled Error" err="initial discovery check failure, continuing and counting on future sync update: unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" logger="UnhandledError" 2025-12-12T16:15:17.098333125+00:00 stderr F I1212 16:15:17.098298 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="controllerrevisions.apps" 2025-12-12T16:15:17.098399837+00:00 stderr F I1212 16:15:17.098377 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="httproutes.gateway.networking.k8s.io" 2025-12-12T16:15:17.098429797+00:00 stderr F I1212 16:15:17.098414 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="servicemonitors.monitoring.coreos.com" 2025-12-12T16:15:17.098459108+00:00 stderr F I1212 16:15:17.098444 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="thanosrulers.monitoring.coreos.com" 2025-12-12T16:15:17.098486879+00:00 stderr F I1212 16:15:17.098470 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="statefulsets.apps" 2025-12-12T16:15:17.098549390+00:00 stderr F I1212 16:15:17.098523 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressrouters.network.operator.openshift.io" 2025-12-12T16:15:17.098630682+00:00 stderr F I1212 16:15:17.098606 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="subscriptions.operators.coreos.com" 2025-12-12T16:15:17.098653763+00:00 stderr F I1212 16:15:17.098637 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="replicasets.apps" 2025-12-12T16:15:17.098676713+00:00 stderr F I1212 16:15:17.098662 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="cronjobs.batch" 2025-12-12T16:15:17.098717474+00:00 stderr F I1212 16:15:17.098702 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="projecthelmchartrepositories.helm.openshift.io" 2025-12-12T16:15:17.098737215+00:00 stderr F I1212 16:15:17.098724 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressfirewalls.k8s.ovn.org" 2025-12-12T16:15:17.098765296+00:00 stderr F I1212 16:15:17.098750 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="catalogsources.operators.coreos.com" 2025-12-12T16:15:17.098788886+00:00 stderr F I1212 16:15:17.098774 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="metal3remediationtemplates.infrastructure.cluster.x-k8s.io" 2025-12-12T16:15:17.098808317+00:00 stderr F I1212 16:15:17.098795 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="metal3remediations.infrastructure.cluster.x-k8s.io" 2025-12-12T16:15:17.099040452+00:00 stderr F I1212 16:15:17.099010 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ingresses.networking.k8s.io" 2025-12-12T16:15:17.099237387+00:00 stderr F I1212 16:15:17.099215 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="roles.rbac.authorization.k8s.io" 2025-12-12T16:15:17.099247917+00:00 stderr F I1212 16:15:17.099241 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressqoses.k8s.ovn.org" 2025-12-12T16:15:17.099319279+00:00 stderr F I1212 16:15:17.099298 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="dnsrecords.ingress.operator.openshift.io" 2025-12-12T16:15:17.099326469+00:00 stderr F I1212 16:15:17.099321 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podmonitors.monitoring.coreos.com" 2025-12-12T16:15:17.099355020+00:00 stderr F I1212 16:15:17.099340 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="operatorpkis.network.operator.openshift.io" 2025-12-12T16:15:17.099382940+00:00 stderr F I1212 16:15:17.099368 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="operatorconditions.operators.coreos.com" 2025-12-12T16:15:17.099407251+00:00 stderr F I1212 16:15:17.099393 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="deployments.apps" 2025-12-12T16:15:17.099445082+00:00 stderr F I1212 16:15:17.099430 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="networkpolicies.networking.k8s.io" 2025-12-12T16:15:17.099464712+00:00 stderr F I1212 16:15:17.099449 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="leases.coordination.k8s.io" 2025-12-12T16:15:17.099490783+00:00 stderr F I1212 16:15:17.099472 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="network-attachment-definitions.k8s.cni.cncf.io" 2025-12-12T16:15:17.099515764+00:00 stderr F I1212 16:15:17.099501 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="userdefinednetworks.k8s.ovn.org" 2025-12-12T16:15:17.099546504+00:00 stderr F I1212 16:15:17.099531 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="nodeslicepools.whereabouts.cni.cncf.io" 2025-12-12T16:15:17.099555165+00:00 stderr F I1212 16:15:17.099529 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.099787910+00:00 stderr F I1212 16:15:17.099764 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="serviceaccounts" 2025-12-12T16:15:17.099787910+00:00 stderr F I1212 16:15:17.099784 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="csistoragecapacities.storage.k8s.io" 2025-12-12T16:15:17.099827501+00:00 stderr F I1212 16:15:17.099804 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machines.machine.openshift.io" 2025-12-12T16:15:17.099834521+00:00 stderr F I1212 16:15:17.099826 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertingrules.monitoring.openshift.io" 2025-12-12T16:15:17.099864772+00:00 stderr F I1212 16:15:17.099849 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertrelabelconfigs.monitoring.openshift.io" 2025-12-12T16:15:17.099871332+00:00 stderr F I1212 16:15:17.099866 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="horizontalpodautoscalers.autoscaling" 2025-12-12T16:15:17.099921263+00:00 stderr F I1212 16:15:17.099900 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machineautoscalers.autoscaling.openshift.io" 2025-12-12T16:15:17.099955374+00:00 stderr F I1212 16:15:17.099940 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podnetworkconnectivitychecks.controlplane.operator.openshift.io" 2025-12-12T16:15:17.099985245+00:00 stderr F I1212 16:15:17.099969 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="operatorgroups.operators.coreos.com" 2025-12-12T16:15:17.099991845+00:00 stderr F I1212 16:15:17.099985 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="endpoints" 2025-12-12T16:15:17.100015556+00:00 stderr F I1212 16:15:17.100001 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="podtemplates" 2025-12-12T16:15:17.100045326+00:00 stderr F I1212 16:15:17.100030 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="gateways.gateway.networking.k8s.io" 2025-12-12T16:15:17.100064727+00:00 stderr F I1212 16:15:17.100052 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="installplans.operators.coreos.com" 2025-12-12T16:15:17.100091727+00:00 stderr F I1212 16:15:17.100077 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="daemonsets.apps" 2025-12-12T16:15:17.100118888+00:00 stderr F I1212 16:15:17.100104 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="grpcroutes.gateway.networking.k8s.io" 2025-12-12T16:15:17.100140559+00:00 stderr F I1212 16:15:17.100124 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ipaddresses.ipam.cluster.x-k8s.io" 2025-12-12T16:15:17.100161519+00:00 stderr F I1212 16:15:17.100146 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ipamclaims.k8s.cni.cncf.io" 2025-12-12T16:15:17.100202200+00:00 stderr F I1212 16:15:17.100168 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machinesets.machine.openshift.io" 2025-12-12T16:15:17.100213910+00:00 stderr F I1212 16:15:17.100207 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheusrules.monitoring.coreos.com" 2025-12-12T16:15:17.100247021+00:00 stderr F I1212 16:15:17.100229 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="imagepolicies.config.openshift.io" 2025-12-12T16:15:17.100275412+00:00 stderr F I1212 16:15:17.100254 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="referencegrants.gateway.networking.k8s.io" 2025-12-12T16:15:17.100283932+00:00 stderr F I1212 16:15:17.100275 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="probes.monitoring.coreos.com" 2025-12-12T16:15:17.100327183+00:00 stderr F I1212 16:15:17.100300 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="prometheuses.monitoring.coreos.com" 2025-12-12T16:15:17.100336153+00:00 stderr F I1212 16:15:17.100327 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="jobs.batch" 2025-12-12T16:15:17.100377344+00:00 stderr F I1212 16:15:17.100358 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="clusterserviceversions.operators.coreos.com" 2025-12-12T16:15:17.100452876+00:00 stderr F I1212 16:15:17.100387 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ippools.whereabouts.cni.cncf.io" 2025-12-12T16:15:17.100509928+00:00 stderr F I1212 16:15:17.100493 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="controlplanemachinesets.machine.openshift.io" 2025-12-12T16:15:17.100532528+00:00 stderr F I1212 16:15:17.100518 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="machinehealthchecks.machine.openshift.io" 2025-12-12T16:15:17.100583879+00:00 stderr F I1212 16:15:17.100568 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertmanagers.monitoring.coreos.com" 2025-12-12T16:15:17.100590950+00:00 stderr F I1212 16:15:17.100584 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="limitranges" 2025-12-12T16:15:17.100617780+00:00 stderr F I1212 16:15:17.100603 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="poddisruptionbudgets.policy" 2025-12-12T16:15:17.100634311+00:00 stderr F I1212 16:15:17.100618 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="rolebindings.rbac.authorization.k8s.io" 2025-12-12T16:15:17.100664581+00:00 stderr F I1212 16:15:17.100637 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="endpointslices.discovery.k8s.io" 2025-12-12T16:15:17.100683662+00:00 stderr F I1212 16:15:17.100672 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="egressservices.k8s.ovn.org" 2025-12-12T16:15:17.100707582+00:00 stderr F I1212 16:15:17.100693 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="alertmanagerconfigs.monitoring.coreos.com" 2025-12-12T16:15:17.100726813+00:00 stderr F I1212 16:15:17.100715 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="overlappingrangeipreservations.whereabouts.cni.cncf.io" 2025-12-12T16:15:17.100750943+00:00 stderr F I1212 16:15:17.100736 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ipaddressclaims.ipam.cluster.x-k8s.io" 2025-12-12T16:15:17.100775094+00:00 stderr F I1212 16:15:17.100758 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="ingresscontrollers.operator.openshift.io" 2025-12-12T16:15:17.100781664+00:00 stderr F I1212 16:15:17.100775 1 controllermanager.go:827] "Started controller" controller="resourcequota-controller" 2025-12-12T16:15:17.100788114+00:00 stderr F I1212 16:15:17.100782 1 controllermanager.go:796] "Starting controller" controller="garbage-collector-controller" 2025-12-12T16:15:17.100858566+00:00 stderr F I1212 16:15:17.100820 1 resource_quota_controller.go:300] "Starting resource quota controller" logger="resourcequota-controller" 2025-12-12T16:15:17.100914517+00:00 stderr F I1212 16:15:17.100891 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:15:17.100946648+00:00 stderr F I1212 16:15:17.100927 1 resource_quota_monitor.go:308] "QuotaMonitor running" logger="resourcequota-controller" 2025-12-12T16:15:17.109119105+00:00 stderr F E1212 16:15:17.109015 1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" logger="UnhandledError" 2025-12-12T16:15:17.109203097+00:00 stderr F I1212 16:15:17.109155 1 controllermanager.go:827] "Started controller" controller="garbage-collector-controller" 2025-12-12T16:15:17.109217017+00:00 stderr F I1212 16:15:17.109175 1 controllermanager.go:796] "Starting controller" controller="replicaset-controller" 2025-12-12T16:15:17.109246168+00:00 stderr F I1212 16:15:17.109160 1 garbagecollector.go:144] "Starting controller" logger="garbage-collector-controller" controller="garbagecollector" 2025-12-12T16:15:17.109302239+00:00 stderr F I1212 16:15:17.109288 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:15:17.109346010+00:00 stderr F I1212 16:15:17.109317 1 graph_builder.go:351] "Running" logger="garbage-collector-controller" component="GraphBuilder" 2025-12-12T16:15:17.110693533+00:00 stderr F I1212 16:15:17.110543 1 resource_quota_controller.go:476] "syncing resource quota controller with updated resources from discovery" logger="resourcequota-controller" diff="added: [/v1, Resource=configmaps /v1, Resource=endpoints /v1, Resource=events /v1, Resource=limitranges /v1, Resource=persistentvolumeclaims /v1, Resource=pods /v1, Resource=podtemplates /v1, Resource=replicationcontrollers /v1, Resource=resourcequotas /v1, Resource=secrets /v1, Resource=serviceaccounts /v1, Resource=services apps/v1, Resource=controllerrevisions apps/v1, Resource=daemonsets apps/v1, Resource=deployments apps/v1, Resource=replicasets apps/v1, Resource=statefulsets autoscaling.openshift.io/v1beta1, Resource=machineautoscalers autoscaling/v2, Resource=horizontalpodautoscalers batch/v1, Resource=cronjobs batch/v1, Resource=jobs config.openshift.io/v1, Resource=imagepolicies controlplane.operator.openshift.io/v1alpha1, Resource=podnetworkconnectivitychecks coordination.k8s.io/v1, Resource=leases discovery.k8s.io/v1, Resource=endpointslices events.k8s.io/v1, Resource=events gateway.networking.k8s.io/v1, Resource=gateways gateway.networking.k8s.io/v1, Resource=grpcroutes gateway.networking.k8s.io/v1, Resource=httproutes gateway.networking.k8s.io/v1beta1, Resource=referencegrants helm.openshift.io/v1beta1, Resource=projecthelmchartrepositories infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediations infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediationtemplates ingress.operator.openshift.io/v1, Resource=dnsrecords ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddressclaims ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddresses k8s.cni.cncf.io/v1, Resource=network-attachment-definitions k8s.cni.cncf.io/v1alpha1, Resource=ipamclaims k8s.ovn.org/v1, Resource=egressfirewalls k8s.ovn.org/v1, Resource=egressqoses k8s.ovn.org/v1, Resource=egressservices k8s.ovn.org/v1, Resource=userdefinednetworks machine.openshift.io/v1, Resource=controlplanemachinesets machine.openshift.io/v1beta1, Resource=machinehealthchecks machine.openshift.io/v1beta1, Resource=machines machine.openshift.io/v1beta1, Resource=machinesets monitoring.coreos.com/v1, Resource=alertmanagers monitoring.coreos.com/v1, Resource=podmonitors monitoring.coreos.com/v1, Resource=probes monitoring.coreos.com/v1, Resource=prometheuses monitoring.coreos.com/v1, Resource=prometheusrules monitoring.coreos.com/v1, Resource=servicemonitors monitoring.coreos.com/v1, Resource=thanosrulers monitoring.coreos.com/v1beta1, Resource=alertmanagerconfigs monitoring.openshift.io/v1, Resource=alertingrules monitoring.openshift.io/v1, Resource=alertrelabelconfigs network.operator.openshift.io/v1, Resource=egressrouters network.operator.openshift.io/v1, Resource=operatorpkis networking.k8s.io/v1, Resource=ingresses networking.k8s.io/v1, Resource=networkpolicies operator.openshift.io/v1, Resource=ingresscontrollers operators.coreos.com/v1, Resource=operatorgroups operators.coreos.com/v1alpha1, Resource=catalogsources operators.coreos.com/v1alpha1, Resource=clusterserviceversions operators.coreos.com/v1alpha1, Resource=installplans operators.coreos.com/v1alpha1, Resource=subscriptions operators.coreos.com/v2, Resource=operatorconditions policy/v1, Resource=poddisruptionbudgets rbac.authorization.k8s.io/v1, Resource=rolebindings rbac.authorization.k8s.io/v1, Resource=roles storage.k8s.io/v1, Resource=csistoragecapacities whereabouts.cni.cncf.io/v1alpha1, Resource=ippools whereabouts.cni.cncf.io/v1alpha1, Resource=nodeslicepools whereabouts.cni.cncf.io/v1alpha1, Resource=overlappingrangeipreservations], removed: []" 2025-12-12T16:15:17.111931103+00:00 stderr F I1212 16:15:17.111860 1 controllermanager.go:827] "Started controller" controller="replicaset-controller" 2025-12-12T16:15:17.111931103+00:00 stderr F I1212 16:15:17.111878 1 controllermanager.go:796] "Starting controller" controller="horizontal-pod-autoscaler-controller" 2025-12-12T16:15:17.112248130+00:00 stderr F I1212 16:15:17.112021 1 replica_set.go:219] "Starting controller" logger="replicaset-controller" name="replicaset" 2025-12-12T16:15:17.112248130+00:00 stderr F I1212 16:15:17.112032 1 shared_informer.go:350] "Waiting for caches to sync" controller="ReplicaSet" 2025-12-12T16:15:17.118804428+00:00 stderr F I1212 16:15:17.118748 1 controllermanager.go:827] "Started controller" controller="horizontal-pod-autoscaler-controller" 2025-12-12T16:15:17.118804428+00:00 stderr F I1212 16:15:17.118769 1 controllermanager.go:796] "Starting controller" controller="disruption-controller" 2025-12-12T16:15:17.118829689+00:00 stderr F I1212 16:15:17.118793 1 horizontal.go:204] "Starting HPA controller" logger="horizontal-pod-autoscaler-controller" 2025-12-12T16:15:17.118829689+00:00 stderr F I1212 16:15:17.118810 1 shared_informer.go:350] "Waiting for caches to sync" controller="HPA" 2025-12-12T16:15:17.122410485+00:00 stderr F I1212 16:15:17.122355 1 controllermanager.go:827] "Started controller" controller="disruption-controller" 2025-12-12T16:15:17.122410485+00:00 stderr F I1212 16:15:17.122383 1 controllermanager.go:790] "Warning: controller is disabled" controller="token-cleaner-controller" 2025-12-12T16:15:17.122410485+00:00 stderr F I1212 16:15:17.122392 1 controllermanager.go:796] "Starting controller" controller="clusterrole-aggregation-controller" 2025-12-12T16:15:17.123006470+00:00 stderr F I1212 16:15:17.122960 1 disruption.go:455] "Sending events to api server." logger="disruption-controller" 2025-12-12T16:15:17.123021400+00:00 stderr F I1212 16:15:17.123015 1 disruption.go:466] "Starting disruption controller" logger="disruption-controller" 2025-12-12T16:15:17.123028710+00:00 stderr F I1212 16:15:17.123021 1 shared_informer.go:350] "Waiting for caches to sync" controller="disruption" 2025-12-12T16:15:17.124043554+00:00 stderr F I1212 16:15:17.123996 1 controllermanager.go:827] "Started controller" controller="clusterrole-aggregation-controller" 2025-12-12T16:15:17.124043554+00:00 stderr F I1212 16:15:17.124013 1 controllermanager.go:796] "Starting controller" controller="persistentvolumeclaim-protection-controller" 2025-12-12T16:15:17.124137767+00:00 stderr F I1212 16:15:17.124104 1 clusterroleaggregation_controller.go:194] "Starting ClusterRoleAggregator controller" logger="clusterrole-aggregation-controller" 2025-12-12T16:15:17.124137767+00:00 stderr F I1212 16:15:17.124122 1 shared_informer.go:350] "Waiting for caches to sync" controller="ClusterRoleAggregator" 2025-12-12T16:15:17.126600236+00:00 stderr F I1212 16:15:17.126554 1 controllermanager.go:827] "Started controller" controller="persistentvolumeclaim-protection-controller" 2025-12-12T16:15:17.126600236+00:00 stderr F I1212 16:15:17.126574 1 controllermanager.go:796] "Starting controller" controller="namespace-controller" 2025-12-12T16:15:17.126727059+00:00 stderr F I1212 16:15:17.126680 1 pvc_protection_controller.go:168] "Starting PVC protection controller" logger="persistentvolumeclaim-protection-controller" 2025-12-12T16:15:17.126727059+00:00 stderr F I1212 16:15:17.126716 1 shared_informer.go:350] "Waiting for caches to sync" controller="PVC protection" 2025-12-12T16:15:17.137265363+00:00 stderr F I1212 16:15:17.137136 1 garbagecollector.go:787] "failed to discover some groups" logger="garbage-collector-controller" groups="map[\"apps.openshift.io/v1\":\"stale GroupVersion discovery: apps.openshift.io/v1\" \"authorization.openshift.io/v1\":\"stale GroupVersion discovery: authorization.openshift.io/v1\" \"build.openshift.io/v1\":\"stale GroupVersion discovery: build.openshift.io/v1\" \"image.openshift.io/v1\":\"stale GroupVersion discovery: image.openshift.io/v1\" \"oauth.openshift.io/v1\":\"stale GroupVersion discovery: oauth.openshift.io/v1\" \"packages.operators.coreos.com/v1\":\"stale GroupVersion discovery: packages.operators.coreos.com/v1\" \"project.openshift.io/v1\":\"stale GroupVersion discovery: project.openshift.io/v1\" \"quota.openshift.io/v1\":\"stale GroupVersion discovery: quota.openshift.io/v1\" \"route.openshift.io/v1\":\"stale GroupVersion discovery: route.openshift.io/v1\" \"security.openshift.io/v1\":\"stale GroupVersion discovery: security.openshift.io/v1\" \"template.openshift.io/v1\":\"stale GroupVersion discovery: template.openshift.io/v1\" \"user.openshift.io/v1\":\"stale GroupVersion discovery: user.openshift.io/v1\"]" 2025-12-12T16:15:17.138216026+00:00 stderr P I1212 16:15:17.138016 1 garbagecollector.go:203] "syncing garbage collector with updated resources from discovery" logger="garbage-collector-controller" diff="added: [/v1, Resource=configmaps /v1, Resource=endpoints /v1, Resource=events /v1, Resource=limitranges /v1, Resource=namespaces /v1, Resource=nodes /v1, Resource=persistentvolumeclaims /v1, Resource=persistentvolumes /v1, Resource=pods /v1, Resource=podtemplates /v1, Resource=replicationcontrollers /v1, Resource=resourcequotas /v1, Resource=secrets /v1, Resource=serviceaccounts /v1, Resource=services admissionregistration.k8s.io/v1, Resource=mutatingwebhookconfigurations admissionregistration.k8s.io/v1, Resource=validatingadmissionpolicies admissionregistration.k8s.io/v1, Resource=validatingadmissionpolicybindings admissionregistration.k8s.io/v1, Resource=validatingwebhookconfigurations apiextensions.k8s.io/v1, Resource=customresourcedefinitions apiregistration.k8s.io/v1, Resource=apiservices apiserver.openshift.io/v1, Resource=apirequestcounts apps/v1, Resource=controllerrevisions apps/v1, Resource=daemonsets apps/v1, Resource=deployments apps/v1, Resource=replicasets apps/v1, Resource=statefulsets autoscaling.openshift.io/v1, Resource=clusterautoscalers autoscaling.openshift.io/v1beta1, Resource=machineautoscalers autoscaling/v2, Resource=horizontalpodautoscalers batch/v1, Resource=cronjobs batch/v1, Resource=jobs certificates.k8s.io/v1, Resource=certificatesigningrequests config.openshift.io/v1, Resource=apiservers config.openshift.io/v1, Resource=authentications config.openshift.io/v1, Resource=builds config.openshift.io/v1, Resource=clusterimagepolicies config.openshift.io/v1, Resource=clusteroperators config.openshift.io/v1, Resource=clusterversions config.openshift.io/v1, Resource=consoles config.openshift.io/v1, Resource=dnses config.openshift.io/v1, Resource=featuregates config.openshift.io/v1, Resource=imagecontentpolicies config.openshift.io/v1, Resource=imagedigestmirrorsets config.openshift.io/v1, Resource=imagepolicies config.openshift.io/v1, Resource=images config.openshift.io/v1, Resource=imagetagmirrorsets config.openshift.io/v1, Resource=infrastructures config.openshift.io/v1, Resource=ingresses config.openshift.io/v1, Resource=networks config.openshift.io/v1, Resource=nodes config.openshift.io/v1, Resource=oauths config.openshift.io/v1, Resource=operatorhubs config.openshift.io/v1, Resource=projects config.openshift.io/v1, Resource=proxies config.openshift.io/v1, Resource=schedulers console.openshift.io/v1, Resource=consoleclidownloads console.openshift.io/v1, Resource=consoleexternalloglinks console.openshift.io/v1, Resource=consolelinks console.openshift.io/v1, Resource=consolenotifications console.openshift.io/v1, Resource=consoleplugins console.openshift.io/v1, Resource=consolequickstarts console.openshift.io/v1, Resource=consolesamples console.openshift.io/v1, Resource=consoleyamlsamples controlplane.operator.openshift.io/v1alpha1, Resource=podnetworkconnectivitychecks coordination.k8s.io/v1, Resource=leases discovery.k8s.io/v1, Resource=endpointslices events.k8s.io/v1, Resource=events flowcontrol.apiserver.k8s.io/v1, Resource=flowschemas flowcontrol.apiserver.k8s.io/v1, Resource=prioritylevelconfigurations gateway.networking.k8s.io/v1, Resource=gatewayclasses gateway.networking.k8s.io/v1, Resource=gateways gateway.networking.k8s.io/v1, Resource=grpcroutes gateway.networking.k8s.io/v1, Resource=httproutes gateway.networking.k8s.io/v1beta1, Resource=referencegrants helm.openshift.io/v1beta1, Resource=helmchartrepositories helm.openshift.io/v1beta1, Resource=projecthelmchartrepositories imageregistry.operator.openshift.io/v1, Resource=configs imageregistry.operator.openshift.io/v1, Resource=imagepruners infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediations infrastructure.cluster.x-k8s.io/v1beta1, Resource=metal3remediationtemplates ingress.operator.openshift.io/v1, Resource=dnsrecords ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddressclaims ipam.cluster.x-k8s.io/v1beta1, Resource=ipaddresses k8s.cni.cncf.io/v1, Resource=network-attachment-definitions k8s.cni.cncf.io/v1alpha1, Resource=ipamclaims k8s.ovn.org/v1, Resource=adminpolicybasedexternalroutes k8s.ovn.org/v1, Resource=clusteruserdefinednetworks k8s.ovn.org/v1, Resource=egressfirewalls k8s.ovn.org/v1, Resource=egressips k8s.ovn.org/v1, Resource=egressqoses k8s.ovn.org/v1, Resource=egressservices k8s.ovn.org/v1, Resource=userdefinednetworks machine.openshift.io/v1, Resource=controlplanemachinesets machine.openshift.io/v1beta1, Resource=machinehealthchecks machine.openshift.io/v1beta1, Resource=machines machine.openshift.io/v1beta1, Resource=machinesets machineconfiguration.openshift.io/v1, Resource=containerruntimeconfigs machineconfiguration.openshift.io/v1, Resource=controllerconfigs machineconfiguration.openshift.io/v1, Resource=kubeletconfigs machineconfiguration.openshift.io/v1, Resource=machineconfignodes machineconfiguration.openshift.io/v1, Resource=machineconfigpools machineconfiguration.openshift.io/v1, Resource=machineconfigs machineconfiguration.openshift.io/v1, Resource=machineosbuilds machineconfiguration.openshift.io/v1, Resource=machineosconfigs machineconfiguration.openshift.io/v1, Resource=pinnedimagesets migration.k8s.io/v1alpha1, Resource=storagestates migration.k8s.io/v1alpha1, Resource=storageversionmigrations monitoring.coreos.com/v1, Resource=alertmanagers monitoring.coreos.com/v1, Resource=podmonitors monitoring.coreos.com/v1, Resource=probes monitoring.coreos.com/v1, Resource=prometheuses monitoring.coreos.com/v1, Resource=prometheusrules monitoring.coreos.com/v1, Resource=servicemonitors monitoring.coreos.com/v1, Resource=thanosrulers monitoring.coreos.com/v1beta1, Resource=alertmanagerconfigs monitoring.openshift.io/v1, Resource=alertingrules monitoring.openshift.io/v1, Resource=alertrelabelconfigs network.operator.openshift.io/v1, Resource=egressrouters network.operator.openshift.io/v1, Resource=operatorpkis networking.k8s.io/v1, Resource=ingressclasses networking.k8s.io/v1, Resource=ingresses networking.k8s.io/v1, Resource=ipaddresses networking.k8s.io/v1, Resource=networkpolicies networking.k8s.io/v1, Resource=servicecidrs node.k8s.io/v1, Resource=runtimeclasses operator.openshift.io/v1, Resource=authentications operator.openshift.io/v1, Resource=clustercsidrivers operator.openshift.io/v1, Resource=configs operator.openshift.io/v1, Resource=consoles operator.openshift.io/v1, Resource=csisnapshotcontrollers operator.openshift.io/v1, Resource=dnses operator.openshift.io/v1, Resource=etcds operator.openshift.io/v1, Resource=ingresscontrollers operator.openshift.io/v1, Resource=kubeapiservers operator.openshift.io/v1, Resource=kubecontrollermanagers operator.openshift.io/v1, Resource=kubeschedulers operator.openshift.io/v1, Resource=kubestorageversionmigrators operator.openshift.io/v1, Resource=machineconfigurations operator.openshift.io/v1, Resource=networks operator.openshift.io/v1, Resource=openshiftapiservers operator.openshift.io/v1, Resource=openshiftcontrollermanagers operator.openshift.io/v1, Resource=servicecas operator.openshift.io/v1, Resource=storages operator.openshift.io/v1alpha1, Resource=imagecontentsourcepolicies operators.coreos.com/v1, Resource=olmconfigs operators.coreos.com/v1, Resource=operatorgroups operators.coreos.com/v1, Resource=operators operators.coreos.com/v1alpha1, Resource=catalogsources operators.coreos.com/v1alpha1, Resource=clusterserviceversions operators.coreos.com/v1alpha1, Resource=installplans operators.coreos.com/v1alpha1, Resource=subscriptions operators.coreos.com/v2, Resource=operatorconditions policy.networking.k8s.io/v1alpha1, Resource=adminnetworkpolicies policy.networking.k8s.io/v1alpha1, Resource=baselineadminnetworkpolicies policy/v1, Resource=poddisruptionbudgets rbac.authorization.k8s.io/v1, Resource=clusterrolebindings rbac.authorization.k8s.io/v1, Resource=clusterroles rbac.authorization.k8s.io/v1, Resource=rolebindings rbac.authorization.k8s.io/v1, Resource=roles samples.operator.openshift.io/v1, Resource=configs scheduling.k8s.io/v1, Resource=priorityclasses security.internal.openshift.io/v1, Resource= 2025-12-12T16:15:17.138264837+00:00 stderr F rangeallocations storage.k8s.io/v1, Resource=csidrivers storage.k8s.io/v1, Resource=csinodes storage.k8s.io/v1, Resource=csistoragecapacities storage.k8s.io/v1, Resource=storageclasses storage.k8s.io/v1, Resource=volumeattachments whereabouts.cni.cncf.io/v1alpha1, Resource=ippools whereabouts.cni.cncf.io/v1alpha1, Resource=nodeslicepools whereabouts.cni.cncf.io/v1alpha1, Resource=overlappingrangeipreservations], removed: []" 2025-12-12T16:15:17.140576603+00:00 stderr F I1212 16:15:17.140505 1 shared_informer.go:357] "Caches are synced" controller="tokens" 2025-12-12T16:15:17.151434844+00:00 stderr F E1212 16:15:17.151323 1 namespaced_resources_deleter.go:164] "Unhandled Error" err="unable to get all supported resources from server: unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" logger="UnhandledError" 2025-12-12T16:15:17.152902110+00:00 stderr F I1212 16:15:17.152835 1 controllermanager.go:827] "Started controller" controller="namespace-controller" 2025-12-12T16:15:17.152920470+00:00 stderr F I1212 16:15:17.152897 1 controllermanager.go:796] "Starting controller" controller="statefulset-controller" 2025-12-12T16:15:17.153007772+00:00 stderr F I1212 16:15:17.152957 1 namespace_controller.go:202] "Starting namespace controller" logger="namespace-controller" 2025-12-12T16:15:17.153007772+00:00 stderr F I1212 16:15:17.152993 1 shared_informer.go:350] "Waiting for caches to sync" controller="namespace" 2025-12-12T16:15:17.157360007+00:00 stderr F I1212 16:15:17.157289 1 controllermanager.go:827] "Started controller" controller="statefulset-controller" 2025-12-12T16:15:17.157360007+00:00 stderr F I1212 16:15:17.157320 1 controllermanager.go:796] "Starting controller" controller="certificatesigningrequest-approving-controller" 2025-12-12T16:15:17.157502741+00:00 stderr F I1212 16:15:17.157453 1 stateful_set.go:166] "Starting stateful set controller" logger="statefulset-controller" 2025-12-12T16:15:17.157502741+00:00 stderr F I1212 16:15:17.157472 1 shared_informer.go:350] "Waiting for caches to sync" controller="stateful set" 2025-12-12T16:15:17.160721858+00:00 stderr F I1212 16:15:17.160621 1 controllermanager.go:827] "Started controller" controller="certificatesigningrequest-approving-controller" 2025-12-12T16:15:17.160721858+00:00 stderr F I1212 16:15:17.160650 1 controllermanager.go:790] "Warning: controller is disabled" controller="ttl-controller" 2025-12-12T16:15:17.160721858+00:00 stderr F I1212 16:15:17.160658 1 controllermanager.go:790] "Warning: controller is disabled" controller="bootstrap-signer-controller" 2025-12-12T16:15:17.160721858+00:00 stderr F I1212 16:15:17.160666 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-protection-controller" 2025-12-12T16:15:17.160860661+00:00 stderr F I1212 16:15:17.160806 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-approving-controller" name="csrapproving" 2025-12-12T16:15:17.160860661+00:00 stderr F I1212 16:15:17.160835 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrapproving" 2025-12-12T16:15:17.163952376+00:00 stderr F I1212 16:15:17.163901 1 controllermanager.go:827] "Started controller" controller="persistentvolume-protection-controller" 2025-12-12T16:15:17.163952376+00:00 stderr F I1212 16:15:17.163927 1 controllermanager.go:796] "Starting controller" controller="ttl-after-finished-controller" 2025-12-12T16:15:17.163971956+00:00 stderr F I1212 16:15:17.163949 1 pv_protection_controller.go:81] "Starting PV protection controller" logger="persistentvolume-protection-controller" 2025-12-12T16:15:17.163971956+00:00 stderr F I1212 16:15:17.163965 1 shared_informer.go:350] "Waiting for caches to sync" controller="PV protection" 2025-12-12T16:15:17.166951968+00:00 stderr F I1212 16:15:17.166894 1 controllermanager.go:827] "Started controller" controller="ttl-after-finished-controller" 2025-12-12T16:15:17.166951968+00:00 stderr F I1212 16:15:17.166923 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="kube-apiserver-serving-clustertrustbundle-publisher-controller" requiredFeatureGates=["ClusterTrustBundle"] 2025-12-12T16:15:17.166977239+00:00 stderr F I1212 16:15:17.166951 1 controllermanager.go:796] "Starting controller" controller="replicationcontroller-controller" 2025-12-12T16:15:17.167062621+00:00 stderr F I1212 16:15:17.167021 1 ttlafterfinished_controller.go:112] "Starting TTL after finished controller" logger="ttl-after-finished-controller" 2025-12-12T16:15:17.167062621+00:00 stderr F I1212 16:15:17.167038 1 shared_informer.go:350] "Waiting for caches to sync" controller="TTL after finished" 2025-12-12T16:15:17.169100110+00:00 stderr F I1212 16:15:17.169065 1 controllermanager.go:827] "Started controller" controller="replicationcontroller-controller" 2025-12-12T16:15:17.169100110+00:00 stderr F I1212 16:15:17.169079 1 controllermanager.go:796] "Starting controller" controller="deployment-controller" 2025-12-12T16:15:17.169756696+00:00 stderr F I1212 16:15:17.169266 1 replica_set.go:219] "Starting controller" logger="replicationcontroller-controller" name="replicationcontroller" 2025-12-12T16:15:17.169756696+00:00 stderr F I1212 16:15:17.169289 1 shared_informer.go:350] "Waiting for caches to sync" controller="ReplicationController" 2025-12-12T16:15:17.171687172+00:00 stderr F I1212 16:15:17.171643 1 controllermanager.go:827] "Started controller" controller="deployment-controller" 2025-12-12T16:15:17.171687172+00:00 stderr F I1212 16:15:17.171673 1 controllermanager.go:796] "Starting controller" controller="node-ipam-controller" 2025-12-12T16:15:17.171707153+00:00 stderr F I1212 16:15:17.171686 1 controllermanager.go:805] "Warning: skipping controller" controller="node-ipam-controller" 2025-12-12T16:15:17.171715993+00:00 stderr F I1212 16:15:17.171697 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="volumeattributesclass-protection-controller" requiredFeatureGates=["VolumeAttributesClass"] 2025-12-12T16:15:17.171724203+00:00 stderr F I1212 16:15:17.171715 1 controllermanager.go:796] "Starting controller" controller="service-ca-certificate-publisher-controller" 2025-12-12T16:15:17.171860426+00:00 stderr F I1212 16:15:17.171814 1 deployment_controller.go:173] "Starting controller" logger="deployment-controller" controller="deployment" 2025-12-12T16:15:17.171860426+00:00 stderr F I1212 16:15:17.171836 1 shared_informer.go:350] "Waiting for caches to sync" controller="deployment" 2025-12-12T16:15:17.174075070+00:00 stderr F I1212 16:15:17.174013 1 controllermanager.go:827] "Started controller" controller="service-ca-certificate-publisher-controller" 2025-12-12T16:15:17.174075070+00:00 stderr F I1212 16:15:17.174043 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="resourceclaim-controller" requiredFeatureGates=["DynamicResourceAllocation"] 2025-12-12T16:15:17.174075070+00:00 stderr F I1212 16:15:17.174062 1 controllermanager.go:779] "Controller is disabled by a feature gate" controller="device-taint-eviction-controller" requiredFeatureGates=["DynamicResourceAllocation","DRADeviceTaints"] 2025-12-12T16:15:17.174099860+00:00 stderr F I1212 16:15:17.174068 1 publisher.go:80] Starting service CA certificate configmap publisher 2025-12-12T16:15:17.174099860+00:00 stderr F I1212 16:15:17.174074 1 controllermanager.go:796] "Starting controller" controller="legacy-serviceaccount-token-cleaner-controller" 2025-12-12T16:15:17.174099860+00:00 stderr F I1212 16:15:17.174082 1 shared_informer.go:350] "Waiting for caches to sync" controller="crt configmap" 2025-12-12T16:15:17.176199641+00:00 stderr F I1212 16:15:17.176158 1 controllermanager.go:827] "Started controller" controller="legacy-serviceaccount-token-cleaner-controller" 2025-12-12T16:15:17.176218592+00:00 stderr F I1212 16:15:17.176200 1 controllermanager.go:796] "Starting controller" controller="pod-garbage-collector-controller" 2025-12-12T16:15:17.176218592+00:00 stderr F I1212 16:15:17.176204 1 legacy_serviceaccount_token_cleaner.go:103] "Starting legacy service account token cleaner controller" logger="legacy-serviceaccount-token-cleaner-controller" 2025-12-12T16:15:17.176226542+00:00 stderr F I1212 16:15:17.176219 1 shared_informer.go:350] "Waiting for caches to sync" controller="legacy-service-account-token-cleaner" 2025-12-12T16:15:17.178313662+00:00 stderr F I1212 16:15:17.178203 1 controllermanager.go:827] "Started controller" controller="pod-garbage-collector-controller" 2025-12-12T16:15:17.178313662+00:00 stderr F I1212 16:15:17.178244 1 controllermanager.go:796] "Starting controller" controller="daemonset-controller" 2025-12-12T16:15:17.178313662+00:00 stderr F I1212 16:15:17.178250 1 gc_controller.go:99] "Starting GC controller" logger="pod-garbage-collector-controller" 2025-12-12T16:15:17.178313662+00:00 stderr F I1212 16:15:17.178277 1 shared_informer.go:350] "Waiting for caches to sync" controller="GC" 2025-12-12T16:15:17.180020003+00:00 stderr F I1212 16:15:17.179965 1 controllermanager.go:827] "Started controller" controller="daemonset-controller" 2025-12-12T16:15:17.180020003+00:00 stderr F I1212 16:15:17.179981 1 controllermanager.go:796] "Starting controller" controller="cronjob-controller" 2025-12-12T16:15:17.180168247+00:00 stderr F I1212 16:15:17.180115 1 daemon_controller.go:316] "Starting daemon sets controller" logger="daemonset-controller" 2025-12-12T16:15:17.180168247+00:00 stderr F I1212 16:15:17.180146 1 shared_informer.go:350] "Waiting for caches to sync" controller="daemon sets" 2025-12-12T16:15:17.181775985+00:00 stderr F I1212 16:15:17.181721 1 controllermanager.go:827] "Started controller" controller="cronjob-controller" 2025-12-12T16:15:17.181775985+00:00 stderr F I1212 16:15:17.181739 1 controllermanager.go:785] "Skipping a cloud provider controller" controller="cloud-node-lifecycle-controller" 2025-12-12T16:15:17.181775985+00:00 stderr F I1212 16:15:17.181747 1 controllermanager.go:796] "Starting controller" controller="ephemeral-volume-controller" 2025-12-12T16:15:17.181950260+00:00 stderr F I1212 16:15:17.181903 1 cronjob_controllerv2.go:145] "Starting cronjob controller v2" logger="cronjob-controller" 2025-12-12T16:15:17.181950260+00:00 stderr F I1212 16:15:17.181923 1 shared_informer.go:350] "Waiting for caches to sync" controller="cronjob" 2025-12-12T16:15:17.183659441+00:00 stderr F I1212 16:15:17.183611 1 controllermanager.go:827] "Started controller" controller="ephemeral-volume-controller" 2025-12-12T16:15:17.183659441+00:00 stderr F I1212 16:15:17.183628 1 controllermanager.go:796] "Starting controller" controller="validatingadmissionpolicy-status-controller" 2025-12-12T16:15:17.183793194+00:00 stderr F I1212 16:15:17.183749 1 controller.go:173] "Starting ephemeral volume controller" logger="ephemeral-volume-controller" 2025-12-12T16:15:17.183793194+00:00 stderr F I1212 16:15:17.183766 1 shared_informer.go:350] "Waiting for caches to sync" controller="ephemeral" 2025-12-12T16:15:17.198349955+00:00 stderr F I1212 16:15:17.198260 1 controllermanager.go:827] "Started controller" controller="validatingadmissionpolicy-status-controller" 2025-12-12T16:15:17.198349955+00:00 stderr F I1212 16:15:17.198328 1 controllermanager.go:796] "Starting controller" controller="storage-version-migrator-controller" 2025-12-12T16:15:17.198349955+00:00 stderr F I1212 16:15:17.198338 1 controllermanager.go:805] "Warning: skipping controller" controller="storage-version-migrator-controller" 2025-12-12T16:15:17.198402206+00:00 stderr F I1212 16:15:17.198344 1 controllermanager.go:796] "Starting controller" controller="selinux-warning-controller" 2025-12-12T16:15:17.198530309+00:00 stderr F I1212 16:15:17.198465 1 shared_informer.go:350] "Waiting for caches to sync" controller="validatingadmissionpolicy-status" 2025-12-12T16:15:17.200843075+00:00 stderr F I1212 16:15:17.200790 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/host-path" 2025-12-12T16:15:17.200843075+00:00 stderr F I1212 16:15:17.200809 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/nfs" 2025-12-12T16:15:17.200843075+00:00 stderr F I1212 16:15:17.200817 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/fc" 2025-12-12T16:15:17.200843075+00:00 stderr F I1212 16:15:17.200825 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" 2025-12-12T16:15:17.200868055+00:00 stderr F I1212 16:15:17.200847 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/csi" 2025-12-12T16:15:17.200868055+00:00 stderr F I1212 16:15:17.200862 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" 2025-12-12T16:15:17.201020889+00:00 stderr F I1212 16:15:17.200990 1 controllermanager.go:827] "Started controller" controller="selinux-warning-controller" 2025-12-12T16:15:17.201020889+00:00 stderr F I1212 16:15:17.201005 1 controllermanager.go:796] "Starting controller" controller="endpointslice-controller" 2025-12-12T16:15:17.201123101+00:00 stderr F I1212 16:15:17.201082 1 selinux_warning_controller.go:348] "Starting SELinux warning controller" logger="selinux-warning-controller" 2025-12-12T16:15:17.201234504+00:00 stderr F I1212 16:15:17.201173 1 shared_informer.go:350] "Waiting for caches to sync" controller="selinux_warning" 2025-12-12T16:15:17.202883984+00:00 stderr F I1212 16:15:17.202824 1 controllermanager.go:827] "Started controller" controller="endpointslice-controller" 2025-12-12T16:15:17.202883984+00:00 stderr F I1212 16:15:17.202845 1 controllermanager.go:796] "Starting controller" controller="serviceaccount-controller" 2025-12-12T16:15:17.203049948+00:00 stderr F I1212 16:15:17.203003 1 endpointslice_controller.go:281] "Starting endpoint slice controller" logger="endpointslice-controller" 2025-12-12T16:15:17.203049948+00:00 stderr F I1212 16:15:17.203020 1 shared_informer.go:350] "Waiting for caches to sync" controller="endpoint_slice" 2025-12-12T16:15:17.204830621+00:00 stderr F I1212 16:15:17.204788 1 controllermanager.go:827] "Started controller" controller="serviceaccount-controller" 2025-12-12T16:15:17.204830621+00:00 stderr F I1212 16:15:17.204805 1 controllermanager.go:796] "Starting controller" controller="root-ca-certificate-publisher-controller" 2025-12-12T16:15:17.204991295+00:00 stderr F I1212 16:15:17.204942 1 serviceaccounts_controller.go:114] "Starting service account controller" logger="serviceaccount-controller" 2025-12-12T16:15:17.204991295+00:00 stderr F I1212 16:15:17.204965 1 shared_informer.go:350] "Waiting for caches to sync" controller="service account" 2025-12-12T16:15:17.209157145+00:00 stderr F I1212 16:15:17.209080 1 controllermanager.go:827] "Started controller" controller="root-ca-certificate-publisher-controller" 2025-12-12T16:15:17.209157145+00:00 stderr F I1212 16:15:17.209129 1 controllermanager.go:796] "Starting controller" controller="service-cidr-controller" 2025-12-12T16:15:17.209240367+00:00 stderr F I1212 16:15:17.209168 1 publisher.go:107] "Starting root CA cert publisher controller" logger="root-ca-certificate-publisher-controller" 2025-12-12T16:15:17.209240367+00:00 stderr F I1212 16:15:17.209230 1 shared_informer.go:350] "Waiting for caches to sync" controller="crt configmap" 2025-12-12T16:15:17.212902195+00:00 stderr F I1212 16:15:17.212813 1 controllermanager.go:827] "Started controller" controller="service-cidr-controller" 2025-12-12T16:15:17.212902195+00:00 stderr F I1212 16:15:17.212837 1 controllermanager.go:785] "Skipping a cloud provider controller" controller="service-lb-controller" 2025-12-12T16:15:17.212902195+00:00 stderr F I1212 16:15:17.212850 1 controllermanager.go:796] "Starting controller" controller="endpoints-controller" 2025-12-12T16:15:17.213353996+00:00 stderr F I1212 16:15:17.213323 1 servicecidrs_controller.go:136] "Starting" logger="service-cidr-controller" controller="service-cidr-controller" 2025-12-12T16:15:17.213353996+00:00 stderr F I1212 16:15:17.213343 1 shared_informer.go:350] "Waiting for caches to sync" controller="service-cidr-controller" 2025-12-12T16:15:17.216937962+00:00 stderr F I1212 16:15:17.216866 1 controllermanager.go:827] "Started controller" controller="endpoints-controller" 2025-12-12T16:15:17.216937962+00:00 stderr F I1212 16:15:17.216910 1 controllermanager.go:796] "Starting controller" controller="endpointslice-mirroring-controller" 2025-12-12T16:15:17.217230389+00:00 stderr F I1212 16:15:17.217203 1 endpoints_controller.go:187] "Starting endpoint controller" logger="endpoints-controller" 2025-12-12T16:15:17.217230389+00:00 stderr F I1212 16:15:17.217220 1 shared_informer.go:350] "Waiting for caches to sync" controller="endpoint" 2025-12-12T16:15:17.220614891+00:00 stderr F I1212 16:15:17.220527 1 controllermanager.go:827] "Started controller" controller="endpointslice-mirroring-controller" 2025-12-12T16:15:17.220614891+00:00 stderr F I1212 16:15:17.220566 1 controllermanager.go:796] "Starting controller" controller="certificatesigningrequest-signing-controller" 2025-12-12T16:15:17.220934069+00:00 stderr F I1212 16:15:17.220859 1 endpointslicemirroring_controller.go:227] "Starting EndpointSliceMirroring controller" logger="endpointslice-mirroring-controller" 2025-12-12T16:15:17.220934069+00:00 stderr F I1212 16:15:17.220907 1 shared_informer.go:350] "Waiting for caches to sync" controller="endpoint_slice_mirroring" 2025-12-12T16:15:17.224721040+00:00 stderr F I1212 16:15:17.224666 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.225069858+00:00 stderr F I1212 16:15:17.225029 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-kubelet-serving" 2025-12-12T16:15:17.225069858+00:00 stderr F I1212 16:15:17.225056 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-kubelet-serving" 2025-12-12T16:15:17.225119840+00:00 stderr F I1212 16:15:17.225092 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.225199801+00:00 stderr F I1212 16:15:17.225152 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.225497879+00:00 stderr F I1212 16:15:17.225462 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-kubelet-client" 2025-12-12T16:15:17.225497879+00:00 stderr F I1212 16:15:17.225482 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-kubelet-client" 2025-12-12T16:15:17.225512479+00:00 stderr F I1212 16:15:17.225500 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.225609341+00:00 stderr F I1212 16:15:17.225581 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.225980810+00:00 stderr F I1212 16:15:17.225910 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-kube-apiserver-client" 2025-12-12T16:15:17.225980810+00:00 stderr F I1212 16:15:17.225954 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-kube-apiserver-client" 2025-12-12T16:15:17.225995671+00:00 stderr F I1212 16:15:17.225976 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.226004711+00:00 stderr F I1212 16:15:17.225989 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.226259177+00:00 stderr F I1212 16:15:17.226229 1 controllermanager.go:827] "Started controller" controller="certificatesigningrequest-signing-controller" 2025-12-12T16:15:17.226259177+00:00 stderr F I1212 16:15:17.226251 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-expander-controller" 2025-12-12T16:15:17.226296268+00:00 stderr F I1212 16:15:17.226282 1 certificate_controller.go:120] "Starting certificate controller" logger="certificatesigningrequest-signing-controller" name="csrsigning-legacy-unknown" 2025-12-12T16:15:17.226305788+00:00 stderr F I1212 16:15:17.226292 1 shared_informer.go:350] "Waiting for caches to sync" controller="certificate-csrsigning-legacy-unknown" 2025-12-12T16:15:17.226316578+00:00 stderr F I1212 16:15:17.226308 1 dynamic_serving_content.go:135] "Starting controller" name="csr-controller::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.crt::/etc/kubernetes/static-pod-certs/secrets/csr-signer/tls.key" 2025-12-12T16:15:17.229959876+00:00 stderr F I1212 16:15:17.229935 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" 2025-12-12T16:15:17.230083169+00:00 stderr F I1212 16:15:17.230033 1 controllermanager.go:827] "Started controller" controller="persistentvolume-expander-controller" 2025-12-12T16:15:17.230083169+00:00 stderr F I1212 16:15:17.230056 1 controllermanager.go:796] "Starting controller" controller="taint-eviction-controller" 2025-12-12T16:15:17.230171511+00:00 stderr F I1212 16:15:17.230146 1 expand_controller.go:329] "Starting expand controller" logger="persistentvolume-expander-controller" 2025-12-12T16:15:17.230237323+00:00 stderr F I1212 16:15:17.230219 1 shared_informer.go:350] "Waiting for caches to sync" controller="expand" 2025-12-12T16:15:17.233072961+00:00 stderr F I1212 16:15:17.233012 1 controllermanager.go:827] "Started controller" controller="taint-eviction-controller" 2025-12-12T16:15:17.233072961+00:00 stderr F I1212 16:15:17.233046 1 controllermanager.go:796] "Starting controller" controller="job-controller" 2025-12-12T16:15:17.233194564+00:00 stderr F I1212 16:15:17.233150 1 taint_eviction.go:282] "Starting" logger="taint-eviction-controller" controller="taint-eviction-controller" 2025-12-12T16:15:17.233284736+00:00 stderr F I1212 16:15:17.233260 1 taint_eviction.go:288] "Sending events to api server" logger="taint-eviction-controller" 2025-12-12T16:15:17.233296597+00:00 stderr F I1212 16:15:17.233289 1 shared_informer.go:350] "Waiting for caches to sync" controller="taint-eviction-controller" 2025-12-12T16:15:17.236077504+00:00 stderr F I1212 16:15:17.236028 1 controllermanager.go:827] "Started controller" controller="job-controller" 2025-12-12T16:15:17.236077504+00:00 stderr F I1212 16:15:17.236045 1 controllermanager.go:796] "Starting controller" controller="node-lifecycle-controller" 2025-12-12T16:15:17.236227767+00:00 stderr F I1212 16:15:17.236197 1 job_controller.go:243] "Starting job controller" logger="job-controller" 2025-12-12T16:15:17.236275218+00:00 stderr F I1212 16:15:17.236260 1 shared_informer.go:350] "Waiting for caches to sync" controller="job" 2025-12-12T16:15:17.238405360+00:00 stderr F I1212 16:15:17.238357 1 node_lifecycle_controller.go:419] "Controller will reconcile labels" logger="node-lifecycle-controller" 2025-12-12T16:15:17.238405360+00:00 stderr F I1212 16:15:17.238396 1 controllermanager.go:827] "Started controller" controller="node-lifecycle-controller" 2025-12-12T16:15:17.238433890+00:00 stderr F I1212 16:15:17.238405 1 controllermanager.go:785] "Skipping a cloud provider controller" controller="node-route-controller" 2025-12-12T16:15:17.238433890+00:00 stderr F I1212 16:15:17.238414 1 controllermanager.go:796] "Starting controller" controller="persistentvolume-binder-controller" 2025-12-12T16:15:17.238576474+00:00 stderr F I1212 16:15:17.238544 1 node_lifecycle_controller.go:453] "Sending events to api server" logger="node-lifecycle-controller" 2025-12-12T16:15:17.238597334+00:00 stderr F I1212 16:15:17.238584 1 node_lifecycle_controller.go:464] "Starting node controller" logger="node-lifecycle-controller" 2025-12-12T16:15:17.238619445+00:00 stderr F I1212 16:15:17.238595 1 shared_informer.go:350] "Waiting for caches to sync" controller="taint" 2025-12-12T16:15:17.241585476+00:00 stderr F I1212 16:15:17.241538 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/host-path" 2025-12-12T16:15:17.241585476+00:00 stderr F I1212 16:15:17.241560 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/nfs" 2025-12-12T16:15:17.241585476+00:00 stderr F I1212 16:15:17.241574 1 plugins.go:616] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" 2025-12-12T16:15:17.241651538+00:00 stderr F I1212 16:15:17.241618 1 controllermanager.go:827] "Started controller" controller="persistentvolume-binder-controller" 2025-12-12T16:15:17.241779031+00:00 stderr F I1212 16:15:17.241742 1 pv_controller_base.go:308] "Starting persistent volume controller" logger="persistentvolume-binder-controller" 2025-12-12T16:15:17.241779031+00:00 stderr F I1212 16:15:17.241763 1 shared_informer.go:350] "Waiting for caches to sync" controller="persistent volume" 2025-12-12T16:15:17.250151843+00:00 stderr F I1212 16:15:17.246591 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:15:17.250151843+00:00 stderr F I1212 16:15:17.249789 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.251330 1 reflector.go:430] "Caches populated" type="*v2.HorizontalPodAutoscaler" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.251900 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.251966 1 reflector.go:430] "Caches populated" type="*v1.CSIStorageCapacity" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.252029 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.252085 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.252382 1 reflector.go:430] "Caches populated" type="*v1.VolumeAttachment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.252460 1 reflector.go:430] "Caches populated" type="*v1.ServiceCIDR" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.252385 1 reflector.go:430] "Caches populated" type="*v1.CSINode" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.252611 1 reflector.go:430] "Caches populated" type="*v1.LimitRange" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.253047472+00:00 stderr F I1212 16:15:17.253028 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicy" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.255556523+00:00 stderr F I1212 16:15:17.254086 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.255556523+00:00 stderr F I1212 16:15:17.254395 1 reflector.go:430] "Caches populated" type="*v1.Job" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.255556523+00:00 stderr F I1212 16:15:17.254590 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-machine-config-operator/machine-config-nodes-crd-cleanup-29367829" delay="0s" 2025-12-12T16:15:17.255556523+00:00 stderr F I1212 16:15:17.254681 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369340" delay="0s" 2025-12-12T16:15:17.255556523+00:00 stderr F I1212 16:15:17.254690 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369355" delay="0s" 2025-12-12T16:15:17.255556523+00:00 stderr F I1212 16:15:17.254699 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369370" delay="0s" 2025-12-12T16:15:17.256735371+00:00 stderr F I1212 16:15:17.256686 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolume" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.258569905+00:00 stderr F I1212 16:15:17.258532 1 reflector.go:430] "Caches populated" type="*v1.PodTemplate" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.258842212+00:00 stderr F I1212 16:15:17.258809 1 reflector.go:430] "Caches populated" type="*v1.StorageClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.258994996+00:00 stderr F I1212 16:15:17.258953 1 reflector.go:430] "Caches populated" type="*v1.CSIDriver" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.259642321+00:00 stderr F I1212 16:15:17.259570 1 reflector.go:430] "Caches populated" type="*v1.Lease" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.260735928+00:00 stderr F I1212 16:15:17.260677 1 reflector.go:430] "Caches populated" type="*v1.NetworkPolicy" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.260909562+00:00 stderr F I1212 16:15:17.260857 1 reflector.go:430] "Caches populated" type="*v1.ResourceQuota" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.264262553+00:00 stderr F I1212 16:15:17.263675 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.264262553+00:00 stderr F I1212 16:15:17.264022 1 actual_state_of_world.go:541] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"crc\" does not exist" 2025-12-12T16:15:17.264410396+00:00 stderr F I1212 16:15:17.264392 1 shared_informer.go:357] "Caches are synced" controller="PV protection" 2025-12-12T16:15:17.265675527+00:00 stderr F I1212 16:15:17.265096 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.265675527+00:00 stderr F I1212 16:15:17.265618 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.265693587+00:00 stderr F I1212 16:15:17.265671 1 reflector.go:430] "Caches populated" type="*v1.CronJob" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.266734612+00:00 stderr F I1212 16:15:17.266710 1 reflector.go:430] "Caches populated" type="*v1.IPAddress" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.267564612+00:00 stderr F I1212 16:15:17.267371 1 shared_informer.go:357] "Caches are synced" controller="TTL after finished" 2025-12-12T16:15:17.267564612+00:00 stderr F I1212 16:15:17.267435 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.268391972+00:00 stderr F I1212 16:15:17.268297 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.269793136+00:00 stderr F I1212 16:15:17.269739 1 shared_informer.go:357] "Caches are synced" controller="ReplicationController" 2025-12-12T16:15:17.269946920+00:00 stderr F I1212 16:15:17.269922 1 reflector.go:430] "Caches populated" type="*v1.EndpointSlice" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.271304122+00:00 stderr F I1212 16:15:17.271277 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.272643315+00:00 stderr F I1212 16:15:17.272599 1 reflector.go:430] "Caches populated" type="*v1.ControllerRevision" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.274165231+00:00 stderr F I1212 16:15:17.274136 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.277159503+00:00 stderr F I1212 16:15:17.277103 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.277352348+00:00 stderr F I1212 16:15:17.277334 1 shared_informer.go:357] "Caches are synced" controller="legacy-service-account-token-cleaner" 2025-12-12T16:15:17.278614158+00:00 stderr F I1212 16:15:17.278568 1 shared_informer.go:357] "Caches are synced" controller="GC" 2025-12-12T16:15:17.282330618+00:00 stderr F I1212 16:15:17.282246 1 shared_informer.go:357] "Caches are synced" controller="cronjob" 2025-12-12T16:15:17.283056655+00:00 stderr F I1212 16:15:17.283015 1 utils.go:221] "too many missed times" logger="cronjob-controller" cronjob="openshift-operator-lifecycle-manager/collect-profiles" 2025-12-12T16:15:17.283892065+00:00 stderr F I1212 16:15:17.283852 1 shared_informer.go:357] "Caches are synced" controller="ephemeral" 2025-12-12T16:15:17.286445017+00:00 stderr F I1212 16:15:17.286349 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.293617800+00:00 stderr F I1212 16:15:17.291882 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.293617800+00:00 stderr F I1212 16:15:17.293423 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.296040828+00:00 stderr F I1212 16:15:17.295358 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.296040828+00:00 stderr F I1212 16:15:17.295600 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.302251418+00:00 stderr F I1212 16:15:17.298692 1 shared_informer.go:357] "Caches are synced" controller="validatingadmissionpolicy-status" 2025-12-12T16:15:17.302251418+00:00 stderr F I1212 16:15:17.301269 1 shared_informer.go:357] "Caches are synced" controller="selinux_warning" 2025-12-12T16:15:17.306225374+00:00 stderr F I1212 16:15:17.303217 1 shared_informer.go:357] "Caches are synced" controller="endpoint_slice" 2025-12-12T16:15:17.306225374+00:00 stderr F I1212 16:15:17.303241 1 endpointslice_controller.go:288] "Starting service queue worker threads" logger="endpointslice-controller" total=5 2025-12-12T16:15:17.306225374+00:00 stderr F I1212 16:15:17.303266 1 endpointslice_controller.go:292] "Starting topology queue worker threads" logger="endpointslice-controller" total=1 2025-12-12T16:15:17.306225374+00:00 stderr F I1212 16:15:17.303316 1 topologycache.go:253] "Insufficient node info for topology hints" logger="endpointslice-controller" totalZones=0 totalCPU="0" sufficientNodeInfo=true 2025-12-12T16:15:17.308989570+00:00 stderr F I1212 16:15:17.307278 1 shared_informer.go:357] "Caches are synced" controller="service account" 2025-12-12T16:15:17.310419855+00:00 stderr F I1212 16:15:17.309621 1 reflector.go:430] "Caches populated" type="*v1.ReplicaSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.317869904+00:00 stderr F I1212 16:15:17.317781 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.320980529+00:00 stderr F I1212 16:15:17.320921 1 shared_informer.go:357] "Caches are synced" controller="service-cidr-controller" 2025-12-12T16:15:17.321886671+00:00 stderr F I1212 16:15:17.321848 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:15:17.322397093+00:00 stderr F I1212 16:15:17.322374 1 shared_informer.go:357] "Caches are synced" controller="endpoint" 2025-12-12T16:15:17.323002478+00:00 stderr F I1212 16:15:17.322748 1 shared_informer.go:357] "Caches are synced" controller="endpoint_slice_mirroring" 2025-12-12T16:15:17.323002478+00:00 stderr F I1212 16:15:17.322766 1 endpointslicemirroring_controller.go:234] "Starting worker threads" logger="endpointslice-mirroring-controller" total=5 2025-12-12T16:15:17.323455419+00:00 stderr F I1212 16:15:17.323397 1 shared_informer.go:357] "Caches are synced" controller="HPA" 2025-12-12T16:15:17.323672814+00:00 stderr F I1212 16:15:17.323639 1 shared_informer.go:357] "Caches are synced" controller="ReplicaSet" 2025-12-12T16:15:17.324690798+00:00 stderr F I1212 16:15:17.324652 1 shared_informer.go:357] "Caches are synced" controller="disruption" 2025-12-12T16:15:17.325150189+00:00 stderr F I1212 16:15:17.325110 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.325533149+00:00 stderr F I1212 16:15:17.325481 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-apiserver-operator/openshift-apiserver-operator-846cbfc458" need=1 creating=1 2025-12-12T16:15:17.326890651+00:00 stderr F I1212 16:15:17.326856 1 shared_informer.go:357] "Caches are synced" controller="PVC protection" 2025-12-12T16:15:17.326935902+00:00 stderr F I1212 16:15:17.326888 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-legacy-unknown" 2025-12-12T16:15:17.327300621+00:00 stderr F I1212 16:15:17.327217 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-apiserver/apiserver-9ddfb9f55" need=1 creating=1 2025-12-12T16:15:17.327333442+00:00 stderr F I1212 16:15:17.327291 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-authentication-operator/authentication-operator-7f5c659b84" need=1 creating=1 2025-12-12T16:15:17.327663730+00:00 stderr F I1212 16:15:17.327612 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-authentication/oauth-openshift-66458b6674" need=1 creating=1 2025-12-12T16:15:17.327928146+00:00 stderr F I1212 16:15:17.327865 1 shared_informer.go:357] "Caches are synced" controller="ClusterRoleAggregator" 2025-12-12T16:15:17.330956109+00:00 stderr F I1212 16:15:17.329840 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-cluster-machine-approver/machine-approver-54c688565" need=1 creating=1 2025-12-12T16:15:17.330956109+00:00 stderr F I1212 16:15:17.330241 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-kubelet-client" 2025-12-12T16:15:17.330956109+00:00 stderr F I1212 16:15:17.330332 1 shared_informer.go:357] "Caches are synced" controller="expand" 2025-12-12T16:15:17.330956109+00:00 stderr F I1212 16:15:17.330349 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-kubelet-serving" 2025-12-12T16:15:17.330956109+00:00 stderr F I1212 16:15:17.330790 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrsigning-kube-apiserver-client" 2025-12-12T16:15:17.335298174+00:00 stderr F I1212 16:15:17.335217 1 shared_informer.go:357] "Caches are synced" controller="taint-eviction-controller" 2025-12-12T16:15:17.336649376+00:00 stderr F I1212 16:15:17.336509 1 shared_informer.go:357] "Caches are synced" controller="job" 2025-12-12T16:15:17.337788844+00:00 stderr F I1212 16:15:17.337739 1 reflector.go:430] "Caches populated" type="*v1.RuntimeClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.338051830+00:00 stderr F I1212 16:15:17.338016 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.340298824+00:00 stderr F I1212 16:15:17.340253 1 shared_informer.go:357] "Caches are synced" controller="taint" 2025-12-12T16:15:17.340410127+00:00 stderr F I1212 16:15:17.340345 1 node_lifecycle_controller.go:675] "Controller observed a new Node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:17.342347544+00:00 stderr F I1212 16:15:17.342312 1 shared_informer.go:357] "Caches are synced" controller="persistent volume" 2025-12-12T16:15:17.342981429+00:00 stderr F I1212 16:15:17.342920 1 controller_utils.go:173] "Recording event message for node" logger="node-lifecycle-controller" event="Registered Node crc in Controller" node="crc" 2025-12-12T16:15:17.343050641+00:00 stderr F I1212 16:15:17.343029 1 node_lifecycle_controller.go:1221] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone="" 2025-12-12T16:15:17.343204844+00:00 stderr F I1212 16:15:17.343160 1 node_lifecycle_controller.go:873] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:17.343298557+00:00 stderr F I1212 16:15:17.343230 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:17.343219335 +0000 UTC m=+15.900939804" 2025-12-12T16:15:17.343366058+00:00 stderr F I1212 16:15:17.343327 1 node_lifecycle_controller.go:1025] "Controller detected that all Nodes are not-Ready. Entering master disruption mode" logger="node-lifecycle-controller" 2025-12-12T16:15:17.346621637+00:00 stderr F I1212 16:15:17.346577 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.348046591+00:00 stderr F I1212 16:15:17.348014 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.348227145+00:00 stderr F I1212 16:15:17.348192 1 reflector.go:430] "Caches populated" type="*v1.PriorityClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.348355268+00:00 stderr F I1212 16:15:17.348318 1 reflector.go:430] "Caches populated" type="*v1.IngressClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.348667916+00:00 stderr F I1212 16:15:17.348639 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicyBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.348759588+00:00 stderr F I1212 16:15:17.348736 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.349379243+00:00 stderr F I1212 16:15:17.349355 1 reflector.go:430] "Caches populated" type="*v1.PriorityLevelConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.349973287+00:00 stderr F I1212 16:15:17.349947 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.350128681+00:00 stderr F I1212 16:15:17.350104 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.351033853+00:00 stderr F I1212 16:15:17.351007 1 reflector.go:430] "Caches populated" type="*v1.FlowSchema" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:17.352388096+00:00 stderr F I1212 16:15:17.352356 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.353503603+00:00 stderr F I1212 16:15:17.353470 1 shared_informer.go:357] "Caches are synced" controller="namespace" 2025-12-12T16:15:17.360254625+00:00 stderr F I1212 16:15:17.359733 1 shared_informer.go:357] "Caches are synced" controller="stateful set" 2025-12-12T16:15:17.360254625+00:00 stderr F I1212 16:15:17.360120 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.360484911+00:00 stderr F I1212 16:15:17.360440 1 shared_informer.go:357] "Caches are synced" controller="attach detach" 2025-12-12T16:15:17.361928345+00:00 stderr F I1212 16:15:17.361738 1 shared_informer.go:357] "Caches are synced" controller="certificate-csrapproving" 2025-12-12T16:15:17.372694115+00:00 stderr F I1212 16:15:17.372242 1 shared_informer.go:357] "Caches are synced" controller="deployment" 2025-12-12T16:15:17.373685649+00:00 stderr F I1212 16:15:17.373645 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.375474722+00:00 stderr F I1212 16:15:17.375435 1 shared_informer.go:357] "Caches are synced" controller="crt configmap" 2025-12-12T16:15:17.380773929+00:00 stderr F I1212 16:15:17.380439 1 shared_informer.go:357] "Caches are synced" controller="daemon sets" 2025-12-12T16:15:17.380773929+00:00 stderr F I1212 16:15:17.380462 1 shared_informer.go:350] "Waiting for caches to sync" controller="daemon sets" 2025-12-12T16:15:17.380773929+00:00 stderr F I1212 16:15:17.380469 1 shared_informer.go:357] "Caches are synced" controller="daemon sets" 2025-12-12T16:15:17.408606690+00:00 stderr F I1212 16:15:17.408556 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.409806689+00:00 stderr F I1212 16:15:17.409767 1 shared_informer.go:357] "Caches are synced" controller="crt configmap" 2025-12-12T16:15:17.424769759+00:00 stderr F I1212 16:15:17.424712 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.457700303+00:00 stderr F I1212 16:15:17.455684 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.462212801+00:00 stderr F I1212 16:15:17.460762 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.571527935+00:00 stderr F I1212 16:15:17.571450 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.635587488+00:00 stderr F I1212 16:15:17.635497 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.645877446+00:00 stderr F I1212 16:15:17.645824 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.665882268+00:00 stderr F I1212 16:15:17.665772 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.678791169+00:00 stderr F I1212 16:15:17.678694 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.700694867+00:00 stderr F I1212 16:15:17.700622 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.704858257+00:00 stderr F I1212 16:15:17.704564 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.709932659+00:00 stderr F I1212 16:15:17.709846 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.718395083+00:00 stderr F I1212 16:15:17.718329 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.726797566+00:00 stderr F I1212 16:15:17.726758 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.735517546+00:00 stderr F I1212 16:15:17.735256 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.744975474+00:00 stderr F I1212 16:15:17.744913 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.755692782+00:00 stderr F I1212 16:15:17.755631 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.800833629+00:00 stderr F I1212 16:15:17.800478 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.815809280+00:00 stderr F I1212 16:15:17.815725 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.841883778+00:00 stderr F I1212 16:15:17.841830 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.859027951+00:00 stderr F I1212 16:15:17.858945 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.875490718+00:00 stderr F I1212 16:15:17.875438 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.884977316+00:00 stderr F I1212 16:15:17.884930 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.894709911+00:00 stderr F I1212 16:15:17.894651 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.922852969+00:00 stderr F I1212 16:15:17.922786 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.972043694+00:00 stderr F I1212 16:15:17.971983 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.982069425+00:00 stderr F I1212 16:15:17.981993 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:17.991897572+00:00 stderr F I1212 16:15:17.991782 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.001845602+00:00 stderr F I1212 16:15:18.001797 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.014007515+00:00 stderr F I1212 16:15:18.013970 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.059558542+00:00 stderr F I1212 16:15:18.059521 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.076580772+00:00 stderr F I1212 16:15:18.076503 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.085471816+00:00 stderr F I1212 16:15:18.085343 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.108405399+00:00 stderr F I1212 16:15:18.108332 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.149386516+00:00 stderr F I1212 16:15:18.149284 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.157533643+00:00 stderr F I1212 16:15:18.157472 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.173703952+00:00 stderr F I1212 16:15:18.173581 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.207967488+00:00 stderr F I1212 16:15:18.207833 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.215857558+00:00 stderr F I1212 16:15:18.215715 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.229531527+00:00 stderr F I1212 16:15:18.229439 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.240606004+00:00 stderr F I1212 16:15:18.240541 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.247628743+00:00 stderr F I1212 16:15:18.247591 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:15:18.247708625+00:00 stderr F I1212 16:15:18.247691 1 resource_quota_controller.go:502] "synced quota controller" logger="resourcequota-controller" 2025-12-12T16:15:18.250802790+00:00 stderr F I1212 16:15:18.250726 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.276567450+00:00 stderr F I1212 16:15:18.276480 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.291084290+00:00 stderr F I1212 16:15:18.289433 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.305269332+00:00 stderr F I1212 16:15:18.301691 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:15:18.323249345+00:00 stderr F I1212 16:15:18.322788 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.346084595+00:00 stderr F I1212 16:15:18.345997 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.371378914+00:00 stderr F I1212 16:15:18.371301 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.377702247+00:00 stderr F I1212 16:15:18.377649 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.387899782+00:00 stderr F I1212 16:15:18.387812 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.409285148+00:00 stderr F I1212 16:15:18.409197 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.409932703+00:00 stderr F I1212 16:15:18.409816 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[config.openshift.io/v1/ClusterVersion, namespace: openshift-machine-api, name: version, uid: 81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503]" observed="[config.openshift.io/v1/ClusterVersion, namespace: , name: version, uid: 81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503]" 2025-12-12T16:15:18.419689838+00:00 stderr F I1212 16:15:18.419643 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.420942488+00:00 stderr F I1212 16:15:18.420906 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.432024255+00:00 stderr F I1212 16:15:18.431940 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.447865177+00:00 stderr F I1212 16:15:18.447763 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.467199963+00:00 stderr F I1212 16:15:18.467114 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.477164503+00:00 stderr F I1212 16:15:18.477038 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.485471353+00:00 stderr F I1212 16:15:18.485436 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.498409815+00:00 stderr F I1212 16:15:18.498310 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.507589846+00:00 stderr F I1212 16:15:18.507545 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.523152751+00:00 stderr F I1212 16:15:18.523089 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.533273415+00:00 stderr F I1212 16:15:18.533208 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.562092549+00:00 stderr F I1212 16:15:18.561962 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.579498228+00:00 stderr F I1212 16:15:18.579409 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.586830815+00:00 stderr F I1212 16:15:18.586771 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.610550946+00:00 stderr F I1212 16:15:18.610466 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.627432283+00:00 stderr F I1212 16:15:18.627365 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.634316669+00:00 stderr F I1212 16:15:18.634050 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.658778708+00:00 stderr F I1212 16:15:18.658705 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.658928712+00:00 stderr F I1212 16:15:18.658866 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[operator.openshift.io/v1/Console, namespace: openshift-console, name: cluster, uid: 72c9b389-7361-48f0-8bf6-56fe26546245]" observed="[operator.openshift.io/v1/Console, namespace: , name: cluster, uid: 72c9b389-7361-48f0-8bf6-56fe26546245]" 2025-12-12T16:15:18.669003475+00:00 stderr F I1212 16:15:18.668931 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.698375712+00:00 stderr F I1212 16:15:18.698297 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.709080070+00:00 stderr F I1212 16:15:18.709002 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.709168452+00:00 stderr F I1212 16:15:18.709125 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[operator.openshift.io/v1/DNS, namespace: openshift-dns, name: default, uid: 0f9755ef-acf2-4bc6-a6fc-f491e28e635f]" observed="[operator.openshift.io/v1/DNS, namespace: , name: default, uid: 0f9755ef-acf2-4bc6-a6fc-f491e28e635f]" 2025-12-12T16:15:18.719634534+00:00 stderr F I1212 16:15:18.719537 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.736974412+00:00 stderr F I1212 16:15:18.736875 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.753124301+00:00 stderr F I1212 16:15:18.753036 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.790276616+00:00 stderr F I1212 16:15:18.790164 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.809401857+00:00 stderr F I1212 16:15:18.809324 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.818993958+00:00 stderr F I1212 16:15:18.818934 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.826920889+00:00 stderr F I1212 16:15:18.826868 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.842064114+00:00 stderr F I1212 16:15:18.841769 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.850412105+00:00 stderr F I1212 16:15:18.850318 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.865832186+00:00 stderr F I1212 16:15:18.865734 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.877203520+00:00 stderr F I1212 16:15:18.877070 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.916322603+00:00 stderr F I1212 16:15:18.913441 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.936228682+00:00 stderr F I1212 16:15:18.935503 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.954994254+00:00 stderr F I1212 16:15:18.954910 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.972039505+00:00 stderr F I1212 16:15:18.971932 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.983138962+00:00 stderr F I1212 16:15:18.983061 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:18.993783449+00:00 stderr F I1212 16:15:18.993704 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.012244104+00:00 stderr F I1212 16:15:19.012135 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.022361747+00:00 stderr F I1212 16:15:19.022272 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.091817801+00:00 stderr F I1212 16:15:19.091719 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.100996512+00:00 stderr F I1212 16:15:19.100911 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.112008767+00:00 stderr F I1212 16:15:19.111917 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.124711453+00:00 stderr F I1212 16:15:19.124638 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.134219892+00:00 stderr F I1212 16:15:19.134136 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.148883175+00:00 stderr F I1212 16:15:19.148801 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.149044049+00:00 stderr F I1212 16:15:19.148968 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: openshift-machine-api, name: master, uid: 3b9df6d6-bacd-4862-b99f-10ec7fcf29ac]" observed="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: , name: master, uid: 3b9df6d6-bacd-4862-b99f-10ec7fcf29ac]" 2025-12-12T16:15:19.149060190+00:00 stderr F I1212 16:15:19.149035 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: openshift-machine-api, name: worker, uid: 633fcfae-03e0-4a3a-8d5c-de9a658e82f6]" observed="[machineconfiguration.openshift.io/v1/MachineConfigPool, namespace: , name: worker, uid: 633fcfae-03e0-4a3a-8d5c-de9a658e82f6]" 2025-12-12T16:15:19.211966135+00:00 stderr F I1212 16:15:19.211869 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.217487328+00:00 stderr F I1212 16:15:19.217443 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.226523106+00:00 stderr F I1212 16:15:19.226464 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.237782547+00:00 stderr F I1212 16:15:19.237730 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.245889752+00:00 stderr F I1212 16:15:19.245828 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.255987906+00:00 stderr F I1212 16:15:19.255912 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.266169901+00:00 stderr F I1212 16:15:19.266091 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.276986852+00:00 stderr F I1212 16:15:19.276922 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.283742884+00:00 stderr F I1212 16:15:19.283694 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.307033375+00:00 stderr F I1212 16:15:19.306940 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.307303932+00:00 stderr F I1212 16:15:19.307232 1 graph_builder.go:728] "replacing virtual item with observed item" logger="garbage-collector-controller" virtual="[operator.openshift.io/v1/Network, namespace: openshift-host-network, name: cluster, uid: d56acc66-d25c-4e5c-aa52-5418dd270c94]" observed="[operator.openshift.io/v1/Network, namespace: , name: cluster, uid: d56acc66-d25c-4e5c-aa52-5418dd270c94]" 2025-12-12T16:15:19.317921568+00:00 stderr F I1212 16:15:19.317829 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.337146331+00:00 stderr F I1212 16:15:19.337046 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.354138930+00:00 stderr F I1212 16:15:19.354059 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.363928256+00:00 stderr F I1212 16:15:19.363861 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.373433405+00:00 stderr F I1212 16:15:19.373355 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.380937826+00:00 stderr F I1212 16:15:19.380859 1 reflector.go:430] "Caches populated" type="*v1.PartialObjectMetadata" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" 2025-12-12T16:15:19.409780661+00:00 stderr F I1212 16:15:19.409704 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:15:19.409780661+00:00 stderr F I1212 16:15:19.409733 1 garbagecollector.go:154] "Garbage collector: all resource monitors have synced" logger="garbage-collector-controller" 2025-12-12T16:15:19.409780661+00:00 stderr F I1212 16:15:19.409740 1 garbagecollector.go:157] "Proceeding to collect garbage" logger="garbage-collector-controller" 2025-12-12T16:15:19.409889103+00:00 stderr F I1212 16:15:19.409823 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: certified-operators, uid: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83]" virtual=false 2025-12-12T16:15:19.409911254+00:00 stderr F I1212 16:15:19.409850 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-marketplace, uid: 647ee808-5841-49cb-96be-0e8080859241]" virtual=false 2025-12-12T16:15:19.410017817+00:00 stderr F I1212 16:15:19.409922 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-operators, uid: ca744265-3ae3-4482-8c3d-b10e28fe1042]" virtual=false 2025-12-12T16:15:19.410017817+00:00 stderr F I1212 16:15:19.409959 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: unpack-bundles, uid: d387f0e1-78f2-4f21-8b20-eb44e79f2d4e]" virtual=false 2025-12-12T16:15:19.410017817+00:00 stderr F I1212 16:15:19.409989 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: default-deny-all-traffic, uid: 5d7de421-b53a-4577-ad68-ea7393dc755c]" virtual=false 2025-12-12T16:15:19.410148000+00:00 stderr F I1212 16:15:19.410082 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: 85980716-45a2-4c0f-be28-4217783526a0]" virtual=false 2025-12-12T16:15:19.410148000+00:00 stderr F I1212 16:15:19.410060 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: community-operators, uid: 88a656bd-c52a-4813-892e-7e3363ba9ac0]" virtual=false 2025-12-12T16:15:19.410238632+00:00 stderr F I1212 16:15:19.410200 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: marketplace-operator, uid: 9a95e898-e71f-46f4-ab23-671d4fbd8588]" virtual=false 2025-12-12T16:15:19.410298623+00:00 stderr F I1212 16:15:19.410095 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 6e667886-db08-4af0-9937-668c3a1a44aa]" virtual=false 2025-12-12T16:15:19.410319774+00:00 stderr F I1212 16:15:19.410290 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: dns-default, uid: d5a0398f-8bff-46a5-9d7c-f2f4e26b3879]" virtual=false 2025-12-12T16:15:19.410346564+00:00 stderr F I1212 16:15:19.410284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-operator, name: iptables-alerter, uid: 04364458-be97-459c-9e4e-c10e4ed7a89c]" virtual=false 2025-12-12T16:15:19.410356045+00:00 stderr F I1212 16:15:19.410206 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: default-deny-all, uid: 26158c82-b93b-4b40-ad84-d46310003f34]" virtual=false 2025-12-12T16:15:19.410387475+00:00 stderr F I1212 16:15:19.410109 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: 91cc0f79-c51b-424a-a293-e2750a385ac6]" virtual=false 2025-12-12T16:15:19.410396846+00:00 stderr F I1212 16:15:19.409960 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 60dc7d80-6fce-4c91-9304-81fc23842033]" virtual=false 2025-12-12T16:15:19.410481508+00:00 stderr F I1212 16:15:19.410205 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operators, name: default-allow-all, uid: 232e5193-54e7-43f0-ad09-44fb764c3765]" virtual=false 2025-12-12T16:15:19.410481508+00:00 stderr F I1212 16:15:19.410264 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-node-identity, name: network-node-identity, uid: af047b3a-df8e-4f5f-bbf5-258cdd2c977b]" virtual=false 2025-12-12T16:15:19.410481508+00:00 stderr F I1212 16:15:19.410236 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-server, uid: d1ca07c1-cb4d-45e8-b23b-37a1f3a3f651]" virtual=false 2025-12-12T16:15:19.410517739+00:00 stderr F I1212 16:15:19.410238 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-diagnostics, name: network-check-target, uid: 9914de7f-6a1d-49fb-87e3-f0d03b5893ec]" virtual=false 2025-12-12T16:15:19.410569640+00:00 stderr F I1212 16:15:19.410290 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: node-resolver, uid: 7a3a6da6-fe06-4125-a5c7-e5f524871af3]" virtual=false 2025-12-12T16:15:19.410683193+00:00 stderr F I1212 16:15:19.410218 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 9af2c58b-44d2-43f2-bfa6-abf8a256a724]" virtual=false 2025-12-12T16:15:19.413149482+00:00 stderr F I1212 16:15:19.413082 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: community-operators, uid: 88a656bd-c52a-4813-892e-7e3363ba9ac0]" 2025-12-12T16:15:19.413149482+00:00 stderr F I1212 16:15:19.413124 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 2fe756fc-41fb-44a6-ab78-d1fdba7d2669]" virtual=false 2025-12-12T16:15:19.413396058+00:00 stderr F I1212 16:15:19.413366 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: certified-operators, uid: 04c2c69e-a9e9-447b-aff2-c2db7de0ee83]" 2025-12-12T16:15:19.413396058+00:00 stderr F I1212 16:15:19.413381 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus-additional-cni-plugins, uid: db614dd0-5d3f-4079-bdf6-87e2d4631507]" virtual=false 2025-12-12T16:15:19.413994642+00:00 stderr F I1212 16:15:19.413821 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-server, uid: d1ca07c1-cb4d-45e8-b23b-37a1f3a3f651]" 2025-12-12T16:15:19.413994642+00:00 stderr F I1212 16:15:19.413852 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ovn-kubernetes, name: ovnkube-node, uid: 68675fbc-dce1-4e0f-93b3-f0fa287b1edd]" virtual=false 2025-12-12T16:15:19.414798742+00:00 stderr F I1212 16:15:19.414750 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 2fe756fc-41fb-44a6-ab78-d1fdba7d2669]" 2025-12-12T16:15:19.414816212+00:00 stderr F I1212 16:15:19.414788 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-image-registry, name: node-ca, uid: 191786f0-2ab8-4a50-a3fb-f9953399f287]" virtual=false 2025-12-12T16:15:19.414928055+00:00 stderr F I1212 16:15:19.414907 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-operators, uid: ca744265-3ae3-4482-8c3d-b10e28fe1042]" 2025-12-12T16:15:19.414941975+00:00 stderr F I1212 16:15:19.414924 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: network-metrics-daemon, uid: 9623fe22-300e-4e9e-9241-5a539b90f3a7]" virtual=false 2025-12-12T16:15:19.416831731+00:00 stderr F I1212 16:15:19.416763 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: marketplace-operator, uid: 9a95e898-e71f-46f4-ab23-671d4fbd8588]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.416831731+00:00 stderr F I1212 16:15:19.416806 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ingress-canary, name: ingress-canary, uid: 77896bcd-d1f7-46a2-984f-9205a544fb94]" virtual=false 2025-12-12T16:15:19.419363332+00:00 stderr F I1212 16:15:19.419244 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 6e667886-db08-4af0-9937-668c3a1a44aa]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.419363332+00:00 stderr F I1212 16:15:19.419273 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-image-registry, name: node-ca, uid: 191786f0-2ab8-4a50-a3fb-f9953399f287]" 2025-12-12T16:15:19.419363332+00:00 stderr F I1212 16:15:19.419306 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus, uid: c6bd5b69-2014-4db0-b123-1bdb423140f1]" virtual=false 2025-12-12T16:15:19.419363332+00:00 stderr F I1212 16:15:19.419314 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: hostpath-provisioner, name: csi-hostpathplugin, uid: 22984672-0d0d-46da-9df6-54a721b7b6bf]" virtual=false 2025-12-12T16:15:19.419694170+00:00 stderr F I1212 16:15:19.419631 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: 91cc0f79-c51b-424a-a293-e2750a385ac6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.419694170+00:00 stderr F I1212 16:15:19.419663 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: dns-default, uid: d5a0398f-8bff-46a5-9d7c-f2f4e26b3879]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"DNS","name":"default","uid":"0f9755ef-acf2-4bc6-a6fc-f491e28e635f","controller":true}] 2025-12-12T16:15:19.419694170+00:00 stderr F I1212 16:15:19.419678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-storage-policy-validation, uid: 6a1e4dfe-42b3-45a0-8dce-5d973a54d3b1]" virtual=false 2025-12-12T16:15:19.419711120+00:00 stderr F I1212 16:15:19.419690 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-apiserver-operator, uid: 7b7e1a01-4e5a-4c5f-87de-07cde1d85301]" virtual=false 2025-12-12T16:15:19.419957276+00:00 stderr F I1212 16:15:19.419905 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: unpack-bundles, uid: d387f0e1-78f2-4f21-8b20-eb44e79f2d4e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.419957276+00:00 stderr F I1212 16:15:19.419929 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-operator, name: iptables-alerter, uid: 04364458-be97-459c-9e4e-c10e4ed7a89c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.419957276+00:00 stderr F I1212 16:15:19.419941 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-storage-operator, uid: 197d9352-caa7-4358-80dd-a56a8cceb99f]" virtual=false 2025-12-12T16:15:19.419970786+00:00 stderr F I1212 16:15:19.419954 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-user-settings, uid: 04f36abf-3d91-487f-ba90-02965aa17b74]" virtual=false 2025-12-12T16:15:19.420239133+00:00 stderr F I1212 16:15:19.420120 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: default-deny-all-traffic, uid: 5d7de421-b53a-4577-ad68-ea7393dc755c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.420239133+00:00 stderr F I1212 16:15:19.420148 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kni-infra, uid: 4fc76540-3eed-4c8e-b1c4-9a8744b7c4fe]" virtual=false 2025-12-12T16:15:19.420258143+00:00 stderr F I1212 16:15:19.420231 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-diagnostics, name: network-check-target, uid: 9914de7f-6a1d-49fb-87e3-f0d03b5893ec]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.420266833+00:00 stderr F I1212 16:15:19.420255 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-controller-manager-operator, uid: 6498c809-5d1f-41f3-9a77-71207d8b4490]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420302 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus-additional-cni-plugins, uid: db614dd0-5d3f-4079-bdf6-87e2d4631507]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420334 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-config-operator, uid: b4fb1cd2-d1ab-4a46-a6a5-d18dc4abe3ff]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420391 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/CatalogSource, namespace: openshift-marketplace, name: redhat-marketplace, uid: 647ee808-5841-49cb-96be-0e8080859241]" 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420411 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-openstack-infra, uid: 9b523a2a-29c2-47a4-8d2f-638bdb038f6b]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420121 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 9af2c58b-44d2-43f2-bfa6-abf8a256a724]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420502 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-nutanix-infra, uid: 8f2c7d6d-b7e9-4b3b-b558-7c4d41319c79]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420585 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-network-node-identity, name: network-node-identity, uid: af047b3a-df8e-4f5f-bbf5-258cdd2c977b]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420606 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-dns-operator, uid: 2828084a-f369-4ab4-8c97-5bac5fa50528]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420639 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-dns, name: node-resolver, uid: 7a3a6da6-fe06-4125-a5c7-e5f524871af3]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"DNS","name":"default","uid":"0f9755ef-acf2-4bc6-a6fc-f491e28e635f","controller":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420662 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-console, uid: ce6b46bc-daaa-4079-95d7-fcbba6af00f1]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420752 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ingress-canary, name: ingress-canary, uid: 77896bcd-d1f7-46a2-984f-9205a544fb94]" 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420772 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-node-identity, uid: 58202f40-7743-4217-8ef0-132a1f911124]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.420991 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 60dc7d80-6fce-4c91-9304-81fc23842033]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421016 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovn-kubernetes, uid: 14f4405e-2ee4-486f-b504-e6eb6327dd6e]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421087 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: 85980716-45a2-4c0f-be28-4217783526a0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421112 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-vsphere-infra, uid: 2ecc0567-f8a0-41d2-89f1-0b731ad62fbb]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421170 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-marketplace, name: default-deny-all, uid: 26158c82-b93b-4b40-ad84-d46310003f34]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421248 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-operator, uid: 41451894-a97b-448d-aef0-4bc5d55372c1]" virtual=false 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421331 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[networking.k8s.io/v1/NetworkPolicy, namespace: openshift-operators, name: default-allow-all, uid: 232e5193-54e7-43f0-ad09-44fb764c3765]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.421402651+00:00 stderr F I1212 16:15:19.421356 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-etcd-operator, uid: e1772d22-6f09-435f-b8d7-78fdc6a9bc4a]" virtual=false 2025-12-12T16:15:19.423222005+00:00 stderr F I1212 16:15:19.422131 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:15:19.423222005+00:00 stderr F I1212 16:15:19.422154 1 garbagecollector.go:235] "synced garbage collector" logger="garbage-collector-controller" 2025-12-12T16:15:19.423388609+00:00 stderr F I1212 16:15:19.423342 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: hostpath-provisioner, name: csi-hostpathplugin, uid: 22984672-0d0d-46da-9df6-54a721b7b6bf]" 2025-12-12T16:15:19.423397889+00:00 stderr F I1212 16:15:19.423378 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-storage-version-migrator-operator, uid: 7c712a8d-4b1a-4eef-a1f6-050630ee028f]" virtual=false 2025-12-12T16:15:19.424354602+00:00 stderr F I1212 16:15:19.424164 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: multus, uid: c6bd5b69-2014-4db0-b123-1bdb423140f1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.424354602+00:00 stderr F I1212 16:15:19.424222 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-machine-approver, uid: bf242ec0-9a9d-43a2-a357-f6d487eabb56]" virtual=false 2025-12-12T16:15:19.425169532+00:00 stderr F I1212 16:15:19.425121 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-ovn-kubernetes, name: ovnkube-node, uid: 68675fbc-dce1-4e0f-93b3-f0fa287b1edd]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.425169532+00:00 stderr F I1212 16:15:19.425149 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-controller-manager-operator, uid: 8bc063ab-9aa2-42ac-a00e-f4a74b01a855]" virtual=false 2025-12-12T16:15:19.425420028+00:00 stderr F I1212 16:15:19.425358 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-apiserver-operator, uid: 7b7e1a01-4e5a-4c5f-87de-07cde1d85301]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.425452758+00:00 stderr F I1212 16:15:19.425425 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-host-network, uid: 4b542650-5442-4772-92b3-4649992d4842]" virtual=false 2025-12-12T16:15:19.425508420+00:00 stderr F I1212 16:15:19.425463 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/DaemonSet, namespace: openshift-multus, name: network-metrics-daemon, uid: 9623fe22-300e-4e9e-9241-5a539b90f3a7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.426142395+00:00 stderr F I1212 16:15:19.425508 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-monitoring, uid: 07c07c05-6e0f-4083-ae8f-f743734c6b19]" virtual=false 2025-12-12T16:15:19.426142395+00:00 stderr F I1212 16:15:19.425358 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-user-settings, uid: 04f36abf-3d91-487f-ba90-02965aa17b74]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.426142395+00:00 stderr F I1212 16:15:19.425663 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operator-lifecycle-manager, uid: fae25a02-86eb-4906-aa64-3c8f5894eb51]" virtual=false 2025-12-12T16:15:19.426142395+00:00 stderr F I1212 16:15:19.425809 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-storage-operator, uid: 197d9352-caa7-4358-80dd-a56a8cceb99f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.426142395+00:00 stderr F I1212 16:15:19.425827 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-version, uid: 00c20f60-51f9-4756-b585-93ed93b6029b]" virtual=false 2025-12-12T16:15:19.426556225+00:00 stderr F I1212 16:15:19.426516 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-controller-manager-operator, uid: 6498c809-5d1f-41f3-9a77-71207d8b4490]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.426609306+00:00 stderr F I1212 16:15:19.426591 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-managed, uid: addccfaa-40f8-4cbc-86ad-29a8e9b5ef78]" virtual=false 2025-12-12T16:15:19.426839612+00:00 stderr F I1212 16:15:19.426820 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kni-infra, uid: 4fc76540-3eed-4c8e-b1c4-9a8744b7c4fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.426884453+00:00 stderr F I1212 16:15:19.426864 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-image-registry, uid: 46fdad02-53ed-489c-bc37-661151dcc55f]" virtual=false 2025-12-12T16:15:19.427113928+00:00 stderr F I1212 16:15:19.427092 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-storage-policy-validation, uid: 6a1e4dfe-42b3-45a0-8dce-5d973a54d3b1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.427159079+00:00 stderr F I1212 16:15:19.427144 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ingress-operator, uid: dafbc0ce-18be-4af5-8fd3-13010bc7349e]" virtual=false 2025-12-12T16:15:19.429688990+00:00 stderr F I1212 16:15:19.429640 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-etcd-operator, uid: e1772d22-6f09-435f-b8d7-78fdc6a9bc4a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.429746992+00:00 stderr F I1212 16:15:19.429730 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-scheduler-operator, uid: e597c7c3-88eb-47f8-b1a0-825e57afdf8a]" virtual=false 2025-12-12T16:15:19.430311495+00:00 stderr F I1212 16:15:19.430266 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-dns-operator, uid: 2828084a-f369-4ab4-8c97-5bac5fa50528]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.430311495+00:00 stderr F I1212 16:15:19.430261 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-vsphere-infra, uid: 2ecc0567-f8a0-41d2-89f1-0b731ad62fbb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.430311495+00:00 stderr F I1212 16:15:19.430296 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovirt-infra, uid: 96a755c5-7f75-4b72-995e-f9dddfb24440]" virtual=false 2025-12-12T16:15:19.430330306+00:00 stderr F I1212 16:15:19.430309 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-authentication-operator, uid: 6912c967-af53-4ba4-8d3d-98a1cb2e9bdc]" virtual=false 2025-12-12T16:15:19.430454899+00:00 stderr F I1212 16:15:19.430432 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-openstack-infra, uid: 9b523a2a-29c2-47a4-8d2f-638bdb038f6b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.430493530+00:00 stderr F I1212 16:15:19.430471 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console-operator, uid: 41451894-a97b-448d-aef0-4bc5d55372c1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.430501330+00:00 stderr F I1212 16:15:19.430490 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config, uid: ef6ba319-bb5c-4df3-99ee-e0d4da656faf]" virtual=false 2025-12-12T16:15:19.430530181+00:00 stderr F I1212 16:15:19.430478 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-multus, uid: b98014e1-4b49-40b7-a8a0-5258b064c959]" virtual=false 2025-12-12T16:15:19.430593202+00:00 stderr F I1212 16:15:19.430565 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-console, uid: ce6b46bc-daaa-4079-95d7-fcbba6af00f1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.430602822+00:00 stderr F I1212 16:15:19.430590 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-marketplace, uid: 9165b720-653d-498f-9378-91ee6a28934f]" virtual=false 2025-12-12T16:15:19.430677784+00:00 stderr F I1212 16:15:19.430436 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-nutanix-infra, uid: 8f2c7d6d-b7e9-4b3b-b558-7c4d41319c79]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.430687365+00:00 stderr F I1212 16:15:19.430673 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-samples-operator, uid: c06aeb09-cb69-4bbf-8dde-7145ef6be96d]" virtual=false 2025-12-12T16:15:19.430934750+00:00 stderr F I1212 16:15:19.430911 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-config-operator, uid: b4fb1cd2-d1ab-4a46-a6a5-d18dc4abe3ff]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.430973531+00:00 stderr F I1212 16:15:19.430958 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-api, uid: 79bcd8e8-bf75-45da-8612-c49cfd9d13df]" virtual=false 2025-12-12T16:15:19.432003046+00:00 stderr F I1212 16:15:19.431979 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-node-identity, uid: 58202f40-7743-4217-8ef0-132a1f911124]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.432045127+00:00 stderr F I1212 16:15:19.432031 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-diagnostics, uid: 3991475a-57bf-40e9-9954-a84192d5f5e1]" virtual=false 2025-12-12T16:15:19.432217071+00:00 stderr F I1212 16:15:19.432196 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-machine-approver, uid: bf242ec0-9a9d-43a2-a357-f6d487eabb56]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.432257802+00:00 stderr F I1212 16:15:19.432243 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-operator, uid: faea3de1-356e-4f94-8518-0aca76aba835]" virtual=false 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434278 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-host-network, uid: 4b542650-5442-4772-92b3-4649992d4842]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434327 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-controller-manager-operator, uid: 8bc063ab-9aa2-42ac-a00e-f4a74b01a855]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434361 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console, uid: 6c0a940d-a53d-4fc4-ad13-cda3ca9fe0f4]" virtual=false 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434336 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-network-config-controller, uid: af3d201f-bde4-4a45-ae31-631a3c175cdc]" virtual=false 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434653 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-monitoring, uid: 07c07c05-6e0f-4083-ae8f-f743734c6b19]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434673 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovn-kubernetes, uid: 14f4405e-2ee4-486f-b504-e6eb6327dd6e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434682 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operators, uid: bef0fff6-6eed-4771-85c8-e0dd5bdde9e0]" virtual=false 2025-12-12T16:15:19.434773893+00:00 stderr F I1212 16:15:19.434699 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-user-workload-monitoring, uid: 576d3059-0feb-4d0f-8353-b02539f1c62f]" virtual=false 2025-12-12T16:15:19.434859475+00:00 stderr F I1212 16:15:19.434830 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-version, uid: 00c20f60-51f9-4756-b585-93ed93b6029b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.434870225+00:00 stderr F I1212 16:15:19.434857 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-operator, uid: 1dc5d509-40a4-4bd8-a98c-cfe8b82b4f57]" virtual=false 2025-12-12T16:15:19.434981758+00:00 stderr F I1212 16:15:19.434957 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-storage-version-migrator-operator, uid: 7c712a8d-4b1a-4eef-a1f6-050630ee028f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.435030929+00:00 stderr F I1212 16:15:19.435010 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-apiserver-operator, uid: cd464d22-ebb5-4c4d-8bbb-1d343035fd2f]" virtual=false 2025-12-12T16:15:19.440371678+00:00 stderr F I1212 16:15:19.440282 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operator-lifecycle-manager, uid: fae25a02-86eb-4906-aa64-3c8f5894eb51]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.440371678+00:00 stderr F I1212 16:15:19.440351 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-service-ca-operator, uid: 6c679b53-4beb-47e3-bcb3-3c8a99b44665]" virtual=false 2025-12-12T16:15:19.440784468+00:00 stderr F I1212 16:15:19.440752 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-managed, uid: addccfaa-40f8-4cbc-86ad-29a8e9b5ef78]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.440827829+00:00 stderr F I1212 16:15:19.440811 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-platform-infra, uid: 1d3ed5f4-a054-412f-81c5-af793a208a1f]" virtual=false 2025-12-12T16:15:19.440975252+00:00 stderr F I1212 16:15:19.440956 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ingress-operator, uid: dafbc0ce-18be-4af5-8fd3-13010bc7349e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.441015013+00:00 stderr F I1212 16:15:19.441000 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: user-defined-networks-namespace-label, uid: 0f7de771-84f1-494d-ab22-4f5de9678262]" virtual=false 2025-12-12T16:15:19.441128956+00:00 stderr F I1212 16:15:19.441107 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-image-registry, uid: 46fdad02-53ed-489c-bc37-661151dcc55f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.441163567+00:00 stderr F I1212 16:15:19.441147 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-ingress-operator-gatewayapi-crd-admission, uid: bf5fe191-baad-427b-99ce-a2fea2958a87]" virtual=false 2025-12-12T16:15:19.441251269+00:00 stderr F I1212 16:15:19.440810 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cluster-samples-operator, uid: c06aeb09-cb69-4bbf-8dde-7145ef6be96d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.441292310+00:00 stderr F I1212 16:15:19.441276 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: attachdetach-controller, uid: 6968ff6b-3db4-449b-ad16-12a6be3c8000]" virtual=false 2025-12-12T16:15:19.441416143+00:00 stderr F I1212 16:15:19.441371 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config, uid: ef6ba319-bb5c-4df3-99ee-e0d4da656faf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.441426543+00:00 stderr F I1212 16:15:19.441412 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-network-config-controller, name: deployer, uid: bffdd3e4-53a0-4c1c-a7b6-3bb66fa31203]" virtual=false 2025-12-12T16:15:19.441483305+00:00 stderr F I1212 16:15:19.441450 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-ovirt-infra, uid: 96a755c5-7f75-4b72-995e-f9dddfb24440]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.441518095+00:00 stderr F I1212 16:15:19.441485 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: builder, uid: ec911173-d1cc-4f54-b641-30b99ebf4c2e]" virtual=false 2025-12-12T16:15:19.441906965+00:00 stderr F I1212 16:15:19.441883 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-scheduler-operator, uid: e597c7c3-88eb-47f8-b1a0-825e57afdf8a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.441945166+00:00 stderr F I1212 16:15:19.441931 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: node-controller, uid: 3355bcfa-b640-468e-a73c-b84ee6396fcd]" virtual=false 2025-12-12T16:15:19.444615600+00:00 stderr F I1212 16:15:19.444583 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-marketplace, uid: 9165b720-653d-498f-9378-91ee6a28934f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.444667691+00:00 stderr F I1212 16:15:19.444651 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-6, uid: 7fbbc739-803e-4e42-8e9d-30d2b33db770]" virtual=false 2025-12-12T16:15:19.444720843+00:00 stderr F I1212 16:15:19.444684 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-multus, uid: b98014e1-4b49-40b7-a8a0-5258b064c959]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.444753633+00:00 stderr F I1212 16:15:19.444725 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ac, uid: 8730bc0a-fa8f-4fcf-9d74-e54b3e4f4363]" virtual=false 2025-12-12T16:15:19.444814255+00:00 stderr F I1212 16:15:19.444778 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-diagnostics, uid: 3991475a-57bf-40e9-9954-a84192d5f5e1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.444814255+00:00 stderr F I1212 16:15:19.444768 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-machine-api, uid: 79bcd8e8-bf75-45da-8612-c49cfd9d13df]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.444829005+00:00 stderr F I1212 16:15:19.444811 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: builder, uid: 74608fd6-aa80-4f4a-a1c6-0b417b4a94b2]" virtual=false 2025-12-12T16:15:19.444864226+00:00 stderr F I1212 16:15:19.444832 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: deployer, uid: 99cd421d-ded6-4fde-8b07-ab1bdcc36d56]" virtual=false 2025-12-12T16:15:19.444885497+00:00 stderr F I1212 16:15:19.444578 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-operators, uid: bef0fff6-6eed-4771-85c8-e0dd5bdde9e0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.444959328+00:00 stderr F I1212 16:15:19.444942 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: builder, uid: 8230aa49-42ea-46aa-818b-6ae8b93c6a67]" virtual=false 2025-12-12T16:15:19.445160983+00:00 stderr F I1212 16:15:19.445138 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-user-workload-monitoring, uid: 576d3059-0feb-4d0f-8353-b02539f1c62f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.445218505+00:00 stderr F I1212 16:15:19.445200 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-controller-manager, name: controller-manager, uid: 9ad527d9-1183-453c-8e5c-6dec4324ad58]" virtual=false 2025-12-12T16:15:19.445423349+00:00 stderr F I1212 16:15:19.445401 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-authentication-operator, uid: 6912c967-af53-4ba4-8d3d-98a1cb2e9bdc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.445459820+00:00 stderr F I1212 16:15:19.445446 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: etcd-backup-sa, uid: 5c7e92ed-fca6-4a76-b1f2-8e10a5724c44]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449470 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: attachdetach-controller, uid: 6968ff6b-3db4-449b-ad16-12a6be3c8000]" 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449519 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: cluster-csr-approver-controller, uid: 63fa0ac0-decc-48c6-8165-57a92f1d5959]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449569 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-kube-apiserver-operator, uid: cd464d22-ebb5-4c4d-8bbb-1d343035fd2f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449603 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: node-bootstrapper, uid: 3841d532-da4c-4bbe-af54-78f84a63d1fd]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449470 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: builder, uid: 74608fd6-aa80-4f4a-a1c6-0b417b4a94b2]" 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449758 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus, uid: 8ed5854e-19c6-4934-895c-1ff820fbc84c]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449831 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-console, uid: 6c0a940d-a53d-4fc4-ad13-cda3ca9fe0f4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449855 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: iptables-alerter, uid: 67f6de68-003a-46d5-b769-d3ebce26fdf7]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449894 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: builder, uid: 8230aa49-42ea-46aa-818b-6ae8b93c6a67]" 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.449912 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: ttl-after-finished-controller, uid: 3ecc9919-f132-47b5-940a-3826fd2fc3b6]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.450025 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: openshift-ingress-operator-gatewayapi-crd-admission, uid: bf5fe191-baad-427b-99ce-a2fea2958a87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.450046 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-canary, name: default, uid: f804373b-a2e1-453e-88a8-4653f85e801a]" virtual=false 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.450131 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-controller-manager, name: controller-manager, uid: 9ad527d9-1183-453c-8e5c-6dec4324ad58]" 2025-12-12T16:15:19.450211475+00:00 stderr F I1212 16:15:19.450144 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: builder, uid: b003ff11-a6f2-4294-9ce0-2d32dbc0937e]" virtual=false 2025-12-12T16:15:19.454305163+00:00 stderr F I1212 16:15:19.454252 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: builder, uid: ec911173-d1cc-4f54-b641-30b99ebf4c2e]" 2025-12-12T16:15:19.454384845+00:00 stderr F I1212 16:15:19.454360 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-authentication-operator, name: metrics, uid: 9d158722-c9d8-4574-9c8d-76aff39b1405]" virtual=false 2025-12-12T16:15:19.454448797+00:00 stderr F I1212 16:15:19.454408 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-service-ca-operator, uid: 6c679b53-4beb-47e3-bcb3-3c8a99b44665]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.454475318+00:00 stderr F I1212 16:15:19.454450 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-user-settings, name: deployer, uid: d08a82ce-b43a-49cd-9d77-e1e7b9a41fef]" virtual=false 2025-12-12T16:15:19.454618621+00:00 stderr F I1212 16:15:19.454597 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-canary, name: default, uid: f804373b-a2e1-453e-88a8-4653f85e801a]" 2025-12-12T16:15:19.454655172+00:00 stderr F I1212 16:15:19.454630 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: node-controller, uid: 3355bcfa-b640-468e-a73c-b84ee6396fcd]" 2025-12-12T16:15:19.454676532+00:00 stderr F I1212 16:15:19.454654 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: default, uid: 34ba90e3-6c5e-439e-a67b-eac970fe524a]" virtual=false 2025-12-12T16:15:19.454699173+00:00 stderr F I1212 16:15:19.454669 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-network-config-controller, uid: af3d201f-bde4-4a45-ae31-631a3c175cdc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.454724524+00:00 stderr F I1212 16:15:19.454700 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: dab4d42f-1426-4619-af00-f3b882989b05]" virtual=false 2025-12-12T16:15:19.454742904+00:00 stderr F I1212 16:15:19.454640 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-10, uid: abc26192-80da-4229-9267-91d2e7c7eb5f]" virtual=false 2025-12-12T16:15:19.454831766+00:00 stderr F I1212 16:15:19.454805 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: deployer, uid: 99cd421d-ded6-4fde-8b07-ab1bdcc36d56]" 2025-12-12T16:15:19.454846597+00:00 stderr F I1212 16:15:19.454829 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-1, uid: 4768d56f-c284-4d80-a18a-7f2fc97d36f1]" virtual=false 2025-12-12T16:15:19.454880457+00:00 stderr F I1212 16:15:19.454295 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-network-config-controller, name: deployer, uid: bffdd3e4-53a0-4c1c-a7b6-3bb66fa31203]" 2025-12-12T16:15:19.454916958+00:00 stderr F I1212 16:15:19.454902 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-node-lease, name: deployer, uid: 8e233062-664d-460d-8b67-92708f75ac0e]" virtual=false 2025-12-12T16:15:19.455009900+00:00 stderr F I1212 16:15:19.454979 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-cloud-platform-infra, uid: 1d3ed5f4-a054-412f-81c5-af793a208a1f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.455020041+00:00 stderr F I1212 16:15:19.455008 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-version, name: builder, uid: 30c745f9-96b1-4602-a9e9-08b77d9bfa46]" virtual=false 2025-12-12T16:15:19.455049261+00:00 stderr F I1212 16:15:19.454873 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-6, uid: 7fbbc739-803e-4e42-8e9d-30d2b33db770]" 2025-12-12T16:15:19.455090692+00:00 stderr F I1212 16:15:19.455059 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: node-bootstrapper, uid: 3841d532-da4c-4bbe-af54-78f84a63d1fd]" 2025-12-12T16:15:19.455211275+00:00 stderr F I1212 16:15:19.455164 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-network-operator, uid: faea3de1-356e-4f94-8518-0aca76aba835]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.455265707+00:00 stderr F I1212 16:15:19.455245 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: pruner, uid: 2f88f37f-e5cf-4c37-b18a-4197a4cdaad7]" virtual=false 2025-12-12T16:15:19.455357559+00:00 stderr F I1212 16:15:19.455332 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: etcd-backup-sa, uid: 5c7e92ed-fca6-4a76-b1f2-8e10a5724c44]" 2025-12-12T16:15:19.455365929+00:00 stderr F I1212 16:15:19.455341 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: builder, uid: b003ff11-a6f2-4294-9ce0-2d32dbc0937e]" 2025-12-12T16:15:19.455386780+00:00 stderr F I1212 16:15:19.455359 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kni-infra, name: deployer, uid: 0c4c21d2-5525-4487-aede-378a6b890668]" virtual=false 2025-12-12T16:15:19.455386780+00:00 stderr F I1212 16:15:19.455374 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator, uid: a990d1c8-6964-4853-af7b-2ce1eec0c42d]" virtual=false 2025-12-12T16:15:19.455444011+00:00 stderr F I1212 16:15:19.455209 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: cluster-csr-approver-controller, uid: 63fa0ac0-decc-48c6-8165-57a92f1d5959]" 2025-12-12T16:15:19.455454951+00:00 stderr F I1212 16:15:19.455437 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: service-ca, uid: 21659ae5-e446-496e-b437-26ee7996654c]" virtual=false 2025-12-12T16:15:19.455486272+00:00 stderr F I1212 16:15:19.455071 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: builder, uid: b9ad08ba-c898-498f-9515-ea7a023d885c]" virtual=false 2025-12-12T16:15:19.455568164+00:00 stderr F I1212 16:15:19.455206 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: ttl-after-finished-controller, uid: 3ecc9919-f132-47b5-940a-3826fd2fc3b6]" 2025-12-12T16:15:19.455607475+00:00 stderr F I1212 16:15:19.455591 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: deployer, uid: 73934604-7573-45bd-89b7-75f566b9a856]" virtual=false 2025-12-12T16:15:19.455645366+00:00 stderr F I1212 16:15:19.455622 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: builder, uid: f6d1b89d-2c58-40a5-b998-a82962dad07c]" virtual=false 2025-12-12T16:15:19.455708897+00:00 stderr F I1212 16:15:19.454782 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Namespace, namespace: , name: openshift-config-operator, uid: 1dc5d509-40a4-4bd8-a98c-cfe8b82b4f57]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.455717037+00:00 stderr F I1212 16:15:19.455705 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: deployer, uid: 27ba4832-30a5-4a65-b948-d4315002c6a6]" virtual=false 2025-12-12T16:15:19.458801442+00:00 stderr F I1212 16:15:19.458738 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kni-infra, name: deployer, uid: 0c4c21d2-5525-4487-aede-378a6b890668]" 2025-12-12T16:15:19.458801442+00:00 stderr F I1212 16:15:19.458761 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: builder, uid: f6d1b89d-2c58-40a5-b998-a82962dad07c]" 2025-12-12T16:15:19.458801442+00:00 stderr F I1212 16:15:19.458789 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: service-ca, uid: 21659ae5-e446-496e-b437-26ee7996654c]" 2025-12-12T16:15:19.458828952+00:00 stderr F I1212 16:15:19.458791 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-5, uid: c91d53ac-6902-4aea-bc79-72ae21f68713]" virtual=false 2025-12-12T16:15:19.458978336+00:00 stderr F I1212 16:15:19.458928 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: deployer, uid: 73934604-7573-45bd-89b7-75f566b9a856]" 2025-12-12T16:15:19.458978336+00:00 stderr F I1212 16:15:19.458961 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: openshift-config-operator, uid: bde8337f-c80d-4cd7-8e1c-8853e023fdb8]" virtual=false 2025-12-12T16:15:19.459034847+00:00 stderr F I1212 16:15:19.458799 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: builder, uid: 41e1a7f9-ba62-44a4-b535-61e07661f89c]" virtual=false 2025-12-12T16:15:19.459098069+00:00 stderr F I1212 16:15:19.459071 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-10, uid: abc26192-80da-4229-9267-91d2e7c7eb5f]" 2025-12-12T16:15:19.459108479+00:00 stderr F I1212 16:15:19.459093 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-2, uid: c41b0f0a-724d-401f-b8bd-abaae84f2c11]" virtual=false 2025-12-12T16:15:19.459163990+00:00 stderr F I1212 16:15:19.458753 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-1, uid: 4768d56f-c284-4d80-a18a-7f2fc97d36f1]" 2025-12-12T16:15:19.459194441+00:00 stderr F I1212 16:15:19.459156 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: podsecurity-admission-label-syncer-controller, uid: bca97e5e-d0d8-4c5a-bfdd-c504f693249a]" virtual=false 2025-12-12T16:15:19.459231302+00:00 stderr F I1212 16:15:19.458765 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: installer-sa, uid: 2d96571f-07cb-4d3b-bfac-0fabca946128]" virtual=false 2025-12-12T16:15:19.459404026+00:00 stderr F I1212 16:15:19.459358 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: default, uid: 34ba90e3-6c5e-439e-a67b-eac970fe524a]" 2025-12-12T16:15:19.459404026+00:00 stderr F I1212 16:15:19.459383 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-node-lease, name: default, uid: d9f4e90f-b5c8-4648-9667-8756cb55b5d7]" virtual=false 2025-12-12T16:15:19.459459478+00:00 stderr F I1212 16:15:19.459432 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: builder, uid: b9ad08ba-c898-498f-9515-ea7a023d885c]" 2025-12-12T16:15:19.459505619+00:00 stderr F I1212 16:15:19.459478 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: default, uid: 4750690e-0c8b-491f-b998-69169ddd243f]" virtual=false 2025-12-12T16:15:19.459514949+00:00 stderr F I1212 16:15:19.459491 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicy, namespace: , name: user-defined-networks-namespace-label, uid: 0f7de771-84f1-494d-ab22-4f5de9678262]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.459529309+00:00 stderr F I1212 16:15:19.459518 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: builder, uid: 5b589cef-0493-4f73-af95-5fe83afd7105]" virtual=false 2025-12-12T16:15:19.459646682+00:00 stderr F I1212 16:15:19.459608 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus, uid: 8ed5854e-19c6-4934-895c-1ff820fbc84c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.459646682+00:00 stderr F I1212 16:15:19.459632 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-server, uid: 49c42c21-31e5-48a2-98d0-b5a4558999a6]" virtual=false 2025-12-12T16:15:19.461473396+00:00 stderr F I1212 16:15:19.461414 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ac, uid: 8730bc0a-fa8f-4fcf-9d74-e54b3e4f4363]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.461473396+00:00 stderr F I1212 16:15:19.461447 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: deployer, uid: fea4ac7c-a289-46cc-aeb8-859d4badb806]" virtual=false 2025-12-12T16:15:19.461595799+00:00 stderr F I1212 16:15:19.461550 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-user-settings, name: deployer, uid: d08a82ce-b43a-49cd-9d77-e1e7b9a41fef]" 2025-12-12T16:15:19.461595799+00:00 stderr F I1212 16:15:19.461572 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: daemon-set-controller, uid: 5286a185-4b83-40c2-9098-07ec4294efae]" virtual=false 2025-12-12T16:15:19.461741243+00:00 stderr F I1212 16:15:19.461694 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: pruner, uid: 2f88f37f-e5cf-4c37-b18a-4197a4cdaad7]" 2025-12-12T16:15:19.461741243+00:00 stderr F I1212 16:15:19.461718 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: builder, uid: 98afd01c-22e1-4a9e-93cc-230323e7b742]" virtual=false 2025-12-12T16:15:19.461887946+00:00 stderr F I1212 16:15:19.461842 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-node-lease, name: deployer, uid: 8e233062-664d-460d-8b67-92708f75ac0e]" 2025-12-12T16:15:19.461887946+00:00 stderr F I1212 16:15:19.461861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: deployer, uid: cc3f31d4-968e-4a1d-a628-98fdafdf6d5d]" virtual=false 2025-12-12T16:15:19.462540742+00:00 stderr F I1212 16:15:19.462070 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-version, name: builder, uid: 30c745f9-96b1-4602-a9e9-08b77d9bfa46]" 2025-12-12T16:15:19.462540742+00:00 stderr F I1212 16:15:19.462092 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-node, name: builder, uid: 77ae6968-b99d-48e1-a5b8-d503785185e9]" virtual=false 2025-12-12T16:15:19.462540742+00:00 stderr F I1212 16:15:19.462261 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: deployer, uid: 27ba4832-30a5-4a65-b948-d4315002c6a6]" 2025-12-12T16:15:19.462540742+00:00 stderr F I1212 16:15:19.462275 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-console, name: console, uid: ec398901-ff88-4cfa-a0a2-0ea4b56db3c6]" virtual=false 2025-12-12T16:15:19.462614884+00:00 stderr F I1212 16:15:19.462579 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: dab4d42f-1426-4619-af00-f3b882989b05]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.462614884+00:00 stderr F I1212 16:15:19.462605 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-8, uid: 389efd57-822c-4936-b5ef-48f1f85dba11]" virtual=false 2025-12-12T16:15:19.462814749+00:00 stderr F I1212 16:15:19.462772 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: iptables-alerter, uid: 67f6de68-003a-46d5-b769-d3ebce26fdf7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.462814749+00:00 stderr F I1212 16:15:19.462799 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-9, uid: eb1cf43c-e507-46f9-97cb-9059b8b2bdd3]" virtual=false 2025-12-12T16:15:19.464847177+00:00 stderr F I1212 16:15:19.464764 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-authentication-operator, name: metrics, uid: 9d158722-c9d8-4574-9c8d-76aff39b1405]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.464847177+00:00 stderr F I1212 16:15:19.464802 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift, name: deployer, uid: 21b98e04-9608-4aa4-bfeb-28654b0bf211]" virtual=false 2025-12-12T16:15:19.465095683+00:00 stderr F I1212 16:15:19.464973 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-2, uid: c41b0f0a-724d-401f-b8bd-abaae84f2c11]" 2025-12-12T16:15:19.465095683+00:00 stderr F I1212 16:15:19.464996 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: builder, uid: c56aa2a5-1091-4819-a115-cd7706917838]" virtual=false 2025-12-12T16:15:19.465158065+00:00 stderr F I1212 16:15:19.465131 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: installer-sa, uid: 2d96571f-07cb-4d3b-bfac-0fabca946128]" 2025-12-12T16:15:19.465195896+00:00 stderr F I1212 16:15:19.465154 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-etcd, name: etcd, uid: 053c6064-de9c-4946-a066-c34d3c19a2bc]" virtual=false 2025-12-12T16:15:19.465387350+00:00 stderr F I1212 16:15:19.465341 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: podsecurity-admission-label-syncer-controller, uid: bca97e5e-d0d8-4c5a-bfdd-c504f693249a]" 2025-12-12T16:15:19.465407941+00:00 stderr F I1212 16:15:19.465366 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: deployer, uid: bd81c6a1-7155-4ca5-a869-31d1faf5e3a0]" virtual=false 2025-12-12T16:15:19.465549254+00:00 stderr F I1212 16:15:19.465509 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-node-lease, name: default, uid: d9f4e90f-b5c8-4648-9667-8756cb55b5d7]" 2025-12-12T16:15:19.465549254+00:00 stderr F I1212 16:15:19.465532 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: default, uid: f4608606-d88c-4ec8-89ee-21f93c136594]" virtual=false 2025-12-12T16:15:19.465664887+00:00 stderr F I1212 16:15:19.465623 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: default, uid: 4750690e-0c8b-491f-b998-69169ddd243f]" 2025-12-12T16:15:19.465664887+00:00 stderr F I1212 16:15:19.465646 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: deployer, uid: c584619d-8d7f-4592-a18b-91bce24258ef]" virtual=false 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465748 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: builder, uid: 5b589cef-0493-4f73-af95-5fe83afd7105]" 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465769 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: certificate-controller, uid: 4dcb62bd-673a-401c-b9fc-796c8546ed21]" virtual=false 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465795 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: builder, uid: 41e1a7f9-ba62-44a4-b535-61e07661f89c]" 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465839 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: registry, uid: 2bb6e75e-26ef-4d59-985f-240fe5c7eab9]" virtual=false 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465872 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-server, uid: 49c42c21-31e5-48a2-98d0-b5a4558999a6]" 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465890 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: default, uid: 1e020d8e-2d67-4a6e-8549-c09b8401a246]" virtual=false 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465981 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: deployer, uid: fea4ac7c-a289-46cc-aeb8-859d4badb806]" 2025-12-12T16:15:19.466035836+00:00 stderr F I1212 16:15:19.465993 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: builder, uid: 535dee8a-3b2f-4921-89dd-12724351ac12]" virtual=false 2025-12-12T16:15:19.466196720+00:00 stderr F I1212 16:15:19.466161 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-console, name: console, uid: ec398901-ff88-4cfa-a0a2-0ea4b56db3c6]" 2025-12-12T16:15:19.466294042+00:00 stderr F I1212 16:15:19.466270 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: endpointslice-controller, uid: 256f8856-13ea-4fda-bd71-93e35c11ea76]" virtual=false 2025-12-12T16:15:19.466294042+00:00 stderr F I1212 16:15:19.466269 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: builder, uid: 98afd01c-22e1-4a9e-93cc-230323e7b742]" 2025-12-12T16:15:19.466321113+00:00 stderr F I1212 16:15:19.466295 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: builder, uid: 1fa2d12d-ea3b-4db9-88a2-07f56ec49bb0]" virtual=false 2025-12-12T16:15:19.466419355+00:00 stderr F I1212 16:15:19.466396 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-node, name: builder, uid: 77ae6968-b99d-48e1-a5b8-d503785185e9]" 2025-12-12T16:15:19.466446486+00:00 stderr F I1212 16:15:19.466426 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: default, uid: 96e46814-f893-4d12-b45c-187d516b4df8]" virtual=false 2025-12-12T16:15:19.466495167+00:00 stderr F I1212 16:15:19.466394 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: daemon-set-controller, uid: 5286a185-4b83-40c2-9098-07ec4294efae]" 2025-12-12T16:15:19.466545078+00:00 stderr F I1212 16:15:19.466522 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-11, uid: c121b5bf-996c-44ca-8254-1a98965ff795]" virtual=false 2025-12-12T16:15:19.467027310+00:00 stderr F I1212 16:15:19.466786 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: deployer, uid: cc3f31d4-968e-4a1d-a628-98fdafdf6d5d]" 2025-12-12T16:15:19.467027310+00:00 stderr F I1212 16:15:19.466812 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: deployer, uid: b11864d9-2918-448f-8b39-4aa70f8e52eb]" virtual=false 2025-12-12T16:15:19.467027310+00:00 stderr F I1212 16:15:19.466879 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-5, uid: c91d53ac-6902-4aea-bc79-72ae21f68713]" 2025-12-12T16:15:19.467027310+00:00 stderr F I1212 16:15:19.466898 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: deployer, uid: 2f7baea1-de2c-48b2-a1a9-0f5ec6b8701e]" virtual=false 2025-12-12T16:15:19.467027310+00:00 stderr F I1212 16:15:19.466878 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: builder, uid: c56aa2a5-1091-4819-a115-cd7706917838]" 2025-12-12T16:15:19.467027310+00:00 stderr F I1212 16:15:19.467001 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator, uid: a990d1c8-6964-4853-af7b-2ce1eec0c42d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.467070211+00:00 stderr F I1212 16:15:19.467039 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: c63e31c2-61a6-4920-b002-afb13dcebab4]" virtual=false 2025-12-12T16:15:19.467147593+00:00 stderr F I1212 16:15:19.467116 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-etcd, name: etcd, uid: 053c6064-de9c-4946-a066-c34d3c19a2bc]" 2025-12-12T16:15:19.467147593+00:00 stderr F I1212 16:15:19.467017 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: builder, uid: f54a854d-010d-4fb2-8a51-cf377b2b0778]" virtual=false 2025-12-12T16:15:19.467191464+00:00 stderr F I1212 16:15:19.467145 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: builder, uid: 26ead4a8-7e03-4515-9b9f-d0bf058e93a8]" virtual=false 2025-12-12T16:15:19.467219615+00:00 stderr F I1212 16:15:19.467196 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift, name: deployer, uid: 21b98e04-9608-4aa4-bfeb-28654b0bf211]" 2025-12-12T16:15:19.467239445+00:00 stderr F I1212 16:15:19.467221 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-openstack-infra, name: deployer, uid: 4b384042-32f4-458b-9760-11d0021a0497]" virtual=false 2025-12-12T16:15:19.467323797+00:00 stderr F I1212 16:15:19.467302 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-9, uid: eb1cf43c-e507-46f9-97cb-9059b8b2bdd3]" 2025-12-12T16:15:19.467339127+00:00 stderr F I1212 16:15:19.467324 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-service-ca-operator, name: metrics, uid: fda33e4c-938e-4672-b83c-6f709efbd0d6]" virtual=false 2025-12-12T16:15:19.468000253+00:00 stderr F I1212 16:15:19.467959 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-8, uid: 389efd57-822c-4936-b5ef-48f1f85dba11]" 2025-12-12T16:15:19.468000253+00:00 stderr F I1212 16:15:19.467985 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: builder, uid: 141754eb-3b33-4705-88f3-c88de6c55bd2]" virtual=false 2025-12-12T16:15:19.468161977+00:00 stderr F I1212 16:15:19.468128 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: deployer, uid: bd81c6a1-7155-4ca5-a869-31d1faf5e3a0]" 2025-12-12T16:15:19.468194238+00:00 stderr F I1212 16:15:19.468154 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-6, uid: 9ceb9b72-2527-47c6-ae4e-f1d92f0aee7d]" virtual=false 2025-12-12T16:15:19.469723255+00:00 stderr F I1212 16:15:19.469453 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: builder, uid: 535dee8a-3b2f-4921-89dd-12724351ac12]" 2025-12-12T16:15:19.469723255+00:00 stderr F I1212 16:15:19.469478 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: endpoint-controller, uid: a3ede342-01d8-4abe-bad8-245d175763a0]" virtual=false 2025-12-12T16:15:19.469723255+00:00 stderr F I1212 16:15:19.469613 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: certificate-controller, uid: 4dcb62bd-673a-401c-b9fc-796c8546ed21]" 2025-12-12T16:15:19.469723255+00:00 stderr F I1212 16:15:19.469626 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: default, uid: 75534293-587d-4dcf-8e0f-fd50688bde1a]" virtual=false 2025-12-12T16:15:19.469765866+00:00 stderr F I1212 16:15:19.469721 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: default, uid: f4608606-d88c-4ec8-89ee-21f93c136594]" 2025-12-12T16:15:19.469765866+00:00 stderr F I1212 16:15:19.469733 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: default, name: deployer, uid: 59cee1be-932f-478e-b7d2-f409aaad4cee]" virtual=false 2025-12-12T16:15:19.469844528+00:00 stderr F I1212 16:15:19.469813 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: deployer, uid: c584619d-8d7f-4592-a18b-91bce24258ef]" 2025-12-12T16:15:19.469856108+00:00 stderr F I1212 16:15:19.469839 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: machine-approver-sa, uid: 9d2e2fd5-f689-4ff8-bed1-c7547bf698d4]" virtual=false 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.469963 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-6, uid: 9ceb9b72-2527-47c6-ae4e-f1d92f0aee7d]" 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.469971 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: endpointslice-controller, uid: 256f8856-13ea-4fda-bd71-93e35c11ea76]" 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.469980 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: default, uid: 1ca9819b-33bf-4c12-bb80-ca69ea6953f3]" virtual=false 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.469989 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-vsphere-infra, name: builder, uid: e1b42a9f-097e-44db-96a5-7014216c9a84]" virtual=false 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.470112 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: deployer, uid: b11864d9-2918-448f-8b39-4aa70f8e52eb]" 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.470128 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-vsphere-infra, name: default, uid: 4819c0d7-85c1-48eb-9fc3-5faf6bcb2879]" virtual=false 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.470219 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: builder, uid: 141754eb-3b33-4705-88f3-c88de6c55bd2]" 2025-12-12T16:15:19.470321769+00:00 stderr F I1212 16:15:19.470238 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: service-ca-cert-publisher, uid: bd10e027-a312-4be4-ae7f-3a8b9df9a1cf]" virtual=false 2025-12-12T16:15:19.470357760+00:00 stderr F I1212 16:15:19.470320 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-11, uid: c121b5bf-996c-44ca-8254-1a98965ff795]" 2025-12-12T16:15:19.470357760+00:00 stderr F I1212 16:15:19.470336 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-storage-operator, name: deployer, uid: e85511fa-055a-4a09-ab9a-df3a1556f403]" virtual=false 2025-12-12T16:15:19.470432642+00:00 stderr F I1212 16:15:19.470140 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: builder, uid: 1fa2d12d-ea3b-4db9-88a2-07f56ec49bb0]" 2025-12-12T16:15:19.470432642+00:00 stderr F I1212 16:15:19.470414 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: template-instance-controller, uid: fb858338-9c08-4096-a4d6-425d6a71dff3]" virtual=false 2025-12-12T16:15:19.470619636+00:00 stderr F I1212 16:15:19.470572 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: default, uid: 1e020d8e-2d67-4a6e-8549-c09b8401a246]" 2025-12-12T16:15:19.470619636+00:00 stderr F I1212 16:15:19.470585 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: builder, uid: 26ead4a8-7e03-4515-9b9f-d0bf058e93a8]" 2025-12-12T16:15:19.470619636+00:00 stderr F I1212 16:15:19.470592 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-2, uid: 8901d31a-3867-4570-b0e5-ae6a39cff91d]" virtual=false 2025-12-12T16:15:19.470619636+00:00 stderr F I1212 16:15:19.470598 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-dns-operator, name: metrics, uid: 7038e591-b4c2-4e44-9589-0decde72039e]" virtual=false 2025-12-12T16:15:19.470703799+00:00 stderr F I1212 16:15:19.470679 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: default, uid: 96e46814-f893-4d12-b45c-187d516b4df8]" 2025-12-12T16:15:19.470703799+00:00 stderr F I1212 16:15:19.470694 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: builder, uid: 52a5ceba-610e-4157-b81b-fab5cec351a1]" virtual=false 2025-12-12T16:15:19.470749210+00:00 stderr F I1212 16:15:19.470726 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: registry, uid: 2bb6e75e-26ef-4d59-985f-240fe5c7eab9]" 2025-12-12T16:15:19.470749210+00:00 stderr F I1212 16:15:19.470742 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-etcd-operator, name: etcd-client, uid: 3f64a35c-cf8e-418c-a4a0-bf68cea2beb1]" virtual=false 2025-12-12T16:15:19.470873093+00:00 stderr F I1212 16:15:19.470836 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: openshift-config-operator, uid: bde8337f-c80d-4cd7-8e1c-8853e023fdb8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.470885823+00:00 stderr F I1212 16:15:19.470868 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-4, uid: 3c0c45f7-a4ba-4891-8b19-1900e7bfb64c]" virtual=false 2025-12-12T16:15:19.471069437+00:00 stderr F I1212 16:15:19.471030 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: deployer, uid: 2f7baea1-de2c-48b2-a1a9-0f5ec6b8701e]" 2025-12-12T16:15:19.471142049+00:00 stderr F I1212 16:15:19.471111 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 09b33468-17e9-49a7-b6f4-38686a6730e9]" virtual=false 2025-12-12T16:15:19.472388479+00:00 stderr F I1212 16:15:19.472336 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-openstack-infra, name: deployer, uid: 4b384042-32f4-458b-9760-11d0021a0497]" 2025-12-12T16:15:19.472388479+00:00 stderr F I1212 16:15:19.472364 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operators, name: builder, uid: f1c95839-cff2-4f0b-9cc9-cfc42629ac11]" virtual=false 2025-12-12T16:15:19.472709057+00:00 stderr F I1212 16:15:19.472584 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: builder, uid: f54a854d-010d-4fb2-8a51-cf377b2b0778]" 2025-12-12T16:15:19.472709057+00:00 stderr F I1212 16:15:19.472603 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: a1d81936-4e38-446d-819b-61c0b05df947]" virtual=false 2025-12-12T16:15:19.472726057+00:00 stderr F I1212 16:15:19.472714 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: endpoint-controller, uid: a3ede342-01d8-4abe-bad8-245d175763a0]" 2025-12-12T16:15:19.472751788+00:00 stderr F I1212 16:15:19.472728 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-3, uid: ae4fdaf7-2628-4d35-8d24-e03e4228a048]" virtual=false 2025-12-12T16:15:19.472889831+00:00 stderr F I1212 16:15:19.472823 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: default, name: deployer, uid: 59cee1be-932f-478e-b7d2-f409aaad4cee]" 2025-12-12T16:15:19.472889831+00:00 stderr F I1212 16:15:19.472841 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-operator, name: metrics, uid: 62887ae0-bdbb-4eca-867b-a51f8a3fa46b]" virtual=false 2025-12-12T16:15:19.472999944+00:00 stderr F I1212 16:15:19.472973 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-vsphere-infra, name: builder, uid: e1b42a9f-097e-44db-96a5-7014216c9a84]" 2025-12-12T16:15:19.472999944+00:00 stderr F I1212 16:15:19.472987 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operators, name: deployer, uid: 87f6c7e9-8f98-4ad0-9fa2-f53dc9bc5c5e]" virtual=false 2025-12-12T16:15:19.473101056+00:00 stderr F I1212 16:15:19.473072 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: default, uid: 75534293-587d-4dcf-8e0f-fd50688bde1a]" 2025-12-12T16:15:19.473101056+00:00 stderr F I1212 16:15:19.473087 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: serviceaccount-controller, uid: d0fd0fda-69d2-4c6f-8617-5d28edec2d50]" virtual=false 2025-12-12T16:15:19.473265550+00:00 stderr F I1212 16:15:19.473233 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-storage-operator, name: deployer, uid: e85511fa-055a-4a09-ab9a-df3a1556f403]" 2025-12-12T16:15:19.473265550+00:00 stderr F I1212 16:15:19.473251 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: builder, uid: 4d5f18cf-0ea1-4661-9219-294422bd4361]" virtual=false 2025-12-12T16:15:19.473436184+00:00 stderr F I1212 16:15:19.473404 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: service-ca-cert-publisher, uid: bd10e027-a312-4be4-ae7f-3a8b9df9a1cf]" 2025-12-12T16:15:19.473436184+00:00 stderr F I1212 16:15:19.473422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: deployer, uid: 1e29eb01-b81d-423f-be31-d0ab89b33598]" virtual=false 2025-12-12T16:15:19.473562917+00:00 stderr F I1212 16:15:19.473532 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: default, uid: 1ca9819b-33bf-4c12-bb80-ca69ea6953f3]" 2025-12-12T16:15:19.473562917+00:00 stderr F I1212 16:15:19.473552 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: deployer, uid: 7b61d502-c32a-4f7c-92ea-7d55dcdd294d]" virtual=false 2025-12-12T16:15:19.473669050+00:00 stderr F I1212 16:15:19.473641 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-vsphere-infra, name: default, uid: 4819c0d7-85c1-48eb-9fc3-5faf6bcb2879]" 2025-12-12T16:15:19.473669050+00:00 stderr F I1212 16:15:19.473657 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: default, uid: 4227edbe-7eba-4d20-8e34-86220ddd361c]" virtual=false 2025-12-12T16:15:19.473875345+00:00 stderr F I1212 16:15:19.473843 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: builder, uid: 52a5ceba-610e-4157-b81b-fab5cec351a1]" 2025-12-12T16:15:19.473875345+00:00 stderr F I1212 16:15:19.473858 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-canary, name: deployer, uid: c52bc5c3-9e4f-4837-bfbc-be7e88fb7589]" virtual=false 2025-12-12T16:15:19.474007058+00:00 stderr F I1212 16:15:19.473978 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: template-instance-controller, uid: fb858338-9c08-4096-a4d6-425d6a71dff3]" 2025-12-12T16:15:19.474007058+00:00 stderr F I1212 16:15:19.473994 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: 502d0892-a804-4fae-ad3f-cb342f49e7aa]" virtual=false 2025-12-12T16:15:19.474359877+00:00 stderr F I1212 16:15:19.474311 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-2, uid: 8901d31a-3867-4570-b0e5-ae6a39cff91d]" 2025-12-12T16:15:19.474359877+00:00 stderr F I1212 16:15:19.474335 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: default, uid: eb652dd7-7f1c-4207-bf18-7b4f57f0e355]" virtual=false 2025-12-12T16:15:19.474488980+00:00 stderr F I1212 16:15:19.474451 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-4, uid: 3c0c45f7-a4ba-4891-8b19-1900e7bfb64c]" 2025-12-12T16:15:19.474488980+00:00 stderr F I1212 16:15:19.474470 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: redhat-marketplace, uid: 4349139a-211f-4b82-a867-d0f135c54aa4]" virtual=false 2025-12-12T16:15:19.474667564+00:00 stderr F I1212 16:15:19.474579 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: c63e31c2-61a6-4920-b002-afb13dcebab4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.474667564+00:00 stderr F I1212 16:15:19.474607 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-console, name: deployer, uid: 93625b35-b76b-43d4-a090-a41dcee374f8]" virtual=false 2025-12-12T16:15:19.474978852+00:00 stderr F I1212 16:15:19.474935 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operators, name: builder, uid: f1c95839-cff2-4f0b-9cc9-cfc42629ac11]" 2025-12-12T16:15:19.474978852+00:00 stderr F I1212 16:15:19.474956 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-nutanix-infra, name: builder, uid: b522b271-bbe5-4154-8cd2-270b129c956a]" virtual=false 2025-12-12T16:15:19.475268789+00:00 stderr F I1212 16:15:19.475232 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-3, uid: ae4fdaf7-2628-4d35-8d24-e03e4228a048]" 2025-12-12T16:15:19.475268789+00:00 stderr F I1212 16:15:19.475251 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: generic-garbage-collector, uid: 59a1862f-75d5-4511-b116-6e331f654ef5]" virtual=false 2025-12-12T16:15:19.476336594+00:00 stderr F I1212 16:15:19.475387 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-service-ca-operator, name: metrics, uid: fda33e4c-938e-4672-b83c-6f709efbd0d6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.476336594+00:00 stderr F I1212 16:15:19.475407 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: namespace-controller, uid: 696c89b6-8579-4031-8b5d-ec42982f7b7f]" virtual=false 2025-12-12T16:15:19.477264517+00:00 stderr F I1212 16:15:19.477227 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: builder, uid: 4d5f18cf-0ea1-4661-9219-294422bd4361]" 2025-12-12T16:15:19.477264517+00:00 stderr F I1212 16:15:19.477247 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: default, uid: 41b145ee-82ad-44b2-a3ab-5aa6e5bc7b66]" virtual=false 2025-12-12T16:15:19.477305218+00:00 stderr F I1212 16:15:19.477213 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operators, name: deployer, uid: 87f6c7e9-8f98-4ad0-9fa2-f53dc9bc5c5e]" 2025-12-12T16:15:19.477380579+00:00 stderr F I1212 16:15:19.477345 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: default, uid: 6ea81864-1580-468b-affd-dc4ea77910fd]" virtual=false 2025-12-12T16:15:19.477429731+00:00 stderr F I1212 16:15:19.477404 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: default, uid: 4227edbe-7eba-4d20-8e34-86220ddd361c]" 2025-12-12T16:15:19.477440251+00:00 stderr F I1212 16:15:19.477422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: default, uid: 2e4de228-a792-4e8b-9c08-41f2db6ea4ab]" virtual=false 2025-12-12T16:15:19.477566154+00:00 stderr F I1212 16:15:19.477538 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-console, name: deployer, uid: 93625b35-b76b-43d4-a090-a41dcee374f8]" 2025-12-12T16:15:19.477566154+00:00 stderr F I1212 16:15:19.477555 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: default, uid: c532884c-2f3d-4df5-a458-fdfd553f285c]" virtual=false 2025-12-12T16:15:19.477803340+00:00 stderr F I1212 16:15:19.477777 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: deployer, uid: 7b61d502-c32a-4f7c-92ea-7d55dcdd294d]" 2025-12-12T16:15:19.477867811+00:00 stderr F I1212 16:15:19.477844 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: deployer, uid: c5a8b010-8e77-4249-854a-5bd6ebf0714a]" virtual=false 2025-12-12T16:15:19.477936573+00:00 stderr F I1212 16:15:19.477908 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: namespace-controller, uid: 696c89b6-8579-4031-8b5d-ec42982f7b7f]" 2025-12-12T16:15:19.477947603+00:00 stderr F I1212 16:15:19.477930 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: dns-operator, uid: ededd462-781c-45db-afa8-736b78e4df88]" virtual=false 2025-12-12T16:15:19.478089546+00:00 stderr F I1212 16:15:19.478062 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-canary, name: deployer, uid: c52bc5c3-9e4f-4837-bfbc-be7e88fb7589]" 2025-12-12T16:15:19.478089546+00:00 stderr F I1212 16:15:19.478078 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: deployer, uid: 26ccb77a-9190-41d3-81ec-a3448b0ac222]" virtual=false 2025-12-12T16:15:19.478128957+00:00 stderr F I1212 16:15:19.478107 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: serviceaccount-controller, uid: d0fd0fda-69d2-4c6f-8617-5d28edec2d50]" 2025-12-12T16:15:19.478197579+00:00 stderr F I1212 16:15:19.478154 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: network-metrics-service, uid: 221650ca-5ea9-450a-b2e3-01b15c386136]" virtual=false 2025-12-12T16:15:19.478250730+00:00 stderr F I1212 16:15:19.478226 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: default, uid: eb652dd7-7f1c-4207-bf18-7b4f57f0e355]" 2025-12-12T16:15:19.478260861+00:00 stderr F I1212 16:15:19.478245 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: default, uid: c653164a-e793-44b0-b9cf-75230bf56767]" virtual=false 2025-12-12T16:15:19.478342733+00:00 stderr F I1212 16:15:19.478168 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: deployer, uid: 1e29eb01-b81d-423f-be31-d0ab89b33598]" 2025-12-12T16:15:19.478392744+00:00 stderr F I1212 16:15:19.478371 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress, name: router-default, uid: 6445a1ec-8ec2-4ec8-b191-9cc7fa235148]" virtual=false 2025-12-12T16:15:19.478624049+00:00 stderr F I1212 16:15:19.478587 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: machine-approver-sa, uid: 9d2e2fd5-f689-4ff8-bed1-c7547bf698d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.478677591+00:00 stderr F I1212 16:15:19.478654 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: builder, uid: e50d61e7-e2e4-460d-9e05-25ff6de3cd32]" virtual=false 2025-12-12T16:15:19.479668074+00:00 stderr F I1212 16:15:19.479544 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: generic-garbage-collector, uid: 59a1862f-75d5-4511-b116-6e331f654ef5]" 2025-12-12T16:15:19.479668074+00:00 stderr F I1212 16:15:19.479567 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: certified-operators, uid: 11b4506b-4dd0-4b2c-9e06-8d4d70c20ebd]" virtual=false 2025-12-12T16:15:19.479688495+00:00 stderr F I1212 16:15:19.479675 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: a1d81936-4e38-446d-819b-61c0b05df947]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.479722226+00:00 stderr F I1212 16:15:19.479692 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-node, name: deployer, uid: fccddf5a-affb-4433-8780-42c795c69011]" virtual=false 2025-12-12T16:15:19.479860809+00:00 stderr F I1212 16:15:19.479830 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-etcd-operator, name: etcd-client, uid: 3f64a35c-cf8e-418c-a4a0-bf68cea2beb1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:19.479860809+00:00 stderr F I1212 16:15:19.479850 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-openstack-infra, name: default, uid: 2bcfe359-f9ba-45a9-ab05-b91838f64af3]" virtual=false 2025-12-12T16:15:19.479890180+00:00 stderr F I1212 16:15:19.479826 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-dns-operator, name: metrics, uid: 7038e591-b4c2-4e44-9589-0decde72039e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.479940031+00:00 stderr F I1212 16:15:19.479917 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-user-settings, name: builder, uid: 990b7f43-c7e3-4a7a-b557-411acbd2ad67]" virtual=false 2025-12-12T16:15:19.480866853+00:00 stderr F I1212 16:15:19.480806 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-nutanix-infra, name: builder, uid: b522b271-bbe5-4154-8cd2-270b129c956a]" 2025-12-12T16:15:19.480925225+00:00 stderr F I1212 16:15:19.480902 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: kube-controller-manager-sa, uid: 9c41cd61-c9eb-4bea-a70a-c5c79ef5115b]" virtual=false 2025-12-12T16:15:19.481061308+00:00 stderr F I1212 16:15:19.480920 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: default, uid: 41b145ee-82ad-44b2-a3ab-5aa6e5bc7b66]" 2025-12-12T16:15:19.481075338+00:00 stderr F I1212 16:15:19.481055 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: multus-admission-controller, uid: 1c31f0d5-3fc0-4cca-8214-9358f0570149]" virtual=false 2025-12-12T16:15:19.481246443+00:00 stderr F I1212 16:15:19.481224 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress, name: router-default, uid: 6445a1ec-8ec2-4ec8-b191-9cc7fa235148]" 2025-12-12T16:15:19.481318234+00:00 stderr F I1212 16:15:19.481296 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: etcd-operator, uid: 0ce3e4fb-39b1-4367-9741-b9e539e4cdc1]" virtual=false 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482134 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: redhat-marketplace, uid: 4349139a-211f-4b82-a867-d0f135c54aa4]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"CatalogSource","name":"redhat-marketplace","uid":"647ee808-5841-49cb-96be-0e8080859241","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482163 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: deployer, uid: 7d501b4a-ea8d-4956-a1bb-b411c9a0acd7]" virtual=false 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482218 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: builder, uid: e50d61e7-e2e4-460d-9e05-25ff6de3cd32]" 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482237 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: builder, uid: 27b77f41-8696-456f-9113-a11045bd9102]" virtual=false 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482298 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: default, uid: c532884c-2f3d-4df5-a458-fdfd553f285c]" 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482308 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver, name: api, uid: 1acd3f91-f33f-4387-ba78-a29b03a6d672]" virtual=false 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482348 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: 502d0892-a804-4fae-ad3f-cb342f49e7aa]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482363 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-operator, name: metrics, uid: 0f5c5226-4139-4bd9-a1f1-4819358f4b44]" virtual=false 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482403 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: default, uid: c653164a-e793-44b0-b9cf-75230bf56767]" 2025-12-12T16:15:19.482421441+00:00 stderr F I1212 16:15:19.482414 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: deployer, uid: aab8c518-8e90-460e-bb56-9187630afadb]" virtual=false 2025-12-12T16:15:19.482476812+00:00 stderr F I1212 16:15:19.482453 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: deployer, uid: c5a8b010-8e77-4249-854a-5bd6ebf0714a]" 2025-12-12T16:15:19.482485032+00:00 stderr F I1212 16:15:19.482476 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: default, uid: 7acc8277-85c1-4f2c-92af-00e9499b3c15]" virtual=false 2025-12-12T16:15:19.482513323+00:00 stderr F I1212 16:15:19.482493 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: deployer, uid: 26ccb77a-9190-41d3-81ec-a3448b0ac222]" 2025-12-12T16:15:19.482520923+00:00 stderr F I1212 16:15:19.482509 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-network-config-controller, name: default, uid: 29b08631-3854-474e-9847-d54b952f0d3c]" virtual=false 2025-12-12T16:15:19.482587015+00:00 stderr F I1212 16:15:19.482567 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: default, uid: 6ea81864-1580-468b-affd-dc4ea77910fd]" 2025-12-12T16:15:19.482595375+00:00 stderr F I1212 16:15:19.482582 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: deployer, uid: 006511ee-4a16-4f23-b041-b5856dc37b32]" virtual=false 2025-12-12T16:15:19.482669217+00:00 stderr F I1212 16:15:19.482644 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: default, uid: 2e4de228-a792-4e8b-9c08-41f2db6ea4ab]" 2025-12-12T16:15:19.482678027+00:00 stderr F I1212 16:15:19.482667 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-scheduler, name: scheduler, uid: 627baa67-c865-4f19-b78e-0050eed41c83]" virtual=false 2025-12-12T16:15:19.482846561+00:00 stderr F I1212 16:15:19.482822 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-node, name: deployer, uid: fccddf5a-affb-4433-8780-42c795c69011]" 2025-12-12T16:15:19.482857091+00:00 stderr F I1212 16:15:19.482841 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-controllers, uid: 9c65bbd1-8044-4ed7-b28a-8adb920c184f]" virtual=false 2025-12-12T16:15:19.482988545+00:00 stderr F I1212 16:15:19.482967 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-openstack-infra, name: default, uid: 2bcfe359-f9ba-45a9-ab05-b91838f64af3]" 2025-12-12T16:15:19.482998285+00:00 stderr F I1212 16:15:19.482985 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: builder, uid: e10d0e66-4cc7-416b-8916-7c7433bb9322]" virtual=false 2025-12-12T16:15:19.483376534+00:00 stderr F I1212 16:15:19.483326 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-operator, name: metrics, uid: 62887ae0-bdbb-4eca-867b-a51f8a3fa46b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.483376534+00:00 stderr F I1212 16:15:19.483358 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-node, name: default, uid: 79b18594-4b7c-43b7-8c56-14f835f6ee85]" virtual=false 2025-12-12T16:15:19.483544118+00:00 stderr F I1212 16:15:19.483504 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-user-settings, name: builder, uid: 990b7f43-c7e3-4a7a-b557-411acbd2ad67]" 2025-12-12T16:15:19.483544118+00:00 stderr F I1212 16:15:19.483525 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: builder, uid: c1778f80-a165-4eef-a00e-3ca4e2307a0c]" virtual=false 2025-12-12T16:15:19.483910327+00:00 stderr F I1212 16:15:19.483867 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver-operator, name: deployer, uid: aab8c518-8e90-460e-bb56-9187630afadb]" 2025-12-12T16:15:19.483910327+00:00 stderr F I1212 16:15:19.483890 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: replicaset-controller, uid: 1285aecd-e85c-4c37-847e-58b996f0c28e]" virtual=false 2025-12-12T16:15:19.484144112+00:00 stderr F I1212 16:15:19.484098 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 09b33468-17e9-49a7-b6f4-38686a6730e9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.484144112+00:00 stderr F I1212 16:15:19.484122 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: default, name: default, uid: 88c22f04-d880-4e96-aadc-be4db6947872]" virtual=false 2025-12-12T16:15:19.484326717+00:00 stderr F I1212 16:15:19.484287 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver, name: api, uid: 1acd3f91-f33f-4387-ba78-a29b03a6d672]" 2025-12-12T16:15:19.484326717+00:00 stderr F I1212 16:15:19.484306 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: horizontal-pod-autoscaler, uid: 9ebeaa7e-c7cc-4591-9b58-fb1bda9e433f]" virtual=false 2025-12-12T16:15:19.484908781+00:00 stderr F I1212 16:15:19.484871 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-network-config-controller, name: default, uid: 29b08631-3854-474e-9847-d54b952f0d3c]" 2025-12-12T16:15:19.484908781+00:00 stderr F I1212 16:15:19.484893 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-os-builder, uid: 3af30073-f45e-46b3-90a8-855946223929]" virtual=false 2025-12-12T16:15:19.485261169+00:00 stderr F I1212 16:15:19.485225 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: deployer, uid: 006511ee-4a16-4f23-b041-b5856dc37b32]" 2025-12-12T16:15:19.485261169+00:00 stderr F I1212 16:15:19.485247 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: job-controller, uid: ae9289c4-7e54-44c6-92f5-86a0979df2e7]" virtual=false 2025-12-12T16:15:19.485420453+00:00 stderr F I1212 16:15:19.485388 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-scheduler, name: scheduler, uid: 627baa67-c865-4f19-b78e-0050eed41c83]" 2025-12-12T16:15:19.485420453+00:00 stderr F I1212 16:15:19.485407 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: console-operator, uid: 49c566ed-810f-4e34-89c0-e69f4ba1f5fe]" virtual=false 2025-12-12T16:15:19.485591237+00:00 stderr F I1212 16:15:19.485548 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: replicaset-controller, uid: 1285aecd-e85c-4c37-847e-58b996f0c28e]" 2025-12-12T16:15:19.485591237+00:00 stderr F I1212 16:15:19.485568 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: builder, uid: dd83b211-cd45-4da9-a56d-2f65e0514554]" virtual=false 2025-12-12T16:15:19.485754831+00:00 stderr F I1212 16:15:19.485724 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: default, uid: 7acc8277-85c1-4f2c-92af-00e9499b3c15]" 2025-12-12T16:15:19.485754831+00:00 stderr F I1212 16:15:19.485742 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift, name: builder, uid: c58d2dd0-5671-4827-bf24-a42c609a9378]" virtual=false 2025-12-12T16:15:19.486646323+00:00 stderr F I1212 16:15:19.486596 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: horizontal-pod-autoscaler, uid: 9ebeaa7e-c7cc-4591-9b58-fb1bda9e433f]" 2025-12-12T16:15:19.486646323+00:00 stderr F I1212 16:15:19.486621 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-7, uid: b08accdd-34d1-43ec-b9f0-be52165b199c]" virtual=false 2025-12-12T16:15:19.486873028+00:00 stderr F I1212 16:15:19.486827 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: network-metrics-service, uid: 221650ca-5ea9-450a-b2e3-01b15c386136]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.486873028+00:00 stderr F I1212 16:15:19.486851 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-5, uid: b4c3ff81-9e93-4c5b-b530-e6a169c05a01]" virtual=false 2025-12-12T16:15:19.487112554+00:00 stderr F I1212 16:15:19.487069 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: dns-operator, uid: ededd462-781c-45db-afa8-736b78e4df88]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.487112554+00:00 stderr F I1212 16:15:19.487092 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: node-bootstrapper, uid: fbe6cfae-a3b8-4429-8f59-393641a59a1e]" virtual=false 2025-12-12T16:15:19.487660567+00:00 stderr F I1212 16:15:19.487297 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: certified-operators, uid: 11b4506b-4dd0-4b2c-9e06-8d4d70c20ebd]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"CatalogSource","name":"certified-operators","uid":"04c2c69e-a9e9-447b-aff2-c2db7de0ee83","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.487660567+00:00 stderr F I1212 16:15:19.487323 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: olm-operator-serviceaccount, uid: 0d23d03f-8d1c-40a1-b029-bb73930758f7]" virtual=false 2025-12-12T16:15:19.487660567+00:00 stderr F I1212 16:15:19.487488 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-multus, name: multus-admission-controller, uid: 1c31f0d5-3fc0-4cca-8214-9358f0570149]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.487660567+00:00 stderr F I1212 16:15:19.487510 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: default, uid: b8cfd92c-ea4c-4a21-8faf-9e038360268d]" virtual=false 2025-12-12T16:15:19.488142779+00:00 stderr F I1212 16:15:19.487719 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift, name: builder, uid: c58d2dd0-5671-4827-bf24-a42c609a9378]" 2025-12-12T16:15:19.488142779+00:00 stderr F I1212 16:15:19.487742 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-storage-operator, name: default, uid: 0a9cea8b-2224-42da-871d-533f9dd4c4b4]" virtual=false 2025-12-12T16:15:19.488142779+00:00 stderr F I1212 16:15:19.487884 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: kube-controller-manager-sa, uid: 9c41cd61-c9eb-4bea-a70a-c5c79ef5115b]" 2025-12-12T16:15:19.488142779+00:00 stderr F I1212 16:15:19.487904 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-public, name: builder, uid: 67844281-406c-422f-b0bb-da1067afd0e0]" virtual=false 2025-12-12T16:15:19.488369604+00:00 stderr F I1212 16:15:19.488324 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: builder, uid: dd83b211-cd45-4da9-a56d-2f65e0514554]" 2025-12-12T16:15:19.488369604+00:00 stderr F I1212 16:15:19.488345 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: deployer, uid: a4109aea-02c1-4104-b362-beb4ea292f94]" virtual=false 2025-12-12T16:15:19.488513308+00:00 stderr F I1212 16:15:19.488480 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver, name: revision-status-7, uid: b08accdd-34d1-43ec-b9f0-be52165b199c]" 2025-12-12T16:15:19.488513308+00:00 stderr F I1212 16:15:19.488500 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-image-registry, name: image-registry, uid: efe87d9e-0016-4f12-92f7-1b72e08ea4e2]" virtual=false 2025-12-12T16:15:19.489334517+00:00 stderr F I1212 16:15:19.489137 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: default, name: default, uid: 88c22f04-d880-4e96-aadc-be4db6947872]" 2025-12-12T16:15:19.489334517+00:00 stderr F I1212 16:15:19.489161 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: deployer, uid: 6a0f9184-6333-4122-9f10-431a888d6bc2]" virtual=false 2025-12-12T16:15:19.489397899+00:00 stderr F I1212 16:15:19.489365 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: builder, uid: c1778f80-a165-4eef-a00e-3ca4e2307a0c]" 2025-12-12T16:15:19.489409609+00:00 stderr F I1212 16:15:19.489390 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-route-controller-manager, name: route-controller-manager, uid: 0db65fef-8487-4125-8403-ec14a5aec225]" virtual=false 2025-12-12T16:15:19.489497431+00:00 stderr F I1212 16:15:19.489466 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: job-controller, uid: ae9289c4-7e54-44c6-92f5-86a0979df2e7]" 2025-12-12T16:15:19.489497431+00:00 stderr F I1212 16:15:19.489488 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: replication-controller, uid: b814a108-82b6-4bca-aeea-c38b76f4e56d]" virtual=false 2025-12-12T16:15:19.489532422+00:00 stderr F I1212 16:15:19.489477 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-node, name: default, uid: 79b18594-4b7c-43b7-8c56-14f835f6ee85]" 2025-12-12T16:15:19.489566503+00:00 stderr F I1212 16:15:19.489543 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-os-builder, uid: 3af30073-f45e-46b3-90a8-855946223929]" 2025-12-12T16:15:19.489576863+00:00 stderr F I1212 16:15:19.489561 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: default, uid: 277588f5-5477-400c-866d-4a85eae127fa]" virtual=false 2025-12-12T16:15:19.489615004+00:00 stderr F I1212 16:15:19.489592 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-canary, name: builder, uid: b634b16e-44ca-4e13-8634-a25da31fdc22]" virtual=false 2025-12-12T16:15:19.489668855+00:00 stderr F I1212 16:15:19.489644 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: deployer, uid: 7d501b4a-ea8d-4956-a1bb-b411c9a0acd7]" 2025-12-12T16:15:19.489679876+00:00 stderr F I1212 16:15:19.489664 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovirt-infra, name: default, uid: 0f80c18d-5e40-454e-bc43-ab2b1ee7548b]" virtual=false 2025-12-12T16:15:19.489736577+00:00 stderr F I1212 16:15:19.489611 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: builder, uid: e10d0e66-4cc7-416b-8916-7c7433bb9322]" 2025-12-12T16:15:19.489784998+00:00 stderr F I1212 16:15:19.489759 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: node-bootstrapper, uid: fbe6cfae-a3b8-4429-8f59-393641a59a1e]" 2025-12-12T16:15:19.489802119+00:00 stderr F I1212 16:15:19.489779 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: builder, uid: 7093ccc9-dd20-4057-ae61-9cb86cbf6bc8]" virtual=false 2025-12-12T16:15:19.489823609+00:00 stderr F I1212 16:15:19.489762 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-version, name: deployer, uid: c8ceac88-130c-477d-8681-cff1e4530893]" virtual=false 2025-12-12T16:15:19.489900321+00:00 stderr F I1212 16:15:19.489875 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-5, uid: b4c3ff81-9e93-4c5b-b530-e6a169c05a01]" 2025-12-12T16:15:19.489911681+00:00 stderr F I1212 16:15:19.489895 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: node-resolver, uid: ddba61e6-462c-4cc2-8ef5-ee39f8169405]" virtual=false 2025-12-12T16:15:19.489938912+00:00 stderr F I1212 16:15:19.489729 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: builder, uid: 27b77f41-8696-456f-9113-a11045bd9102]" 2025-12-12T16:15:19.489979763+00:00 stderr F I1212 16:15:19.489960 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-etcd-operator, name: metrics, uid: 518336b3-7008-41e5-9cec-2c1c85f2ff09]" virtual=false 2025-12-12T16:15:19.490335351+00:00 stderr F I1212 16:15:19.490310 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-storage-operator, name: default, uid: 0a9cea8b-2224-42da-871d-533f9dd4c4b4]" 2025-12-12T16:15:19.490390713+00:00 stderr F I1212 16:15:19.490368 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: deployment-controller, uid: b7f2405f-911b-41e6-998c-c321f2b05ce4]" virtual=false 2025-12-12T16:15:19.490547237+00:00 stderr F I1212 16:15:19.490514 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: deployer, uid: a4109aea-02c1-4104-b362-beb4ea292f94]" 2025-12-12T16:15:19.490561257+00:00 stderr F I1212 16:15:19.490540 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: dns, uid: 21d3359e-cdba-4451-aebe-88c2eb5c63eb]" virtual=false 2025-12-12T16:15:19.490609078+00:00 stderr F I1212 16:15:19.490588 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: default, uid: b8cfd92c-ea4c-4a21-8faf-9e038360268d]" 2025-12-12T16:15:19.490656299+00:00 stderr F I1212 16:15:19.490633 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: olm-operator-metrics, uid: 5f438b20-bbb9-4020-9829-dc86fe8ca8bd]" virtual=false 2025-12-12T16:15:19.490695650+00:00 stderr F I1212 16:15:19.490670 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-public, name: builder, uid: 67844281-406c-422f-b0bb-da1067afd0e0]" 2025-12-12T16:15:19.490706310+00:00 stderr F I1212 16:15:19.490691 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-samples-operator, name: metrics, uid: c016741c-0788-45bd-a328-6a4aa719a9ee]" virtual=false 2025-12-12T16:15:19.490753492+00:00 stderr F I1212 16:15:19.490641 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-controllers, uid: 9c65bbd1-8044-4ed7-b28a-8adb920c184f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.490764432+00:00 stderr F I1212 16:15:19.490748 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-host-network, name: deployer, uid: e0ea96ae-8f17-4a55-860f-74ab46922e86]" virtual=false 2025-12-12T16:15:19.490801973+00:00 stderr F I1212 16:15:19.490623 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-operator, name: metrics, uid: 0f5c5226-4139-4bd9-a1f1-4819358f4b44]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.490848294+00:00 stderr F I1212 16:15:19.490827 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: builder, uid: 5f6c6c3a-8dd4-44a3-9712-d93b9daeb8ca]" virtual=false 2025-12-12T16:15:19.491023738+00:00 stderr F I1212 16:15:19.490989 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-image-registry, name: image-registry, uid: efe87d9e-0016-4f12-92f7-1b72e08ea4e2]" 2025-12-12T16:15:19.491126321+00:00 stderr F I1212 16:15:19.491096 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-termination-handler, uid: 1567c8f7-61db-45bd-a1b6-c1f4b610e91b]" virtual=false 2025-12-12T16:15:19.491198012+00:00 stderr F I1212 16:15:19.491072 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: etcd-operator, uid: 0ce3e4fb-39b1-4367-9741-b9e539e4cdc1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.491240603+00:00 stderr F I1212 16:15:19.491218 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 749fdd6c-f7a2-4743-bbd6-4c00d16e8776]" virtual=false 2025-12-12T16:15:19.491284984+00:00 stderr F I1212 16:15:19.491003 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: deployer, uid: 6a0f9184-6333-4122-9f10-431a888d6bc2]" 2025-12-12T16:15:19.491307575+00:00 stderr F I1212 16:15:19.491286 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: legacy-service-account-token-cleaner, uid: 62ca79ff-82a8-42a3-b628-684ac0c27fce]" virtual=false 2025-12-12T16:15:19.491900889+00:00 stderr F I1212 16:15:19.491861 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: console-operator, uid: 49c566ed-810f-4e34-89c0-e69f4ba1f5fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.491920320+00:00 stderr F I1212 16:15:19.491891 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3a493813-5327-4484-bc96-a108bced6093]" virtual=false 2025-12-12T16:15:19.491979181+00:00 stderr F I1212 16:15:19.491854 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-route-controller-manager, name: route-controller-manager, uid: 0db65fef-8487-4125-8403-ec14a5aec225]" 2025-12-12T16:15:19.491979181+00:00 stderr F I1212 16:15:19.491960 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: default, uid: 6477c597-7d3e-45d6-b6c3-df5d0fe750c2]" virtual=false 2025-12-12T16:15:19.492444212+00:00 stderr F I1212 16:15:19.492405 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: replication-controller, uid: b814a108-82b6-4bca-aeea-c38b76f4e56d]" 2025-12-12T16:15:19.492444212+00:00 stderr F I1212 16:15:19.492425 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: deployer, uid: 61d00f4c-0ecb-40a9-a3df-40e6892b2fd4]" virtual=false 2025-12-12T16:15:19.494909242+00:00 stderr F I1212 16:15:19.494843 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: default, uid: 277588f5-5477-400c-866d-4a85eae127fa]" 2025-12-12T16:15:19.494909242+00:00 stderr F I1212 16:15:19.494882 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: origin-namespace-controller, uid: 1feba1fd-f979-4e52-b531-960e423f549c]" virtual=false 2025-12-12T16:15:19.498433167+00:00 stderr F I1212 16:15:19.498389 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovirt-infra, name: default, uid: 0f80c18d-5e40-454e-bc43-ab2b1ee7548b]" 2025-12-12T16:15:19.498433167+00:00 stderr F I1212 16:15:19.498416 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: pv-protection-controller, uid: 4c8ece48-10b2-479d-864b-2ef45c15b424]" virtual=false 2025-12-12T16:15:19.502585777+00:00 stderr F I1212 16:15:19.502533 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-canary, name: builder, uid: b634b16e-44ca-4e13-8634-a25da31fdc22]" 2025-12-12T16:15:19.502602547+00:00 stderr F I1212 16:15:19.502579 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: deployer, uid: 0541a888-d3f0-4404-b4c6-1404da55b02b]" virtual=false 2025-12-12T16:15:19.505258951+00:00 stderr F I1212 16:15:19.505218 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: builder, uid: 7093ccc9-dd20-4057-ae61-9cb86cbf6bc8]" 2025-12-12T16:15:19.505274381+00:00 stderr F I1212 16:15:19.505258 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: pv-recycler-controller, uid: 1a3f23df-b74a-4fa9-a7fb-139dd63611c4]" virtual=false 2025-12-12T16:15:19.508609732+00:00 stderr F I1212 16:15:19.508570 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-version, name: deployer, uid: c8ceac88-130c-477d-8681-cff1e4530893]" 2025-12-12T16:15:19.508609732+00:00 stderr F I1212 16:15:19.508593 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: deployer, uid: 895a6560-2e4c-46a5-9c34-4ade34dcb3f8]" virtual=false 2025-12-12T16:15:19.512762832+00:00 stderr F I1212 16:15:19.512724 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: node-resolver, uid: ddba61e6-462c-4cc2-8ef5-ee39f8169405]" 2025-12-12T16:15:19.512762832+00:00 stderr F I1212 16:15:19.512748 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 1d6e0c49-5022-4407-b7e2-606925e10c95]" virtual=false 2025-12-12T16:15:19.520081988+00:00 stderr F I1212 16:15:19.520021 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: olm-operator-serviceaccount, uid: 0d23d03f-8d1c-40a1-b029-bb73930758f7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.520081988+00:00 stderr F I1212 16:15:19.520056 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: default, uid: 01496a3e-ea05-4212-b512-413db278590d]" virtual=false 2025-12-12T16:15:19.521858571+00:00 stderr F I1212 16:15:19.521819 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: deployment-controller, uid: b7f2405f-911b-41e6-998c-c321f2b05ce4]" 2025-12-12T16:15:19.521858571+00:00 stderr F I1212 16:15:19.521844 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-controllers, uid: 3304c944-7c53-445a-9f17-9e3e5f75226c]" virtual=false 2025-12-12T16:15:19.527418835+00:00 stderr F I1212 16:15:19.527358 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns, name: dns, uid: 21d3359e-cdba-4451-aebe-88c2eb5c63eb]" 2025-12-12T16:15:19.527436725+00:00 stderr F I1212 16:15:19.527408 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: deployer, uid: 66a28d36-1418-4409-9421-9a994e302b8d]" virtual=false 2025-12-12T16:15:19.531910853+00:00 stderr F I1212 16:15:19.531858 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-host-network, name: deployer, uid: e0ea96ae-8f17-4a55-860f-74ab46922e86]" 2025-12-12T16:15:19.531927643+00:00 stderr F I1212 16:15:19.531904 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: c114bd17-4943-43d2-bc7d-81a1438e3da6]" virtual=false 2025-12-12T16:15:19.539253550+00:00 stderr F I1212 16:15:19.539167 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: builder, uid: 5f6c6c3a-8dd4-44a3-9712-d93b9daeb8ca]" 2025-12-12T16:15:19.539253550+00:00 stderr F I1212 16:15:19.539220 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: redhat-operators, uid: d505240d-ca61-4a03-8c73-ff6e1be09703]" virtual=false 2025-12-12T16:15:19.546330570+00:00 stderr F I1212 16:15:19.546249 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: legacy-service-account-token-cleaner, uid: 62ca79ff-82a8-42a3-b628-684ac0c27fce]" 2025-12-12T16:15:19.546330570+00:00 stderr F I1212 16:15:19.546299 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-console, name: builder, uid: d7f99f28-2029-4d1f-b19f-4a162817a9f5]" virtual=false 2025-12-12T16:15:19.555222825+00:00 stderr F I1212 16:15:19.555141 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: default, uid: 6477c597-7d3e-45d6-b6c3-df5d0fe750c2]" 2025-12-12T16:15:19.555268556+00:00 stderr F I1212 16:15:19.555209 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: catalog-operator-metrics, uid: c608aa3d-4067-43ca-a0d6-9d04be3b853c]" virtual=false 2025-12-12T16:15:19.559193790+00:00 stderr F I1212 16:15:19.559123 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: deployer, uid: 61d00f4c-0ecb-40a9-a3df-40e6892b2fd4]" 2025-12-12T16:15:19.559193790+00:00 stderr F I1212 16:15:19.559156 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: route-controller-manager-sa, uid: 18f12a6b-71e4-45c9-8aaf-7f850a0cdb2d]" virtual=false 2025-12-12T16:15:19.562562671+00:00 stderr F I1212 16:15:19.562501 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: origin-namespace-controller, uid: 1feba1fd-f979-4e52-b531-960e423f549c]" 2025-12-12T16:15:19.562562671+00:00 stderr F I1212 16:15:19.562546 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: default, uid: 58ea1264-dd37-4e71-95d6-26d5cc0aadb6]" virtual=false 2025-12-12T16:15:19.566532387+00:00 stderr F I1212 16:15:19.566477 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: pv-protection-controller, uid: 4c8ece48-10b2-479d-864b-2ef45c15b424]" 2025-12-12T16:15:19.566532387+00:00 stderr F I1212 16:15:19.566504 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: deployer, uid: 474f8420-7409-4c5f-979a-197c284438f4]" virtual=false 2025-12-12T16:15:19.569542750+00:00 stderr F I1212 16:15:19.569491 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: deployer, uid: 0541a888-d3f0-4404-b4c6-1404da55b02b]" 2025-12-12T16:15:19.569542750+00:00 stderr F I1212 16:15:19.569518 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: disruption-controller, uid: b4536208-f02c-4182-9783-6d7fd28f52e7]" virtual=false 2025-12-12T16:15:19.572730896+00:00 stderr F I1212 16:15:19.572658 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: pv-recycler-controller, uid: 1a3f23df-b74a-4fa9-a7fb-139dd63611c4]" 2025-12-12T16:15:19.572730896+00:00 stderr F I1212 16:15:19.572681 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-public, name: default, uid: 025bb28b-0604-400c-9e57-a9a804c76def]" virtual=false 2025-12-12T16:15:19.576042006+00:00 stderr F I1212 16:15:19.575985 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: deployer, uid: 895a6560-2e4c-46a5-9c34-4ade34dcb3f8]" 2025-12-12T16:15:19.576059987+00:00 stderr F I1212 16:15:19.576036 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: default, uid: e2774e29-b4be-4136-9ff2-1f69111749c7]" virtual=false 2025-12-12T16:15:19.584546041+00:00 stderr F I1212 16:15:19.584476 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-etcd-operator, name: metrics, uid: 518336b3-7008-41e5-9cec-2c1c85f2ff09]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.584546041+00:00 stderr F I1212 16:15:19.584522 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: openshift-kube-scheduler-sa, uid: 9b9f949b-a1b1-4ef7-b595-1264eb5298e2]" virtual=false 2025-12-12T16:15:19.586144110+00:00 stderr F I1212 16:15:19.586097 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: default, uid: 01496a3e-ea05-4212-b512-413db278590d]" 2025-12-12T16:15:19.586144110+00:00 stderr F I1212 16:15:19.586117 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: bd3ae2d6-78bd-48d1-be9f-a77e56ba96c8]" virtual=false 2025-12-12T16:15:19.592728198+00:00 stderr F I1212 16:15:19.592634 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: deployer, uid: 66a28d36-1418-4409-9421-9a994e302b8d]" 2025-12-12T16:15:19.592728198+00:00 stderr F I1212 16:15:19.592656 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 6bd1068a-6d01-406e-a2dd-325d2b2e1302]" virtual=false 2025-12-12T16:15:19.597306299+00:00 stderr F I1212 16:15:19.597237 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-samples-operator, name: metrics, uid: c016741c-0788-45bd-a328-6a4aa719a9ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.597306299+00:00 stderr F I1212 16:15:19.597281 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: openshift-apiserver-sa, uid: 6b549422-a252-4d94-afa8-af96c926b923]" virtual=false 2025-12-12T16:15:19.599433480+00:00 stderr F I1212 16:15:19.599383 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: c114bd17-4943-43d2-bc7d-81a1438e3da6]" 2025-12-12T16:15:19.599433480+00:00 stderr F I1212 16:15:19.599411 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-7, uid: 36f6c219-47c9-453e-8adc-81a163318ca3]" virtual=false 2025-12-12T16:15:19.603959509+00:00 stderr F I1212 16:15:19.603858 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: olm-operator-metrics, uid: 5f438b20-bbb9-4020-9829-dc86fe8ca8bd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.603959509+00:00 stderr F I1212 16:15:19.603901 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-2, uid: f5063641-afc0-44e4-b98e-2062cca37149]" virtual=false 2025-12-12T16:15:19.609582264+00:00 stderr F I1212 16:15:19.609493 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-termination-handler, uid: 1567c8f7-61db-45bd-a1b6-c1f4b610e91b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.609582264+00:00 stderr F I1212 16:15:19.609538 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovirt-infra, name: builder, uid: 9804a365-b973-4ba3-8a2d-cba7c2d38a4d]" virtual=false 2025-12-12T16:15:19.613411276+00:00 stderr F I1212 16:15:19.613335 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-console, name: builder, uid: d7f99f28-2029-4d1f-b19f-4a162817a9f5]" 2025-12-12T16:15:19.613411276+00:00 stderr F I1212 16:15:19.613363 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: root-ca-cert-publisher, uid: d990bc47-3cb7-4d58-92f0-77fa50856da8]" virtual=false 2025-12-12T16:15:19.618457878+00:00 stderr F I1212 16:15:19.618168 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 749fdd6c-f7a2-4743-bbd6-4c00d16e8776]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.618457878+00:00 stderr F I1212 16:15:19.618251 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: builder, uid: bcae120c-248e-4d63-824a-cc2fa5a8758a]" virtual=false 2025-12-12T16:15:19.621335817+00:00 stderr F I1212 16:15:19.621227 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3a493813-5327-4484-bc96-a108bced6093]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.621372588+00:00 stderr F I1212 16:15:19.621322 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: default, uid: bf36925f-d839-4909-beaf-656f7c2514b0]" virtual=false 2025-12-12T16:15:19.626278787+00:00 stderr F I1212 16:15:19.626137 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: route-controller-manager-sa, uid: 18f12a6b-71e4-45c9-8aaf-7f850a0cdb2d]" 2025-12-12T16:15:19.626309837+00:00 stderr F I1212 16:15:19.626260 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: deployer, uid: e9c7a76e-810d-465a-9a26-fdb45a5a0c7c]" virtual=false 2025-12-12T16:15:19.629361271+00:00 stderr F I1212 16:15:19.629300 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: default, uid: 58ea1264-dd37-4e71-95d6-26d5cc0aadb6]" 2025-12-12T16:15:19.629361271+00:00 stderr F I1212 16:15:19.629332 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 180b3a1a-50c2-445c-8650-162b0f3a1d99]" virtual=false 2025-12-12T16:15:19.632229770+00:00 stderr F I1212 16:15:19.632131 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: deployer, uid: 474f8420-7409-4c5f-979a-197c284438f4]" 2025-12-12T16:15:19.632250230+00:00 stderr F I1212 16:15:19.632222 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-console, name: networking-console-plugin, uid: 99d5d1b5-3679-4a1e-a5a0-7df1601fc793]" virtual=false 2025-12-12T16:15:19.640893849+00:00 stderr F I1212 16:15:19.640810 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: disruption-controller, uid: b4536208-f02c-4182-9783-6d7fd28f52e7]" 2025-12-12T16:15:19.640919139+00:00 stderr F I1212 16:15:19.640885 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: build-controller, uid: 97fd2ec1-aa60-43c2-8b13-cec09978bd80]" virtual=false 2025-12-12T16:15:19.646377031+00:00 stderr F I1212 16:15:19.646310 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-public, name: default, uid: 025bb28b-0604-400c-9e57-a9a804c76def]" 2025-12-12T16:15:19.646377031+00:00 stderr F I1212 16:15:19.646344 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: default, name: builder, uid: d1c7e051-3e11-4ab1-998c-b5c0893c3a41]" virtual=false 2025-12-12T16:15:19.649407654+00:00 stderr F I1212 16:15:19.649316 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 1d6e0c49-5022-4407-b7e2-606925e10c95]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.649429834+00:00 stderr F I1212 16:15:19.649401 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-operator, name: network-operator-7bdcf4f5bd, uid: 3ae821d5-a7b7-467b-837d-641fc04a72a9]" virtual=false 2025-12-12T16:15:19.653078642+00:00 stderr F I1212 16:15:19.653015 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: default, uid: e2774e29-b4be-4136-9ff2-1f69111749c7]" 2025-12-12T16:15:19.653078642+00:00 stderr F I1212 16:15:19.653054 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-console, name: networking-console-plugin-5ff7774fd9, uid: 665f1706-d7e5-4a5a-9e31-d19dda2d76da]" virtual=false 2025-12-12T16:15:19.653157714+00:00 stderr F I1212 16:15:19.653010 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: openshift-kube-scheduler-sa, uid: 9b9f949b-a1b1-4ef7-b595-1264eb5298e2]" 2025-12-12T16:15:19.653157714+00:00 stderr F I1212 16:15:19.653143 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: service-account-controller, uid: fb7876b2-a9e4-4d25-972f-aab5978eae7b]" virtual=false 2025-12-12T16:15:19.663058213+00:00 stderr F I1212 16:15:19.662949 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-controllers, uid: 3304c944-7c53-445a-9f17-9e3e5f75226c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.663058213+00:00 stderr F I1212 16:15:19.663010 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: deployer, uid: 73648bb4-df93-476c-9d76-8367e13474a8]" virtual=false 2025-12-12T16:15:19.670077542+00:00 stderr F I1212 16:15:19.670008 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: openshift-apiserver-sa, uid: 6b549422-a252-4d94-afa8-af96c926b923]" 2025-12-12T16:15:19.670077542+00:00 stderr F I1212 16:15:19.670050 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: oauth-apiserver-sa, uid: fbcf342b-4904-4fcd-a105-e497db8721fd]" virtual=false 2025-12-12T16:15:19.678288320+00:00 stderr F I1212 16:15:19.677330 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler, name: revision-status-2, uid: f5063641-afc0-44e4-b98e-2062cca37149]" 2025-12-12T16:15:19.678288320+00:00 stderr F I1212 16:15:19.677368 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: resourcequota-controller, uid: 7569002a-ef9d-493e-a834-64656e35e490]" virtual=false 2025-12-12T16:15:19.678288320+00:00 stderr F I1212 16:15:19.677461 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-7, uid: 36f6c219-47c9-453e-8adc-81a163318ca3]" 2025-12-12T16:15:19.678288320+00:00 stderr F I1212 16:15:19.677475 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: default, uid: b9c3d9d6-d96e-4cf5-84fe-8886eb72a589]" virtual=false 2025-12-12T16:15:19.678288320+00:00 stderr F I1212 16:15:19.677527 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: 6bd1068a-6d01-406e-a2dd-325d2b2e1302]" 2025-12-12T16:15:19.678288320+00:00 stderr F I1212 16:15:19.677540 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kni-infra, name: default, uid: 2cefe3a9-ea0d-4bf6-92f1-a5b66d76c95b]" virtual=false 2025-12-12T16:15:19.681964348+00:00 stderr F I1212 16:15:19.681897 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovirt-infra, name: builder, uid: 9804a365-b973-4ba3-8a2d-cba7c2d38a4d]" 2025-12-12T16:15:19.681964348+00:00 stderr F I1212 16:15:19.681935 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: marketplace-operator, uid: c100705c-9ecd-449a-969b-a207f023c1b0]" virtual=false 2025-12-12T16:15:19.682079091+00:00 stderr F I1212 16:15:19.682029 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: redhat-operators, uid: d505240d-ca61-4a03-8c73-ff6e1be09703]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"CatalogSource","name":"redhat-operators","uid":"ca744265-3ae3-4482-8c3d-b10e28fe1042","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.682079091+00:00 stderr F I1212 16:15:19.682065 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: builder, uid: de6c7c71-9b95-462b-b648-f33e58be581a]" virtual=false 2025-12-12T16:15:19.688148217+00:00 stderr F I1212 16:15:19.688047 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: root-ca-cert-publisher, uid: d990bc47-3cb7-4d58-92f0-77fa50856da8]" 2025-12-12T16:15:19.688148217+00:00 stderr F I1212 16:15:19.688118 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: 736e29a2-1ce3-47d8-ac5c-ddba4e238245]" virtual=false 2025-12-12T16:15:19.699323616+00:00 stderr F I1212 16:15:19.699247 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: default, uid: bf36925f-d839-4909-beaf-656f7c2514b0]" 2025-12-12T16:15:19.699323616+00:00 stderr F I1212 16:15:19.699297 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-user-settings, name: default, uid: c1f5b030-718b-46e9-a528-46147d477b1a]" virtual=false 2025-12-12T16:15:19.699465150+00:00 stderr F I1212 16:15:19.699442 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: builder, uid: bcae120c-248e-4d63-824a-cc2fa5a8758a]" 2025-12-12T16:15:19.699475220+00:00 stderr F I1212 16:15:19.699462 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-host-network, name: builder, uid: 7858bc9b-2ccd-410f-bebe-7a916adc5224]" virtual=false 2025-12-12T16:15:19.716154632+00:00 stderr F I1212 16:15:19.716078 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: deployer, uid: e9c7a76e-810d-465a-9a26-fdb45a5a0c7c]" 2025-12-12T16:15:19.716154632+00:00 stderr F I1212 16:15:19.716122 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: installer-sa, uid: dacaf78a-32d5-4b31-bcc1-af9d3559c427]" virtual=false 2025-12-12T16:15:19.720764193+00:00 stderr F I1212 16:15:19.718974 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: build-controller, uid: 97fd2ec1-aa60-43c2-8b13-cec09978bd80]" 2025-12-12T16:15:19.720764193+00:00 stderr F I1212 16:15:19.719020 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ancillary-tools, uid: c21b8763-d204-40e9-8b00-8f8a8767dd88]" virtual=false 2025-12-12T16:15:19.720764193+00:00 stderr F I1212 16:15:19.719144 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: default, name: builder, uid: d1c7e051-3e11-4ab1-998c-b5c0893c3a41]" 2025-12-12T16:15:19.720764193+00:00 stderr F I1212 16:15:19.719159 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 199e5ec3-0fe1-4929-8be5-eeb222b837d4]" virtual=false 2025-12-12T16:15:19.726854459+00:00 stderr F I1212 16:15:19.724038 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: catalog-operator-metrics, uid: c608aa3d-4067-43ca-a0d6-9d04be3b853c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.726854459+00:00 stderr F I1212 16:15:19.724080 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-managed, name: builder, uid: 2594c989-f7ee-4ae2-9c02-84fba44f645a]" virtual=false 2025-12-12T16:15:19.726854459+00:00 stderr F I1212 16:15:19.724117 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:19.740628811+00:00 stderr F I1212 16:15:19.740531 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: service-account-controller, uid: fb7876b2-a9e4-4d25-972f-aab5978eae7b]" 2025-12-12T16:15:19.740628811+00:00 stderr F I1212 16:15:19.740589 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: deployer, uid: 5ec3e3b8-2bff-4bc6-bdca-00f6f083190a]" virtual=false 2025-12-12T16:15:19.741513863+00:00 stderr F I1212 16:15:19.741456 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: default, uid: b9c3d9d6-d96e-4cf5-84fe-8886eb72a589]" 2025-12-12T16:15:19.741513863+00:00 stderr F I1212 16:15:19.741498 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-4, uid: 916d9208-ead5-4b5c-997f-82c9f885ea3e]" virtual=false 2025-12-12T16:15:19.741681217+00:00 stderr F I1212 16:15:19.741592 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: bd3ae2d6-78bd-48d1-be9f-a77e56ba96c8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.741811070+00:00 stderr F I1212 16:15:19.741678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: default, uid: b18ddf31-d59f-4827-b2be-0829aba08292]" virtual=false 2025-12-12T16:15:19.742426525+00:00 stderr F I1212 16:15:19.742379 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kni-infra, name: default, uid: 2cefe3a9-ea0d-4bf6-92f1-a5b66d76c95b]" 2025-12-12T16:15:19.742448295+00:00 stderr F I1212 16:15:19.742422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-os-puller, uid: 9a9dc95b-8b66-409b-82dd-85e1e2eaef67]" virtual=false 2025-12-12T16:15:19.742756033+00:00 stderr F I1212 16:15:19.742711 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-cluster-samples-operator/cluster-samples-operator-6b564684c8" need=1 creating=1 2025-12-12T16:15:19.742891406+00:00 stderr F I1212 16:15:19.742851 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-cluster-version/cluster-version-operator-7c9b9cfd6" need=1 creating=1 2025-12-12T16:15:19.743510901+00:00 stderr F I1212 16:15:19.743477 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-config-operator/openshift-config-operator-5777786469" need=1 creating=1 2025-12-12T16:15:19.743581242+00:00 stderr F I1212 16:15:19.743548 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: oauth-apiserver-sa, uid: fbcf342b-4904-4fcd-a105-e497db8721fd]" 2025-12-12T16:15:19.743592463+00:00 stderr F I1212 16:15:19.743578 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: deployer, uid: 9db3cbe6-cced-4e06-8f6d-1270a6e44e1d]" virtual=false 2025-12-12T16:15:19.743656164+00:00 stderr F I1212 16:15:19.743524 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: deployer, uid: 73648bb4-df93-476c-9d76-8367e13474a8]" 2025-12-12T16:15:19.743666475+00:00 stderr F I1212 16:15:19.743648 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: default, uid: d7f17504-d3e7-4cf7-9576-9082d656eb82]" virtual=false 2025-12-12T16:15:19.743801668+00:00 stderr F I1212 16:15:19.743498 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: resourcequota-controller, uid: 7569002a-ef9d-493e-a834-64656e35e490]" 2025-12-12T16:15:19.743832459+00:00 stderr F I1212 16:15:19.743801 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-authentication, name: oauth-openshift, uid: 7299e9ca-868e-4f2a-8bab-86445465b6e7]" virtual=false 2025-12-12T16:15:19.749960896+00:00 stderr F I1212 16:15:19.748857 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-console-operator/console-operator-67c89758df" need=1 creating=1 2025-12-12T16:15:19.749960896+00:00 stderr F I1212 16:15:19.749324 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca, name: builder, uid: de6c7c71-9b95-462b-b648-f33e58be581a]" 2025-12-12T16:15:19.749960896+00:00 stderr F I1212 16:15:19.749359 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-3, uid: f02d59da-bbe4-4f8b-a03b-6d3ee409478c]" virtual=false 2025-12-12T16:15:19.783749100+00:00 stderr F I1212 16:15:19.783692 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-user-settings, name: default, uid: c1f5b030-718b-46e9-a528-46147d477b1a]" 2025-12-12T16:15:19.783853283+00:00 stderr F I1212 16:15:19.783801 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-storage-operator, name: builder, uid: 7cf688aa-d2b2-4ed4-b662-ca72cacf51d6]" virtual=false 2025-12-12T16:15:19.784467838+00:00 stderr F I1212 16:15:19.784223 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-host-network, name: builder, uid: 7858bc9b-2ccd-410f-bebe-7a916adc5224]" 2025-12-12T16:15:19.784528959+00:00 stderr F I1212 16:15:19.784505 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: endpointslicemirroring-controller, uid: 82732f53-5027-447a-9494-37cb459120ba]" virtual=false 2025-12-12T16:15:19.785890672+00:00 stderr F I1212 16:15:19.785474 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 180b3a1a-50c2-445c-8650-162b0f3a1d99]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.785890672+00:00 stderr F I1212 16:15:19.785560 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: oauth-openshift, uid: 06d4a926-27d0-49ac-b5dd-b15c69b33bae]" virtual=false 2025-12-12T16:15:19.785890672+00:00 stderr F I1212 16:15:19.785813 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-console, name: networking-console-plugin, uid: 99d5d1b5-3679-4a1e-a5a0-7df1601fc793]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.785890672+00:00 stderr F I1212 16:15:19.785839 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator-webhook, uid: 3d8d8642-1cd0-4ece-918a-8ae8e150b269]" virtual=false 2025-12-12T16:15:19.786893676+00:00 stderr F I1212 16:15:19.786846 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: installer-sa, uid: dacaf78a-32d5-4b31-bcc1-af9d3559c427]" 2025-12-12T16:15:19.786893676+00:00 stderr F I1212 16:15:19.786877 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: persistent-volume-binder, uid: ee43d172-bfed-432e-851b-8ec90da1e682]" virtual=false 2025-12-12T16:15:19.790571945+00:00 stderr F I1212 16:15:19.790432 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-4, uid: 916d9208-ead5-4b5c-997f-82c9f885ea3e]" 2025-12-12T16:15:19.790571945+00:00 stderr F I1212 16:15:19.790471 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: deployer, uid: a8772516-08ea-467a-a65b-9f9dd79ac240]" virtual=false 2025-12-12T16:15:19.796307763+00:00 stderr F I1212 16:15:19.796247 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-console/console-64d44f6ddf" need=1 creating=1 2025-12-12T16:15:19.800239417+00:00 stderr F I1212 16:15:19.800167 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: deployer, uid: 5ec3e3b8-2bff-4bc6-bdca-00f6f083190a]" 2025-12-12T16:15:19.805067164+00:00 stderr F I1212 16:15:19.800328 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-managed, name: builder, uid: 2594c989-f7ee-4ae2-9c02-84fba44f645a]" 2025-12-12T16:15:19.805103415+00:00 stderr F I1212 16:15:19.805060 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: deployer, uid: ad265540-0b1d-4c38-a635-b761945529bc]" virtual=false 2025-12-12T16:15:19.805207257+00:00 stderr F I1212 16:15:19.800480 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-console, name: networking-console-plugin-5ff7774fd9, uid: 665f1706-d7e5-4a5a-9e31-d19dda2d76da]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"networking-console-plugin","uid":"e8047e30-a40e-4ced-ae42-eea4288c975a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.805207257+00:00 stderr F I1212 16:15:19.805192 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-console, name: default, uid: 3c7b4d13-2251-45a6-9967-d84afc2cb185]" virtual=false 2025-12-12T16:15:19.805248128+00:00 stderr F I1212 16:15:19.800429 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-operator, name: network-operator-7bdcf4f5bd, uid: 3ae821d5-a7b7-467b-837d-641fc04a72a9]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"network-operator","uid":"2c897060-d3cf-4d7f-8d38-ef464b7a697a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.805248128+00:00 stderr F I1212 16:15:19.805241 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: builder, uid: bead93dc-3918-448e-827b-aba7ae88a412]" virtual=false 2025-12-12T16:15:19.805370981+00:00 stderr F I1212 16:15:19.805312 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: deployer, uid: 9db3cbe6-cced-4e06-8f6d-1270a6e44e1d]" 2025-12-12T16:15:19.805420902+00:00 stderr F I1212 16:15:19.805380 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: builder, uid: a72f137a-0cef-49e7-ad2f-7de2e3f9e3db]" virtual=false 2025-12-12T16:15:19.805799941+00:00 stderr F I1212 16:15:19.804950 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: build-config-change-controller, uid: 2728b9f5-a4cd-4c68-baf7-f8ba8bfec130]" virtual=false 2025-12-12T16:15:19.812116744+00:00 stderr F I1212 16:15:19.812032 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: default, uid: b18ddf31-d59f-4827-b2be-0829aba08292]" 2025-12-12T16:15:19.812116744+00:00 stderr F I1212 16:15:19.812064 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: 67b62412-7f65-4222-ba31-74c21dcee1b1]" virtual=false 2025-12-12T16:15:19.812252887+00:00 stderr F I1212 16:15:19.812224 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-os-puller, uid: 9a9dc95b-8b66-409b-82dd-85e1e2eaef67]" 2025-12-12T16:15:19.812252887+00:00 stderr F I1212 16:15:19.812240 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress, name: router-internal-default, uid: 5f75d74e-bfd6-48a0-89a7-729f8cffaebd]" virtual=false 2025-12-12T16:15:19.814297726+00:00 stderr F I1212 16:15:19.813747 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: default, uid: d7f17504-d3e7-4cf7-9576-9082d656eb82]" 2025-12-12T16:15:19.814297726+00:00 stderr F I1212 16:15:19.813771 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 78bb60a8-531d-48f1-b653-8764a30ad047]" virtual=false 2025-12-12T16:15:19.814680595+00:00 stderr F I1212 16:15:19.814640 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-console/downloads-747b44746d" need=1 creating=1 2025-12-12T16:15:19.829749288+00:00 stderr F I1212 16:15:19.829610 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager, name: revision-status-3, uid: f02d59da-bbe4-4f8b-a03b-6d3ee409478c]" 2025-12-12T16:15:19.838557241+00:00 stderr F I1212 16:15:19.838358 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-managed, name: default, uid: 97c1835e-d31e-490f-adb5-0fa15e1fc5c1]" virtual=false 2025-12-12T16:15:19.838557241+00:00 stderr F I1212 16:15:19.834113 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager-operator/openshift-controller-manager-operator-686468bdd5" need=1 creating=1 2025-12-12T16:15:19.838653213+00:00 stderr F I1212 16:15:19.833995 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: marketplace-operator, uid: c100705c-9ecd-449a-969b-a207f023c1b0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.838691554+00:00 stderr F I1212 16:15:19.838654 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: default, uid: 86fb5851-a892-48de-a31a-ecdeaeb55b9f]" virtual=false 2025-12-12T16:15:19.838767246+00:00 stderr F I1212 16:15:19.835870 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: endpointslicemirroring-controller, uid: 82732f53-5027-447a-9494-37cb459120ba]" 2025-12-12T16:15:19.838767246+00:00 stderr F I1212 16:15:19.838717 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: statefulset-controller, uid: d7312284-dc0c-47e0-872e-968ccd8291ad]" virtual=false 2025-12-12T16:15:19.838767246+00:00 stderr F I1212 16:15:19.836034 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication, name: oauth-openshift, uid: 06d4a926-27d0-49ac-b5dd-b15c69b33bae]" 2025-12-12T16:15:19.838788746+00:00 stderr F I1212 16:15:19.838765 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-diagnostics, name: network-check-source-5bb8f5cd97, uid: 1df99967-ec1a-4576-a622-ef7910592096]" virtual=false 2025-12-12T16:15:19.838825177+00:00 stderr F I1212 16:15:19.834225 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-authentication, name: oauth-openshift, uid: 7299e9ca-868e-4f2a-8bab-86445465b6e7]" 2025-12-12T16:15:19.838835447+00:00 stderr F I1212 16:15:19.838822 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-version, name: default, uid: 80c0c1cd-233e-4b90-9995-e6b99f63549a]" virtual=false 2025-12-12T16:15:19.838925759+00:00 stderr F I1212 16:15:19.834893 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-storage-operator, name: builder, uid: 7cf688aa-d2b2-4ed4-b662-ca72cacf51d6]" 2025-12-12T16:15:19.838925759+00:00 stderr F I1212 16:15:19.838912 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-public, name: deployer, uid: 7beb48aa-b6de-4c2e-8d8d-e04c864573b5]" virtual=false 2025-12-12T16:15:19.843095320+00:00 stderr F I1212 16:15:19.843049 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: deployer, uid: a8772516-08ea-467a-a65b-9f9dd79ac240]" 2025-12-12T16:15:19.843119251+00:00 stderr F I1212 16:15:19.843089 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: localhost-recovery-client, uid: 02402ae1-1813-4bc8-bb77-523d295fe6dc]" virtual=false 2025-12-12T16:15:19.843286145+00:00 stderr F I1212 16:15:19.843253 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: persistent-volume-binder, uid: ee43d172-bfed-432e-851b-8ec90da1e682]" 2025-12-12T16:15:19.843286145+00:00 stderr F I1212 16:15:19.843277 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-apiserver-operator, name: metrics, uid: c89a6cf8-b1f5-433c-a98e-6433ab2d8604]" virtual=false 2025-12-12T16:15:19.843550141+00:00 stderr F I1212 16:15:19.843510 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: 736e29a2-1ce3-47d8-ac5c-ddba4e238245]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.843563661+00:00 stderr F I1212 16:15:19.843549 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: localhost-recovery-client, uid: c39f32e4-2bf7-4ab1-a758-6a3e9a19f741]" virtual=false 2025-12-12T16:15:19.844655298+00:00 stderr F I1212 16:15:19.844498 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-65b6cccf98" need=1 creating=1 2025-12-12T16:15:19.848227044+00:00 stderr F I1212 16:15:19.845682 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: multus-ancillary-tools, uid: c21b8763-d204-40e9-8b00-8f8a8767dd88]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.848227044+00:00 stderr F I1212 16:15:19.845742 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-scheduler-operator, name: metrics, uid: 9c082bd0-69fc-4adf-bbca-de0862ba049d]" virtual=false 2025-12-12T16:15:19.866403551+00:00 stderr F I1212 16:15:19.864890 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager-operator, name: deployer, uid: ad265540-0b1d-4c38-a635-b761945529bc]" 2025-12-12T16:15:19.866403551+00:00 stderr F I1212 16:15:19.864937 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: installer-sa, uid: 5c5fde73-5cc6-40bc-bd3e-0995f75828e3]" virtual=false 2025-12-12T16:15:19.866403551+00:00 stderr F I1212 16:15:19.865092 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-console, name: default, uid: 3c7b4d13-2251-45a6-9967-d84afc2cb185]" 2025-12-12T16:15:19.866403551+00:00 stderr F I1212 16:15:19.865127 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config, name: builder, uid: af779aa9-bc34-420d-b9dd-3f64a8cd8795]" virtual=false 2025-12-12T16:15:19.866403551+00:00 stderr F I1212 16:15:19.866190 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 199e5ec3-0fe1-4929-8be5-eeb222b837d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.866403551+00:00 stderr F I1212 16:15:19.866237 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: builder, uid: 8588f92a-2dbe-4ae3-a8f8-29c0e1cbb10c]" virtual=false 2025-12-12T16:15:19.880773137+00:00 stderr F I1212 16:15:19.875452 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: builder, uid: a72f137a-0cef-49e7-ad2f-7de2e3f9e3db]" 2025-12-12T16:15:19.880773137+00:00 stderr F I1212 16:15:19.880644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-image-registry, name: image-registry-operator, uid: 21252561-db9f-4519-becf-9b4daef38a73]" virtual=false 2025-12-12T16:15:19.881362941+00:00 stderr F I1212 16:15:19.881326 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-managed, name: default, uid: 97c1835e-d31e-490f-adb5-0fa15e1fc5c1]" 2025-12-12T16:15:19.881548915+00:00 stderr F I1212 16:15:19.881520 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: deployer, uid: b0f75917-032e-4c0f-8f3b-14f5f8f5a41b]" virtual=false 2025-12-12T16:15:19.883127073+00:00 stderr F I1212 16:15:19.883052 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: builder, uid: bead93dc-3918-448e-827b-aba7ae88a412]" 2025-12-12T16:15:19.883127073+00:00 stderr F I1212 16:15:19.883081 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: deployer, uid: 566a48b1-9358-49b2-a5b6-83230406f41f]" virtual=false 2025-12-12T16:15:19.883145854+00:00 stderr F I1212 16:15:19.883101 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: build-config-change-controller, uid: 2728b9f5-a4cd-4c68-baf7-f8ba8bfec130]" 2025-12-12T16:15:19.883196435+00:00 stderr F I1212 16:15:19.883151 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: image-trigger-controller, uid: df02ec41-3450-4126-8633-b087bad850de]" virtual=false 2025-12-12T16:15:19.892484689+00:00 stderr F I1212 16:15:19.888865 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-version, name: default, uid: 80c0c1cd-233e-4b90-9995-e6b99f63549a]" 2025-12-12T16:15:19.892484689+00:00 stderr F I1212 16:15:19.888902 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: localhost-recovery-client, uid: 65bb636e-ab4f-4b40-9181-a17ceca447ed]" virtual=false 2025-12-12T16:15:19.897575751+00:00 stderr F I1212 16:15:19.897501 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: statefulset-controller, uid: d7312284-dc0c-47e0-872e-968ccd8291ad]" 2025-12-12T16:15:19.897575751+00:00 stderr F I1212 16:15:19.897546 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 55006a20-e0ea-4a38-a3bc-8ae4f1472858]" virtual=false 2025-12-12T16:15:19.898082664+00:00 stderr F I1212 16:15:19.898050 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-dns-operator/dns-operator-799b87ffcd" need=1 creating=1 2025-12-12T16:15:19.898511464+00:00 stderr F I1212 16:15:19.898455 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: localhost-recovery-client, uid: 02402ae1-1813-4bc8-bb77-523d295fe6dc]" 2025-12-12T16:15:19.898533995+00:00 stderr F I1212 16:15:19.898506 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: service-cidrs-controller, uid: 1227546e-33d8-47f3-9540-121cff5e5f9f]" virtual=false 2025-12-12T16:15:19.898683548+00:00 stderr F I1212 16:15:19.898652 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-public, name: deployer, uid: 7beb48aa-b6de-4c2e-8d8d-e04c864573b5]" 2025-12-12T16:15:19.898697058+00:00 stderr F I1212 16:15:19.898677 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: ingress-to-route-controller, uid: bdde9720-b4df-4c5f-a27b-e2c4558d3e7a]" virtual=false 2025-12-12T16:15:19.909026467+00:00 stderr F I1212 16:15:19.908934 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: default, uid: 86fb5851-a892-48de-a31a-ecdeaeb55b9f]" 2025-12-12T16:15:19.909026467+00:00 stderr F I1212 16:15:19.908973 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-image-registry/cluster-image-registry-operator-86c45576b9" need=1 creating=1 2025-12-12T16:15:19.909814136+00:00 stderr F I1212 16:15:19.908982 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: 5d04164f-45ca-4c60-8a08-c4459300ecda]" virtual=false 2025-12-12T16:15:19.909814136+00:00 stderr F I1212 16:15:19.908945 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-etcd-operator/etcd-operator-69b85846b6" need=1 creating=1 2025-12-12T16:15:19.910096583+00:00 stderr F I1212 16:15:19.909896 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-image-registry/image-registry-66587d64c8" need=1 creating=1 2025-12-12T16:15:19.918829423+00:00 stderr F I1212 16:15:19.918763 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: localhost-recovery-client, uid: c39f32e4-2bf7-4ab1-a758-6a3e9a19f741]" 2025-12-12T16:15:19.918855664+00:00 stderr F I1212 16:15:19.918811 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: deployer, uid: ddec520b-6891-4c8b-9a52-f9fabdf5f688]" virtual=false 2025-12-12T16:15:19.918987657+00:00 stderr F I1212 16:15:19.918957 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: installer-sa, uid: 5c5fde73-5cc6-40bc-bd3e-0995f75828e3]" 2025-12-12T16:15:19.919001338+00:00 stderr F I1212 16:15:19.918981 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: pvc-protection-controller, uid: b6d23f97-28e9-41bc-9c26-22bcc2ec90ed]" virtual=false 2025-12-12T16:15:19.919075559+00:00 stderr F I1212 16:15:19.919046 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config, name: builder, uid: af779aa9-bc34-420d-b9dd-3f64a8cd8795]" 2025-12-12T16:15:19.919088420+00:00 stderr F I1212 16:15:19.919068 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: image-import-controller, uid: 1dc06cca-7293-417c-81e2-5e6cce6b9533]" virtual=false 2025-12-12T16:15:19.919440618+00:00 stderr F I1212 16:15:19.919377 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: 67b62412-7f65-4222-ba31-74c21dcee1b1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.919460079+00:00 stderr F I1212 16:15:19.919434 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-openstack-infra, name: builder, uid: 2eb2c036-e615-4b2e-b9dc-38cc6b9c7670]" virtual=false 2025-12-12T16:15:19.919672454+00:00 stderr F I1212 16:15:19.919643 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: builder, uid: 8588f92a-2dbe-4ae3-a8f8-29c0e1cbb10c]" 2025-12-12T16:15:19.919672454+00:00 stderr F I1212 16:15:19.919661 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: default, uid: 7c7b0e94-0077-4963-a37b-1e26fbcf8d6b]" virtual=false 2025-12-12T16:15:19.920698518+00:00 stderr F I1212 16:15:19.920658 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: machine-api-operator-webhook, uid: 3d8d8642-1cd0-4ece-918a-8ae8e150b269]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.920698518+00:00 stderr F I1212 16:15:19.920682 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-controller-manager-operator, name: metrics, uid: 4a7bd676-8b0b-4244-9e34-29c3edb8bb40]" virtual=false 2025-12-12T16:15:19.920798841+00:00 stderr F I1212 16:15:19.920745 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress, name: router-internal-default, uid: 5f75d74e-bfd6-48a0-89a7-729f8cffaebd]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"router-default","uid":"6445a1ec-8ec2-4ec8-b191-9cc7fa235148","controller":true}] 2025-12-12T16:15:19.920812171+00:00 stderr F I1212 16:15:19.920794 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-storage-version-migrator-operator, name: metrics, uid: e5bc231d-9114-40a5-a422-584788726a16]" virtual=false 2025-12-12T16:15:19.940693410+00:00 stderr F I1212 16:15:19.940609 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-apiserver, name: deployer, uid: 566a48b1-9358-49b2-a5b6-83230406f41f]" 2025-12-12T16:15:19.940693410+00:00 stderr F I1212 16:15:19.940631 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-controller-manager, name: deployer, uid: b0f75917-032e-4c0f-8f3b-14f5f8f5a41b]" 2025-12-12T16:15:19.940693410+00:00 stderr F I1212 16:15:19.940657 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: c7d72a1b-faa9-4eb8-96e4-0297bb74850e]" virtual=false 2025-12-12T16:15:19.940757222+00:00 stderr F I1212 16:15:19.940683 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: default, uid: 2b13ea26-a053-40ad-b8a8-2802624b8f79]" virtual=false 2025-12-12T16:15:19.944416500+00:00 stderr F I1212 16:15:19.941020 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 78bb60a8-531d-48f1-b653-8764a30ad047]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.944416500+00:00 stderr F I1212 16:15:19.941055 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-nutanix-infra, name: deployer, uid: 3792a84f-7f79-4eb2-a968-75ccee612176]" virtual=false 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949216 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: localhost-recovery-client, uid: 65bb636e-ab4f-4b40-9181-a17ceca447ed]" 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949261 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: pod-garbage-collector, uid: 5ce54231-742f-4dfb-829b-81771f654767]" virtual=false 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949465 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: service-cidrs-controller, uid: 1227546e-33d8-47f3-9540-121cff5e5f9f]" 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949480 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: clusterrole-aggregation-controller, uid: 5f605669-b9d9-4714-88c8-ddf0857592b2]" virtual=false 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949752 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: image-trigger-controller, uid: df02ec41-3450-4126-8633-b087bad850de]" 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949767 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-config-operator, name: metrics, uid: d0b70160-b97a-47c1-8814-0419134941de]" virtual=false 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949821 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: ingress-to-route-controller, uid: bdde9720-b4df-4c5f-a27b-e2c4558d3e7a]" 2025-12-12T16:15:19.950227890+00:00 stderr F I1212 16:15:19.949831 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: unidling-controller, uid: fe05a460-6bad-4ddd-9ca0-bd504b0531c6]" virtual=false 2025-12-12T16:15:19.958474819+00:00 stderr F I1212 16:15:19.958366 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/ReplicaSet, namespace: openshift-network-diagnostics, name: network-check-source-5bb8f5cd97, uid: 1df99967-ec1a-4576-a622-ef7910592096]" owner=[{"apiVersion":"apps/v1","kind":"Deployment","name":"network-check-source","uid":"e3b48335-28bd-49bf-9cf0-82069658b68a","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:19.958517790+00:00 stderr F I1212 16:15:19.958458 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: a79ef72a-9de8-4bd8-ab71-7e0b71724a57]" virtual=false 2025-12-12T16:15:19.958517790+00:00 stderr F I1212 16:15:19.958487 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-apiserver-operator, name: metrics, uid: c89a6cf8-b1f5-433c-a98e-6433ab2d8604]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.958639713+00:00 stderr F I1212 16:15:19.958522 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: validatingadmissionpolicy-status-controller, uid: 0dfeba7a-60b3-42d1-a7b5-f1253ef2665f]" virtual=false 2025-12-12T16:15:19.959707318+00:00 stderr F I1212 16:15:19.959617 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-ingress-operator/ingress-operator-6b9cb4dbcf" need=1 creating=1 2025-12-12T16:15:19.960081557+00:00 stderr F I1212 16:15:19.960042 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-ingress/router-default-68cf44c8b8" need=1 creating=1 2025-12-12T16:15:19.968540041+00:00 stderr F I1212 16:15:19.968474 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: pvc-protection-controller, uid: b6d23f97-28e9-41bc-9c26-22bcc2ec90ed]" 2025-12-12T16:15:19.968540041+00:00 stderr F I1212 16:15:19.968515 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kni-infra, name: builder, uid: 4f34ca53-a9e9-4877-b0b3-71ed10ae5796]" virtual=false 2025-12-12T16:15:19.968589222+00:00 stderr F I1212 16:15:19.968534 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: deployer, uid: ddec520b-6891-4c8b-9a52-f9fabdf5f688]" 2025-12-12T16:15:19.968741406+00:00 stderr F I1212 16:15:19.968586 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-controller-manager-operator, name: metrics, uid: 756dc7f9-c733-4561-89e6-0982cec51bd4]" virtual=false 2025-12-12T16:15:19.970110659+00:00 stderr F I1212 16:15:19.970057 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: image-import-controller, uid: 1dc06cca-7293-417c-81e2-5e6cce6b9533]" 2025-12-12T16:15:19.970131789+00:00 stderr F I1212 16:15:19.970112 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: deployer, uid: c7569911-15b2-499c-bcfc-63c922e616c4]" virtual=false 2025-12-12T16:15:19.974590877+00:00 stderr F I1212 16:15:19.974480 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-scheduler-operator, name: metrics, uid: 9c082bd0-69fc-4adf-bbca-de0862ba049d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.974590877+00:00 stderr F I1212 16:15:19.974569 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: metrics-daemon-sa, uid: abce43d0-2a82-4a99-9c68-e5d3d0e59581]" virtual=false 2025-12-12T16:15:19.978865400+00:00 stderr F I1212 16:15:19.978626 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-openstack-infra, name: builder, uid: 2eb2c036-e615-4b2e-b9dc-38cc6b9c7670]" 2025-12-12T16:15:19.978865400+00:00 stderr F I1212 16:15:19.978674 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver-operator, name: metrics, uid: 67098b90-387a-4f7b-8a68-e484e6889ec7]" virtual=false 2025-12-12T16:15:19.984451244+00:00 stderr F I1212 16:15:19.984365 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: default, uid: 7c7b0e94-0077-4963-a37b-1e26fbcf8d6b]" 2025-12-12T16:15:19.984451244+00:00 stderr F I1212 16:15:19.984405 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: etcd-sa, uid: cf8da280-4a93-405d-8000-4e3ef2f1a7e3]" virtual=false 2025-12-12T16:15:19.985386777+00:00 stderr F I1212 16:15:19.984977 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-kube-controller-manager-operator/kube-controller-manager-operator-69d5f845f8" need=1 creating=1 2025-12-12T16:15:19.985386777+00:00 stderr F I1212 16:15:19.985001 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-kube-apiserver-operator/kube-apiserver-operator-575994946d" need=1 creating=1 2025-12-12T16:15:19.990223143+00:00 stderr F I1212 16:15:19.987660 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-54f497555d" need=1 creating=1 2025-12-12T16:15:19.991224998+00:00 stderr F I1212 16:15:19.991075 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-565b79b866" need=1 creating=1 2025-12-12T16:15:19.993429181+00:00 stderr F I1212 16:15:19.991530 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-kube-storage-version-migrator/migrator-866fcbc849" need=1 creating=1 2025-12-12T16:15:19.993429181+00:00 stderr F I1212 16:15:19.991894 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-image-registry, name: image-registry-operator, uid: 21252561-db9f-4519-becf-9b4daef38a73]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:19.993429181+00:00 stderr F I1212 16:15:19.991923 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: default, uid: 1b283572-9af0-442d-8a15-9ee9027190a6]" virtual=false 2025-12-12T16:15:19.996784901+00:00 stderr F I1212 16:15:19.996729 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: default, uid: 2b13ea26-a053-40ad-b8a8-2802624b8f79]" 2025-12-12T16:15:19.996828112+00:00 stderr F I1212 16:15:19.996771 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: c758def5-7362-4985-b540-393f26fc97a6]" virtual=false 2025-12-12T16:15:19.999351003+00:00 stderr F I1212 16:15:19.998986 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-nutanix-infra, name: deployer, uid: 3792a84f-7f79-4eb2-a968-75ccee612176]" 2025-12-12T16:15:19.999351003+00:00 stderr F I1212 16:15:19.999038 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: builder, uid: 6619c0cb-fa6a-456e-b3a8-856dcf11cb1a]" virtual=false 2025-12-12T16:15:20.001694680+00:00 stderr F I1212 16:15:20.001525 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: pod-garbage-collector, uid: 5ce54231-742f-4dfb-829b-81771f654767]" 2025-12-12T16:15:20.001694680+00:00 stderr F I1212 16:15:20.001559 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: default, uid: 386d11da-de12-4a63-b9ab-9b4c97ada569]" virtual=false 2025-12-12T16:15:20.005745667+00:00 stderr F I1212 16:15:20.005702 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: clusterrole-aggregation-controller, uid: 5f605669-b9d9-4714-88c8-ddf0857592b2]" 2025-12-12T16:15:20.005765978+00:00 stderr F I1212 16:15:20.005734 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: deployer, uid: cbf3ed8b-68e4-4187-bb1a-d2bb1ce31f0f]" virtual=false 2025-12-12T16:15:20.012520761+00:00 stderr F I1212 16:15:20.012463 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: unidling-controller, uid: fe05a460-6bad-4ddd-9ca0-bd504b0531c6]" 2025-12-12T16:15:20.012540641+00:00 stderr F I1212 16:15:20.012506 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: d89f5418-8a04-45bf-a138-83af4c524742]" virtual=false 2025-12-12T16:15:20.018239948+00:00 stderr F I1212 16:15:20.018133 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: validatingadmissionpolicy-status-controller, uid: 0dfeba7a-60b3-42d1-a7b5-f1253ef2665f]" 2025-12-12T16:15:20.018270959+00:00 stderr F I1212 16:15:20.018241 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: e7d12ea0-5793-4ac6-b96e-798934641d22]" virtual=false 2025-12-12T16:15:20.023969706+00:00 stderr F I1212 16:15:20.023776 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: 5d04164f-45ca-4c60-8a08-c4459300ecda]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.023969706+00:00 stderr F I1212 16:15:20.023826 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-user-workload-monitoring, name: builder, uid: 0bfd41d1-728f-4d9e-ad31-08b91bcd63af]" virtual=false 2025-12-12T16:15:20.027080301+00:00 stderr F I1212 16:15:20.026944 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 55006a20-e0ea-4a38-a3bc-8ae4f1472858]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.027142063+00:00 stderr F I1212 16:15:20.027123 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: deployer-controller, uid: 713e7c7c-42e2-4427-a84d-9fd6516e6601]" virtual=false 2025-12-12T16:15:20.028725681+00:00 stderr F I1212 16:15:20.028549 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kni-infra, name: builder, uid: 4f34ca53-a9e9-4877-b0b3-71ed10ae5796]" 2025-12-12T16:15:20.028725681+00:00 stderr F I1212 16:15:20.028593 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-node-lease, name: builder, uid: 1312ac94-adc8-4d42-b0bf-e4b5b221e38f]" virtual=false 2025-12-12T16:15:20.037452491+00:00 stderr F I1212 16:15:20.037352 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: deployer, uid: c7569911-15b2-499c-bcfc-63c922e616c4]" 2025-12-12T16:15:20.037452491+00:00 stderr F I1212 16:15:20.037392 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: default, uid: 2745b706-0414-4a3c-adbc-cc32f3ad4f80]" virtual=false 2025-12-12T16:15:20.044801298+00:00 stderr F I1212 16:15:20.044754 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: etcd-sa, uid: cf8da280-4a93-405d-8000-4e3ef2f1a7e3]" 2025-12-12T16:15:20.044841519+00:00 stderr F I1212 16:15:20.044793 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: default, uid: aa3b7c25-ebec-443a-88d6-5bedeb19d655]" virtual=false 2025-12-12T16:15:20.052941274+00:00 stderr F I1212 16:15:20.052520 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-controller-manager-operator, name: metrics, uid: 4a7bd676-8b0b-4244-9e34-29c3edb8bb40]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.052941274+00:00 stderr F I1212 16:15:20.052611 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-vsphere-infra, name: deployer, uid: cd1166ae-7ef0-44af-b045-20be9069087c]" virtual=false 2025-12-12T16:15:20.054012740+00:00 stderr F I1212 16:15:20.053404 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-storage-version-migrator-operator, name: metrics, uid: e5bc231d-9114-40a5-a422-584788726a16]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.054032131+00:00 stderr F I1212 16:15:20.054004 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: default, uid: 2e3ec735-7dc5-461b-8247-6194400cea62]" virtual=false 2025-12-12T16:15:20.056263164+00:00 stderr F I1212 16:15:20.056219 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: default, uid: 1b283572-9af0-442d-8a15-9ee9027190a6]" 2025-12-12T16:15:20.056333746+00:00 stderr F I1212 16:15:20.056308 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: marketplace-operator-metrics, uid: 94337474-19e9-47ef-a63f-a5db85f82770]" virtual=false 2025-12-12T16:15:20.062538276+00:00 stderr F I1212 16:15:20.062449 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: c7d72a1b-faa9-4eb8-96e4-0297bb74850e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.062566256+00:00 stderr F I1212 16:15:20.062521 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 6ba7c917-27fc-4114-85d9-07825a840abc]" virtual=false 2025-12-12T16:15:20.068030268+00:00 stderr F I1212 16:15:20.067888 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator-operator, name: builder, uid: 6619c0cb-fa6a-456e-b3a8-856dcf11cb1a]" 2025-12-12T16:15:20.068030268+00:00 stderr F I1212 16:15:20.067928 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: bfac9eb9-68bb-4da9-afc6-a17734d95032]" virtual=false 2025-12-12T16:15:20.068857148+00:00 stderr F I1212 16:15:20.068813 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: default, uid: 386d11da-de12-4a63-b9ab-9b4c97ada569]" 2025-12-12T16:15:20.068857148+00:00 stderr F I1212 16:15:20.068837 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-apiserver, name: apiserver, uid: 0b3c16e1-fe8b-4e27-bd29-ca7bbf3dc547]" virtual=false 2025-12-12T16:15:20.075328084+00:00 stderr F I1212 16:15:20.075077 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: deployer, uid: cbf3ed8b-68e4-4187-bb1a-d2bb1ce31f0f]" 2025-12-12T16:15:20.075328084+00:00 stderr F I1212 16:15:20.075140 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: builder, uid: ba69f219-cf03-41f6-b902-7b668aa1a5cd]" virtual=false 2025-12-12T16:15:20.077517656+00:00 stderr F I1212 16:15:20.076597 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-config-operator, name: metrics, uid: d0b70160-b97a-47c1-8814-0419134941de]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.077517656+00:00 stderr F I1212 16:15:20.076644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: default, uid: 3103673c-4e1b-493f-aed9-3a7d847c8fa4]" virtual=false 2025-12-12T16:15:20.083902480+00:00 stderr F I1212 16:15:20.083828 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd" need=1 creating=1 2025-12-12T16:15:20.084032683+00:00 stderr F I1212 16:15:20.083946 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-machine-api/machine-api-operator-755bb95488" need=1 creating=1 2025-12-12T16:15:20.084393452+00:00 stderr F I1212 16:15:20.084333 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-machine-config-operator/machine-config-controller-f9cdd68f7" need=1 creating=1 2025-12-12T16:15:20.084544396+00:00 stderr F I1212 16:15:20.084513 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-machine-config-operator/machine-config-operator-67c9d58cbb" need=1 creating=1 2025-12-12T16:15:20.085428147+00:00 stderr F I1212 16:15:20.085405 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-marketplace/marketplace-operator-547dbd544d" need=1 creating=1 2025-12-12T16:15:20.090032728+00:00 stderr F I1212 16:15:20.089893 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: a79ef72a-9de8-4bd8-ab71-7e0b71724a57]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.090032728+00:00 stderr F I1212 16:15:20.089951 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: default, uid: aab42295-a624-4ccc-972a-b41ad1114219]" virtual=false 2025-12-12T16:15:20.093422280+00:00 stderr F I1212 16:15:20.093328 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-user-workload-monitoring, name: builder, uid: 0bfd41d1-728f-4d9e-ad31-08b91bcd63af]" 2025-12-12T16:15:20.093422280+00:00 stderr F I1212 16:15:20.093360 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config, name: default, uid: 1624f01e-85f4-4361-9b08-5b557e6efb22]" virtual=false 2025-12-12T16:15:20.094518796+00:00 stderr F I1212 16:15:20.094468 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: deployer-controller, uid: 713e7c7c-42e2-4427-a84d-9fd6516e6601]" 2025-12-12T16:15:20.094591668+00:00 stderr F I1212 16:15:20.094571 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: deploymentconfig-controller, uid: cc06b50e-5feb-470f-b4c7-d7406b801911]" virtual=false 2025-12-12T16:15:20.098333298+00:00 stderr F I1212 16:15:20.098274 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-node-lease, name: builder, uid: 1312ac94-adc8-4d42-b0bf-e4b5b221e38f]" 2025-12-12T16:15:20.098361869+00:00 stderr F I1212 16:15:20.098321 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler, uid: 56382e05-4f3e-45e2-9065-468d4f668091]" virtual=false 2025-12-12T16:15:20.099136317+00:00 stderr F I1212 16:15:20.099099 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-multus/multus-admission-controller-69db94689b" need=1 creating=1 2025-12-12T16:15:20.099463755+00:00 stderr F I1212 16:15:20.099428 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-controller-manager-operator, name: metrics, uid: 756dc7f9-c733-4561-89e6-0982cec51bd4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.099479855+00:00 stderr F I1212 16:15:20.099462 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: builder, uid: ec18956d-7ea3-460b-b795-ae2e221e5597]" virtual=false 2025-12-12T16:15:20.100874959+00:00 stderr F I1212 16:15:20.100831 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-oauth-apiserver/apiserver-8596bd845d" need=1 creating=1 2025-12-12T16:15:20.103721598+00:00 stderr F I1212 16:15:20.102817 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: default, uid: 2745b706-0414-4a3c-adbc-cc32f3ad4f80]" 2025-12-12T16:15:20.103721598+00:00 stderr F I1212 16:15:20.102868 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: default, uid: 3ff99114-6df1-4a97-8009-239e69ead330]" virtual=false 2025-12-12T16:15:20.107249523+00:00 stderr F I1212 16:15:20.106920 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: metrics-daemon-sa, uid: abce43d0-2a82-4a99-9c68-e5d3d0e59581]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.107249523+00:00 stderr F I1212 16:15:20.106960 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-nutanix-infra, name: default, uid: 8c879ee5-2d71-4a75-bcbc-90f06c3b5306]" virtual=false 2025-12-12T16:15:20.107249523+00:00 stderr F I1212 16:15:20.106975 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operator-lifecycle-manager/catalog-operator-75ff9f647d" need=1 creating=1 2025-12-12T16:15:20.111069035+00:00 stderr F I1212 16:15:20.111012 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver-operator, name: metrics, uid: 67098b90-387a-4f7b-8a68-e484e6889ec7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.111098585+00:00 stderr F I1212 16:15:20.111057 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: default, uid: 4e4e9461-ea5d-4d99-9505-35dbe519d6e6]" virtual=false 2025-12-12T16:15:20.112354006+00:00 stderr F I1212 16:15:20.112308 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-route-controller-manager, name: default, uid: aa3b7c25-ebec-443a-88d6-5bedeb19d655]" 2025-12-12T16:15:20.112354006+00:00 stderr F I1212 16:15:20.112337 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-oauth-apiserver, name: api, uid: a4a2a4a5-c090-496f-ba96-7e7759b7e76e]" virtual=false 2025-12-12T16:15:20.117069489+00:00 stderr F I1212 16:15:20.116993 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-vsphere-infra, name: deployer, uid: cd1166ae-7ef0-44af-b045-20be9069087c]" 2025-12-12T16:15:20.117069489+00:00 stderr F I1212 16:15:20.117038 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: builder, uid: ac24149d-bbb5-409a-98e5-e9c9bb94b98b]" virtual=false 2025-12-12T16:15:20.118306139+00:00 stderr F I1212 16:15:20.118258 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: default, uid: 2e3ec735-7dc5-461b-8247-6194400cea62]" 2025-12-12T16:15:20.118323659+00:00 stderr F I1212 16:15:20.118293 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: default, uid: 99601b99-6f8e-4cd7-85b2-edc53b26c14b]" virtual=false 2025-12-12T16:15:20.125800250+00:00 stderr F I1212 16:15:20.125733 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-node, uid: c758def5-7362-4985-b540-393f26fc97a6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.125800250+00:00 stderr F I1212 16:15:20.125770 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: privileged-namespaces-psa-label-syncer, uid: e7389ded-b23c-4fc0-b3ab-5d33f7e9d342]" virtual=false 2025-12-12T16:15:20.136473817+00:00 stderr F I1212 16:15:20.136253 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-kube-apiserver, name: apiserver, uid: 0b3c16e1-fe8b-4e27-bd29-ca7bbf3dc547]" 2025-12-12T16:15:20.136473817+00:00 stderr F I1212 16:15:20.136294 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: builder, uid: 2ac4350b-fa20-4664-b7ac-18f9ac8040a0]" virtual=false 2025-12-12T16:15:20.141304623+00:00 stderr F I1212 16:15:20.141070 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: builder, uid: ba69f219-cf03-41f6-b902-7b668aa1a5cd]" 2025-12-12T16:15:20.141304623+00:00 stderr F I1212 16:15:20.141101 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: default, uid: 9272d409-3498-4e8d-8e7d-cbba64cffa29]" virtual=false 2025-12-12T16:15:20.142378399+00:00 stderr F I1212 16:15:20.142326 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: default, uid: 3103673c-4e1b-493f-aed9-3a7d847c8fa4]" 2025-12-12T16:15:20.142378399+00:00 stderr F I1212 16:15:20.142359 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-dns, name: dns-default, uid: e794a656-06b8-46f9-9107-71acf4782fc7]" virtual=false 2025-12-12T16:15:20.150890244+00:00 stderr F I1212 16:15:20.150768 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: d89f5418-8a04-45bf-a138-83af4c524742]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.150890244+00:00 stderr F I1212 16:15:20.150851 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: installer-sa, uid: 1d552f28-335e-4a85-bc2d-d51d26fc26bc]" virtual=false 2025-12-12T16:15:20.151989621+00:00 stderr F I1212 16:15:20.151917 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: ovn-kubernetes-control-plane, uid: e7d12ea0-5793-4ac6-b96e-798934641d22]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.152030061+00:00 stderr F I1212 16:15:20.151999 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: default, uid: 55d30c96-0171-4cea-81c8-1bc7dde775fa]" virtual=false 2025-12-12T16:15:20.152205136+00:00 stderr F I1212 16:15:20.152137 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operator-lifecycle-manager, name: default, uid: aab42295-a624-4ccc-972a-b41ad1114219]" 2025-12-12T16:15:20.152219346+00:00 stderr F I1212 16:15:20.152191 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: builder, uid: db0f51ea-3d5e-44aa-b8fc-e26b02a67203]" virtual=false 2025-12-12T16:15:20.157870512+00:00 stderr F I1212 16:15:20.157793 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config, name: default, uid: 1624f01e-85f4-4361-9b08-5b557e6efb22]" 2025-12-12T16:15:20.157870512+00:00 stderr F I1212 16:15:20.157835 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-operator-lifecycle-manager, name: pprof-cert, uid: 78c31177-72ae-4588-82df-59ba321a257b]" virtual=false 2025-12-12T16:15:20.160542607+00:00 stderr F I1212 16:15:20.160485 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: deploymentconfig-controller, uid: cc06b50e-5feb-470f-b4c7-d7406b801911]" 2025-12-12T16:15:20.160542607+00:00 stderr F I1212 16:15:20.160520 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: cronjob-controller, uid: d10b5490-76e5-4a6b-b20d-081a7fd10ac0]" virtual=false 2025-12-12T16:15:20.165899776+00:00 stderr F I1212 16:15:20.165757 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: builder, uid: ec18956d-7ea3-460b-b795-ae2e221e5597]" 2025-12-12T16:15:20.165899776+00:00 stderr F I1212 16:15:20.165792 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: console, uid: 324553c0-d8c0-43b0-bbae-07283a98bcf1]" virtual=false 2025-12-12T16:15:20.170199559+00:00 stderr F I1212 16:15:20.169995 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: default, uid: 3ff99114-6df1-4a97-8009-239e69ead330]" 2025-12-12T16:15:20.170199559+00:00 stderr F I1212 16:15:20.170036 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-1, uid: f0431d0b-c8bb-453e-adcc-46f809a3bc04]" virtual=false 2025-12-12T16:15:20.172224608+00:00 stderr F I1212 16:15:20.171831 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-nutanix-infra, name: default, uid: 8c879ee5-2d71-4a75-bcbc-90f06c3b5306]" 2025-12-12T16:15:20.173646312+00:00 stderr F I1212 16:15:20.171901 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: node-ca, uid: 1476d897-3740-45c6-b3a2-3403be584014]" virtual=false 2025-12-12T16:15:20.180901827+00:00 stderr F I1212 16:15:20.180436 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-oauth-apiserver, name: api, uid: a4a2a4a5-c090-496f-ba96-7e7759b7e76e]" 2025-12-12T16:15:20.180901827+00:00 stderr F I1212 16:15:20.180486 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: redhat-operators, uid: 0cfb03d5-61fc-4569-8be6-5f028c9f7f5d]" virtual=false 2025-12-12T16:15:20.180901827+00:00 stderr F I1212 16:15:20.180489 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-machine-approver, name: default, uid: 4e4e9461-ea5d-4d99-9505-35dbe519d6e6]" 2025-12-12T16:15:20.180901827+00:00 stderr F I1212 16:15:20.180544 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-console-operator, name: metrics, uid: 899e5eff-60ef-429f-9533-9c263e2d0ddb]" virtual=false 2025-12-12T16:15:20.181013080+00:00 stderr F I1212 16:15:20.180992 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operator-lifecycle-manager/olm-operator-5cdf44d969" need=1 creating=1 2025-12-12T16:15:20.183499040+00:00 stderr F I1212 16:15:20.182434 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operator-lifecycle-manager/package-server-manager-77f986bd66" need=1 creating=1 2025-12-12T16:15:20.183666534+00:00 stderr F I1212 16:15:20.183610 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: builder, uid: ac24149d-bbb5-409a-98e5-e9c9bb94b98b]" 2025-12-12T16:15:20.183666534+00:00 stderr F I1212 16:15:20.183649 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-source, uid: 5ad92a4d-4ca9-422b-8abe-d013f8c3121c]" virtual=false 2025-12-12T16:15:20.187211859+00:00 stderr F I1212 16:15:20.183771 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-operator-lifecycle-manager/packageserver-7d4fc7d867" need=1 creating=1 2025-12-12T16:15:20.190328294+00:00 stderr F I1212 16:15:20.188645 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988" need=1 creating=1 2025-12-12T16:15:20.203514892+00:00 stderr F I1212 16:15:20.202675 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: bfac9eb9-68bb-4da9-afc6-a17734d95032]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.203514892+00:00 stderr F I1212 16:15:20.202731 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver, name: check-endpoints, uid: d0180f4d-9eb3-45e2-8586-df212e67c7f6]" virtual=false 2025-12-12T16:15:20.203514892+00:00 stderr F I1212 16:15:20.202868 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 6ba7c917-27fc-4114-85d9-07825a840abc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.203514892+00:00 stderr F I1212 16:15:20.202922 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-target, uid: 2870f12f-1c16-412d-8d85-3f66a56def0d]" virtual=false 2025-12-12T16:15:20.203514892+00:00 stderr F I1212 16:15:20.202983 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: privileged-namespaces-psa-label-syncer, uid: e7389ded-b23c-4fc0-b3ab-5d33f7e9d342]" 2025-12-12T16:15:20.203514892+00:00 stderr F I1212 16:15:20.202994 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-canary, name: ingress-canary, uid: 0888ad56-e63e-40f9-9ab5-bdfe12ee18ef]" virtual=false 2025-12-12T16:15:20.204567307+00:00 stderr F I1212 16:15:20.203682 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: marketplace-operator-metrics, uid: 94337474-19e9-47ef-a63f-a5db85f82770]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.204567307+00:00 stderr F I1212 16:15:20.203706 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 5ecbdbba-0aac-4ca1-9bd5-63c3dd666779]" virtual=false 2025-12-12T16:15:20.204567307+00:00 stderr F I1212 16:15:20.202605 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-776cdc94d6" need=1 creating=1 2025-12-12T16:15:20.208603274+00:00 stderr F I1212 16:15:20.208544 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: default, uid: 99601b99-6f8e-4cd7-85b2-edc53b26c14b]" 2025-12-12T16:15:20.208603274+00:00 stderr F I1212 16:15:20.208585 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/ClusterServiceVersion, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: 09b3d4b2-fc47-4ee0-a331-67a39502cf21]" virtual=false 2025-12-12T16:15:20.208682256+00:00 stderr F I1212 16:15:20.208544 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver, name: builder, uid: 2ac4350b-fa20-4664-b7ac-18f9ac8040a0]" 2025-12-12T16:15:20.208694267+00:00 stderr F I1212 16:15:20.208675 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-operator, uid: 1ef3c1b1-685a-4417-9b53-23d61c410f1e]" virtual=false 2025-12-12T16:15:20.208812930+00:00 stderr F I1212 16:15:20.208772 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-operator, name: default, uid: 9272d409-3498-4e8d-8e7d-cbba64cffa29]" 2025-12-12T16:15:20.208812930+00:00 stderr F I1212 16:15:20.208797 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-public, uid: a4c72545-652f-48d3-b484-7d5f7a310e7e]" virtual=false 2025-12-12T16:15:20.216764131+00:00 stderr F I1212 16:15:20.216106 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: installer-sa, uid: 1d552f28-335e-4a85-bc2d-d51d26fc26bc]" 2025-12-12T16:15:20.216764131+00:00 stderr F I1212 16:15:20.216152 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-config-operator, name: prometheus-k8s, uid: 01b524f2-7ddc-4878-9076-8f4881346e07]" virtual=false 2025-12-12T16:15:20.216764131+00:00 stderr F I1212 16:15:20.216295 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-service-ca-operator/service-ca-operator-5b9c976747" need=1 creating=1 2025-12-12T16:15:20.216764131+00:00 stderr F I1212 16:15:20.216372 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-service-ca/service-ca-74545575db" need=1 creating=1 2025-12-12T16:15:20.223945214+00:00 stderr F I1212 16:15:20.221593 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: builder, uid: db0f51ea-3d5e-44aa-b8fc-e26b02a67203]" 2025-12-12T16:15:20.223945214+00:00 stderr F I1212 16:15:20.221637 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: whereabouts-cni, uid: 3e189f77-ec81-4804-a69e-3406cea72d88]" virtual=false 2025-12-12T16:15:20.223945214+00:00 stderr F I1212 16:15:20.221748 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-apiserver-operator, name: default, uid: 55d30c96-0171-4cea-81c8-1bc7dde775fa]" 2025-12-12T16:15:20.223945214+00:00 stderr F I1212 16:15:20.221767 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: edc23d92-6948-48f7-b843-ef543c0aec83]" virtual=false 2025-12-12T16:15:20.227730355+00:00 stderr F I1212 16:15:20.227461 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: cronjob-controller, uid: d10b5490-76e5-4a6b-b20d-081a7fd10ac0]" 2025-12-12T16:15:20.227730355+00:00 stderr F I1212 16:15:20.227510 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: machine-api-controllers, uid: 8913b400-f591-4bff-8f47-c4026984f25f]" virtual=false 2025-12-12T16:15:20.232104831+00:00 stderr F I1212 16:15:20.231256 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler, uid: 56382e05-4f3e-45e2-9065-468d4f668091]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.232104831+00:00 stderr F I1212 16:15:20.231318 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 4d74cb46-6482-4707-bff6-6547ff546015]" virtual=false 2025-12-12T16:15:20.235730348+00:00 stderr F I1212 16:15:20.235660 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd, name: revision-status-1, uid: f0431d0b-c8bb-453e-adcc-46f809a3bc04]" 2025-12-12T16:15:20.235730348+00:00 stderr F I1212 16:15:20.235706 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: prometheus-k8s, uid: ef46d29c-9cc9-4ab9-bcf0-8c1a79052bc9]" virtual=false 2025-12-12T16:15:20.268884157+00:00 stderr F I1212 16:15:20.268778 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[operators.coreos.com/v1alpha1/ClusterServiceVersion, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: 09b3d4b2-fc47-4ee0-a331-67a39502cf21]" 2025-12-12T16:15:20.268884157+00:00 stderr F I1212 16:15:20.268839 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 74c4b64c-8513-4ace-b791-ecc4897b9a04]" virtual=false 2025-12-12T16:15:20.281493100+00:00 stderr F I1212 16:15:20.280372 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-dns, name: dns-default, uid: e794a656-06b8-46f9-9107-71acf4782fc7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"DNS","name":"default","uid":"0f9755ef-acf2-4bc6-a6fc-f491e28e635f","controller":true}] 2025-12-12T16:15:20.281493100+00:00 stderr F I1212 16:15:20.280458 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: console-operator, uid: 770c2fcf-bc84-4b08-9f10-7f8e1853cee5]" virtual=false 2025-12-12T16:15:20.294268558+00:00 stderr F I1212 16:15:20.294112 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Secret, namespace: openshift-operator-lifecycle-manager, name: pprof-cert, uid: 78c31177-72ae-4588-82df-59ba321a257b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:20.294268558+00:00 stderr F I1212 16:15:20.294235 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: 39ffab32-69c7-4cde-83a4-37704d2add4d]" virtual=false 2025-12-12T16:15:20.305978910+00:00 stderr F I1212 16:15:20.303597 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: console, uid: 324553c0-d8c0-43b0-bbae-07283a98bcf1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.305978910+00:00 stderr F I1212 16:15:20.303655 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: f119fd7b-c9f9-41a3-9f6b-6b849a2877ff]" virtual=false 2025-12-12T16:15:20.306937613+00:00 stderr F I1212 16:15:20.306876 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-marketplace, name: redhat-operators, uid: 0cfb03d5-61fc-4569-8be6-5f028c9f7f5d]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"CatalogSource","name":"redhat-operators","uid":"ca744265-3ae3-4482-8c3d-b10e28fe1042","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:15:20.306937613+00:00 stderr F I1212 16:15:20.306922 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-vsphere-infra, name: host-networking-services, uid: 13dc3db2-b779-4912-a196-ee77feacea00]" virtual=false 2025-12-12T16:15:20.314267070+00:00 stderr F I1212 16:15:20.314216 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-source, uid: 5ad92a4d-4ca9-422b-8abe-d013f8c3121c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.314333492+00:00 stderr F I1212 16:15:20.314316 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift, name: copied-csv-viewer, uid: 1487dd1c-4a3b-4683-a18c-da8fd82bf4e5]" virtual=false 2025-12-12T16:15:20.320893320+00:00 stderr F I1212 16:15:20.320673 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-apiserver, name: check-endpoints, uid: d0180f4d-9eb3-45e2-8586-df212e67c7f6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.320893320+00:00 stderr F I1212 16:15:20.320695 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-console-operator, name: metrics, uid: 899e5eff-60ef-429f-9533-9c263e2d0ddb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.320893320+00:00 stderr F I1212 16:15:20.320726 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: a79b075f-5ddc-4304-a7df-de0caa322fa5]" virtual=false 2025-12-12T16:15:20.320893320+00:00 stderr F I1212 16:15:20.320742 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: console-operator, uid: ca4d2ee6-7668-4b43-9cca-59d0d06f542a]" virtual=false 2025-12-12T16:15:20.321004432+00:00 stderr F I1212 16:15:20.320976 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: node-ca, uid: 1476d897-3740-45c6-b3a2-3403be584014]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.321050893+00:00 stderr F I1212 16:15:20.321030 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-nutanix-infra, name: host-networking-services, uid: 20a538b5-51d1-4f13-90b5-d59b3dc493bb]" virtual=false 2025-12-12T16:15:20.326003103+00:00 stderr F I1212 16:15:20.325928 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-ingress-canary, name: ingress-canary, uid: 0888ad56-e63e-40f9-9ab5-bdfe12ee18ef]" owner=[{"apiVersion":"apps/v1","kind":"daemonset","name":"ingress-canary","uid":"77896bcd-d1f7-46a2-984f-9205a544fb94","controller":true}] 2025-12-12T16:15:20.326003103+00:00 stderr F I1212 16:15:20.325983 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v2/OperatorCondition, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: d15615f2-2624-4c4a-9ddd-e19c1181e778]" virtual=false 2025-12-12T16:15:20.329891356+00:00 stderr F I1212 16:15:20.329815 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-network-diagnostics, name: network-check-target, uid: 2870f12f-1c16-412d-8d85-3f66a56def0d]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.329891356+00:00 stderr F I1212 16:15:20.329871 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 77e7db24-b3cb-49de-8ddd-4832c5da528e]" virtual=false 2025-12-12T16:15:20.333554735+00:00 stderr F I1212 16:15:20.333491 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/Service, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 5ecbdbba-0aac-4ca1-9bd5-63c3dd666779]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.333632007+00:00 stderr F I1212 16:15:20.333613 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: 7a0fca50-57b4-41d8-922f-f52b7051910d]" virtual=false 2025-12-12T16:15:20.339269552+00:00 stderr F I1212 16:15:20.339060 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-operator, uid: 1ef3c1b1-685a-4417-9b53-23d61c410f1e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.339269552+00:00 stderr F I1212 16:15:20.339120 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: d5f0400c-c866-4461-91ca-1674c874bd8b]" virtual=false 2025-12-12T16:15:20.344164240+00:00 stderr F I1212 16:15:20.342894 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-public, uid: a4c72545-652f-48d3-b484-7d5f7a310e7e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.344164240+00:00 stderr F I1212 16:15:20.342942 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: console-operator, uid: 094d60d7-e5b5-4603-ad96-cde7975bd83f]" virtual=false 2025-12-12T16:15:20.347653004+00:00 stderr F I1212 16:15:20.347532 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-machine-config-operator, name: prometheus-k8s, uid: 01b524f2-7ddc-4878-9076-8f4881346e07]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.347691685+00:00 stderr F I1212 16:15:20.347637 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: dns-operator, uid: 79c48787-e4ef-45fe-9cb2-8707e9cd7d61]" virtual=false 2025-12-12T16:15:20.349658113+00:00 stderr F I1212 16:15:20.349620 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="0s" 2025-12-12T16:15:20.359764086+00:00 stderr F I1212 16:15:20.359674 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: edc23d92-6948-48f7-b843-ef543c0aec83]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.359764086+00:00 stderr F I1212 16:15:20.359735 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: marketplace-operator, uid: dcd874ef-723a-40d7-9425-f4383187a07d]" virtual=false 2025-12-12T16:15:20.359990092+00:00 stderr F I1212 16:15:20.359930 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: whereabouts-cni, uid: 3e189f77-ec81-4804-a69e-3406cea72d88]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.360004272+00:00 stderr F I1212 16:15:20.359989 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication, name: prometheus-k8s, uid: 2a54be00-974b-4c50-8d1c-f7162000d609]" virtual=false 2025-12-12T16:15:20.362486802+00:00 stderr F I1212 16:15:20.362342 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 4d74cb46-6482-4707-bff6-6547ff546015]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.362486802+00:00 stderr F I1212 16:15:20.362379 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: 39466c9d-67a7-445a-b328-b0ff5f22d5e2]" virtual=false 2025-12-12T16:15:20.365779791+00:00 stderr F I1212 16:15:20.365566 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: machine-api-controllers, uid: 8913b400-f591-4bff-8f47-c4026984f25f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.365779791+00:00 stderr F I1212 16:15:20.365616 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: eae1ab86-3aac-4e4d-844b-b4aab669be67]" virtual=false 2025-12-12T16:15:20.370357801+00:00 stderr F I1212 16:15:20.370298 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-multus, name: prometheus-k8s, uid: ef46d29c-9cc9-4ab9-bcf0-8c1a79052bc9]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.370432373+00:00 stderr F I1212 16:15:20.370407 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-openstack-infra, name: host-networking-services, uid: ba93ff46-031f-415f-9480-767006d718e7]" virtual=false 2025-12-12T16:15:20.374898811+00:00 stderr F I1212 16:15:20.374846 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:15:20.381225953+00:00 stderr F I1212 16:15:20.381151 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:15:20.400141959+00:00 stderr F I1212 16:15:20.400072 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 74c4b64c-8513-4ace-b791-ecc4897b9a04]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.400243871+00:00 stderr F I1212 16:15:20.400219 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 8a2a7e9d-fa82-48c8-842c-214567ad94ec]" virtual=false 2025-12-12T16:15:20.413446279+00:00 stderr F I1212 16:15:20.413369 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: console-operator, uid: 770c2fcf-bc84-4b08-9f10-7f8e1853cee5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.413446279+00:00 stderr F I1212 16:15:20.413423 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-approver, uid: b908a159-2cd6-4878-ab3a-f9388352a4d6]" virtual=false 2025-12-12T16:15:20.416410061+00:00 stderr F I1212 16:15:20.416325 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: 39ffab32-69c7-4cde-83a4-37704d2add4d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.416410061+00:00 stderr F I1212 16:15:20.416381 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: prometheus-k8s, uid: 253a0556-00ed-477b-b11e-727c668659f4]" virtual=false 2025-12-12T16:15:20.432948839+00:00 stderr F I1212 16:15:20.432882 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: f119fd7b-c9f9-41a3-9f6b-6b849a2877ff]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.433021601+00:00 stderr F I1212 16:15:20.433002 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 2e1f82c6-2d8a-417d-af7a-bc440909ea57]" virtual=false 2025-12-12T16:15:20.440104532+00:00 stderr F I1212 16:15:20.440037 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-vsphere-infra, name: host-networking-services, uid: 13dc3db2-b779-4912-a196-ee77feacea00]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.440197164+00:00 stderr F I1212 16:15:20.440159 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 2c65389f-32ed-4b66-b0bf-f697ede62460]" virtual=false 2025-12-12T16:15:20.443113354+00:00 stderr F I1212 16:15:20.443062 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift, name: copied-csv-viewer, uid: 1487dd1c-4a3b-4683-a18c-da8fd82bf4e5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.443193096+00:00 stderr F I1212 16:15:20.443156 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-version, name: prometheus-k8s, uid: ffda4cc2-e551-454c-8bf1-be5336168d3f]" virtual=false 2025-12-12T16:15:20.446086636+00:00 stderr F I1212 16:15:20.446044 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: a79b075f-5ddc-4304-a7df-de0caa322fa5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.446150467+00:00 stderr F I1212 16:15:20.446132 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 902067e6-576b-4c67-b503-46ba31250666]" virtual=false 2025-12-12T16:15:20.450115693+00:00 stderr F I1212 16:15:20.450074 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: console-operator, uid: ca4d2ee6-7668-4b43-9cca-59d0d06f542a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.450197785+00:00 stderr F I1212 16:15:20.450161 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-operator, name: prometheus-k8s, uid: 5dbba0e9-dd0c-4e5b-8f0d-878be7cd91a7]" virtual=false 2025-12-12T16:15:20.454347485+00:00 stderr F I1212 16:15:20.454287 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-nutanix-infra, name: host-networking-services, uid: 20a538b5-51d1-4f13-90b5-d59b3dc493bb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.454430737+00:00 stderr F I1212 16:15:20.454405 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 7a8de694-4980-44f0-938f-b8112d953aa4]" virtual=false 2025-12-12T16:15:20.458363171+00:00 stderr F I1212 16:15:20.458325 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v2/OperatorCondition, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: d15615f2-2624-4c4a-9ddd-e19c1181e778]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"ClusterServiceVersion","name":"packageserver","uid":"09b3d4b2-fc47-4ee0-a331-67a39502cf21","controller":true,"blockOwnerDeletion":false}] 2025-12-12T16:15:20.458439343+00:00 stderr F I1212 16:15:20.458417 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 6ff38600-b4d6-452f-82fc-0d24fdec9101]" virtual=false 2025-12-12T16:15:20.461689912+00:00 stderr F I1212 16:15:20.461562 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 77e7db24-b3cb-49de-8ddd-4832c5da528e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.461689912+00:00 stderr F I1212 16:15:20.461627 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: ingress-operator, uid: 9ef68a54-5a41-4cec-8a8e-0de1dd9c9610]" virtual=false 2025-12-12T16:15:20.464099470+00:00 stderr F I1212 16:15:20.464026 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: 7a0fca50-57b4-41d8-922f-f52b7051910d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.464137351+00:00 stderr F I1212 16:15:20.464087 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: d6fad1f4-41ae-4595-bdb0-0d02b479944b]" virtual=false 2025-12-12T16:15:20.469851738+00:00 stderr F I1212 16:15:20.469792 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: d5f0400c-c866-4461-91ca-1674c874bd8b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.469898019+00:00 stderr F I1212 16:15:20.469825 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6b65bfe8-9060-45f8-b70c-21d30115c6f6]" virtual=false 2025-12-12T16:15:20.473501016+00:00 stderr F I1212 16:15:20.473454 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: console-operator, uid: 094d60d7-e5b5-4603-ad96-cde7975bd83f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.473580078+00:00 stderr F I1212 16:15:20.473557 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: 7dff5aea-b0b2-4e11-8d0a-aaee6bc5c894]" virtual=false 2025-12-12T16:15:20.476521219+00:00 stderr F I1212 16:15:20.476473 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: dns-operator, uid: 79c48787-e4ef-45fe-9cb2-8707e9cd7d61]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.476602971+00:00 stderr F I1212 16:15:20.476582 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-operator, name: prometheus-k8s, uid: ac0827c7-174d-4310-a132-b4ec6df8afc8]" virtual=false 2025-12-12T16:15:20.486571181+00:00 stderr F I1212 16:15:20.486499 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-marketplace, name: marketplace-operator, uid: dcd874ef-723a-40d7-9425-f4383187a07d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.486596562+00:00 stderr F I1212 16:15:20.486557 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: 8a7d0ef2-903e-4505-8a80-ea97c68b4a10]" virtual=false 2025-12-12T16:15:20.492073604+00:00 stderr F I1212 16:15:20.491639 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication, name: prometheus-k8s, uid: 2a54be00-974b-4c50-8d1c-f7162000d609]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.492073604+00:00 stderr F I1212 16:15:20.491709 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager, name: prometheus-k8s, uid: ee712995-32ca-4cd3-addd-56c3dc98ac1b]" virtual=false 2025-12-12T16:15:20.493220171+00:00 stderr F I1212 16:15:20.493154 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: 39466c9d-67a7-445a-b328-b0ff5f22d5e2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.493240822+00:00 stderr F I1212 16:15:20.493224 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: prometheus-k8s, uid: c584e4f7-dc9a-4554-8568-6104eac0033f]" virtual=false 2025-12-12T16:15:20.498052128+00:00 stderr F I1212 16:15:20.497479 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: eae1ab86-3aac-4e4d-844b-b4aab669be67]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.498052128+00:00 stderr F I1212 16:15:20.497540 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kni-infra, name: host-networking-services, uid: 9814d6e2-5d57-4f6a-a185-4f4b991702ec]" virtual=false 2025-12-12T16:15:20.505360964+00:00 stderr F I1212 16:15:20.505024 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-openstack-infra, name: host-networking-services, uid: ba93ff46-031f-415f-9480-767006d718e7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.505360964+00:00 stderr F I1212 16:15:20.505090 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: b346ce36-850b-4b91-84fd-9009ca037189]" virtual=false 2025-12-12T16:15:20.533761348+00:00 stderr F I1212 16:15:20.533506 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 8a2a7e9d-fa82-48c8-842c-214567ad94ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.533761348+00:00 stderr F I1212 16:15:20.533730 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-configmap-reader, uid: 5f8e3d3f-6c13-40f0-a1c4-0c2b0cee9aba]" virtual=false 2025-12-12T16:15:20.547300354+00:00 stderr F I1212 16:15:20.547232 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-approver, uid: b908a159-2cd6-4878-ab3a-f9388352a4d6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.547382676+00:00 stderr F I1212 16:15:20.547351 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: prometheus-k8s, uid: 56ff8849-354a-4ede-88f2-4436b0a3bde5]" virtual=false 2025-12-12T16:15:20.550508301+00:00 stderr F I1212 16:15:20.550428 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-operator, name: prometheus-k8s, uid: 253a0556-00ed-477b-b11e-727c668659f4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.550508301+00:00 stderr F I1212 16:15:20.550480 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: d0fac19c-3e45-4cfd-a8ca-f121bc469295]" virtual=false 2025-12-12T16:15:20.567411609+00:00 stderr F I1212 16:15:20.567279 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 2e1f82c6-2d8a-417d-af7a-bc440909ea57]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.567411609+00:00 stderr F I1212 16:15:20.567395 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: node-ca, uid: 7fd7f4d9-78fa-4f5a-9f7c-d1f6264ee09a]" virtual=false 2025-12-12T16:15:20.574104420+00:00 stderr F I1212 16:15:20.574042 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 2c65389f-32ed-4b66-b0bf-f697ede62460]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.574127890+00:00 stderr F I1212 16:15:20.574113 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: f52bc923-0764-4ed9-8390-8f23448cb6a5]" virtual=false 2025-12-12T16:15:20.576918518+00:00 stderr F I1212 16:15:20.576858 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-version, name: prometheus-k8s, uid: ffda4cc2-e551-454c-8bf1-be5336168d3f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.576938498+00:00 stderr F I1212 16:15:20.576906 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver, name: prometheus-k8s, uid: 6dfe5fa1-4be3-47c2-aa7e-e055b2344b88]" virtual=false 2025-12-12T16:15:20.580026403+00:00 stderr F I1212 16:15:20.579968 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 902067e6-576b-4c67-b503-46ba31250666]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.580026403+00:00 stderr F I1212 16:15:20.580009 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: b75388c6-a2ac-4323-97aa-74085d52c30a]" virtual=false 2025-12-12T16:15:20.583322412+00:00 stderr F I1212 16:15:20.583234 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-operator, name: prometheus-k8s, uid: 5dbba0e9-dd0c-4e5b-8f0d-878be7cd91a7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.583343682+00:00 stderr F I1212 16:15:20.583317 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: a3099dea-12c5-441e-b16c-2f2c07408c1a]" virtual=false 2025-12-12T16:15:20.587034661+00:00 stderr F I1212 16:15:20.585933 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:20.587034661+00:00 stderr F I1212 16:15:20.586530 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 7a8de694-4980-44f0-938f-b8112d953aa4]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.587034661+00:00 stderr F I1212 16:15:20.586659 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:20.587034661+00:00 stderr F I1212 16:15:20.586564 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-api-controllers, uid: 909fe707-d07c-45ed-ac20-11669b612d43]" virtual=false 2025-12-12T16:15:20.588022945+00:00 stderr F I1212 16:15:20.587999 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:20.592291728+00:00 stderr F I1212 16:15:20.592247 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:20.593357564+00:00 stderr F I1212 16:15:20.593292 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 6ff38600-b4d6-452f-82fc-0d24fdec9101]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.593377064+00:00 stderr F I1212 16:15:20.593354 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: openshift-network-public-role, uid: f85f6638-bb46-4041-bf1e-8d05c6621e59]" virtual=false 2025-12-12T16:15:20.596350546+00:00 stderr F I1212 16:15:20.596284 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: ingress-operator, uid: 9ef68a54-5a41-4cec-8a8e-0de1dd9c9610]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.596350546+00:00 stderr F I1212 16:15:20.596334 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: ingress-operator, uid: 32745b86-26b1-4411-83c8-6769afe0ef84]" virtual=false 2025-12-12T16:15:20.596782576+00:00 stderr F I1212 16:15:20.596712 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: d6fad1f4-41ae-4595-bdb0-0d02b479944b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.596796797+00:00 stderr F I1212 16:15:20.596772 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 8fcc503b-4599-415b-b68a-1f79d1f6a02d]" virtual=false 2025-12-12T16:15:20.604228476+00:00 stderr F I1212 16:15:20.604125 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6b65bfe8-9060-45f8-b70c-21d30115c6f6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.604254786+00:00 stderr F I1212 16:15:20.604225 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-node-limited, uid: 2e9cfa70-01d3-48a5-af7e-ac6d0f70489f]" virtual=false 2025-12-12T16:15:20.607392792+00:00 stderr F I1212 16:15:20.607316 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: 7dff5aea-b0b2-4e11-8d0a-aaee6bc5c894]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.607421703+00:00 stderr F I1212 16:15:20.607387 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: b42e1181-7f9e-4c64-b0f6-f8615e0c9572]" virtual=false 2025-12-12T16:15:20.610414455+00:00 stderr F I1212 16:15:20.610367 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-operator, name: prometheus-k8s, uid: ac0827c7-174d-4310-a132-b4ec6df8afc8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.610479516+00:00 stderr F I1212 16:15:20.610457 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: coreos-pull-secret-reader, uid: 60a0a208-9961-463e-986f-3c7302769df7]" virtual=false 2025-12-12T16:15:20.619763570+00:00 stderr F I1212 16:15:20.619724 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: 8a7d0ef2-903e-4505-8a80-ea97c68b4a10]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.619763570+00:00 stderr F I1212 16:15:20.619753 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: prometheus-k8s, uid: eac9d6d9-611e-4c5c-8436-393910521b09]" virtual=false 2025-12-12T16:15:20.624123785+00:00 stderr F I1212 16:15:20.624086 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-controller-manager, name: prometheus-k8s, uid: ee712995-32ca-4cd3-addd-56c3dc98ac1b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.624161146+00:00 stderr F I1212 16:15:20.624116 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 8906de79-0e02-4f0a-b8fe-92706ec66a89]" virtual=false 2025-12-12T16:15:20.626628995+00:00 stderr F I1212 16:15:20.626597 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-dns-operator, name: prometheus-k8s, uid: c584e4f7-dc9a-4554-8568-6104eac0033f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.626681786+00:00 stderr F I1212 16:15:20.626662 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: console-operator, uid: e74750bf-8b67-4097-800a-1f62f1d728e2]" virtual=false 2025-12-12T16:15:20.629962866+00:00 stderr F I1212 16:15:20.629891 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kni-infra, name: host-networking-services, uid: 9814d6e2-5d57-4f6a-a185-4f4b991702ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.629991096+00:00 stderr F I1212 16:15:20.629951 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: builder, uid: 932a7d93-0cf8-4013-aafa-59c7b76d6a1d]" virtual=false 2025-12-12T16:15:20.637593059+00:00 stderr F I1212 16:15:20.637507 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: b346ce36-850b-4b91-84fd-9009ca037189]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.637613210+00:00 stderr F I1212 16:15:20.637589 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-host-network, name: default, uid: f84fe34b-744e-46bf-afe7-3381a323ac37]" virtual=false 2025-12-12T16:15:20.667350416+00:00 stderr F I1212 16:15:20.667018 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: console-configmap-reader, uid: 5f8e3d3f-6c13-40f0-a1c4-0c2b0cee9aba]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.667350416+00:00 stderr F I1212 16:15:20.667084 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: default-rolebindings-controller, uid: d237a291-87b4-4b61-a21f-21eb8763bb75]" virtual=false 2025-12-12T16:15:20.681692132+00:00 stderr F I1212 16:15:20.681576 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-console, name: prometheus-k8s, uid: 56ff8849-354a-4ede-88f2-4436b0a3bde5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.681692132+00:00 stderr F I1212 16:15:20.681636 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: deployer, uid: 16cc2a81-f4f2-4ba2-90f7-d01906541d49]" virtual=false 2025-12-12T16:15:20.682399909+00:00 stderr F I1212 16:15:20.682344 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: d0fac19c-3e45-4cfd-a8ca-f121bc469295]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.682399909+00:00 stderr F I1212 16:15:20.682373 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: builder, uid: 6d40b6a5-e179-40d9-b101-be6419d6686b]" virtual=false 2025-12-12T16:15:20.696224572+00:00 stderr F I1212 16:15:20.696115 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console-operator, name: builder, uid: 932a7d93-0cf8-4013-aafa-59c7b76d6a1d]" 2025-12-12T16:15:20.696224572+00:00 stderr F I1212 16:15:20.696171 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift, name: default, uid: 820c3ad3-c6d2-4af1-86d8-c02aaa2ead06]" virtual=false 2025-12-12T16:15:20.700099565+00:00 stderr F I1212 16:15:20.700038 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: node-ca, uid: 7fd7f4d9-78fa-4f5a-9f7c-d1f6264ee09a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.700117576+00:00 stderr F I1212 16:15:20.700087 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 35a6df22-e2af-4660-b4c6-16bf8a43042f]" virtual=false 2025-12-12T16:15:20.701852167+00:00 stderr F I1212 16:15:20.701799 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-host-network, name: default, uid: f84fe34b-744e-46bf-afe7-3381a323ac37]" 2025-12-12T16:15:20.701871618+00:00 stderr F I1212 16:15:20.701840 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: cluster-network-operator, uid: 4474bfa7-4a4c-4eef-9e88-1d3ba71df974]" virtual=false 2025-12-12T16:15:20.706986521+00:00 stderr F I1212 16:15:20.706914 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: f52bc923-0764-4ed9-8390-8f23448cb6a5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.707006952+00:00 stderr F I1212 16:15:20.706979 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: builder, uid: fe20f19e-5b44-4e3f-a77e-1e90fee53827]" virtual=false 2025-12-12T16:15:20.710374623+00:00 stderr F I1212 16:15:20.710283 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-apiserver, name: prometheus-k8s, uid: 6dfe5fa1-4be3-47c2-aa7e-e055b2344b88]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.710415114+00:00 stderr F I1212 16:15:20.710382 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: csi-provisioner, uid: 35357870-7105-4e62-b521-ee8b9f90ec5b]" virtual=false 2025-12-12T16:15:20.713505788+00:00 stderr F I1212 16:15:20.713432 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: b75388c6-a2ac-4323-97aa-74085d52c30a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.713525199+00:00 stderr F I1212 16:15:20.713506 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: default, uid: 2c71f921-1f35-40de-9b11-87b84cef1c72]" virtual=false 2025-12-12T16:15:20.716859519+00:00 stderr F I1212 16:15:20.716751 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: a3099dea-12c5-441e-b16c-2f2c07408c1a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.716859519+00:00 stderr F I1212 16:15:20.716825 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: default, uid: f2f75e6f-55e5-4223-abf9-a967d80eb782]" virtual=false 2025-12-12T16:15:20.719445071+00:00 stderr F I1212 16:15:20.719387 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: machine-api-controllers, uid: 909fe707-d07c-45ed-ac20-11669b612d43]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.719445071+00:00 stderr F I1212 16:15:20.719430 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-user-workload-monitoring, name: deployer, uid: 8b69c2f4-60c0-4f11-91d8-b8d929dc8794]" virtual=false 2025-12-12T16:15:20.723491359+00:00 stderr F I1212 16:15:20.723365 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config-managed, name: openshift-network-public-role, uid: f85f6638-bb46-4041-bf1e-8d05c6621e59]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.723529550+00:00 stderr F I1212 16:15:20.723479 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: deployer, uid: 38bd145e-b1ff-4829-bbec-b1987d8bc12b]" virtual=false 2025-12-12T16:15:20.727944456+00:00 stderr F I1212 16:15:20.727791 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: ingress-operator, uid: 32745b86-26b1-4411-83c8-6769afe0ef84]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.727944456+00:00 stderr F I1212 16:15:20.727870 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: openshift-controller-manager-sa, uid: 91dce84a-6fb8-432b-b1df-63d4b1472da3]" virtual=false 2025-12-12T16:15:20.732146137+00:00 stderr F I1212 16:15:20.732069 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 8fcc503b-4599-415b-b68a-1f79d1f6a02d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.732146137+00:00 stderr F I1212 16:15:20.732122 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: builder, uid: 75c97165-efb7-4a2c-ba82-cbfeda443516]" virtual=false 2025-12-12T16:15:20.734393761+00:00 stderr F I1212 16:15:20.734344 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: default-rolebindings-controller, uid: d237a291-87b4-4b61-a21f-21eb8763bb75]" 2025-12-12T16:15:20.734393761+00:00 stderr F I1212 16:15:20.734366 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: serviceaccount-pull-secrets-controller, uid: 6772bf38-bda1-4a64-87a9-8f36f5977a73]" virtual=false 2025-12-12T16:15:20.737796913+00:00 stderr F I1212 16:15:20.737729 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-node-limited, uid: 2e9cfa70-01d3-48a5-af7e-ac6d0f70489f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.737796913+00:00 stderr F I1212 16:15:20.737782 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-operator, uid: 27ab1b40-7038-45bc-a0c3-f51f5fd8e027]" virtual=false 2025-12-12T16:15:20.740241782+00:00 stderr F I1212 16:15:20.740195 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: b42e1181-7f9e-4c64-b0f6-f8615e0c9572]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.740241782+00:00 stderr F I1212 16:15:20.740231 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-platform-infra, name: default, uid: a86e4681-eff1-481d-9666-b232c0959b96]" virtual=false 2025-12-12T16:15:20.743738027+00:00 stderr F I1212 16:15:20.743675 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-config, name: coreos-pull-secret-reader, uid: 60a0a208-9961-463e-986f-3c7302769df7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.743738027+00:00 stderr F I1212 16:15:20.743720 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: builder, uid: 9a3cb002-3f9b-49d4-8cd8-60153deea63b]" virtual=false 2025-12-12T16:15:20.747313103+00:00 stderr F I1212 16:15:20.747258 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: deployer, uid: 16cc2a81-f4f2-4ba2-90f7-d01906541d49]" 2025-12-12T16:15:20.747313103+00:00 stderr F I1212 16:15:20.747289 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: kube-storage-version-migrator-sa, uid: a86cc13a-3c63-4d22-8c58-64d5ad74ab39]" virtual=false 2025-12-12T16:15:20.750589462+00:00 stderr F I1212 16:15:20.750501 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-multus, name: builder, uid: 6d40b6a5-e179-40d9-b101-be6419d6686b]" 2025-12-12T16:15:20.750622462+00:00 stderr F I1212 16:15:20.750587 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: builder, uid: bcdd7145-7c24-467b-be3e-1d4ad61ea327]" virtual=false 2025-12-12T16:15:20.755016048+00:00 stderr F I1212 16:15:20.754897 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-image-registry, name: prometheus-k8s, uid: eac9d6d9-611e-4c5c-8436-393910521b09]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.755016048+00:00 stderr F I1212 16:15:20.754972 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 8c7ce863-bde8-4efe-8b60-31d289bbf1f9]" virtual=false 2025-12-12T16:15:20.757967539+00:00 stderr F I1212 16:15:20.757860 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 8906de79-0e02-4f0a-b8fe-92706ec66a89]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.757967539+00:00 stderr F I1212 16:15:20.757930 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: cluster-quota-reconciliation-controller, uid: 858d60d0-adc0-4333-9a8e-34b5913c78bc]" virtual=false 2025-12-12T16:15:20.761881984+00:00 stderr F I1212 16:15:20.761786 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/Role, namespace: openshift-monitoring, name: console-operator, uid: e74750bf-8b67-4097-800a-1f62f1d728e2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.761907114+00:00 stderr F I1212 16:15:20.761865 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: resourcequota-controller, uid: 677c5ec8-4da3-451d-9322-f6cbe335b49d]" virtual=false 2025-12-12T16:15:20.763514853+00:00 stderr F I1212 16:15:20.763466 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift, name: default, uid: 820c3ad3-c6d2-4af1-86d8-c02aaa2ead06]" 2025-12-12T16:15:20.763543804+00:00 stderr F I1212 16:15:20.763515 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: router, uid: 964dbda5-aa30-44a8-8c0d-731c78113ffb]" virtual=false 2025-12-12T16:15:20.773945614+00:00 stderr F I1212 16:15:20.773833 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovn-kubernetes, name: builder, uid: fe20f19e-5b44-4e3f-a77e-1e90fee53827]" 2025-12-12T16:15:20.773993355+00:00 stderr F I1212 16:15:20.773943 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: builder, uid: 4c9f0d11-94bb-4d3c-9f30-17a17f7a5bbe]" virtual=false 2025-12-12T16:15:20.776199559+00:00 stderr F I1212 16:15:20.776125 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: csi-provisioner, uid: 35357870-7105-4e62-b521-ee8b9f90ec5b]" 2025-12-12T16:15:20.776199559+00:00 stderr F I1212 16:15:20.776161 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-network-config-controller, name: builder, uid: 1eeaef8d-c0df-4387-8359-c6822fc9be23]" virtual=false 2025-12-12T16:15:20.780300557+00:00 stderr F I1212 16:15:20.780244 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: default, uid: 2c71f921-1f35-40de-9b11-87b84cef1c72]" 2025-12-12T16:15:20.780318598+00:00 stderr F I1212 16:15:20.780293 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: deployer, uid: 347e3ee2-c2df-4c2c-a508-817f5354ebbd]" virtual=false 2025-12-12T16:15:20.782174682+00:00 stderr F I1212 16:15:20.782115 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: default, uid: f2f75e6f-55e5-4223-abf9-a967d80eb782]" 2025-12-12T16:15:20.782212063+00:00 stderr F I1212 16:15:20.782161 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: deployer, uid: 41b412e2-bdc3-4383-a74c-d64f4b637a72]" virtual=false 2025-12-12T16:15:20.787843969+00:00 stderr F I1212 16:15:20.787756 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-user-workload-monitoring, name: deployer, uid: 8b69c2f4-60c0-4f11-91d8-b8d929dc8794]" 2025-12-12T16:15:20.787874250+00:00 stderr F I1212 16:15:20.787834 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: network-node-identity, uid: 020e6dbc-bbf9-4bb0-9224-ebe047b05265]" virtual=false 2025-12-12T16:15:20.790067953+00:00 stderr F I1212 16:15:20.789968 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: deployer, uid: 38bd145e-b1ff-4829-bbec-b1987d8bc12b]" 2025-12-12T16:15:20.790092773+00:00 stderr F I1212 16:15:20.790055 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operators, name: default, uid: 83f20ffe-7f51-477b-ae91-2c6b88546374]" virtual=false 2025-12-12T16:15:20.792436880+00:00 stderr F I1212 16:15:20.792387 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-controller-manager, name: openshift-controller-manager-sa, uid: 91dce84a-6fb8-432b-b1df-63d4b1472da3]" 2025-12-12T16:15:20.792436880+00:00 stderr F I1212 16:15:20.792422 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: deployer, uid: cda8e090-dddf-4799-8824-a2d06f2890e9]" virtual=false 2025-12-12T16:15:20.795137815+00:00 stderr F I1212 16:15:20.795055 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-dns-operator, name: builder, uid: 75c97165-efb7-4a2c-ba82-cbfeda443516]" 2025-12-12T16:15:20.795137815+00:00 stderr F I1212 16:15:20.795085 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: ephemeral-volume-controller, uid: 715b5262-7a19-4d9f-8f92-28fc87712b12]" virtual=false 2025-12-12T16:15:20.799900320+00:00 stderr F I1212 16:15:20.799811 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: serviceaccount-pull-secrets-controller, uid: 6772bf38-bda1-4a64-87a9-8f36f5977a73]" 2025-12-12T16:15:20.799900320+00:00 stderr F I1212 16:15:20.799844 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: authentication-operator, uid: 063bc733-6edc-4f47-a43c-73cbfb5c3c8d]" virtual=false 2025-12-12T16:15:20.806505239+00:00 stderr F I1212 16:15:20.806417 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-platform-infra, name: default, uid: a86e4681-eff1-481d-9666-b232c0959b96]" 2025-12-12T16:15:20.806505239+00:00 stderr F I1212 16:15:20.806465 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: deployer, uid: 38814c50-c598-4665-a1a1-db15c372256c]" virtual=false 2025-12-12T16:15:20.808621660+00:00 stderr F I1212 16:15:20.808517 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd-operator, name: builder, uid: 9a3cb002-3f9b-49d4-8cd8-60153deea63b]" 2025-12-12T16:15:20.808621660+00:00 stderr F I1212 16:15:20.808562 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: deployer, uid: 0f674f22-a7ff-492e-a8b7-fec1c64d3988]" virtual=false 2025-12-12T16:15:20.813116278+00:00 stderr F I1212 16:15:20.813065 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-storage-version-migrator, name: kube-storage-version-migrator-sa, uid: a86cc13a-3c63-4d22-8c58-64d5ad74ab39]" 2025-12-12T16:15:20.813116278+00:00 stderr F I1212 16:15:20.813099 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-user-workload-monitoring, name: default, uid: 10a7fa91-62ce-48cd-8e05-98eaa149f77d]" virtual=false 2025-12-12T16:15:20.816093610+00:00 stderr F I1212 16:15:20.816033 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-service-ca-operator, name: builder, uid: bcdd7145-7c24-467b-be3e-1d4ad61ea327]" 2025-12-12T16:15:20.816093610+00:00 stderr F I1212 16:15:20.816077 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: builder, uid: bf8a3b65-bd12-4df3-a6c8-0466a4e43300]" virtual=false 2025-12-12T16:15:20.821670584+00:00 stderr F I1212 16:15:20.821634 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: cluster-quota-reconciliation-controller, uid: 858d60d0-adc0-4333-9a8e-34b5913c78bc]" 2025-12-12T16:15:20.821692684+00:00 stderr F I1212 16:15:20.821663 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: template-instance-finalizer-controller, uid: b1e2739a-f205-41a0-b196-d35c4b74bd6d]" virtual=false 2025-12-12T16:15:20.825840264+00:00 stderr F I1212 16:15:20.825787 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: resourcequota-controller, uid: 677c5ec8-4da3-451d-9322-f6cbe335b49d]" 2025-12-12T16:15:20.825840264+00:00 stderr F I1212 16:15:20.825817 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: 0918fba0-c5ef-42c6-ab99-2fb4dcb34871]" virtual=false 2025-12-12T16:15:20.829298848+00:00 stderr F I1212 16:15:20.829236 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress, name: router, uid: 964dbda5-aa30-44a8-8c0d-731c78113ffb]" 2025-12-12T16:15:20.829298848+00:00 stderr F I1212 16:15:20.829281 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: community-operators, uid: d77f270c-6187-402f-85c4-c59b8121fce6]" virtual=false 2025-12-12T16:15:20.833651123+00:00 stderr F I1212 16:15:20.833569 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 35a6df22-e2af-4660-b4c6-16bf8a43042f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.833651123+00:00 stderr F I1212 16:15:20.833610 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: csi-hostpath-provisioner-sa, uid: 905dd134-a0d7-451e-a0c8-3756680dd7db]" virtual=false 2025-12-12T16:15:20.837237859+00:00 stderr F I1212 16:15:20.837075 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-operator, name: cluster-network-operator, uid: 4474bfa7-4a4c-4eef-9e88-1d3ba71df974]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.837355782+00:00 stderr F I1212 16:15:20.837303 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-managed, name: deployer, uid: 4e689c25-8385-420e-8f95-dc68e131a386]" virtual=false 2025-12-12T16:15:20.842298541+00:00 stderr F I1212 16:15:20.842193 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-monitoring, name: builder, uid: 4c9f0d11-94bb-4d3c-9f30-17a17f7a5bbe]" 2025-12-12T16:15:20.842298541+00:00 stderr F I1212 16:15:20.842259 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: selinux-warning-controller, uid: 3caf07b7-0e8d-44e0-8c7c-6d37439ba95a]" virtual=false 2025-12-12T16:15:20.847607779+00:00 stderr F I1212 16:15:20.847511 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-etcd, name: deployer, uid: 347e3ee2-c2df-4c2c-a508-817f5354ebbd]" 2025-12-12T16:15:20.847607779+00:00 stderr F I1212 16:15:20.847537 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-network-config-controller, name: builder, uid: 1eeaef8d-c0df-4387-8359-c6822fc9be23]" 2025-12-12T16:15:20.847607779+00:00 stderr F I1212 16:15:20.847564 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: default, uid: 2b38afcf-128a-4af7-bc79-6b6889f497bf]" virtual=false 2025-12-12T16:15:20.847607779+00:00 stderr F I1212 16:15:20.847579 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: namespace-security-allocation-controller, uid: e72e8646-4643-4bf4-931b-e517fc0fe5b3]" virtual=false 2025-12-12T16:15:20.849077344+00:00 stderr F I1212 16:15:20.849029 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler, name: deployer, uid: 41b412e2-bdc3-4383-a74c-d64f4b637a72]" 2025-12-12T16:15:20.849077344+00:00 stderr F I1212 16:15:20.849061 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: expand-controller, uid: 8331c465-09ca-4963-b41b-c7f4bf6b8de8]" virtual=false 2025-12-12T16:15:20.855641902+00:00 stderr F I1212 16:15:20.855582 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-operators, name: default, uid: 83f20ffe-7f51-477b-ae91-2c6b88546374]" 2025-12-12T16:15:20.855641902+00:00 stderr F I1212 16:15:20.855626 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config, name: deployer, uid: 7cfe1c72-0eeb-4e6d-ab8d-3d8c61e61be4]" virtual=false 2025-12-12T16:15:20.859194998+00:00 stderr F I1212 16:15:20.859085 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: deployer, uid: cda8e090-dddf-4799-8824-a2d06f2890e9]" 2025-12-12T16:15:20.859229259+00:00 stderr F I1212 16:15:20.859159 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 1281df22-53c2-4b26-aa63-b3d41b4761b8]" virtual=false 2025-12-12T16:15:20.861832162+00:00 stderr F I1212 16:15:20.861777 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: ephemeral-volume-controller, uid: 715b5262-7a19-4d9f-8f92-28fc87712b12]" 2025-12-12T16:15:20.861832162+00:00 stderr F I1212 16:15:20.861816 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: ingress-operator, uid: 75f07586-843e-4cd5-a497-25f3abe799ec]" virtual=false 2025-12-12T16:15:20.870438179+00:00 stderr F I1212 16:15:20.870327 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: machine-api-operator, uid: 27ab1b40-7038-45bc-a0c3-f51f5fd8e027]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.870438179+00:00 stderr F I1212 16:15:20.870381 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: deployer, uid: d9038ebe-ecee-4d9e-9c96-9e2047d7902a]" virtual=false 2025-12-12T16:15:20.872204281+00:00 stderr F I1212 16:15:20.872114 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: deployer, uid: 38814c50-c598-4665-a1a1-db15c372256c]" 2025-12-12T16:15:20.872225002+00:00 stderr F I1212 16:15:20.872199 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovirt-infra, name: deployer, uid: 9535ffe2-8039-4653-8c59-34f20d858271]" virtual=false 2025-12-12T16:15:20.876847493+00:00 stderr F I1212 16:15:20.876764 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-kube-scheduler-operator, name: deployer, uid: 0f674f22-a7ff-492e-a8b7-fec1c64d3988]" 2025-12-12T16:15:20.876847493+00:00 stderr F I1212 16:15:20.876805 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-platform-infra, name: deployer, uid: eaf59551-1ed7-4d92-aa27-0bb33e4d035d]" virtual=false 2025-12-12T16:15:20.879893767+00:00 stderr F I1212 16:15:20.879826 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-user-workload-monitoring, name: default, uid: 10a7fa91-62ce-48cd-8e05-98eaa149f77d]" 2025-12-12T16:15:20.879893767+00:00 stderr F I1212 16:15:20.879859 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: deployer, uid: 1e2d702b-d257-47e9-89ec-a4280c329ee3]" virtual=false 2025-12-12T16:15:20.882963111+00:00 stderr F I1212 16:15:20.882811 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: builder, uid: bf8a3b65-bd12-4df3-a6c8-0466a4e43300]" 2025-12-12T16:15:20.882963111+00:00 stderr F I1212 16:15:20.882850 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-platform-infra, name: builder, uid: 55185e74-7385-4f31-aa7e-acec94ba9477]" virtual=false 2025-12-12T16:15:20.889000146+00:00 stderr F I1212 16:15:20.888878 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 8c7ce863-bde8-4efe-8b60-31d289bbf1f9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.889000146+00:00 stderr F I1212 16:15:20.888922 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: aggregate-olm-edit, uid: fe0e4574-d6f5-4af2-98b5-20f45c6b917f]" virtual=false 2025-12-12T16:15:20.889449577+00:00 stderr F I1212 16:15:20.889386 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: template-instance-finalizer-controller, uid: b1e2739a-f205-41a0-b196-d35c4b74bd6d]" 2025-12-12T16:15:20.889449577+00:00 stderr F I1212 16:15:20.889420 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: aggregate-olm-view, uid: 42a12b36-f4f3-4176-9c93-3cc5279e3f50]" virtual=false 2025-12-12T16:15:20.903492145+00:00 stderr F I1212 16:15:20.903414 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: hostpath-provisioner, name: csi-hostpath-provisioner-sa, uid: 905dd134-a0d7-451e-a0c8-3756680dd7db]" 2025-12-12T16:15:20.903492145+00:00 stderr F I1212 16:15:20.903471 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler, uid: 86f423b5-d7c6-42a4-89ff-2089943b04e6]" virtual=false 2025-12-12T16:15:20.906733213+00:00 stderr F I1212 16:15:20.906659 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config-managed, name: deployer, uid: 4e689c25-8385-420e-8f95-dc68e131a386]" 2025-12-12T16:15:20.906733213+00:00 stderr F I1212 16:15:20.906708 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator, uid: c95714ce-ec97-4ab5-b450-51c3a885f6d9]" virtual=false 2025-12-12T16:15:20.906927588+00:00 stderr F I1212 16:15:20.906894 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: selinux-warning-controller, uid: 3caf07b7-0e8d-44e0-8c7c-6d37439ba95a]" 2025-12-12T16:15:20.906940828+00:00 stderr F I1212 16:15:20.906924 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator:cluster-reader, uid: ae390e5b-3179-4936-a6f1-0691a189c71f]" virtual=false 2025-12-12T16:15:20.916445947+00:00 stderr F I1212 16:15:20.916281 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-console, name: default, uid: 2b38afcf-128a-4af7-bc79-6b6889f497bf]" 2025-12-12T16:15:20.916445947+00:00 stderr F I1212 16:15:20.916336 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-image-registry-operator, uid: e50e785b-1640-4641-b0bb-e24ae1240524]" virtual=false 2025-12-12T16:15:20.927174626+00:00 stderr F I1212 16:15:20.926984 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-infra, name: namespace-security-allocation-controller, uid: e72e8646-4643-4bf4-931b-e517fc0fe5b3]" 2025-12-12T16:15:20.927174626+00:00 stderr F I1212 16:15:20.927088 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator, uid: 02e02888-69ca-4c76-a391-38374550c227]" virtual=false 2025-12-12T16:15:20.927421662+00:00 stderr F I1212 16:15:20.927348 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: kube-system, name: expand-controller, uid: 8331c465-09ca-4963-b41b-c7f4bf6b8de8]" 2025-12-12T16:15:20.927435442+00:00 stderr F I1212 16:15:20.927408 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator-namespaced, uid: 4e5afe0e-92fd-4bf8-bfe5-922660926418]" virtual=false 2025-12-12T16:15:20.934670286+00:00 stderr F I1212 16:15:20.932721 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-config, name: deployer, uid: 7cfe1c72-0eeb-4e6d-ab8d-3d8c61e61be4]" 2025-12-12T16:15:20.934670286+00:00 stderr F I1212 16:15:20.932828 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-samples-operator, uid: cfa32561-3685-4d18-b1f6-a98f564bd317]" virtual=false 2025-12-12T16:15:20.939069492+00:00 stderr F I1212 16:15:20.937455 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-network-node-identity, name: network-node-identity, uid: 020e6dbc-bbf9-4bb0-9224-ebe047b05265]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.939069492+00:00 stderr F I1212 16:15:20.937507 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-samples-operator-imageconfig-reader, uid: e4dcdbb5-4304-4535-a6dd-bc550e8fbcc7]" virtual=false 2025-12-12T16:15:20.943336135+00:00 stderr F I1212 16:15:20.942323 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-oauth-apiserver, name: deployer, uid: d9038ebe-ecee-4d9e-9c96-9e2047d7902a]" 2025-12-12T16:15:20.943336135+00:00 stderr F I1212 16:15:20.942379 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-samples-operator-proxy-reader, uid: f1935f50-12ea-48e5-b0aa-1bc11540b309]" virtual=false 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963686 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-authentication-operator, name: authentication-operator, uid: 063bc733-6edc-4f47-a43c-73cbfb5c3c8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963746 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console, uid: b95b8768-0164-4998-b6e5-6165e7ba01ef]" virtual=false 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963849 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-platform-infra, name: deployer, uid: eaf59551-1ed7-4d92-aa27-0bb33e4d035d]" 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console-extensions-reader, uid: dbc7b83f-434c-4c39-8efd-1c96226d30d7]" virtual=false 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963905 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ovirt-infra, name: deployer, uid: 9535ffe2-8039-4653-8c59-34f20d858271]" 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963917 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console-operator, uid: 5f877ff6-9616-4569-8c9d-cf1b56e92de2]" virtual=false 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963961 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: deployer, uid: 1e2d702b-d257-47e9-89ec-a4280c329ee3]" 2025-12-12T16:15:20.964302120+00:00 stderr F I1212 16:15:20.963973 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: control-plane-machine-set-operator, uid: 8df6d918-3869-408e-a9d1-00413e2161b7]" virtual=false 2025-12-12T16:15:20.978305157+00:00 stderr F I1212 16:15:20.975077 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-cloud-platform-infra, name: builder, uid: 55185e74-7385-4f31-aa7e-acec94ba9477]" 2025-12-12T16:15:20.978305157+00:00 stderr F I1212 16:15:20.975131 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: csi-snapshot-controller-operator-clusterrole, uid: c9893e84-c318-43a5-8920-61611f43e22f]" virtual=false 2025-12-12T16:15:20.978305157+00:00 stderr F I1212 16:15:20.975284 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-marketplace, name: community-operators, uid: d77f270c-6187-402f-85c4-c59b8121fce6]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"CatalogSource","name":"community-operators","uid":"88a656bd-c52a-4813-892e-7e3363ba9ac0","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:20.978305157+00:00 stderr F I1212 16:15:20.975310 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: helm-chartrepos-viewer, uid: e7ccf99c-35cd-4925-bccf-00341a7c0226]" virtual=false 2025-12-12T16:15:20.978305157+00:00 stderr F I1212 16:15:20.977038 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-machine-api, name: cluster-autoscaler-operator, uid: 0918fba0-c5ef-42c6-ab99-2fb4dcb34871]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.978305157+00:00 stderr F I1212 16:15:20.977074 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-controllers, uid: c45cbe9d-a666-4cc5-897d-358f136ded5d]" virtual=false 2025-12-12T16:15:20.997092040+00:00 stderr F I1212 16:15:20.996960 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 1281df22-53c2-4b26-aa63-b3d41b4761b8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.997092040+00:00 stderr F I1212 16:15:20.997025 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-controllers-metal3-remediation, uid: 27107a82-f68f-46f0-8c16-6ac47755c9a1]" virtual=false 2025-12-12T16:15:20.997204253+00:00 stderr F I1212 16:15:20.997139 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ServiceAccount, namespace: openshift-ingress-operator, name: ingress-operator, uid: 75f07586-843e-4cd5-a497-25f3abe799ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:20.997280065+00:00 stderr F I1212 16:15:20.997214 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-controllers-metal3-remediation-aggregation, uid: e9123d81-af7a-48d7-9f87-0a4a73f32564]" virtual=false 2025-12-12T16:15:21.016293323+00:00 stderr F I1212 16:15:21.016152 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: aggregate-olm-view, uid: 42a12b36-f4f3-4176-9c93-3cc5279e3f50]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.016293323+00:00 stderr F I1212 16:15:21.016224 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator, uid: 54d437f7-f4ad-48ff-96fe-ad923eacc808]" virtual=false 2025-12-12T16:15:21.020651188+00:00 stderr F I1212 16:15:21.020564 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler, uid: 86f423b5-d7c6-42a4-89ff-2089943b04e6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.020651188+00:00 stderr F I1212 16:15:21.020627 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator-ext-remediation, uid: dce779d1-65c4-47c8-9017-bf892bf21cd9]" virtual=false 2025-12-12T16:15:21.026776255+00:00 stderr F I1212 16:15:21.026603 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: aggregate-olm-edit, uid: fe0e4574-d6f5-4af2-98b5-20f45c6b917f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.026776255+00:00 stderr F I1212 16:15:21.026656 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator:cluster-reader, uid: b2272cd1-5862-4269-bf93-2f3dacd4d7f5]" virtual=false 2025-12-12T16:15:21.033993009+00:00 stderr F I1212 16:15:21.033879 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator, uid: c95714ce-ec97-4ab5-b450-51c3a885f6d9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.033993009+00:00 stderr F I1212 16:15:21.033933 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: marketplace-operator, uid: 6c513536-1398-4479-bcf3-0cb139fcf12b]" virtual=false 2025-12-12T16:15:21.036450528+00:00 stderr F I1212 16:15:21.036403 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-autoscaler-operator:cluster-reader, uid: ae390e5b-3179-4936-a6f1-0691a189c71f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.036450528+00:00 stderr F I1212 16:15:21.036431 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: metrics-daemon-role, uid: cd81010d-36e3-4272-9c11-bc2a176cac77]" virtual=false 2025-12-12T16:15:21.042364001+00:00 stderr F I1212 16:15:21.042309 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator, uid: 02e02888-69ca-4c76-a391-38374550c227]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.042364001+00:00 stderr F I1212 16:15:21.042343 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console, uid: 6292b407-20e8-47a5-be99-77a5e1f7a896]" virtual=false 2025-12-12T16:15:21.046233374+00:00 stderr F I1212 16:15:21.046167 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-monitoring-operator-namespaced, uid: 4e5afe0e-92fd-4bf8-bfe5-922660926418]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.046233374+00:00 stderr F I1212 16:15:21.046209 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus, uid: 3776dc79-8fd8-4cdf-b093-35c526784a28]" virtual=false 2025-12-12T16:15:21.049756169+00:00 stderr F I1212 16:15:21.049705 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-samples-operator, uid: cfa32561-3685-4d18-b1f6-a98f564bd317]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.049756169+00:00 stderr F I1212 16:15:21.049736 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console-operator, uid: dedf5f86-0501-4a1d-b312-0cf452e2800c]" virtual=false 2025-12-12T16:15:21.053586501+00:00 stderr F I1212 16:15:21.053533 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-image-registry-operator, uid: e50e785b-1640-4641-b0bb-e24ae1240524]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.053586501+00:00 stderr F I1212 16:15:21.053560 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-admission-controller-webhook, uid: 1d70faff-2868-472e-86ee-7d454da7084b]" virtual=false 2025-12-12T16:15:21.064639837+00:00 stderr F I1212 16:15:21.064531 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-samples-operator-proxy-reader, uid: f1935f50-12ea-48e5-b0aa-1bc11540b309]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.064639837+00:00 stderr F I1212 16:15:21.064591 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-ancillary-tools, uid: f41f1829-7b7c-453d-9fac-5a6645faf319]" virtual=false 2025-12-12T16:15:21.068015259+00:00 stderr F I1212 16:15:21.067925 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: cluster-samples-operator-imageconfig-reader, uid: e4dcdbb5-4304-4535-a6dd-bc550e8fbcc7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.068015259+00:00 stderr F I1212 16:15:21.067999 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: csi-snapshot-controller-operator-authentication-reader, uid: 7fd23cab-d9ed-41e1-b2d7-92b6985e2dd1]" virtual=false 2025-12-12T16:15:21.071003941+00:00 stderr F I1212 16:15:21.070936 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console, uid: b95b8768-0164-4998-b6e5-6165e7ba01ef]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.071003941+00:00 stderr F I1212 16:15:21.070985 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: network-diagnostics, uid: b6e21cd4-4c3d-401f-92cc-7c008ff856b3]" virtual=false 2025-12-12T16:15:21.074247739+00:00 stderr F I1212 16:15:21.074173 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console-extensions-reader, uid: dbc7b83f-434c-4c39-8efd-1c96226d30d7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.074247739+00:00 stderr F I1212 16:15:21.074232 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: net-attach-def-project, uid: 8ea7b21b-7f89-4964-98a8-62069f159aa7]" virtual=false 2025-12-12T16:15:21.077106698+00:00 stderr F I1212 16:15:21.077032 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: console-operator, uid: 5f877ff6-9616-4569-8c9d-cf1b56e92de2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.077125968+00:00 stderr F I1212 16:15:21.077096 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-diagnostics, uid: b3eadd94-78e3-4cab-ac8f-94a3b05bb801]" virtual=false 2025-12-12T16:15:21.079903535+00:00 stderr F I1212 16:15:21.079819 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: control-plane-machine-set-operator, uid: 8df6d918-3869-408e-a9d1-00413e2161b7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.079903535+00:00 stderr F I1212 16:15:21.079870 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-node-identity, uid: d7ddb0cd-f3ce-4f72-aac7-7fd1c95d3152]" virtual=false 2025-12-12T16:15:21.089494516+00:00 stderr F I1212 16:15:21.089423 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: csi-snapshot-controller-operator-clusterrole, uid: c9893e84-c318-43a5-8920-61611f43e22f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.089494516+00:00 stderr F I1212 16:15:21.089475 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 376b7f3a-eb8d-4641-a54e-19994092fb5d]" virtual=false 2025-12-12T16:15:21.096348151+00:00 stderr F I1212 16:15:21.096291 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: helm-chartrepos-viewer, uid: e7ccf99c-35cd-4925-bccf-00341a7c0226]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.096408013+00:00 stderr F I1212 16:15:21.096383 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver, name: prometheus-k8s, uid: 0d74114b-fee4-4b23-b205-94074f133ad6]" virtual=false 2025-12-12T16:15:21.106308741+00:00 stderr F I1212 16:15:21.106243 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-controllers, uid: c45cbe9d-a666-4cc5-897d-358f136ded5d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.106308741+00:00 stderr F I1212 16:15:21.106291 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-dns-operator, uid: 09d806fd-c04d-48b5-8033-c43fde394f29]" virtual=false 2025-12-12T16:15:21.122638865+00:00 stderr F I1212 16:15:21.122490 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-controllers-metal3-remediation, uid: 27107a82-f68f-46f0-8c16-6ac47755c9a1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.122638865+00:00 stderr F I1212 16:15:21.122546 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ingress-operator, uid: fc4659c5-9661-4e74-b2d9-41b9ec9d8f6b]" virtual=false 2025-12-12T16:15:21.126229761+00:00 stderr F I1212 16:15:21.126152 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-controllers-metal3-remediation-aggregation, uid: e9123d81-af7a-48d7-9f87-0a4a73f32564]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.126229761+00:00 stderr F I1212 16:15:21.126204 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-iptables-alerter, uid: 50c71944-8927-4a26-8fdf-453e314e3135]" virtual=false 2025-12-12T16:15:21.150987638+00:00 stderr F I1212 16:15:21.150868 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator, uid: 54d437f7-f4ad-48ff-96fe-ad923eacc808]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.150987638+00:00 stderr F I1212 16:15:21.150932 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-cluster-reader, uid: 8a39b760-1b0e-474e-9e1b-f6c769f5ed13]" virtual=false 2025-12-12T16:15:21.153980210+00:00 stderr F I1212 16:15:21.153888 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator-ext-remediation, uid: dce779d1-65c4-47c8-9017-bf892bf21cd9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.153980210+00:00 stderr F I1212 16:15:21.153946 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 8d0bf8dc-ddf1-4412-a8d3-d2cca7aae8a6]" virtual=false 2025-12-12T16:15:21.161352977+00:00 stderr F I1212 16:15:21.161150 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: machine-api-operator:cluster-reader, uid: b2272cd1-5862-4269-bf93-2f3dacd4d7f5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.161352977+00:00 stderr F I1212 16:15:21.161250 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: fef91a3e-8916-4ea4-9554-535d6fd3b728]" virtual=false 2025-12-12T16:15:21.166379188+00:00 stderr F I1212 16:15:21.166327 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: marketplace-operator, uid: 6c513536-1398-4479-bcf3-0cb139fcf12b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.166396709+00:00 stderr F I1212 16:15:21.166369 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication, name: prometheus-k8s, uid: 53fed5e4-a2fc-47bb-8401-e9530ad5c771]" virtual=false 2025-12-12T16:15:21.170105518+00:00 stderr F I1212 16:15:21.170054 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: metrics-daemon-role, uid: cd81010d-36e3-4272-9c11-bc2a176cac77]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.170120459+00:00 stderr F I1212 16:15:21.170097 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-kube-rbac-proxy, uid: 6000bf1a-c327-42b3-89bf-6737effbc9cc]" virtual=false 2025-12-12T16:15:21.176586704+00:00 stderr F I1212 16:15:21.176505 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console, uid: 6292b407-20e8-47a5-be99-77a5e1f7a896]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.176586704+00:00 stderr F I1212 16:15:21.176569 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-node-limited, uid: d46dbbf0-e5a5-4718-9cf2-73a3c26e0e10]" virtual=false 2025-12-12T16:15:21.180139250+00:00 stderr F I1212 16:15:21.179873 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus, uid: 3776dc79-8fd8-4cdf-b093-35c526784a28]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.180139250+00:00 stderr F I1212 16:15:21.179914 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-editor, uid: 6a005bc8-0efa-4343-81c3-19838aa8a393]" virtual=false 2025-12-12T16:15:21.183161753+00:00 stderr F I1212 16:15:21.183098 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: console-operator, uid: dedf5f86-0501-4a1d-b312-0cf452e2800c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.183161753+00:00 stderr F I1212 16:15:21.183126 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-viewer, uid: 18062659-d954-4db4-a1b2-1563fcdc226e]" virtual=false 2025-12-12T16:15:21.186821391+00:00 stderr F I1212 16:15:21.186756 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-admission-controller-webhook, uid: 1d70faff-2868-472e-86ee-7d454da7084b]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.186821391+00:00 stderr F I1212 16:15:21.186791 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: operatorhub-config-reader, uid: 3a3873d7-8caa-496e-b208-16e11bbb5222]" virtual=false 2025-12-12T16:15:21.196309500+00:00 stderr F I1212 16:15:21.196214 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: multus-ancillary-tools, uid: f41f1829-7b7c-453d-9fac-5a6645faf319]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.196309500+00:00 stderr F I1212 16:15:21.196269 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiregistration.k8s.io/v1/APIService, namespace: , name: v1.packages.operators.coreos.com, uid: 1eb523e0-0a92-4ae6-84a4-16192bab1fca]" virtual=false 2025-12-12T16:15:21.199709901+00:00 stderr F I1212 16:15:21.199638 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: csi-snapshot-controller-operator-authentication-reader, uid: 7fd23cab-d9ed-41e1-b2d7-92b6985e2dd1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.199709901+00:00 stderr F I1212 16:15:21.199681 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: e384bcab-90cf-46b3-b3fc-3faa6575f457]" virtual=false 2025-12-12T16:15:21.203043692+00:00 stderr F I1212 16:15:21.202994 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: kube-system, name: network-diagnostics, uid: b6e21cd4-4c3d-401f-92cc-7c008ff856b3]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.203043692+00:00 stderr F I1212 16:15:21.203031 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: e276478b-d4fd-47f7-887b-1c90c397d03e]" virtual=false 2025-12-12T16:15:21.206782322+00:00 stderr F I1212 16:15:21.206730 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: net-attach-def-project, uid: 8ea7b21b-7f89-4964-98a8-62069f159aa7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.206804692+00:00 stderr F I1212 16:15:21.206765 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: d9ba455d-d0ef-4279-b91b-2bb734b7a230]" virtual=false 2025-12-12T16:15:21.208806891+00:00 stderr F I1212 16:15:21.208762 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-diagnostics, uid: b3eadd94-78e3-4cab-ac8f-94a3b05bb801]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.208806891+00:00 stderr F I1212 16:15:21.208791 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: b09df175-99ba-4d3c-bace-ce35d4e83009]" virtual=false 2025-12-12T16:15:21.213647467+00:00 stderr F I1212 16:15:21.213583 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: network-node-identity, uid: d7ddb0cd-f3ce-4f72-aac7-7fd1c95d3152]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.213647467+00:00 stderr F I1212 16:15:21.213618 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: project-helm-chartrepository-editor, uid: 0e911b54-6ce2-4e53-81c1-7bde477cb878]" virtual=false 2025-12-12T16:15:21.223028303+00:00 stderr F I1212 16:15:21.222942 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver-operator, name: prometheus-k8s, uid: 376b7f3a-eb8d-4641-a54e-19994092fb5d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.223028303+00:00 stderr F I1212 16:15:21.222994 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: 675d5d01-bbab-41f4-b22a-cb301a5bfd68]" virtual=false 2025-12-12T16:15:21.229159501+00:00 stderr F I1212 16:15:21.229089 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-apiserver, name: prometheus-k8s, uid: 0d74114b-fee4-4b23-b205-94074f133ad6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.229159501+00:00 stderr F I1212 16:15:21.229134 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheus-k8s-scheduler-resources, uid: 713d6049-bed4-42d4-9469-836c10bbf2a2]" virtual=false 2025-12-12T16:15:21.239214973+00:00 stderr F I1212 16:15:21.239055 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-dns-operator, uid: 09d806fd-c04d-48b5-8033-c43fde394f29]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.239214973+00:00 stderr F I1212 16:15:21.239115 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-version, name: prometheus-k8s, uid: 8406bc8f-05b0-490b-b240-1ec3ce9cbd3f]" virtual=false 2025-12-12T16:15:21.257579816+00:00 stderr F I1212 16:15:21.257476 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ingress-operator, uid: fc4659c5-9661-4e74-b2d9-41b9ec9d8f6b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.257579816+00:00 stderr F I1212 16:15:21.257533 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-configmap-reader, uid: e3dfcc46-dd9b-4c39-b4af-1331760e7f87]" virtual=false 2025-12-12T16:15:21.259440340+00:00 stderr F I1212 16:15:21.259372 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-iptables-alerter, uid: 50c71944-8927-4a26-8fdf-453e314e3135]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.259459361+00:00 stderr F I1212 16:15:21.259433 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-operator, uid: ccb991ad-450a-4068-adbb-b37825b8e6da]" virtual=false 2025-12-12T16:15:21.262043063+00:00 stderr F I1212 16:15:21.261901 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apiregistration.k8s.io/v1/APIService, namespace: , name: v1.packages.operators.coreos.com, uid: 1eb523e0-0a92-4ae6-84a4-16192bab1fca]" 2025-12-12T16:15:21.262043063+00:00 stderr F I1212 16:15:21.261939 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-public, uid: 6a7460b0-4e35-480e-a71b-377d30ca36bd]" virtual=false 2025-12-12T16:15:21.283821648+00:00 stderr F I1212 16:15:21.283653 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-cluster-reader, uid: 8a39b760-1b0e-474e-9e1b-f6c769f5ed13]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.283821648+00:00 stderr F I1212 16:15:21.283722 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: registry-monitoring, uid: f680858f-9ffc-4198-be65-7a886972bb9c]" virtual=false 2025-12-12T16:15:21.286116583+00:00 stderr F I1212 16:15:21.286032 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 8d0bf8dc-ddf1-4412-a8d3-d2cca7aae8a6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.286116583+00:00 stderr F I1212 16:15:21.286090 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-api-controllers, uid: 344d915b-b955-4603-a766-f7a1f3fbba7e]" virtual=false 2025-12-12T16:15:21.293961182+00:00 stderr F I1212 16:15:21.293855 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication-operator, name: prometheus-k8s, uid: fef91a3e-8916-4ea4-9554-535d6fd3b728]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.293961182+00:00 stderr F I1212 16:15:21.293912 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-approver, uid: 3ffb919c-31ad-497c-b082-f862e1b96a30]" virtual=false 2025-12-12T16:15:21.299701700+00:00 stderr F I1212 16:15:21.299593 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-authentication, name: prometheus-k8s, uid: 53fed5e4-a2fc-47bb-8401-e9530ad5c771]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.299701700+00:00 stderr F I1212 16:15:21.299658 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: openshift-network-public-role-binding, uid: 3f544260-b7f2-4307-aa10-76c309175c0a]" virtual=false 2025-12-12T16:15:21.305622243+00:00 stderr F I1212 16:15:21.305542 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-kube-rbac-proxy, uid: 6000bf1a-c327-42b3-89bf-6737effbc9cc]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.305622243+00:00 stderr F I1212 16:15:21.305592 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-operator, name: prometheus-k8s, uid: 398479e3-c16d-452b-8f5e-f3c97e9918d2]" virtual=false 2025-12-12T16:15:21.308835800+00:00 stderr F I1212 16:15:21.308784 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-node-limited, uid: d46dbbf0-e5a5-4718-9cf2-73a3c26e0e10]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.308835800+00:00 stderr F I1212 16:15:21.308817 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: cluster-samples-operator-openshift-config-secret-reader, uid: 8a0fff18-901e-44e5-a8ee-842ba68cfac0]" virtual=false 2025-12-12T16:15:21.313090043+00:00 stderr F I1212 16:15:21.313027 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-editor, uid: 6a005bc8-0efa-4343-81c3-19838aa8a393]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.313090043+00:00 stderr F I1212 16:15:21.313064 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: console-operator, uid: d129014f-8b06-4143-b4fa-3a0f098c6559]" virtual=false 2025-12-12T16:15:21.316395183+00:00 stderr F I1212 16:15:21.316300 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: openshift-ovn-kubernetes-udn-viewer, uid: 18062659-d954-4db4-a1b2-1563fcdc226e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.316395183+00:00 stderr F I1212 16:15:21.316376 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: ingress-operator, uid: a361b360-6755-4cb9-b68b-2998a0713612]" virtual=false 2025-12-12T16:15:21.320161883+00:00 stderr F I1212 16:15:21.320095 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: operatorhub-config-reader, uid: 3a3873d7-8caa-496e-b208-16e11bbb5222]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.320161883+00:00 stderr F I1212 16:15:21.320143 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: machine-api-controllers, uid: 25963d68-1e2f-4b0e-a08f-0ce996f7b51f]" virtual=false 2025-12-12T16:15:21.333537306+00:00 stderr F I1212 16:15:21.333412 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: e384bcab-90cf-46b3-b3fc-3faa6575f457]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.333537306+00:00 stderr F I1212 16:15:21.333482 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: console-operator, uid: 6ee80bf7-3aef-44c3-8ae6-babe6e24ccf2]" virtual=false 2025-12-12T16:15:21.336295442+00:00 stderr F I1212 16:15:21.336239 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-machine-approver, name: prometheus-k8s, uid: e276478b-d4fd-47f7-887b-1c90c397d03e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.336295442+00:00 stderr F I1212 16:15:21.336274 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: prometheus-k8s, uid: 5702339a-1e79-4ada-a460-4e2eff82de4d]" virtual=false 2025-12-12T16:15:21.340231307+00:00 stderr F I1212 16:15:21.340103 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: d9ba455d-d0ef-4279-b91b-2bb734b7a230]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.340231307+00:00 stderr F I1212 16:15:21.340143 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: eb68d863-03ac-4a8d-8ebf-53c9f9785f85]" virtual=false 2025-12-12T16:15:21.342540612+00:00 stderr F I1212 16:15:21.342490 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-samples-operator, name: prometheus-k8s, uid: b09df175-99ba-4d3c-bace-ce35d4e83009]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.342540612+00:00 stderr F I1212 16:15:21.342519 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[batch/v1/CronJob, namespace: openshift-machine-config-operator, name: machine-config-nodes-crd-cleanup, uid: 2f1b5315-e7f1-4f76-a4c7-7be559488f49]" virtual=false 2025-12-12T16:15:21.346025176+00:00 stderr F I1212 16:15:21.345979 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: project-helm-chartrepository-editor, uid: 0e911b54-6ce2-4e53-81c1-7bde477cb878]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.346025176+00:00 stderr F I1212 16:15:21.346005 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[batch/v1/CronJob, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: a64f7156-efdf-4f2f-bdb6-f498fe674093]" virtual=false 2025-12-12T16:15:21.357164955+00:00 stderr F I1212 16:15:21.357054 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-storage-operator, name: csi-snapshot-controller-operator-role, uid: 675d5d01-bbab-41f4-b22a-cb301a5bfd68]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.357164955+00:00 stderr F I1212 16:15:21.357126 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: console-operator, uid: 1d0d1e48-a9e1-4e4a-8a48-181bf893de48]" virtual=false 2025-12-12T16:15:21.363237381+00:00 stderr F I1212 16:15:21.363127 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: prometheus-k8s-scheduler-resources, uid: 713d6049-bed4-42d4-9469-836c10bbf2a2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.363237381+00:00 stderr F I1212 16:15:21.363189 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: prometheus-k8s, uid: 6da3f11b-9f14-4506-a933-bb3207d8d635]" virtual=false 2025-12-12T16:15:21.374082822+00:00 stderr F I1212 16:15:21.373985 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-cluster-version, name: prometheus-k8s, uid: 8406bc8f-05b0-490b-b240-1ec3ce9cbd3f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.374082822+00:00 stderr F I1212 16:15:21.374047 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[policy/v1/PodDisruptionBudget, namespace: openshift-operator-lifecycle-manager, name: packageserver-pdb, uid: e1546b15-314b-4240-8e11-d5e915c472c7]" virtual=false 2025-12-12T16:15:21.390688062+00:00 stderr F I1212 16:15:21.390587 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-configmap-reader, uid: e3dfcc46-dd9b-4c39-b4af-1331760e7f87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.390688062+00:00 stderr F I1212 16:15:21.390659 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 902b45ff-a672-4079-bc39-9c3db9fbed24]" virtual=false 2025-12-12T16:15:21.393147752+00:00 stderr F I1212 16:15:21.393070 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-operator, uid: ccb991ad-450a-4068-adbb-b37825b8e6da]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.393166472+00:00 stderr F I1212 16:15:21.393137 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager, name: prometheus-k8s, uid: 2adafd1c-c1b3-462c-9c6f-384653b668a8]" virtual=false 2025-12-12T16:15:21.395604831+00:00 stderr F I1212 16:15:21.395554 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: console-public, uid: 6a7460b0-4e35-480e-a71b-377d30ca36bd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.395604831+00:00 stderr F I1212 16:15:21.395582 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: dns-operator, uid: 27eea001-0ad1-463e-b0b1-338bbbd06608]" virtual=false 2025-12-12T16:15:21.415895980+00:00 stderr F I1212 16:15:21.415787 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: registry-monitoring, uid: f680858f-9ffc-4198-be65-7a886972bb9c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.415895980+00:00 stderr F I1212 16:15:21.415853 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: prometheus-k8s, uid: 2ee6fe08-e91c-466a-ba8b-85813c903312]" virtual=false 2025-12-12T16:15:21.419588859+00:00 stderr F I1212 16:15:21.419520 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-api-controllers, uid: 344d915b-b955-4603-a766-f7a1f3fbba7e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.419588859+00:00 stderr F I1212 16:15:21.419564 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: 5bdcbcd3-fe52-498d-ae10-72a5939afa87]" virtual=false 2025-12-12T16:15:21.427171931+00:00 stderr F I1212 16:15:21.427094 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: machine-approver, uid: 3ffb919c-31ad-497c-b082-f862e1b96a30]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.427171931+00:00 stderr F I1212 16:15:21.427141 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:controller:operator-lifecycle-manager, uid: 396b0df4-1cea-4e77-93d2-c31f13f44462]" virtual=false 2025-12-12T16:15:21.433170646+00:00 stderr F I1212 16:15:21.433074 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-managed, name: openshift-network-public-role-binding, uid: 3f544260-b7f2-4307-aa10-76c309175c0a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.433170646+00:00 stderr F I1212 16:15:21.433145 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 28e1dbac-2835-4e00-bbcd-c559366575bb]" virtual=false 2025-12-12T16:15:21.436922936+00:00 stderr F I1212 16:15:21.436849 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config-operator, name: prometheus-k8s, uid: 398479e3-c16d-452b-8f5e-f3c97e9918d2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.436922936+00:00 stderr F I1212 16:15:21.436896 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: node-ca, uid: 5f291cdd-c99d-4124-aa53-a0a0ab505e3f]" virtual=false 2025-12-12T16:15:21.444028617+00:00 stderr F I1212 16:15:21.443931 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: cluster-samples-operator-openshift-config-secret-reader, uid: 8a0fff18-901e-44e5-a8ee-842ba68cfac0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.444028617+00:00 stderr F I1212 16:15:21.443996 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: prometheus-k8s, uid: 531f27d1-3010-40b8-a5d8-aaca67f7d382]" virtual=false 2025-12-12T16:15:21.447204934+00:00 stderr F I1212 16:15:21.447120 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: console-operator, uid: d129014f-8b06-4143-b4fa-3a0f098c6559]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.447204934+00:00 stderr F I1212 16:15:21.447154 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: ingress-operator, uid: 58ce10a7-6f25-4811-bff2-bc4a3cc330ee]" virtual=false 2025-12-12T16:15:21.450754289+00:00 stderr F I1212 16:15:21.450674 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: ingress-operator, uid: a361b360-6755-4cb9-b68b-2998a0713612]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.450754289+00:00 stderr F I1212 16:15:21.450731 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: 7627ebcc-7f0f-44e2-a90a-25d7f7f4fe20]" virtual=false 2025-12-12T16:15:21.453680900+00:00 stderr F I1212 16:15:21.453624 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-config, name: machine-api-controllers, uid: 25963d68-1e2f-4b0e-a08f-0ce996f7b51f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.453680900+00:00 stderr F I1212 16:15:21.453652 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kni-infra, name: host-networking-system-node, uid: 67f45f81-1023-4da0-9870-d9545d0217d4]" virtual=false 2025-12-12T16:15:21.467335689+00:00 stderr F I1212 16:15:21.467207 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: console-operator, uid: 6ee80bf7-3aef-44c3-8ae6-babe6e24ccf2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.467335689+00:00 stderr F I1212 16:15:21.467283 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: 6be26842-4041-4228-90d5-6e63e6300edd]" virtual=false 2025-12-12T16:15:21.469739897+00:00 stderr F I1212 16:15:21.469670 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-operator, name: prometheus-k8s, uid: 5702339a-1e79-4ada-a460-4e2eff82de4d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.469739897+00:00 stderr F I1212 16:15:21.469703 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: 0b6bc908-facb-4580-82f5-bff11fb34309]" virtual=false 2025-12-12T16:15:21.473260642+00:00 stderr F I1212 16:15:21.473203 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console-user-settings, name: console-user-settings-admin, uid: eb68d863-03ac-4a8d-8ebf-53c9f9785f85]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.473260642+00:00 stderr F I1212 16:15:21.473233 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 04f722e8-f3c8-48f9-8726-d3ed22a8f6db]" virtual=false 2025-12-12T16:15:21.475707771+00:00 stderr F I1212 16:15:21.475653 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[batch/v1/CronJob, namespace: openshift-machine-config-operator, name: machine-config-nodes-crd-cleanup, uid: 2f1b5315-e7f1-4f76-a4c7-7be559488f49]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:21.475707771+00:00 stderr F I1212 16:15:21.475678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: a9588721-392a-4c35-83c5-11f82c4c1cad]" virtual=false 2025-12-12T16:15:21.479785099+00:00 stderr F I1212 16:15:21.479680 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[batch/v1/CronJob, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: a64f7156-efdf-4f2f-bdb6-f498fe674093]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.479785099+00:00 stderr F I1212 16:15:21.479754 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 2ad12ea6-7b88-4bb1-ad15-8c6ea12eaa71]" virtual=false 2025-12-12T16:15:21.489692438+00:00 stderr F I1212 16:15:21.489571 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: console-operator, uid: 1d0d1e48-a9e1-4e4a-8a48-181bf893de48]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.489692438+00:00 stderr F I1212 16:15:21.489626 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 6bd53a77-3f4a-4220-bb5c-858e7a9b0cd9]" virtual=false 2025-12-12T16:15:21.497035784+00:00 stderr F I1212 16:15:21.496928 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-console, name: prometheus-k8s, uid: 6da3f11b-9f14-4506-a933-bb3207d8d635]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.497035784+00:00 stderr F I1212 16:15:21.496996 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:cluster-config-operator:cluster-reader, uid: 5adbf822-4950-4513-bbf5-d78ebf25fcb4]" virtual=false 2025-12-12T16:15:21.508012309+00:00 stderr F I1212 16:15:21.507906 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[policy/v1/PodDisruptionBudget, namespace: openshift-operator-lifecycle-manager, name: packageserver-pdb, uid: e1546b15-314b-4240-8e11-d5e915c472c7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.508012309+00:00 stderr F I1212 16:15:21.507967 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:cluster-samples-operator:cluster-reader, uid: 54b07952-e1de-488e-b287-f86269b8bd9c]" virtual=false 2025-12-12T16:15:21.523860011+00:00 stderr F I1212 16:15:21.523591 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager-operator, name: prometheus-k8s, uid: 902b45ff-a672-4079-bc39-9c3db9fbed24]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.523860011+00:00 stderr F I1212 16:15:21.523651 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-config-operator, name: prometheus-k8s, uid: 052638af-4f81-4e6a-8c5e-960132e14b35]" virtual=false 2025-12-12T16:15:21.526869853+00:00 stderr F I1212 16:15:21.526816 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-controller-manager, name: prometheus-k8s, uid: 2adafd1c-c1b3-462c-9c6f-384653b668a8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.526869853+00:00 stderr F I1212 16:15:21.526851 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: marketplace-operator, uid: 8f0da541-5375-4494-a502-94aa8540020e]" virtual=false 2025-12-12T16:15:21.529750923+00:00 stderr F I1212 16:15:21.529696 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: dns-operator, uid: 27eea001-0ad1-463e-b0b1-338bbbd06608]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.529770003+00:00 stderr F I1212 16:15:21.529726 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: e0e93b2f-fa87-4069-8467-6366728d0a62]" virtual=false 2025-12-12T16:15:21.550657306+00:00 stderr F I1212 16:15:21.550434 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-dns-operator, name: prometheus-k8s, uid: 2ee6fe08-e91c-466a-ba8b-85813c903312]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.550657306+00:00 stderr F I1212 16:15:21.550503 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:controller:machine-approver, uid: 823e8161-9f7c-4be7-a752-8ff8412404eb]" virtual=false 2025-12-12T16:15:21.553420413+00:00 stderr F I1212 16:15:21.553381 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-etcd-operator, name: prometheus-k8s, uid: 5bdcbcd3-fe52-498d-ae10-72a5939afa87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.553439603+00:00 stderr F I1212 16:15:21.553413 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:machine-config-operator:cluster-reader, uid: 630c9210-f467-4f1f-a8e8-0f29f67432a2]" virtual=false 2025-12-12T16:15:21.561339794+00:00 stderr F I1212 16:15:21.561067 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:controller:operator-lifecycle-manager, uid: 396b0df4-1cea-4e77-93d2-c31f13f44462]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.561339794+00:00 stderr F I1212 16:15:21.561139 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:anyuid, uid: fb1736f6-bcf6-428e-9b30-467c1e706809]" virtual=false 2025-12-12T16:15:21.566301873+00:00 stderr F I1212 16:15:21.566254 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: 28e1dbac-2835-4e00-bbcd-c559366575bb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.566359354+00:00 stderr F I1212 16:15:21.566341 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostaccess, uid: 66f630f7-0880-415b-9902-e5ca5beaa9e4]" virtual=false 2025-12-12T16:15:21.569712125+00:00 stderr F I1212 16:15:21.569684 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: node-ca, uid: 5f291cdd-c99d-4124-aa53-a0a0ab505e3f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.569755086+00:00 stderr F I1212 16:15:21.569740 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostmount, uid: 4d23e07f-bae5-4ee7-b2f1-c6d638dbd6a9]" virtual=false 2025-12-12T16:15:21.575733450+00:00 stderr F I1212 16:15:21.575657 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-image-registry, name: prometheus-k8s, uid: 531f27d1-3010-40b8-a5d8-aaca67f7d382]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.575750641+00:00 stderr F I1212 16:15:21.575722 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostmount-anyuid, uid: 089e5aff-9300-48fb-939a-d108d2883fb4]" virtual=false 2025-12-12T16:15:21.580029414+00:00 stderr F I1212 16:15:21.579972 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: ingress-operator, uid: 58ce10a7-6f25-4811-bff2-bc4a3cc330ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.580029414+00:00 stderr F I1212 16:15:21.580017 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostmount-anyuid-v2, uid: c36c6246-d31c-4cbf-a1d6-a1614306dd1b]" virtual=false 2025-12-12T16:15:21.583139969+00:00 stderr F I1212 16:15:21.583080 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ingress-operator, name: prometheus-k8s, uid: 7627ebcc-7f0f-44e2-a90a-25d7f7f4fe20]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.583193640+00:00 stderr F I1212 16:15:21.583147 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostnetwork, uid: c0fd5578-c164-4651-bd91-7b97e9d5bb6a]" virtual=false 2025-12-12T16:15:21.586649463+00:00 stderr F I1212 16:15:21.586588 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kni-infra, name: host-networking-system-node, uid: 67f45f81-1023-4da0-9870-d9545d0217d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.586671724+00:00 stderr F I1212 16:15:21.586643 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostnetwork-v2, uid: 56de00a9-85f5-4988-a3c2-e9758521894a]" virtual=false 2025-12-12T16:15:21.599966084+00:00 stderr F I1212 16:15:21.599888 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver-operator, name: prometheus-k8s, uid: 6be26842-4041-4228-90d5-6e63e6300edd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.599966084+00:00 stderr F I1212 16:15:21.599944 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nested-container, uid: 210213ac-efde-4d55-a832-ce8a634e9e86]" virtual=false 2025-12-12T16:15:21.602996377+00:00 stderr F I1212 16:15:21.602924 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-apiserver, name: prometheus-k8s, uid: 0b6bc908-facb-4580-82f5-bff11fb34309]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.602996377+00:00 stderr F I1212 16:15:21.602967 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nonroot, uid: a2c10611-bd90-4dd0-ada7-7617b531e742]" virtual=false 2025-12-12T16:15:21.606730507+00:00 stderr F I1212 16:15:21.606675 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager-operator, name: prometheus-k8s, uid: 04f722e8-f3c8-48f9-8726-d3ed22a8f6db]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.606730507+00:00 stderr F I1212 16:15:21.606712 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nonroot-v2, uid: 914e9996-ea72-4f6f-bd16-0cedb827aecc]" virtual=false 2025-12-12T16:15:21.609855462+00:00 stderr F I1212 16:15:21.609804 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-controller-manager, name: prometheus-k8s, uid: a9588721-392a-4c35-83c5-11f82c4c1cad]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.609876513+00:00 stderr F I1212 16:15:21.609851 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:privileged, uid: 27591309-c68c-424b-bd9f-21c556cd6f93]" virtual=false 2025-12-12T16:15:21.612909396+00:00 stderr F I1212 16:15:21.612859 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler-operator, name: prometheus-k8s, uid: 2ad12ea6-7b88-4bb1-ad15-8c6ea12eaa71]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.612930506+00:00 stderr F I1212 16:15:21.612901 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted, uid: 007a6d58-59f5-40e5-8ee9-c81ac33a0ce1]" virtual=false 2025-12-12T16:15:21.624037944+00:00 stderr F I1212 16:15:21.623968 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-kube-scheduler, name: prometheus-k8s, uid: 6bd53a77-3f4a-4220-bb5c-858e7a9b0cd9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.624037944+00:00 stderr F I1212 16:15:21.624021 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted-v2, uid: 3b791359-05a1-41f2-9a61-6f35999837b0]" virtual=false 2025-12-12T16:15:21.630494290+00:00 stderr F I1212 16:15:21.630402 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:cluster-config-operator:cluster-reader, uid: 5adbf822-4950-4513-bbf5-d78ebf25fcb4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.630494290+00:00 stderr F I1212 16:15:21.630470 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted-v3, uid: febb0e6f-0c55-4d8e-8517-036b929457ee]" virtual=false 2025-12-12T16:15:21.640827249+00:00 stderr F I1212 16:15:21.640733 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:cluster-samples-operator:cluster-reader, uid: 54b07952-e1de-488e-b287-f86269b8bd9c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.640827249+00:00 stderr F I1212 16:15:21.640806 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 7f4699d7-5595-4e90-9a52-ed4769b0c986]" virtual=false 2025-12-12T16:15:21.657475400+00:00 stderr F I1212 16:15:21.657381 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-machine-config-operator, name: prometheus-k8s, uid: 052638af-4f81-4e6a-8c5e-960132e14b35]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.657475400+00:00 stderr F I1212 16:15:21.657436 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: fba13e8c-652c-4d18-9044-6893349b700d]" virtual=false 2025-12-12T16:15:21.660074702+00:00 stderr F I1212 16:15:21.660015 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: marketplace-operator, uid: 8f0da541-5375-4494-a502-94aa8540020e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.660074702+00:00 stderr F I1212 16:15:21.660056 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: console-operator, uid: babad3be-1b02-48ca-8bc3-64551155108c]" virtual=false 2025-12-12T16:15:21.662477450+00:00 stderr F I1212 16:15:21.662426 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-marketplace, name: openshift-marketplace-metrics, uid: e0e93b2f-fa87-4069-8467-6366728d0a62]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.662477450+00:00 stderr F I1212 16:15:21.662459 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: multus-whereabouts, uid: 306d9ccf-da01-46ab-b78e-02458b50939f]" virtual=false 2025-12-12T16:15:21.684073530+00:00 stderr F I1212 16:15:21.683986 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:controller:machine-approver, uid: 823e8161-9f7c-4be7-a752-8ff8412404eb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.684073530+00:00 stderr F I1212 16:15:21.684046 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: prometheus-k8s, uid: 5fec5a95-01ea-4349-8aeb-22c31f6322b6]" virtual=false 2025-12-12T16:15:21.687538864+00:00 stderr F I1212 16:15:21.686576 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:machine-config-operator:cluster-reader, uid: 630c9210-f467-4f1f-a8e8-0f29f67432a2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.687538864+00:00 stderr F I1212 16:15:21.686631 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 00a918ea-d345-4c69-8447-3b22d23e9f7b]" virtual=false 2025-12-12T16:15:21.693291833+00:00 stderr F I1212 16:15:21.693257 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:anyuid, uid: fb1736f6-bcf6-428e-9b30-467c1e706809]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.693345564+00:00 stderr F I1212 16:15:21.693325 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6695f592-c1ca-4af1-8eaf-0e0b0d8959fc]" virtual=false 2025-12-12T16:15:21.699609795+00:00 stderr F I1212 16:15:21.699560 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostaccess, uid: 66f630f7-0880-415b-9902-e5ca5beaa9e4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.699609795+00:00 stderr F I1212 16:15:21.699589 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: whereabouts-cni, uid: 81765a55-8ddf-4236-a0d9-3a707f7f82a2]" virtual=false 2025-12-12T16:15:21.703445097+00:00 stderr F I1212 16:15:21.703389 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostmount, uid: 4d23e07f-bae5-4ee7-b2f1-c6d638dbd6a9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.703469398+00:00 stderr F I1212 16:15:21.703430 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: 6546e654-a56c-40cd-b5e5-b0af53dc1922]" virtual=false 2025-12-12T16:15:21.710373734+00:00 stderr F I1212 16:15:21.710310 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostmount-anyuid, uid: 089e5aff-9300-48fb-939a-d108d2883fb4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.710398525+00:00 stderr F I1212 16:15:21.710367 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: system:openshift:scc:hostnetwork-v2, uid: 7e7e52cd-a43c-47dd-b267-7ac721bc6113]" virtual=false 2025-12-12T16:15:21.713530030+00:00 stderr F I1212 16:15:21.713476 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostmount-anyuid-v2, uid: c36c6246-d31c-4cbf-a1d6-a1614306dd1b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.713603222+00:00 stderr F I1212 16:15:21.713577 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-operator, name: prometheus-k8s, uid: 62b3747f-27ac-4810-bbdd-3307f382f740]" virtual=false 2025-12-12T16:15:21.716473251+00:00 stderr F I1212 16:15:21.716437 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostnetwork, uid: c0fd5578-c164-4651-bd91-7b97e9d5bb6a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.716534872+00:00 stderr F I1212 16:15:21.716513 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-nutanix-infra, name: host-networking-system-node, uid: 24dfe867-53c3-48b6-bbd7-5225bd207aed]" virtual=false 2025-12-12T16:15:21.719470823+00:00 stderr F I1212 16:15:21.719430 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:hostnetwork-v2, uid: 56de00a9-85f5-4988-a3c2-e9758521894a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.719470823+00:00 stderr F I1212 16:15:21.719456 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 5aa0e1a1-4826-4259-98cd-75c369bad7a7]" virtual=false 2025-12-12T16:15:21.733661325+00:00 stderr F I1212 16:15:21.733598 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nested-container, uid: 210213ac-efde-4d55-a832-ce8a634e9e86]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.733744127+00:00 stderr F I1212 16:15:21.733723 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-openstack-infra, name: host-networking-system-node, uid: 46d26db4-f098-4515-a946-0219c7756c23]" virtual=false 2025-12-12T16:15:21.736093894+00:00 stderr F I1212 16:15:21.735967 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nonroot, uid: a2c10611-bd90-4dd0-ada7-7617b531e742]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.736093894+00:00 stderr F I1212 16:15:21.735997 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 68a586c5-c3d1-4cfe-9783-ca5e2027b642]" virtual=false 2025-12-12T16:15:21.739973697+00:00 stderr F I1212 16:15:21.739916 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:nonroot-v2, uid: 914e9996-ea72-4f6f-bd16-0cedb827aecc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.740040679+00:00 stderr F I1212 16:15:21.740021 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 1d42a0b6-dd31-46cb-947c-4809e3fc9a44]" virtual=false 2025-12-12T16:15:21.743708867+00:00 stderr F I1212 16:15:21.743634 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:privileged, uid: 27591309-c68c-424b-bd9f-21c556cd6f93]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.743740258+00:00 stderr F I1212 16:15:21.743705 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: 80be51ee-dda4-483c-b1c5-948301f3c52e]" virtual=false 2025-12-12T16:15:21.747198451+00:00 stderr F I1212 16:15:21.747118 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted, uid: 007a6d58-59f5-40e5-8ee9-c81ac33a0ce1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.747282453+00:00 stderr F I1212 16:15:21.747259 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-nodes-identity-limited, uid: e866be9e-7de1-4a23-9ef1-15d71a5333a5]" virtual=false 2025-12-12T16:15:21.757735535+00:00 stderr F I1212 16:15:21.757653 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted-v2, uid: 3b791359-05a1-41f2-9a61-6f35999837b0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.757856688+00:00 stderr F I1212 16:15:21.757829 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 68c8d2f6-d2e7-4ba0-8c6d-3a210481e700]" virtual=false 2025-12-12T16:15:21.763748910+00:00 stderr F I1212 16:15:21.763555 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: system:openshift:scc:restricted-v3, uid: febb0e6f-0c55-4d8e-8517-036b929457ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.763748910+00:00 stderr F I1212 16:15:21.763629 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: d3fbd83b-5d77-4307-a4ba-c9c842b63b86]" virtual=false 2025-12-12T16:15:21.774169421+00:00 stderr F I1212 16:15:21.774077 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator, uid: 7f4699d7-5595-4e90-9a52-ed4769b0c986]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.774169421+00:00 stderr F I1212 16:15:21.774148 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: 51617a9b-76c0-4d9f-8068-7c7a521e3991]" virtual=false 2025-12-12T16:15:21.789522701+00:00 stderr F I1212 16:15:21.789420 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: cluster-monitoring-operator-alert-customization, uid: fba13e8c-652c-4d18-9044-6893349b700d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.789522701+00:00 stderr F I1212 16:15:21.789489 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-user-workload-monitoring, name: cluster-monitoring-operator, uid: b9104e38-7421-465a-bf98-b17ca0561bc6]" virtual=false 2025-12-12T16:15:21.794065680+00:00 stderr F I1212 16:15:21.793977 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-monitoring, name: console-operator, uid: babad3be-1b02-48ca-8bc3-64551155108c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.794217804+00:00 stderr F I1212 16:15:21.794160 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-vsphere-infra, name: host-networking-system-node, uid: 3070c3a2-b2ab-4c6e-9a4b-bdcd112336c5]" virtual=false 2025-12-12T16:15:21.797065102+00:00 stderr F I1212 16:15:21.797023 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: multus-whereabouts, uid: 306d9ccf-da01-46ab-b78e-02458b50939f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.797126994+00:00 stderr F I1212 16:15:21.797104 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: cluster-samples-operator-openshift-edit, uid: 995e7439-f4f4-4fa5-82e0-6afcc588fd52]" virtual=false 2025-12-12T16:15:21.817555026+00:00 stderr F I1212 16:15:21.817454 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-multus, name: prometheus-k8s, uid: 5fec5a95-01ea-4349-8aeb-22c31f6322b6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.817555026+00:00 stderr F I1212 16:15:21.817535 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: copied-csv-viewers, uid: 7426cf47-f08c-4e75-99c5-9b5462dc97c2]" virtual=false 2025-12-12T16:15:21.820520778+00:00 stderr F I1212 16:15:21.820415 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: network-diagnostics, uid: 00a918ea-d345-4c69-8447-3b22d23e9f7b]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.820520778+00:00 stderr F I1212 16:15:21.820493 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: f8199ef4-1467-44ed-9019-69c1f1737f70]" virtual=false 2025-12-12T16:15:21.826358908+00:00 stderr F I1212 16:15:21.826256 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-diagnostics, name: prometheus-k8s, uid: 6695f592-c1ca-4af1-8eaf-0e0b0d8959fc]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.826358908+00:00 stderr F I1212 16:15:21.826311 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-apiserver, name: apiserver, uid: f913dfec-e49a-4051-9533-8f553abc8845]" virtual=false 2025-12-12T16:15:21.833752866+00:00 stderr F I1212 16:15:21.833651 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRole, namespace: , name: whereabouts-cni, uid: 81765a55-8ddf-4236-a0d9-3a707f7f82a2]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.833752866+00:00 stderr F I1212 16:15:21.833728 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-authentication-operator, name: authentication-operator, uid: 391a5d9a-ccb4-4c96-a945-870a508a19d6]" virtual=false 2025-12-12T16:15:21.836590925+00:00 stderr F I1212 16:15:21.836531 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: network-node-identity-leases, uid: 6546e654-a56c-40cd-b5e5-b0af53dc1922]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.836590925+00:00 stderr F I1212 16:15:21.836565 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-authentication, name: oauth-openshift, uid: d3695806-c64f-4466-8682-9f2395f1448f]" virtual=false 2025-12-12T16:15:21.842920237+00:00 stderr F I1212 16:15:21.842845 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-node-identity, name: system:openshift:scc:hostnetwork-v2, uid: 7e7e52cd-a43c-47dd-b267-7ac721bc6113]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.842943918+00:00 stderr F I1212 16:15:21.842898 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 614226dc-6dfc-4b23-a9e9-54341ad46bc9]" virtual=false 2025-12-12T16:15:21.846942594+00:00 stderr F I1212 16:15:21.846850 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-network-operator, name: prometheus-k8s, uid: 62b3747f-27ac-4810-bbdd-3307f382f740]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.846942594+00:00 stderr F I1212 16:15:21.846923 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 83079835-b3de-4de8-ad7d-f332ab909932]" virtual=false 2025-12-12T16:15:21.851266888+00:00 stderr F I1212 16:15:21.851125 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-nutanix-infra, name: host-networking-system-node, uid: 24dfe867-53c3-48b6-bbd7-5225bd207aed]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.851266888+00:00 stderr F I1212 16:15:21.851218 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-version, name: cluster-version-operator, uid: d5123c8d-63b9-4bc1-a443-acddb48f1d78]" virtual=false 2025-12-12T16:15:21.854707781+00:00 stderr F I1212 16:15:21.854650 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-oauth-apiserver, name: prometheus-k8s, uid: 5aa0e1a1-4826-4259-98cd-75c369bad7a7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.854707781+00:00 stderr F I1212 16:15:21.854685 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-config-operator, name: openshift-config-operator, uid: dc451fc9-e781-493f-8e7d-55e9072cc784]" virtual=false 2025-12-12T16:15:21.866897055+00:00 stderr F I1212 16:15:21.866792 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-openstack-infra, name: host-networking-system-node, uid: 46d26db4-f098-4515-a946-0219c7756c23]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.866897055+00:00 stderr F I1212 16:15:21.866872 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console-operator, name: console-operator, uid: 4982b9f1-eaf4-44fa-a84a-bf9954aedcb1]" virtual=false 2025-12-12T16:15:21.870600684+00:00 stderr F I1212 16:15:21.870525 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: collect-profiles, uid: 68a586c5-c3d1-4cfe-9783-ca5e2027b642]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.870600684+00:00 stderr F I1212 16:15:21.870580 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console, name: console, uid: 3263a8c6-5259-42d6-a8da-588894b3887d]" virtual=false 2025-12-12T16:15:21.873524515+00:00 stderr F I1212 16:15:21.873444 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-operator-lifecycle-manager, name: operator-lifecycle-manager-metrics, uid: 1d42a0b6-dd31-46cb-947c-4809e3fc9a44]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.873524515+00:00 stderr F I1212 16:15:21.873506 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console, name: downloads, uid: 61eec0c1-c955-4ca2-b98d-b0e62696a08c]" virtual=false 2025-12-12T16:15:21.876695711+00:00 stderr F I1212 16:15:21.876598 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-control-plane-limited, uid: 80be51ee-dda4-483c-b1c5-948301f3c52e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.876695711+00:00 stderr F I1212 16:15:21.876666 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: 5effb0d2-94d8-48b7-8c69-e538f7848429]" virtual=false 2025-12-12T16:15:21.880141914+00:00 stderr F I1212 16:15:21.880084 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: openshift-ovn-kubernetes-nodes-identity-limited, uid: e866be9e-7de1-4a23-9ef1-15d71a5333a5]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.880165415+00:00 stderr F I1212 16:15:21.880135 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-controller-manager, name: controller-manager, uid: 2935ab56-0ed7-4afe-8c71-c57de10607f1]" virtual=false 2025-12-12T16:15:21.889825207+00:00 stderr F I1212 16:15:21.889731 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-ovn-kubernetes, name: prometheus-k8s, uid: 68c8d2f6-d2e7-4ba0-8c6d-3a210481e700]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:21.889825207+00:00 stderr F I1212 16:15:21.889793 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-dns-operator, name: dns-operator, uid: 75c9caa6-d284-4a97-95d2-2a04b51f093f]" virtual=false 2025-12-12T16:15:21.893001644+00:00 stderr F I1212 16:15:21.892942 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-apiserver, name: apiserver, uid: f913dfec-e49a-4051-9533-8f553abc8845]" 2025-12-12T16:15:21.893001644+00:00 stderr F I1212 16:15:21.892985 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-etcd-operator, name: etcd-operator, uid: 7bcc9069-5a71-4f51-8970-90dddeee56b2]" virtual=false 2025-12-12T16:15:21.897799539+00:00 stderr F I1212 16:15:21.897701 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-route-controller-manager, name: prometheus-k8s, uid: d3fbd83b-5d77-4307-a4ba-c9c842b63b86]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.897799539+00:00 stderr F I1212 16:15:21.897770 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: a4c18a44-787c-4851-97ac-f3da87e8d0e3]" virtual=false 2025-12-12T16:15:21.903017855+00:00 stderr F I1212 16:15:21.902928 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-authentication, name: oauth-openshift, uid: d3695806-c64f-4466-8682-9f2395f1448f]" 2025-12-12T16:15:21.903017855+00:00 stderr F I1212 16:15:21.902983 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-image-registry, name: image-registry, uid: d3f5db75-c64f-496e-937b-26ce08f3d633]" virtual=false 2025-12-12T16:15:21.907584745+00:00 stderr F I1212 16:15:21.907494 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-service-ca-operator, name: prometheus-k8s, uid: 51617a9b-76c0-4d9f-8068-7c7a521e3991]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.907584745+00:00 stderr F I1212 16:15:21.907560 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress-operator, name: ingress-operator, uid: dcd260b6-d741-4056-94e9-f063ec7db58c]" virtual=false 2025-12-12T16:15:21.923569710+00:00 stderr F I1212 16:15:21.923460 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-user-workload-monitoring, name: cluster-monitoring-operator, uid: b9104e38-7421-465a-bf98-b17ca0561bc6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.923569710+00:00 stderr F I1212 16:15:21.923527 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 3a8705c5-b62b-40a4-8e43-30f0569fa490]" virtual=false 2025-12-12T16:15:21.926203974+00:00 stderr F I1212 16:15:21.926087 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift-vsphere-infra, name: host-networking-system-node, uid: 3070c3a2-b2ab-4c6e-9a4b-bdcd112336c5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.926249475+00:00 stderr F I1212 16:15:21.926212 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 09857aec-2c93-4f0d-9e38-a820bd5b8362]" virtual=false 2025-12-12T16:15:21.932373052+00:00 stderr F I1212 16:15:21.932231 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: cluster-samples-operator-openshift-edit, uid: 995e7439-f4f4-4fa5-82e0-6afcc588fd52]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.932373052+00:00 stderr F I1212 16:15:21.932312 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: c3ff943a-b570-4a98-8388-1f8a3280a85a]" virtual=false 2025-12-12T16:15:21.947512077+00:00 stderr F I1212 16:15:21.947396 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-controller-manager, name: controller-manager, uid: 2935ab56-0ed7-4afe-8c71-c57de10607f1]" 2025-12-12T16:15:21.947512077+00:00 stderr F I1212 16:15:21.947450 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: af746821-921a-4842-94da-28c08769612a]" virtual=false 2025-12-12T16:15:21.949855623+00:00 stderr F I1212 16:15:21.949789 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/RoleBinding, namespace: openshift, name: copied-csv-viewers, uid: 7426cf47-f08c-4e75-99c5-9b5462dc97c2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.949855623+00:00 stderr F I1212 16:15:21.949823 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-storage-version-migrator, name: migrator, uid: e04da894-1c98-4971-8b8f-a1f4a381dbaf]" virtual=false 2025-12-12T16:15:21.953501101+00:00 stderr F I1212 16:15:21.953451 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: f8199ef4-1467-44ed-9019-69c1f1737f70]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.953501101+00:00 stderr F I1212 16:15:21.953483 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 7b943ba9-3321-444f-9be4-e7b351a28efa]" virtual=false 2025-12-12T16:15:21.967661602+00:00 stderr F I1212 16:15:21.967560 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-authentication-operator, name: authentication-operator, uid: 391a5d9a-ccb4-4c96-a945-870a508a19d6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.967661602+00:00 stderr F I1212 16:15:21.967615 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: machine-api-operator, uid: 6e3281a2-74ca-4530-b743-ae9a62edcc78]" virtual=false 2025-12-12T16:15:21.969890906+00:00 stderr F I1212 16:15:21.969820 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-image-registry, name: image-registry, uid: d3f5db75-c64f-496e-937b-26ce08f3d633]" 2025-12-12T16:15:21.969910687+00:00 stderr F I1212 16:15:21.969888 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: 12093a0c-63e4-4953-9f6e-fac6da714800]" virtual=false 2025-12-12T16:15:21.978752150+00:00 stderr F I1212 16:15:21.978572 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-machine-approver, name: machine-approver, uid: 614226dc-6dfc-4b23-a9e9-54341ad46bc9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.978752150+00:00 stderr F I1212 16:15:21.978639 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: 7036b823-caf2-4fe7-9364-95791b080487]" virtual=false 2025-12-12T16:15:21.980388999+00:00 stderr F I1212 16:15:21.980339 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: 83079835-b3de-4de8-ad7d-f332ab909932]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.980388999+00:00 stderr F I1212 16:15:21.980371 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-marketplace, name: marketplace-operator, uid: d268648d-aa1b-439b-844b-8e7f98ea08a3]" virtual=false 2025-12-12T16:15:21.983146306+00:00 stderr F I1212 16:15:21.983083 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-cluster-version, name: cluster-version-operator, uid: d5123c8d-63b9-4bc1-a443-acddb48f1d78]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.983146306+00:00 stderr F I1212 16:15:21.983116 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-multus, name: multus-admission-controller, uid: add425b8-cb71-4a29-b746-fade1ff57eee]" virtual=false 2025-12-12T16:15:21.987655724+00:00 stderr F I1212 16:15:21.987590 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-config-operator, name: openshift-config-operator, uid: dc451fc9-e781-493f-8e7d-55e9072cc784]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:21.987655724+00:00 stderr F I1212 16:15:21.987616 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-console, name: networking-console-plugin, uid: e8047e30-a40e-4ced-ae42-eea4288c975a]" virtual=false 2025-12-12T16:15:22.000340190+00:00 stderr F I1212 16:15:22.000208 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console-operator, name: console-operator, uid: 4982b9f1-eaf4-44fa-a84a-bf9954aedcb1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.000340190+00:00 stderr F I1212 16:15:22.000291 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-diagnostics, name: network-check-source, uid: e3b48335-28bd-49bf-9cf0-82069658b68a]" virtual=false 2025-12-12T16:15:22.005527485+00:00 stderr F I1212 16:15:22.005392 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console, name: console, uid: 3263a8c6-5259-42d6-a8da-588894b3887d]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Console","name":"cluster","uid":"72c9b389-7361-48f0-8bf6-56fe26546245","controller":true}] 2025-12-12T16:15:22.005566936+00:00 stderr F I1212 16:15:22.005459 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-operator, name: network-operator, uid: 2c897060-d3cf-4d7f-8d38-ef464b7a697a]" virtual=false 2025-12-12T16:15:22.010035593+00:00 stderr F I1212 16:15:22.009947 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-console, name: downloads, uid: 61eec0c1-c955-4ca2-b98d-b0e62696a08c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Console","name":"cluster","uid":"72c9b389-7361-48f0-8bf6-56fe26546245","controller":true}] 2025-12-12T16:15:22.010061104+00:00 stderr F I1212 16:15:22.010024 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-oauth-apiserver, name: apiserver, uid: 9752e4b7-10ac-4a96-9fb5-b71be5f959c7]" virtual=false 2025-12-12T16:15:22.011152430+00:00 stderr F I1212 16:15:22.011086 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: 5effb0d2-94d8-48b7-8c69-e538f7848429]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.011152430+00:00 stderr F I1212 16:15:22.011133 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: bc11b984-7cfa-489a-9f9a-5f2c0648078f]" virtual=false 2025-12-12T16:15:22.016316055+00:00 stderr F I1212 16:15:22.016204 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-storage-version-migrator, name: migrator, uid: e04da894-1c98-4971-8b8f-a1f4a381dbaf]" 2025-12-12T16:15:22.016316055+00:00 stderr F I1212 16:15:22.016256 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: e6e8c1a2-3934-417b-9f46-0df6a0dbf8d9]" virtual=false 2025-12-12T16:15:22.023452697+00:00 stderr F I1212 16:15:22.023360 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-dns-operator, name: dns-operator, uid: 75c9caa6-d284-4a97-95d2-2a04b51f093f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.023473657+00:00 stderr F I1212 16:15:22.023448 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 8043f85f-0f9a-4179-b841-9d68d3642aae]" virtual=false 2025-12-12T16:15:22.025720961+00:00 stderr F I1212 16:15:22.025651 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-etcd-operator, name: etcd-operator, uid: 7bcc9069-5a71-4f51-8970-90dddeee56b2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.025720961+00:00 stderr F I1212 16:15:22.025700 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: fd54a58c-a2c9-4287-ac4c-f1cc73f92a6e]" virtual=false 2025-12-12T16:15:22.029287237+00:00 stderr F I1212 16:15:22.029234 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-image-registry, name: cluster-image-registry-operator, uid: a4c18a44-787c-4851-97ac-f3da87e8d0e3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.029287237+00:00 stderr F I1212 16:15:22.029262 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ovn-kubernetes, name: ovnkube-control-plane, uid: 8bfd4bef-4292-4ca1-b90f-38cca09fb8f8]" virtual=false 2025-12-12T16:15:22.035906447+00:00 stderr F I1212 16:15:22.035865 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: 12093a0c-63e4-4953-9f6e-fac6da714800]" 2025-12-12T16:15:22.035927747+00:00 stderr F I1212 16:15:22.035898 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-route-controller-manager, name: route-controller-manager, uid: d871271b-4355-4114-942d-3289298ff327]" virtual=false 2025-12-12T16:15:22.039667617+00:00 stderr F I1212 16:15:22.039603 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ingress-operator, name: ingress-operator, uid: dcd260b6-d741-4056-94e9-f063ec7db58c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.039685098+00:00 stderr F I1212 16:15:22.039665 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 1703c560-9cd5-4273-a6b7-22510bce9318]" virtual=false 2025-12-12T16:15:22.055973610+00:00 stderr F I1212 16:15:22.055908 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 3a8705c5-b62b-40a4-8e43-30f0569fa490]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.056004201+00:00 stderr F I1212 16:15:22.055964 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-service-ca, name: service-ca, uid: c2be5831-b4ac-4748-a369-a82d422367a4]" virtual=false 2025-12-12T16:15:22.059573177+00:00 stderr F I1212 16:15:22.059513 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 09857aec-2c93-4f0d-9e38-a820bd5b8362]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.059573177+00:00 stderr F I1212 16:15:22.059550 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator-config, uid: 0460c09a-5deb-4529-9321-8eb18055c720]" virtual=false 2025-12-12T16:15:22.063770358+00:00 stderr F I1212 16:15:22.063715 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator, uid: c3ff943a-b570-4a98-8388-1f8a3280a85a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.063770358+00:00 stderr F I1212 16:15:22.063743 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: trusted-ca-bundle, uid: e4870e2c-b1a3-40ee-870d-6be3d8416fa3]" virtual=false 2025-12-12T16:15:22.072368435+00:00 stderr F I1212 16:15:22.072294 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-oauth-apiserver, name: apiserver, uid: 9752e4b7-10ac-4a96-9fb5-b71be5f959c7]" 2025-12-12T16:15:22.072368435+00:00 stderr F I1212 16:15:22.072342 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver, name: revision-status-1, uid: 98275840-6123-4655-aacc-f5208af82455]" virtual=false 2025-12-12T16:15:22.079746243+00:00 stderr F I1212 16:15:22.079676 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-kube-storage-version-migrator-operator, name: kube-storage-version-migrator-operator, uid: af746821-921a-4842-94da-28c08769612a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.079746243+00:00 stderr F I1212 16:15:22.079713 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: authentication-operator-config, uid: 29e06f16-a1a0-4841-8ef4-6f319b3136e7]" virtual=false 2025-12-12T16:15:22.086220819+00:00 stderr F I1212 16:15:22.086124 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: control-plane-machine-set-operator, uid: 7b943ba9-3321-444f-9be4-e7b351a28efa]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.086220819+00:00 stderr F I1212 16:15:22.086198 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: service-ca-bundle, uid: 03ab4823-2e17-4cbb-91e3-53adebde54a8]" virtual=false 2025-12-12T16:15:22.100017401+00:00 stderr F I1212 16:15:22.099947 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-api, name: machine-api-operator, uid: 6e3281a2-74ca-4530-b743-ae9a62edcc78]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.100017401+00:00 stderr F I1212 16:15:22.100004 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: trusted-ca-bundle, uid: 3793e7a8-8895-4cac-aa6f-506f52f527ac]" virtual=false 2025-12-12T16:15:22.102854369+00:00 stderr F I1212 16:15:22.102822 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-route-controller-manager, name: route-controller-manager, uid: d871271b-4355-4114-942d-3289298ff327]" 2025-12-12T16:15:22.102873590+00:00 stderr F I1212 16:15:22.102847 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-cluster-machine-approver, name: kube-rbac-proxy, uid: 78f8b8c9-a63a-46ff-8363-9dc3399ca474]" virtual=false 2025-12-12T16:15:22.109653153+00:00 stderr F I1212 16:15:22.109585 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: 7036b823-caf2-4fe7-9364-95791b080487]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.109676034+00:00 stderr F I1212 16:15:22.109640 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: admin-gates, uid: 3680cec0-5c0f-4291-87b8-30dabaa1c6bf]" virtual=false 2025-12-12T16:15:22.112742908+00:00 stderr F I1212 16:15:22.112688 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-multus, name: multus-admission-controller, uid: add425b8-cb71-4a29-b746-fade1ff57eee]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.112760528+00:00 stderr F I1212 16:15:22.112732 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-cluster-total, uid: d92d6a2e-2713-4f23-af8b-f867e1f08d0f]" virtual=false 2025-12-12T16:15:22.116433687+00:00 stderr F I1212 16:15:22.116388 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-marketplace, name: marketplace-operator, uid: d268648d-aa1b-439b-844b-8e7f98ea08a3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.116433687+00:00 stderr F I1212 16:15:22.116420 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-cluster, uid: e5c9ba21-d583-4acb-b833-94f1198d5ab5]" virtual=false 2025-12-12T16:15:22.120292280+00:00 stderr F I1212 16:15:22.120250 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-console, name: networking-console-plugin, uid: e8047e30-a40e-4ced-ae42-eea4288c975a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.120292280+00:00 stderr F I1212 16:15:22.120278 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-namespace, uid: 8d2edc3f-4c5c-4ec5-9176-d9cc9534b507]" virtual=false 2025-12-12T16:15:22.122233266+00:00 stderr F I1212 16:15:22.122156 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-service-ca, name: service-ca, uid: c2be5831-b4ac-4748-a369-a82d422367a4]" 2025-12-12T16:15:22.122273837+00:00 stderr F I1212 16:15:22.122225 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-node, uid: d81965cd-f883-4273-8d60-72d4cb125594]" virtual=false 2025-12-12T16:15:22.133701033+00:00 stderr F I1212 16:15:22.133626 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-diagnostics, name: network-check-source, uid: e3b48335-28bd-49bf-9cf0-82069658b68a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.133701033+00:00 stderr F I1212 16:15:22.133684 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-pod, uid: a6a05fcd-e149-4da2-8605-ec8dcde98967]" virtual=false 2025-12-12T16:15:22.135772062+00:00 stderr F I1212 16:15:22.135718 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-network-operator, name: network-operator, uid: 2c897060-d3cf-4d7f-8d38-ef464b7a697a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.135772062+00:00 stderr F I1212 16:15:22.135743 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workload, uid: cb9cdd2c-716a-473e-b72e-73bf990cf716]" virtual=false 2025-12-12T16:15:22.138197221+00:00 stderr F I1212 16:15:22.138143 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver, name: revision-status-1, uid: 98275840-6123-4655-aacc-f5208af82455]" 2025-12-12T16:15:22.138197221+00:00 stderr F I1212 16:15:22.138168 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workloads-namespace, uid: b8105616-68c4-435a-9245-55e9c46771ae]" virtual=false 2025-12-12T16:15:22.142843113+00:00 stderr F I1212 16:15:22.142779 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: bc11b984-7cfa-489a-9f9a-5f2c0648078f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.142843113+00:00 stderr F I1212 16:15:22.142818 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-namespace-by-pod, uid: 45990539-39a3-4ba8-a3c5-7f8a8d43720f]" virtual=false 2025-12-12T16:15:22.149859282+00:00 stderr F I1212 16:15:22.149811 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: e6e8c1a2-3934-417b-9f46-0df6a0dbf8d9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.149859282+00:00 stderr F I1212 16:15:22.149839 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-cluster-rsrc-use, uid: fa3cd139-cca8-48f7-8dfa-a5b39e2abb93]" virtual=false 2025-12-12T16:15:22.155978749+00:00 stderr F I1212 16:15:22.155926 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: package-server-manager, uid: 8043f85f-0f9a-4179-b841-9d68d3642aae]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.155978749+00:00 stderr F I1212 16:15:22.155952 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-rsrc-use, uid: 569e3e4e-b085-47ef-9c8c-21ec4ca092f8]" virtual=false 2025-12-12T16:15:22.159935815+00:00 stderr F I1212 16:15:22.159888 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-operator-lifecycle-manager, name: packageserver, uid: fd54a58c-a2c9-4287-ac4c-f1cc73f92a6e]" owner=[{"apiVersion":"operators.coreos.com/v1alpha1","kind":"ClusterServiceVersion","name":"packageserver","uid":"09b3d4b2-fc47-4ee0-a331-67a39502cf21","controller":false,"blockOwnerDeletion":false}] 2025-12-12T16:15:22.159935815+00:00 stderr F I1212 16:15:22.159918 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-pod-total, uid: 453139e4-0319-4726-82b1-4f496e81434b]" virtual=false 2025-12-12T16:15:22.161940823+00:00 stderr F I1212 16:15:22.161905 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-ovn-kubernetes, name: ovnkube-control-plane, uid: 8bfd4bef-4292-4ca1-b90f-38cca09fb8f8]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.161982584+00:00 stderr F I1212 16:15:22.161966 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-prometheus, uid: 51e84290-d8c9-4d3b-9936-601863537791]" virtual=false 2025-12-12T16:15:22.172802485+00:00 stderr F I1212 16:15:22.172697 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apps/v1/Deployment, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 1703c560-9cd5-4273-a6b7-22510bce9318]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.172802485+00:00 stderr F I1212 16:15:22.172766 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: etcd-dashboard, uid: 25df2d95-6900-4ccf-ae26-d8063c384f29]" virtual=false 2025-12-12T16:15:22.196798943+00:00 stderr F I1212 16:15:22.196317 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator-config, uid: 0460c09a-5deb-4529-9321-8eb18055c720]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.196798943+00:00 stderr F I1212 16:15:22.196372 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: grafana-dashboard-apiserver-performance, uid: 4b2354d3-44f4-4ce2-aee8-a2fc3853a98b]" virtual=false 2025-12-12T16:15:22.200816819+00:00 stderr F I1212 16:15:22.199709 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-apiserver-operator, name: trusted-ca-bundle, uid: e4870e2c-b1a3-40ee-870d-6be3d8416fa3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.200816819+00:00 stderr F I1212 16:15:22.199740 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: node-cluster, uid: 22f15010-d1ca-4097-8c09-5f6abd7133cb]" virtual=false 2025-12-12T16:15:22.212879460+00:00 stderr F I1212 16:15:22.212823 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: authentication-operator-config, uid: 29e06f16-a1a0-4841-8ef4-6f319b3136e7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.212879460+00:00 stderr F I1212 16:15:22.212861 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: openshift-network-features, uid: 38e41668-0d59-4727-95be-557e985b00b4]" virtual=false 2025-12-12T16:15:22.219532640+00:00 stderr F I1212 16:15:22.219470 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: service-ca-bundle, uid: 03ab4823-2e17-4cbb-91e3-53adebde54a8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.219532640+00:00 stderr F I1212 16:15:22.219499 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: release-verification, uid: 59da9e83-cee9-4730-b30e-ca58e8763a4b]" virtual=false 2025-12-12T16:15:22.232093673+00:00 stderr F I1212 16:15:22.232032 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-authentication-operator, name: trusted-ca-bundle, uid: 3793e7a8-8895-4cac-aa6f-506f52f527ac]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.232117664+00:00 stderr F I1212 16:15:22.232081 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config, name: installer-images, uid: 01612b6d-7428-4473-93ad-08016393a900]" virtual=false 2025-12-12T16:15:22.236646103+00:00 stderr F I1212 16:15:22.236602 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-cluster-machine-approver, name: kube-rbac-proxy, uid: 78f8b8c9-a63a-46ff-8363-9dc3399ca474]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.236664123+00:00 stderr F I1212 16:15:22.236645 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: console-operator-config, uid: 5a9c5550-185f-4031-bbc1-1f01e01294a1]" virtual=false 2025-12-12T16:15:22.242539855+00:00 stderr F I1212 16:15:22.242465 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: admin-gates, uid: 3680cec0-5c0f-4291-87b8-30dabaa1c6bf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.242539855+00:00 stderr F I1212 16:15:22.242495 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: telemetry-config, uid: 6dcd23fc-7253-4882-bb1f-3352e899b423]" virtual=false 2025-12-12T16:15:22.246332216+00:00 stderr F I1212 16:15:22.246304 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-cluster-total, uid: d92d6a2e-2713-4f23-af8b-f867e1f08d0f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.246359237+00:00 stderr F I1212 16:15:22.246327 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: trusted-ca, uid: 78374417-3661-4c04-b1a1-0b4c5a1a6af7]" virtual=false 2025-12-12T16:15:22.249319808+00:00 stderr F I1212 16:15:22.249273 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-cluster, uid: e5c9ba21-d583-4acb-b833-94f1198d5ab5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.249319808+00:00 stderr F I1212 16:15:22.249302 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-images, uid: 3832d875-802b-4913-a66f-e8ac89956e81]" virtual=false 2025-12-12T16:15:22.252620427+00:00 stderr F I1212 16:15:22.252582 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-namespace, uid: 8d2edc3f-4c5c-4ec5-9176-d9cc9534b507]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.252620427+00:00 stderr F I1212 16:15:22.252607 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator-config, uid: c2cc6eb9-b241-447f-8aec-c5ac92d13d67]" virtual=false 2025-12-12T16:15:22.259564465+00:00 stderr F I1212 16:15:22.259507 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-node, uid: d81965cd-f883-4273-8d60-72d4cb125594]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.259564465+00:00 stderr F I1212 16:15:22.259532 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-ca-bundle, uid: 6b2c1752-2a08-4bc9-b635-9cfdbfe46be7]" virtual=false 2025-12-12T16:15:22.263465389+00:00 stderr F I1212 16:15:22.263416 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-pod, uid: a6a05fcd-e149-4da2-8605-ec8dcde98967]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.263465389+00:00 stderr F I1212 16:15:22.263441 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-operator-config, uid: d3443717-bd0b-4439-948e-c5a15bd7afe3]" virtual=false 2025-12-12T16:15:22.269609967+00:00 stderr F I1212 16:15:22.269536 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workload, uid: cb9cdd2c-716a-473e-b72e-73bf990cf716]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.269609967+00:00 stderr F I1212 16:15:22.269565 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-service-ca-bundle, uid: 973b291d-38fb-414d-9a6e-45f5a3945bb0]" virtual=false 2025-12-12T16:15:22.272939917+00:00 stderr F I1212 16:15:22.272889 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-k8s-resources-workloads-namespace, uid: b8105616-68c4-435a-9245-55e9c46771ae]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.272939917+00:00 stderr F I1212 16:15:22.272914 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-image-registry, name: trusted-ca, uid: 9f4415b2-fbd5-47b3-8169-1efaed0d251e]" virtual=false 2025-12-12T16:15:22.275480658+00:00 stderr F I1212 16:15:22.275428 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-namespace-by-pod, uid: 45990539-39a3-4ba8-a3c5-7f8a8d43720f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.275480658+00:00 stderr F I1212 16:15:22.275455 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ingress-operator, name: trusted-ca, uid: 4051881b-c1aa-4bd5-a631-cd6296ac1f64]" virtual=false 2025-12-12T16:15:22.283780208+00:00 stderr F I1212 16:15:22.283678 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-cluster-rsrc-use, uid: fa3cd139-cca8-48f7-8dfa-a5b39e2abb93]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.283799749+00:00 stderr F I1212 16:15:22.283779 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator-config, uid: 7f8e88a6-f42f-43ac-a331-9780bdd83dcd]" virtual=false 2025-12-12T16:15:22.289138047+00:00 stderr F I1212 16:15:22.289061 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-node-rsrc-use, uid: 569e3e4e-b085-47ef-9c8c-21ec4ca092f8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.289138047+00:00 stderr F I1212 16:15:22.289091 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator-config, uid: ae00bb8f-1b2a-469a-aece-0cdda9885a20]" virtual=false 2025-12-12T16:15:22.292396586+00:00 stderr F I1212 16:15:22.292345 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-pod-total, uid: 453139e4-0319-4726-82b1-4f496e81434b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.292396586+00:00 stderr F I1212 16:15:22.292375 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator-config, uid: a0a8541b-3197-4955-b6a0-a48f553f3956]" virtual=false 2025-12-12T16:15:22.295902820+00:00 stderr F I1212 16:15:22.295843 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: dashboard-prometheus, uid: 51e84290-d8c9-4d3b-9936-601863537791]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.295902820+00:00 stderr F I1212 16:15:22.295876 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[scheduling.k8s.io/v1/PriorityClass, namespace: , name: openshift-user-critical, uid: 939331d2-6c2b-4c67-bde3-0cc8b90c1ff7]" virtual=false 2025-12-12T16:15:22.307340476+00:00 stderr F I1212 16:15:22.307283 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: etcd-dashboard, uid: 25df2d95-6900-4ccf-ae26-d8063c384f29]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.307340476+00:00 stderr F I1212 16:15:22.307309 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: controlplanemachineset.machine.openshift.io, uid: 1068b539-f852-4d7e-84a0-55f568caa9e2]" virtual=false 2025-12-12T16:15:22.323925125+00:00 stderr F I1212 16:15:22.323814 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: grafana-dashboard-apiserver-performance, uid: 4b2354d3-44f4-4ce2-aee8-a2fc3853a98b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.323925125+00:00 stderr F I1212 16:15:22.323899 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: monitoringconfigmaps.openshift.io, uid: bba97720-2c3c-4f37-90cb-dbc1dc32be7a]" virtual=false 2025-12-12T16:15:22.337779349+00:00 stderr F I1212 16:15:22.337677 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: node-cluster, uid: 22f15010-d1ca-4097-8c09-5f6abd7133cb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.337779349+00:00 stderr F I1212 16:15:22.337747 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: multus.openshift.io, uid: 6af572ca-45a2-4100-8c12-1c0aea97f3b4]" virtual=false 2025-12-12T16:15:22.343764783+00:00 stderr F I1212 16:15:22.343696 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:22.343678791 +0000 UTC m=+20.901399260" 2025-12-12T16:15:22.347107364+00:00 stderr F I1212 16:15:22.347033 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: openshift-network-features, uid: 38e41668-0d59-4727-95be-557e985b00b4]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.347107364+00:00 stderr F I1212 16:15:22.347092 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: network-node-identity.openshift.io, uid: 5c2fe880-8ccc-45c8-8bc7-7f3bd0bb9e5f]" virtual=false 2025-12-12T16:15:22.355698511+00:00 stderr F I1212 16:15:22.355632 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config-managed, name: release-verification, uid: 59da9e83-cee9-4730-b30e-ca58e8763a4b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.355716031+00:00 stderr F I1212 16:15:22.355687 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-apiserver, uid: 6ae7e0b1-7285-4b91-8185-0f0d09bbeed0]" virtual=false 2025-12-12T16:15:22.366870080+00:00 stderr F I1212 16:15:22.366789 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-config, name: installer-images, uid: 01612b6d-7428-4473-93ad-08016393a900]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.366870080+00:00 stderr F I1212 16:15:22.366843 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-apiserver-operator, uid: 910e9b77-6993-4d88-919b-285b0d98dd8d]" virtual=false 2025-12-12T16:15:22.370713852+00:00 stderr F I1212 16:15:22.370614 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: console-operator-config, uid: 5a9c5550-185f-4031-bbc1-1f01e01294a1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.370713852+00:00 stderr F I1212 16:15:22.370672 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-apiserver-sar, uid: 85ed3e5a-252f-49da-95e4-a9d3aa5416b3]" virtual=false 2025-12-12T16:15:22.379931765+00:00 stderr F I1212 16:15:22.379836 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: trusted-ca, uid: 78374417-3661-4c04-b1a1-0b4c5a1a6af7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.379931765+00:00 stderr F I1212 16:15:22.379892 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-authentication-operator, uid: 72792e68-b956-413e-a42d-b3a7532c61e7]" virtual=false 2025-12-12T16:15:22.380305224+00:00 stderr F I1212 16:15:22.380253 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-console-operator, name: telemetry-config, uid: 6dcd23fc-7253-4882-bb1f-3352e899b423]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.380305224+00:00 stderr F I1212 16:15:22.380284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-controller-manager, uid: 107f3a52-f2f2-42f9-9b7e-5bc54ef64b6d]" virtual=false 2025-12-12T16:15:22.382412634+00:00 stderr F I1212 16:15:22.382334 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-images, uid: 3832d875-802b-4913-a66f-e8ac89956e81]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.382412634+00:00 stderr F I1212 16:15:22.382385 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-etcd-operator, uid: 3f75dd62-0a83-41c0-9df3-02ad0e9e0ebf]" virtual=false 2025-12-12T16:15:22.387976478+00:00 stderr F I1212 16:15:22.387865 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator-config, uid: c2cc6eb9-b241-447f-8aec-c5ac92d13d67]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.387976478+00:00 stderr F I1212 16:15:22.387912 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-kube-apiserver-operator, uid: 99fc4261-d4bb-4f96-8703-2e57100904d4]" virtual=false 2025-12-12T16:15:22.393138693+00:00 stderr F I1212 16:15:22.393005 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-ca-bundle, uid: 6b2c1752-2a08-4bc9-b635-9cfdbfe46be7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.393138693+00:00 stderr F I1212 16:15:22.393095 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-monitoring-metrics, uid: 882936af-d591-411a-b504-7d49ece6e25c]" virtual=false 2025-12-12T16:15:22.395926490+00:00 stderr F I1212 16:15:22.395844 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-operator-config, uid: d3443717-bd0b-4439-948e-c5a15bd7afe3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.395926490+00:00 stderr F I1212 16:15:22.395908 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-oauth-apiserver, uid: 842b50c1-3389-4ca9-880f-726a67f8b5b2]" virtual=false 2025-12-12T16:15:22.399953287+00:00 stderr F I1212 16:15:22.399876 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-etcd-operator, name: etcd-service-ca-bundle, uid: 973b291d-38fb-414d-9a6e-45f5a3945bb0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.399953287+00:00 stderr F I1212 16:15:22.399929 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-oauth-apiserver-sar, uid: 67d4f009-cc38-433f-8462-7b254bf70042]" virtual=false 2025-12-12T16:15:22.409362634+00:00 stderr F I1212 16:15:22.409126 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-image-registry, name: trusted-ca, uid: 9f4415b2-fbd5-47b3-8169-1efaed0d251e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.409362634+00:00 stderr F I1212 16:15:22.409204 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-oauth-server, uid: 6974b18b-698a-44c4-b91c-4986016ddc0e]" virtual=false 2025-12-12T16:15:22.413398721+00:00 stderr F I1212 16:15:22.413299 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ingress-operator, name: trusted-ca, uid: 4051881b-c1aa-4bd5-a631-cd6296ac1f64]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.413398721+00:00 stderr F I1212 16:15:22.413366 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-ovn-kubernetes, uid: 76336446-d881-4c62-80aa-a8abd361631e]" virtual=false 2025-12-12T16:15:22.415779818+00:00 stderr F I1212 16:15:22.415725 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator-config, uid: 7f8e88a6-f42f-43ac-a331-9780bdd83dcd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.415779818+00:00 stderr F I1212 16:15:22.415764 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: openshift-ingress-operator-gatewayapi-crd-admission, uid: 3927d6c5-f736-455c-89c7-629d8deb5834]" virtual=false 2025-12-12T16:15:22.423130855+00:00 stderr F I1212 16:15:22.423040 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator-config, uid: ae00bb8f-1b2a-469a-aece-0cdda9885a20]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.423130855+00:00 stderr F I1212 16:15:22.423111 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: openshift-storage-policy-validation-binding, uid: 177a2f7d-d096-4fb9-9e5c-8c4f55729c5a]" virtual=false 2025-12-12T16:15:22.425750458+00:00 stderr F I1212 16:15:22.425629 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-scheduler-operator, name: openshift-kube-scheduler-operator-config, uid: a0a8541b-3197-4955-b6a0-a48f553f3956]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.425750458+00:00 stderr F I1212 16:15:22.425691 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: user-defined-networks-namespace-label-binding, uid: b35820f9-16e9-4c63-bcf1-aa21a6b55045]" virtual=false 2025-12-12T16:15:22.428768471+00:00 stderr F I1212 16:15:22.428700 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[scheduling.k8s.io/v1/PriorityClass, namespace: , name: openshift-user-critical, uid: 939331d2-6c2b-4c67-bde3-0cc8b90c1ff7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.428768471+00:00 stderr F I1212 16:15:22.428739 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/PriorityLevelConfiguration, namespace: , name: openshift-control-plane-operators, uid: 5235f471-c3f2-46fc-a34a-dc0d9c056f48]" virtual=false 2025-12-12T16:15:22.440127135+00:00 stderr F I1212 16:15:22.439971 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: controlplanemachineset.machine.openshift.io, uid: 1068b539-f852-4d7e-84a0-55f568caa9e2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.440127135+00:00 stderr F I1212 16:15:22.440040 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-autoscaler, uid: a124e7f8-3a72-4563-ade1-2b93ffbbf4b1]" virtual=false 2025-12-12T16:15:22.456901699+00:00 stderr F I1212 16:15:22.456759 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: monitoringconfigmaps.openshift.io, uid: bba97720-2c3c-4f37-90cb-dbc1dc32be7a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.456901699+00:00 stderr F I1212 16:15:22.456824 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-storage-version-migrator-operator, name: config, uid: 0f688556-c69f-4593-b345-7d4c88879b2a]" virtual=false 2025-12-12T16:15:22.469988504+00:00 stderr F I1212 16:15:22.469884 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: multus.openshift.io, uid: 6af572ca-45a2-4100-8c12-1c0aea97f3b4]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.469988504+00:00 stderr F I1212 16:15:22.469938 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: coreos-bootimages, uid: 5a7d5808-6b9d-44d5-b5dd-f49d306526e9]" virtual=false 2025-12-12T16:15:22.476054310+00:00 stderr F I1212 16:15:22.475982 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingWebhookConfiguration, namespace: , name: network-node-identity.openshift.io, uid: 5c2fe880-8ccc-45c8-8bc7-7f3bd0bb9e5f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.476054310+00:00 stderr F I1212 16:15:22.476034 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: kube-rbac-proxy, uid: 6a3c3605-4908-48ca-85be-ba92a7f391f5]" virtual=false 2025-12-12T16:15:22.485947279+00:00 stderr F I1212 16:15:22.485879 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-apiserver, uid: 6ae7e0b1-7285-4b91-8185-0f0d09bbeed0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.485986449+00:00 stderr F I1212 16:15:22.485934 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: machine-config-operator-images, uid: 745ac27e-f118-42ee-9c62-98a4539acedf]" virtual=false 2025-12-12T16:15:22.500382256+00:00 stderr F I1212 16:15:22.500269 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-apiserver-operator, uid: 910e9b77-6993-4d88-919b-285b0d98dd8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.500382256+00:00 stderr F I1212 16:15:22.500326 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: machine-config-osimageurl, uid: 329c3a31-e4b6-40dc-8f8b-5f52451f0d5d]" virtual=false 2025-12-12T16:15:22.503257106+00:00 stderr F I1212 16:15:22.503162 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-apiserver-sar, uid: 85ed3e5a-252f-49da-95e4-a9d3aa5416b3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.503257106+00:00 stderr F I1212 16:15:22.503231 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-marketplace, name: marketplace-trusted-ca, uid: 0eeb115f-95d1-4d38-85be-287d6bfd3afd]" virtual=false 2025-12-12T16:15:22.510348226+00:00 stderr F I1212 16:15:22.510216 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-authentication-operator, uid: 72792e68-b956-413e-a42d-b3a7532c61e7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.510348226+00:00 stderr F I1212 16:15:22.510313 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-monitoring, name: telemetry-config, uid: a1a27520-d042-4480-9b9e-33ba68422208]" virtual=false 2025-12-12T16:15:22.513361019+00:00 stderr F I1212 16:15:22.513298 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-controller-manager, uid: 107f3a52-f2f2-42f9-9b7e-5bc54ef64b6d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.513361019+00:00 stderr F I1212 16:15:22.513337 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: cni-copy-resources, uid: 29088134-82cd-49c9-891c-3834b37dd6f8]" virtual=false 2025-12-12T16:15:22.516471184+00:00 stderr F I1212 16:15:22.516344 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-etcd-operator, uid: 3f75dd62-0a83-41c0-9df3-02ad0e9e0ebf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.516471184+00:00 stderr F I1212 16:15:22.516395 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: default-cni-sysctl-allowlist, uid: 5e80d595-c63d-44bf-914d-7f260b4d19dd]" virtual=false 2025-12-12T16:15:22.520821439+00:00 stderr F I1212 16:15:22.520771 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-kube-apiserver-operator, uid: 99fc4261-d4bb-4f96-8703-2e57100904d4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.520821439+00:00 stderr F I1212 16:15:22.520795 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: multus-daemon-config, uid: 46bee54e-7954-481f-b845-7ef5fd73b0e0]" virtual=false 2025-12-12T16:15:22.526622318+00:00 stderr F I1212 16:15:22.526560 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-monitoring-metrics, uid: 882936af-d591-411a-b504-7d49ece6e25c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.526622318+00:00 stderr F I1212 16:15:22.526596 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: whereabouts-flatfile-config, uid: 8db09e57-a56b-4b45-94d6-a2796260e9cd]" virtual=false 2025-12-12T16:15:22.528943634+00:00 stderr F I1212 16:15:22.528894 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-oauth-apiserver, uid: 842b50c1-3389-4ca9-880f-726a67f8b5b2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.528943634+00:00 stderr F I1212 16:15:22.528919 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-autoscaler-operator, uid: d2b3197c-3fdc-4e2f-a143-92ba96fc805f]" virtual=false 2025-12-12T16:15:22.533396122+00:00 stderr F I1212 16:15:22.533268 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-oauth-apiserver-sar, uid: 67d4f009-cc38-433f-8462-7b254bf70042]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.533396122+00:00 stderr F I1212 16:15:22.533309 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-monitoring-operator, uid: 0e5c0178-eb2c-4030-950d-7847f0fb7ea4]" virtual=false 2025-12-12T16:15:22.540578495+00:00 stderr F I1212 16:15:22.540521 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-oauth-server, uid: 6974b18b-698a-44c4-b91c-4986016ddc0e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.540578495+00:00 stderr F I1212 16:15:22.540552 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: adminnetworkpolicies.policy.networking.k8s.io, uid: 87d46d9b-cc3b-4f8c-b9fc-bb6ef4bbb955]" virtual=false 2025-12-12T16:15:22.545985395+00:00 stderr F I1212 16:15:22.545929 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/FlowSchema, namespace: , name: openshift-ovn-kubernetes, uid: 76336446-d881-4c62-80aa-a8abd361631e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.545985395+00:00 stderr F I1212 16:15:22.545958 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-network-operator, uid: e15a63a7-52c5-44b1-b0d3-3eb520062810]" virtual=false 2025-12-12T16:15:22.549545311+00:00 stderr F I1212 16:15:22.549484 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: openshift-ingress-operator-gatewayapi-crd-admission, uid: 3927d6c5-f736-455c-89c7-629d8deb5834]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.549545311+00:00 stderr F I1212 16:15:22.549519 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-samples-operator, uid: 7325fe91-95d0-4360-9d39-f878c0e5b0a3]" virtual=false 2025-12-12T16:15:22.555743350+00:00 stderr F I1212 16:15:22.555678 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: openshift-storage-policy-validation-binding, uid: 177a2f7d-d096-4fb9-9e5c-8c4f55729c5a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.555743350+00:00 stderr F I1212 16:15:22.555706 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-samples-operator-imageconfig-reader, uid: bcb2f6f8-f822-4c40-8b21-84cb4634069a]" virtual=false 2025-12-12T16:15:22.558877896+00:00 stderr F I1212 16:15:22.558802 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[admissionregistration.k8s.io/v1/ValidatingAdmissionPolicyBinding, namespace: , name: user-defined-networks-namespace-label-binding, uid: b35820f9-16e9-4c63-bcf1-aa21a6b55045]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.558877896+00:00 stderr F I1212 16:15:22.558850 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: adminpolicybasedexternalroutes.k8s.ovn.org, uid: bc1c1987-2ab3-4b3b-8a9d-c866725aa5d0]" virtual=false 2025-12-12T16:15:22.563424785+00:00 stderr F I1212 16:15:22.563368 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[flowcontrol.apiserver.k8s.io/v1/PriorityLevelConfiguration, namespace: , name: openshift-control-plane-operators, uid: 5235f471-c3f2-46fc-a34a-dc0d9c056f48]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.563424785+00:00 stderr F I1212 16:15:22.563395 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-samples-operator-proxy-reader, uid: e9888064-b19d-46b0-a18e-14f85962beb0]" virtual=false 2025-12-12T16:15:22.574348228+00:00 stderr F I1212 16:15:22.574248 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-autoscaler, uid: a124e7f8-3a72-4563-ade1-2b93ffbbf4b1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.574348228+00:00 stderr F I1212 16:15:22.574286 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertingrules.monitoring.openshift.io, uid: c8a7adb5-5aa3-4f07-bb77-d2fa4a9f62ea]" virtual=false 2025-12-12T16:15:22.590167499+00:00 stderr F I1212 16:15:22.590051 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-kube-storage-version-migrator-operator, name: config, uid: 0f688556-c69f-4593-b345-7d4c88879b2a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.590167499+00:00 stderr F I1212 16:15:22.590110 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagerconfigs.monitoring.coreos.com, uid: 25ba8d0f-7c32-4bec-9f8c-c86cff145eeb]" virtual=false 2025-12-12T16:15:22.603767237+00:00 stderr F I1212 16:15:22.603655 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: coreos-bootimages, uid: 5a7d5808-6b9d-44d5-b5dd-f49d306526e9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.603767237+00:00 stderr F I1212 16:15:22.603723 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagers.monitoring.coreos.com, uid: f7fa5eed-3281-4e79-aab7-408e898e2345]" virtual=false 2025-12-12T16:15:22.611921383+00:00 stderr F I1212 16:15:22.611830 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: kube-rbac-proxy, uid: 6a3c3605-4908-48ca-85be-ba92a7f391f5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.611921383+00:00 stderr F I1212 16:15:22.611899 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertrelabelconfigs.monitoring.openshift.io, uid: 0e169ac1-f760-4483-9d70-ce979148354c]" virtual=false 2025-12-12T16:15:22.620031439+00:00 stderr F I1212 16:15:22.619938 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: machine-config-operator-images, uid: 745ac27e-f118-42ee-9c62-98a4539acedf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.620031439+00:00 stderr F I1212 16:15:22.620006 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: apiservers.config.openshift.io, uid: 2d30434f-7494-4f21-a0e8-dbca87cebbc8]" virtual=false 2025-12-12T16:15:22.633134855+00:00 stderr F I1212 16:15:22.633073 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-machine-config-operator, name: machine-config-osimageurl, uid: 329c3a31-e4b6-40dc-8f8b-5f52451f0d5d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.633134855+00:00 stderr F I1212 16:15:22.633117 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: authentications.config.openshift.io, uid: 19487e8b-4ab7-4b8b-adad-1e83b5559246]" virtual=false 2025-12-12T16:15:22.637013908+00:00 stderr F I1212 16:15:22.636950 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-marketplace, name: marketplace-trusted-ca, uid: 0eeb115f-95d1-4d38-85be-287d6bfd3afd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.637013908+00:00 stderr F I1212 16:15:22.636991 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: authentications.operator.openshift.io, uid: f4f34020-1d5a-4b7b-ba68-36790236dd3e]" virtual=false 2025-12-12T16:15:22.643339150+00:00 stderr F I1212 16:15:22.643234 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-monitoring, name: telemetry-config, uid: a1a27520-d042-4480-9b9e-33ba68422208]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.643339150+00:00 stderr F I1212 16:15:22.643291 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: baselineadminnetworkpolicies.policy.networking.k8s.io, uid: 6116d4d3-7fac-43fa-aa97-511ad4327c4f]" virtual=false 2025-12-12T16:15:22.646906266+00:00 stderr F I1212 16:15:22.646853 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: cni-copy-resources, uid: 29088134-82cd-49c9-891c-3834b37dd6f8]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.646906266+00:00 stderr F I1212 16:15:22.646884 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-version-operator, uid: f0cb5797-0be4-4eb6-8b26-3d7ec65640c0]" virtual=false 2025-12-12T16:15:22.648705560+00:00 stderr F I1212 16:15:22.648665 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: default-cni-sysctl-allowlist, uid: 5e80d595-c63d-44bf-914d-7f260b4d19dd]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.648705560+00:00 stderr F I1212 16:15:22.648692 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console, uid: d6097847-d15a-458d-b448-c4ded0a76663]" virtual=false 2025-12-12T16:15:22.652683365+00:00 stderr F I1212 16:15:22.652641 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: multus-daemon-config, uid: 46bee54e-7954-481f-b845-7ef5fd73b0e0]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.652683365+00:00 stderr F I1212 16:15:22.652670 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-auth-delegator, uid: 8bb02142-6d0d-4a7f-bada-dc00ec93889b]" virtual=false 2025-12-12T16:15:22.659793067+00:00 stderr F I1212 16:15:22.659724 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-multus, name: whereabouts-flatfile-config, uid: 8db09e57-a56b-4b45-94d6-a2796260e9cd]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.659793067+00:00 stderr F I1212 16:15:22.659763 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-extensions-reader, uid: 5d21b33a-75bb-427e-a582-1be10ba2fb68]" virtual=false 2025-12-12T16:15:22.663796103+00:00 stderr F I1212 16:15:22.663738 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-autoscaler-operator, uid: d2b3197c-3fdc-4e2f-a143-92ba96fc805f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.663796103+00:00 stderr F I1212 16:15:22.663768 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-operator, uid: 262498fa-49f4-4958-bd59-987dc46ab208]" virtual=false 2025-12-12T16:15:22.665797291+00:00 stderr F I1212 16:15:22.665746 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-monitoring-operator, uid: 0e5c0178-eb2c-4030-950d-7847f0fb7ea4]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.665797291+00:00 stderr F I1212 16:15:22.665771 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-operator-auth-delegator, uid: 562c5b2d-97e4-41e8-82ba-efea4c1ae8a3]" virtual=false 2025-12-12T16:15:22.675476495+00:00 stderr F I1212 16:15:22.675403 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: adminnetworkpolicies.policy.networking.k8s.io, uid: 87d46d9b-cc3b-4f8c-b9fc-bb6ef4bbb955]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.675476495+00:00 stderr F I1212 16:15:22.675446 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: control-plane-machine-set-operator, uid: 9323ff6c-6ed3-4922-a4c6-00d20aed02df]" virtual=false 2025-12-12T16:15:22.684260156+00:00 stderr F I1212 16:15:22.684162 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-network-operator, uid: e15a63a7-52c5-44b1-b0d3-3eb520062810]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.684260156+00:00 stderr F I1212 16:15:22.684221 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: csi-snapshot-controller-operator-clusterrole, uid: e2fe778c-1ba2-499f-8bb1-3a6673066906]" virtual=false 2025-12-12T16:15:22.686609073+00:00 stderr F I1212 16:15:22.686532 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-samples-operator, uid: 7325fe91-95d0-4360-9d39-f878c0e5b0a3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.686626593+00:00 stderr F I1212 16:15:22.686595 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: custom-account-openshift-machine-config-operator, uid: 19b38dbe-7b76-4a58-9d39-08193fa24a58]" virtual=false 2025-12-12T16:15:22.689120913+00:00 stderr F I1212 16:15:22.689068 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-samples-operator-imageconfig-reader, uid: bcb2f6f8-f822-4c40-8b21-84cb4634069a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.689120913+00:00 stderr F I1212 16:15:22.689098 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: default-account-cluster-image-registry-operator, uid: 2d9b230e-fffb-45d4-af75-5d566fa0730d]" virtual=false 2025-12-12T16:15:22.693090879+00:00 stderr F I1212 16:15:22.693033 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: adminpolicybasedexternalroutes.k8s.ovn.org, uid: bc1c1987-2ab3-4b3b-8a9d-c866725aa5d0]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.693090879+00:00 stderr F I1212 16:15:22.693062 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: helm-chartrepos-view, uid: 1a2aa76e-9e40-4b24-beda-1e6b18b370b6]" virtual=false 2025-12-12T16:15:22.696137112+00:00 stderr F I1212 16:15:22.696080 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-samples-operator-proxy-reader, uid: e9888064-b19d-46b0-a18e-14f85962beb0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.696137112+00:00 stderr F I1212 16:15:22.696116 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-controllers, uid: 060ebb4f-8c4e-4977-a15f-ce1e43d9742b]" virtual=false 2025-12-12T16:15:22.705867787+00:00 stderr F I1212 16:15:22.705797 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertingrules.monitoring.openshift.io, uid: c8a7adb5-5aa3-4f07-bb77-d2fa4a9f62ea]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.705867787+00:00 stderr F I1212 16:15:22.705837 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-controllers-baremetal, uid: 3d34ff23-98c5-4cd1-87f9-9f1e2ee80f7d]" virtual=false 2025-12-12T16:15:22.735988332+00:00 stderr F I1212 16:15:22.735872 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertrelabelconfigs.monitoring.openshift.io, uid: 0e169ac1-f760-4483-9d70-ce979148354c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.735988332+00:00 stderr F I1212 16:15:22.735942 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-operator, uid: 5d642bd0-a312-4de3-9782-20ad763eab43]" virtual=false 2025-12-12T16:15:22.739637760+00:00 stderr F I1212 16:15:22.739556 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagers.monitoring.coreos.com, uid: f7fa5eed-3281-4e79-aab7-408e898e2345]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.739637760+00:00 stderr F I1212 16:15:22.739608 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-operator-ext-remediation, uid: ab2869e4-518a-412f-9009-04a1e52ca6af]" virtual=false 2025-12-12T16:15:22.746571687+00:00 stderr F I1212 16:15:22.746509 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: apiservers.config.openshift.io, uid: 2d30434f-7494-4f21-a0e8-dbca87cebbc8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.746571687+00:00 stderr F I1212 16:15:22.746556 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: marketplace-operator, uid: 08b74f39-863c-402b-a0d7-09bb37c4ba18]" virtual=false 2025-12-12T16:15:22.763279940+00:00 stderr F I1212 16:15:22.763167 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: authentications.config.openshift.io, uid: 19487e8b-4ab7-4b8b-adad-1e83b5559246]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.763279940+00:00 stderr F I1212 16:15:22.763239 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: metrics-daemon-sa-rolebinding, uid: 4c34a54f-0d8d-4ff2-be71-472375b8b859]" virtual=false 2025-12-12T16:15:22.765985775+00:00 stderr F I1212 16:15:22.765924 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: authentications.operator.openshift.io, uid: f4f34020-1d5a-4b7b-ba68-36790236dd3e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.765985775+00:00 stderr F I1212 16:15:22.765963 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-admission-controller-webhook, uid: 545e1457-64e7-48f5-b19a-0ddbbb2165b1]" virtual=false 2025-12-12T16:15:22.775628687+00:00 stderr F I1212 16:15:22.775520 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: baselineadminnetworkpolicies.policy.networking.k8s.io, uid: 6116d4d3-7fac-43fa-aa97-511ad4327c4f]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.775628687+00:00 stderr F I1212 16:15:22.775594 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-ancillary-tools, uid: 920cac91-1e97-4b78-9407-de7abd17435c]" virtual=false 2025-12-12T16:15:22.779092781+00:00 stderr F I1212 16:15:22.779005 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: cluster-version-operator, uid: f0cb5797-0be4-4eb6-8b26-3d7ec65640c0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.779092781+00:00 stderr F I1212 16:15:22.779077 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-cluster-readers, uid: f90bb80b-8164-47fa-a8d7-2f339bb24054]" virtual=false 2025-12-12T16:15:22.781128400+00:00 stderr F I1212 16:15:22.781076 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console, uid: d6097847-d15a-458d-b448-c4ded0a76663]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.781128400+00:00 stderr F I1212 16:15:22.781112 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-group, uid: 55936cb0-8925-4d56-b4ed-afd17c252ccc]" virtual=false 2025-12-12T16:15:22.782792430+00:00 stderr F I1212 16:15:22.782718 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-auth-delegator, uid: 8bb02142-6d0d-4a7f-bada-dc00ec93889b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.782792430+00:00 stderr F I1212 16:15:22.782769 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-transient, uid: bdf36000-8acb-4bfb-9410-ceb68275d539]" virtual=false 2025-12-12T16:15:22.787104484+00:00 stderr F I1212 16:15:22.787040 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: alertmanagerconfigs.monitoring.coreos.com, uid: 25ba8d0f-7c32-4bec-9f8c-c86cff145eeb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.787104484+00:00 stderr F I1212 16:15:22.787076 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-whereabouts, uid: 84084398-4ced-4b04-aa3c-896feffb01cb]" virtual=false 2025-12-12T16:15:22.790168868+00:00 stderr F I1212 16:15:22.790107 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-extensions-reader, uid: 5d21b33a-75bb-427e-a582-1be10ba2fb68]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.790168868+00:00 stderr F I1212 16:15:22.790141 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-diagnostics, uid: 99939742-bf08-407c-8e6b-b481dd120b5e]" virtual=false 2025-12-12T16:15:22.794033321+00:00 stderr F I1212 16:15:22.793972 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-operator, uid: 262498fa-49f4-4958-bd59-987dc46ab208]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.794033321+00:00 stderr F I1212 16:15:22.794004 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-node-identity, uid: 68a4cac1-791b-476a-b674-8f1dc0ccb944]" virtual=false 2025-12-12T16:15:22.796514740+00:00 stderr F I1212 16:15:22.796452 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: console-operator-auth-delegator, uid: 562c5b2d-97e4-41e8-82ba-efea4c1ae8a3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.796514740+00:00 stderr F I1212 16:15:22.796489 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: olm-operator-binding-openshift-operator-lifecycle-manager, uid: 2d9724b0-5df3-482d-a79f-d1bc7ae61999]" virtual=false 2025-12-12T16:15:22.799500493+00:00 stderr F I1212 16:15:22.799443 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: control-plane-machine-set-operator, uid: 9323ff6c-6ed3-4922-a4c6-00d20aed02df]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.799500493+00:00 stderr F I1212 16:15:22.799485 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: builds.config.openshift.io, uid: 850b051a-294e-45c2-9679-557867afe334]" virtual=false 2025-12-12T16:15:22.810266622+00:00 stderr F I1212 16:15:22.810147 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: csi-snapshot-controller-operator-clusterrole, uid: e2fe778c-1ba2-499f-8bb1-3a6673066906]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.810266622+00:00 stderr F I1212 16:15:22.810226 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: catalogsources.operators.coreos.com, uid: 7cb8b5cc-67d6-44a6-aa70-3542d4901e4f]" virtual=false 2025-12-12T16:15:22.816813009+00:00 stderr F I1212 16:15:22.816719 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: custom-account-openshift-machine-config-operator, uid: 19b38dbe-7b76-4a58-9d39-08193fa24a58]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.816813009+00:00 stderr F I1212 16:15:22.816751 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-console, name: networking-console-plugin, uid: 3aae3443-66e1-496f-98c2-65b5a65de071]" virtual=false 2025-12-12T16:15:22.819069464+00:00 stderr F I1212 16:15:22.819023 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: default-account-cluster-image-registry-operator, uid: 2d9b230e-fffb-45d4-af75-5d566fa0730d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.819069464+00:00 stderr F I1212 16:15:22.819055 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-dns-operator, uid: 1c6b97c1-bc64-472b-ab1a-fcf21b5f07f5]" virtual=false 2025-12-12T16:15:22.822831095+00:00 stderr F I1212 16:15:22.822775 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: helm-chartrepos-view, uid: 1a2aa76e-9e40-4b24-beda-1e6b18b370b6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:22.822831095+00:00 stderr F I1212 16:15:22.822801 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ingress-operator, uid: 79644b50-b6ff-4e91-869b-4b44ba8ea886]" virtual=false 2025-12-12T16:15:22.826793670+00:00 stderr F I1212 16:15:22.826742 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-controllers, uid: 060ebb4f-8c4e-4977-a15f-ce1e43d9742b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.826793670+00:00 stderr F I1212 16:15:22.826771 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-iptables-alerter, uid: 7689b5d9-8b5c-4cf9-b2f1-14e7d8e26f1d]" virtual=false 2025-12-12T16:15:22.836441802+00:00 stderr F I1212 16:15:22.836343 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-controllers-baremetal, uid: 3d34ff23-98c5-4cd1-87f9-9f1e2ee80f7d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.836566455+00:00 stderr F I1212 16:15:22.836505 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 2bbdc40e-0150-42d0-aa51-03a941b0dd1c]" virtual=false 2025-12-12T16:15:22.870393580+00:00 stderr F I1212 16:15:22.870326 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-operator, uid: 5d642bd0-a312-4de3-9782-20ad763eab43]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.870481242+00:00 stderr F I1212 16:15:22.870451 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterautoscalers.autoscaling.openshift.io, uid: 59ca3658-7574-46b0-b397-96f64de05f29]" virtual=false 2025-12-12T16:15:22.872322217+00:00 stderr F I1212 16:15:22.872292 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: machine-api-operator-ext-remediation, uid: ab2869e4-518a-412f-9009-04a1e52ca6af]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.872390148+00:00 stderr F I1212 16:15:22.872365 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-identity-limited, uid: c9164259-3318-4e4b-af57-db18e27bc239]" virtual=false 2025-12-12T16:15:22.880334430+00:00 stderr F I1212 16:15:22.880286 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: marketplace-operator, uid: 08b74f39-863c-402b-a0d7-09bb37c4ba18]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.880419112+00:00 stderr F I1212 16:15:22.880395 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-kube-rbac-proxy, uid: ed2f03f2-1418-497a-b850-35c5a0081d19]" virtual=false 2025-12-12T16:15:22.895598408+00:00 stderr F I1212 16:15:22.895525 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: metrics-daemon-sa-rolebinding, uid: 4c34a54f-0d8d-4ff2-be71-472375b8b859]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.895598408+00:00 stderr F I1212 16:15:22.895567 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: prometheus-k8s-scheduler-resources, uid: 2a047613-f058-4b89-9c8a-aa4452e87b34]" virtual=false 2025-12-12T16:15:22.899145633+00:00 stderr F I1212 16:15:22.899059 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-admission-controller-webhook, uid: 545e1457-64e7-48f5-b19a-0ddbbb2165b1]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.899145633+00:00 stderr F I1212 16:15:22.899089 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: registry-monitoring, uid: c5476be7-8934-4cea-8a20-25a18f96ab8d]" virtual=false 2025-12-12T16:15:22.907326080+00:00 stderr F I1212 16:15:22.907263 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-ancillary-tools, uid: 920cac91-1e97-4b78-9407-de7abd17435c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.907326080+00:00 stderr F I1212 16:15:22.907300 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:controller:machine-approver, uid: e139a108-cc6e-414b-885a-0fa2e4244003]" virtual=false 2025-12-12T16:15:22.909824300+00:00 stderr F I1212 16:15:22.909687 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-cluster-readers, uid: f90bb80b-8164-47fa-a8d7-2f339bb24054]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.909824300+00:00 stderr F I1212 16:15:22.909719 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:authentication, uid: 387d2ef9-6d33-4915-bad6-45ee9e63114d]" virtual=false 2025-12-12T16:15:22.912692329+00:00 stderr F I1212 16:15:22.912649 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-group, uid: 55936cb0-8925-4d56-b4ed-afd17c252ccc]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.912692329+00:00 stderr F I1212 16:15:22.912678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:cluster-kube-scheduler-operator, uid: d9fc97ff-84a5-45f2-9888-fcdb87dab225]" virtual=false 2025-12-12T16:15:22.916265065+00:00 stderr F I1212 16:15:22.916233 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-transient, uid: bdf36000-8acb-4bfb-9410-ceb68275d539]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.916285866+00:00 stderr F I1212 16:15:22.916259 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:etcd-operator, uid: bb4836d5-e8d4-4034-8f1c-796dbf28b3ed]" virtual=false 2025-12-12T16:15:22.919683928+00:00 stderr F I1212 16:15:22.919650 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: multus-whereabouts, uid: 84084398-4ced-4b04-aa3c-896feffb01cb]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.919701398+00:00 stderr F I1212 16:15:22.919678 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:kube-apiserver-operator, uid: 2594d8b5-792c-417c-b46a-ff938582365b]" virtual=false 2025-12-12T16:15:22.923579442+00:00 stderr F I1212 16:15:22.923488 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-diagnostics, uid: 99939742-bf08-407c-8e6b-b481dd120b5e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.923579442+00:00 stderr F I1212 16:15:22.923517 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:kube-controller-manager-operator, uid: ff5f1c9c-adc9-4b1c-8ff4-19dd76e4f721]" virtual=false 2025-12-12T16:15:22.926252456+00:00 stderr F I1212 16:15:22.926209 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: network-node-identity, uid: 68a4cac1-791b-476a-b674-8f1dc0ccb944]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.926252456+00:00 stderr F I1212 16:15:22.926238 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:kube-storage-version-migrator-operator, uid: e05e9077-0af7-4dd5-ac74-d84175f2bcb7]" virtual=false 2025-12-12T16:15:22.932268001+00:00 stderr F I1212 16:15:22.932191 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: olm-operator-binding-openshift-operator-lifecycle-manager, uid: 2d9724b0-5df3-482d-a79f-d1bc7ae61999]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.932285651+00:00 stderr F I1212 16:15:22.932256 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:openshift-apiserver-operator, uid: a655e0b7-20a3-4719-9b9c-7f959a02212e]" virtual=false 2025-12-12T16:15:22.933386008+00:00 stderr F I1212 16:15:22.933335 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: builds.config.openshift.io, uid: 850b051a-294e-45c2-9679-557867afe334]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.933386008+00:00 stderr F I1212 16:15:22.933370 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:openshift-config-operator, uid: 7cea5f93-6c74-4224-9f7d-c1df49991c1e]" virtual=false 2025-12-12T16:15:22.942161219+00:00 stderr F I1212 16:15:22.942094 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: catalogsources.operators.coreos.com, uid: 7cb8b5cc-67d6-44a6-aa70-3542d4901e4f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.942199790+00:00 stderr F I1212 16:15:22.942156 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:openshift-controller-manager-operator, uid: 22384a9d-e561-4ab9-b2f7-ac4182cae46b]" virtual=false 2025-12-12T16:15:22.952226372+00:00 stderr F I1212 16:15:22.950211 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-console, name: networking-console-plugin, uid: 3aae3443-66e1-496f-98c2-65b5a65de071]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.952226372+00:00 stderr F I1212 16:15:22.950277 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:service-ca-operator, uid: 3c6c642f-b5c7-4491-82a9-fddee98f83d3]" virtual=false 2025-12-12T16:15:22.954568548+00:00 stderr F I1212 16:15:22.954499 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-dns-operator, uid: 1c6b97c1-bc64-472b-ab1a-fcf21b5f07f5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.954593739+00:00 stderr F I1212 16:15:22.954562 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:scc:restricted-v2, uid: 713a42f6-b069-4a3f-8409-2147ccd5b1ab]" virtual=false 2025-12-12T16:15:22.959904627+00:00 stderr F I1212 16:15:22.959850 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-iptables-alerter, uid: 7689b5d9-8b5c-4cf9-b2f1-14e7d8e26f1d]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.959973078+00:00 stderr F I1212 16:15:22.959947 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clustercsidrivers.operator.openshift.io, uid: 866b7fe3-462a-493e-b554-1ffbb3eed075]" virtual=false 2025-12-12T16:15:22.960680975+00:00 stderr F I1212 16:15:22.960502 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ingress-operator, uid: 79644b50-b6ff-4e91-869b-4b44ba8ea886]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:22.960680975+00:00 stderr F I1212 16:15:22.960567 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterimagepolicies.config.openshift.io, uid: 92736c74-712b-48df-a539-2808f4039e82]" virtual=false 2025-12-12T16:15:22.969483268+00:00 stderr F I1212 16:15:22.969454 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-control-plane-limited, uid: 2bbdc40e-0150-42d0-aa51-03a941b0dd1c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:22.969531939+00:00 stderr F I1212 16:15:22.969516 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusteroperators.config.openshift.io, uid: 6cfd918c-6640-483a-9c2b-3fcf16d219ed]" virtual=false 2025-12-12T16:15:23.002269937+00:00 stderr F I1212 16:15:23.002216 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterautoscalers.autoscaling.openshift.io, uid: 59ca3658-7574-46b0-b397-96f64de05f29]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.002269937+00:00 stderr F I1212 16:15:23.002256 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterresourcequotas.quota.openshift.io, uid: cb289573-c81f-4015-9894-43e9985e6fd8]" virtual=false 2025-12-12T16:15:23.006341636+00:00 stderr F I1212 16:15:23.006268 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-identity-limited, uid: c9164259-3318-4e4b-af57-db18e27bc239]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.006366686+00:00 stderr F I1212 16:15:23.006334 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterserviceversions.operators.coreos.com, uid: c24f00d0-71ff-49f0-87ad-3270262884bd]" virtual=false 2025-12-12T16:15:23.012720859+00:00 stderr F I1212 16:15:23.012652 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: openshift-ovn-kubernetes-node-kube-rbac-proxy, uid: ed2f03f2-1418-497a-b850-35c5a0081d19]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.012739650+00:00 stderr F I1212 16:15:23.012717 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusteruserdefinednetworks.k8s.ovn.org, uid: 361e91d3-1d07-4e74-91e4-1c870c3e6e5e]" virtual=false 2025-12-12T16:15:23.030101988+00:00 stderr F I1212 16:15:23.030009 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: prometheus-k8s-scheduler-resources, uid: 2a047613-f058-4b89-9c8a-aa4452e87b34]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.030234551+00:00 stderr F I1212 16:15:23.030206 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterversions.config.openshift.io, uid: e2de9bbd-f707-49ff-9369-c533d98d8eaf]" virtual=false 2025-12-12T16:15:23.033961301+00:00 stderr F I1212 16:15:23.033911 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: registry-monitoring, uid: c5476be7-8934-4cea-8a20-25a18f96ab8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.034024772+00:00 stderr F I1212 16:15:23.034006 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: configs.imageregistry.operator.openshift.io, uid: ccf6a361-a762-4e7c-aa50-8018ebbb76f0]" virtual=false 2025-12-12T16:15:23.039918474+00:00 stderr F I1212 16:15:23.039855 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:controller:machine-approver, uid: e139a108-cc6e-414b-885a-0fa2e4244003]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.039918474+00:00 stderr F I1212 16:15:23.039900 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: configs.operator.openshift.io, uid: 11c873bc-ca30-4569-8f6f-4e5e52b85bd7]" virtual=false 2025-12-12T16:15:23.042383744+00:00 stderr F I1212 16:15:23.042336 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:authentication, uid: 387d2ef9-6d33-4915-bad6-45ee9e63114d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.042383744+00:00 stderr F I1212 16:15:23.042367 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: configs.samples.operator.openshift.io, uid: 5ef022d1-0f85-4a07-a4a7-befb36614ee8]" virtual=false 2025-12-12T16:15:23.046035452+00:00 stderr F I1212 16:15:23.045931 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:cluster-kube-scheduler-operator, uid: d9fc97ff-84a5-45f2-9888-fcdb87dab225]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.046035452+00:00 stderr F I1212 16:15:23.045973 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleclidownloads.console.openshift.io, uid: ee8b7ce4-dedc-4861-8c84-50cd7b9d9e32]" virtual=false 2025-12-12T16:15:23.049567047+00:00 stderr F I1212 16:15:23.049500 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:etcd-operator, uid: bb4836d5-e8d4-4034-8f1c-796dbf28b3ed]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.049587247+00:00 stderr F I1212 16:15:23.049557 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleexternalloglinks.console.openshift.io, uid: 462bfcf1-c3df-45ca-825c-6c0175ec71f0]" virtual=false 2025-12-12T16:15:23.052920658+00:00 stderr F I1212 16:15:23.052885 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:kube-apiserver-operator, uid: 2594d8b5-792c-417c-b46a-ff938582365b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.052950598+00:00 stderr F I1212 16:15:23.052911 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-node-identity, name: ovnkube-identity-cm, uid: 69be1ac7-0211-4526-80ec-483251460038]" virtual=false 2025-12-12T16:15:23.056297929+00:00 stderr F I1212 16:15:23.056231 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:kube-controller-manager-operator, uid: ff5f1c9c-adc9-4b1c-8ff4-19dd76e4f721]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.056297929+00:00 stderr F I1212 16:15:23.056263 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolelinks.console.openshift.io, uid: e2ab037a-a6e5-4349-88f2-0031b99c6012]" virtual=false 2025-12-12T16:15:23.059455405+00:00 stderr F I1212 16:15:23.059406 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:kube-storage-version-migrator-operator, uid: e05e9077-0af7-4dd5-ac74-d84175f2bcb7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.059455405+00:00 stderr F I1212 16:15:23.059437 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolenotifications.console.openshift.io, uid: 87102751-c66c-4fdb-9fed-0589761cf69f]" virtual=false 2025-12-12T16:15:23.062671803+00:00 stderr F I1212 16:15:23.062613 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:openshift-apiserver-operator, uid: a655e0b7-20a3-4719-9b9c-7f959a02212e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.062671803+00:00 stderr F I1212 16:15:23.062644 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleplugins.console.openshift.io, uid: 35a721e7-c7f7-4f1b-9d69-cd059fa5477c]" virtual=false 2025-12-12T16:15:23.066108185+00:00 stderr F I1212 16:15:23.066055 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:openshift-config-operator, uid: 7cea5f93-6c74-4224-9f7d-c1df49991c1e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.066108185+00:00 stderr F I1212 16:15:23.066091 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolequickstarts.console.openshift.io, uid: 72e3b8d1-19ac-4e57-9ec8-d67f176edcfd]" virtual=false 2025-12-12T16:15:23.076541837+00:00 stderr F I1212 16:15:23.076476 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:openshift-controller-manager-operator, uid: 22384a9d-e561-4ab9-b2f7-ac4182cae46b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.076604708+00:00 stderr F I1212 16:15:23.076586 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoles.config.openshift.io, uid: 37b9304a-bf14-4b91-9e5e-2b973e67cf9a]" virtual=false 2025-12-12T16:15:23.083439473+00:00 stderr F I1212 16:15:23.083393 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:operator:service-ca-operator, uid: 3c6c642f-b5c7-4491-82a9-fddee98f83d3]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.083494734+00:00 stderr F I1212 16:15:23.083478 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoles.operator.openshift.io, uid: 525020c2-726d-404b-b813-353f86301bbc]" virtual=false 2025-12-12T16:15:23.085663536+00:00 stderr F I1212 16:15:23.085618 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[rbac.authorization.k8s.io/v1/ClusterRoleBinding, namespace: , name: system:openshift:scc:restricted-v2, uid: 713a42f6-b069-4a3f-8409-2147ccd5b1ab]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.085663536+00:00 stderr F I1212 16:15:23.085643 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolesamples.console.openshift.io, uid: c19ab109-c54c-4f92-b6a9-f1ed849ae554]" virtual=false 2025-12-12T16:15:23.089262563+00:00 stderr F I1212 16:15:23.089228 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clustercsidrivers.operator.openshift.io, uid: 866b7fe3-462a-493e-b554-1ffbb3eed075]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.089314284+00:00 stderr F I1212 16:15:23.089299 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleyamlsamples.console.openshift.io, uid: cf580cfa-567d-40c5-94c4-e0eed4982630]" virtual=false 2025-12-12T16:15:23.093227699+00:00 stderr F I1212 16:15:23.093136 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterimagepolicies.config.openshift.io, uid: 92736c74-712b-48df-a539-2808f4039e82]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.093252579+00:00 stderr F I1212 16:15:23.093216 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: containerruntimeconfigs.machineconfiguration.openshift.io, uid: 7dd5644d-a7c7-4ba9-8841-84f5beaf7a3d]" virtual=false 2025-12-12T16:15:23.103224060+00:00 stderr F I1212 16:15:23.103141 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusteroperators.config.openshift.io, uid: 6cfd918c-6640-483a-9c2b-3fcf16d219ed]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.103308632+00:00 stderr F I1212 16:15:23.103283 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: controllerconfigs.machineconfiguration.openshift.io, uid: 3ee78e08-4826-4f38-a94e-d34babad29eb]" virtual=false 2025-12-12T16:15:23.136352918+00:00 stderr F I1212 16:15:23.136283 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterresourcequotas.quota.openshift.io, uid: cb289573-c81f-4015-9894-43e9985e6fd8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.136433910+00:00 stderr F I1212 16:15:23.136414 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: controlplanemachinesets.machine.openshift.io, uid: d195a2dc-5ebd-41a4-a85a-85d5e4262952]" virtual=false 2025-12-12T16:15:23.142684920+00:00 stderr F I1212 16:15:23.142644 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusteruserdefinednetworks.k8s.ovn.org, uid: 361e91d3-1d07-4e74-91e4-1c870c3e6e5e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.142745732+00:00 stderr F I1212 16:15:23.142728 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: csisnapshotcontrollers.operator.openshift.io, uid: 63ca733d-f087-471c-9dad-1ab8ad42d7c6]" virtual=false 2025-12-12T16:15:23.146478392+00:00 stderr F I1212 16:15:23.146427 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:23.153839969+00:00 stderr F I1212 16:15:23.153762 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterserviceversions.operators.coreos.com, uid: c24f00d0-71ff-49f0-87ad-3270262884bd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.153839969+00:00 stderr F I1212 16:15:23.153817 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: dnses.config.openshift.io, uid: 9987e5da-908f-4faa-930d-d78df06e1e56]" virtual=false 2025-12-12T16:15:23.164722491+00:00 stderr F I1212 16:15:23.164643 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: clusterversions.config.openshift.io, uid: e2de9bbd-f707-49ff-9369-c533d98d8eaf]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.164825444+00:00 stderr F I1212 16:15:23.164798 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: dnses.operator.openshift.io, uid: 878d4725-ccfa-4700-a3b5-bbd62136d22d]" virtual=false 2025-12-12T16:15:23.166662608+00:00 stderr F I1212 16:15:23.166627 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: configs.imageregistry.operator.openshift.io, uid: ccf6a361-a762-4e7c-aa50-8018ebbb76f0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.166716729+00:00 stderr F I1212 16:15:23.166696 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: dnsrecords.ingress.operator.openshift.io, uid: 348a8296-6e3d-46ea-abd1-cae46477ce67]" virtual=false 2025-12-12T16:15:23.173097473+00:00 stderr F I1212 16:15:23.172991 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: configs.operator.openshift.io, uid: 11c873bc-ca30-4569-8f6f-4e5e52b85bd7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.173130794+00:00 stderr F I1212 16:15:23.173094 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressfirewalls.k8s.ovn.org, uid: ffe6f352-fb5a-4e17-9e91-697bfb9c95af]" virtual=false 2025-12-12T16:15:23.175415559+00:00 stderr F I1212 16:15:23.175381 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: configs.samples.operator.openshift.io, uid: 5ef022d1-0f85-4a07-a4a7-befb36614ee8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.175474280+00:00 stderr F I1212 16:15:23.175452 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: applied-cluster, uid: d45e81f6-f612-465e-aeaa-ac35b5a9786a]" virtual=false 2025-12-12T16:15:23.179932037+00:00 stderr F I1212 16:15:23.179894 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleclidownloads.console.openshift.io, uid: ee8b7ce4-dedc-4861-8c84-50cd7b9d9e32]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.179988269+00:00 stderr F I1212 16:15:23.179970 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressips.k8s.ovn.org, uid: de03b74b-158a-4480-a93b-44a428859320]" virtual=false 2025-12-12T16:15:23.182473049+00:00 stderr F I1212 16:15:23.182398 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleexternalloglinks.console.openshift.io, uid: 462bfcf1-c3df-45ca-825c-6c0175ec71f0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.182473049+00:00 stderr F I1212 16:15:23.182458 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressqoses.k8s.ovn.org, uid: 6012b40c-c7ad-4b14-8312-982944731211]" virtual=false 2025-12-12T16:15:23.185229235+00:00 stderr F I1212 16:15:23.185188 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-node-identity, name: ovnkube-identity-cm, uid: 69be1ac7-0211-4526-80ec-483251460038]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.185229235+00:00 stderr F I1212 16:15:23.185216 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressrouters.network.operator.openshift.io, uid: a7408cdd-0e3a-4ddc-8751-cd603866ca5f]" virtual=false 2025-12-12T16:15:23.189113069+00:00 stderr F I1212 16:15:23.189073 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolelinks.console.openshift.io, uid: e2ab037a-a6e5-4349-88f2-0031b99c6012]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.189113069+00:00 stderr F I1212 16:15:23.189098 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressservices.k8s.ovn.org, uid: b69783ee-7862-4364-816f-6e734c9cde3c]" virtual=false 2025-12-12T16:15:23.191950687+00:00 stderr F I1212 16:15:23.191914 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolenotifications.console.openshift.io, uid: 87102751-c66c-4fdb-9fed-0589761cf69f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.191950687+00:00 stderr F I1212 16:15:23.191937 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: etcds.operator.openshift.io, uid: e0efca0b-7c4e-411a-be11-deff35d5c346]" virtual=false 2025-12-12T16:15:23.198290380+00:00 stderr F I1212 16:15:23.198219 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleplugins.console.openshift.io, uid: 35a721e7-c7f7-4f1b-9d69-cd059fa5477c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.198290380+00:00 stderr F I1212 16:15:23.198263 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: featuregates.config.openshift.io, uid: eadc8d2d-27ca-4258-ad18-eac95c3fb58c]" virtual=false 2025-12-12T16:15:23.200494093+00:00 stderr F I1212 16:15:23.200445 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolequickstarts.console.openshift.io, uid: 72e3b8d1-19ac-4e57-9ec8-d67f176edcfd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.200559624+00:00 stderr F I1212 16:15:23.200541 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: helmchartrepositories.helm.openshift.io, uid: d24d8b1d-992d-4494-8556-78b671c59ba0]" virtual=false 2025-12-12T16:15:23.206327053+00:00 stderr F I1212 16:15:23.206244 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoles.config.openshift.io, uid: 37b9304a-bf14-4b91-9e5e-2b973e67cf9a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.206327053+00:00 stderr F I1212 16:15:23.206291 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagecontentpolicies.config.openshift.io, uid: 1892dff0-d452-48ae-9476-292d8a8e4980]" virtual=false 2025-12-12T16:15:23.212349729+00:00 stderr F I1212 16:15:23.212281 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoles.operator.openshift.io, uid: 525020c2-726d-404b-b813-353f86301bbc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.212454811+00:00 stderr F I1212 16:15:23.212436 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagecontentsourcepolicies.operator.openshift.io, uid: a2528cbc-6de5-47da-905d-4a59d451737d]" virtual=false 2025-12-12T16:15:23.215321070+00:00 stderr F I1212 16:15:23.215293 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consolesamples.console.openshift.io, uid: c19ab109-c54c-4f92-b6a9-f1ed849ae554]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.215365881+00:00 stderr F I1212 16:15:23.215351 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagedigestmirrorsets.config.openshift.io, uid: 236fe1ea-b8f1-4ffc-84b4-833493738def]" virtual=false 2025-12-12T16:15:23.225692580+00:00 stderr F I1212 16:15:23.225630 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: consoleyamlsamples.console.openshift.io, uid: cf580cfa-567d-40c5-94c4-e0eed4982630]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.225718421+00:00 stderr F I1212 16:15:23.225689 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagepolicies.config.openshift.io, uid: 07ca605c-7483-4fc3-aaf5-2f0daba64b49]" virtual=false 2025-12-12T16:15:23.225812003+00:00 stderr F I1212 16:15:23.225782 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: containerruntimeconfigs.machineconfiguration.openshift.io, uid: 7dd5644d-a7c7-4ba9-8841-84f5beaf7a3d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.225857374+00:00 stderr F I1212 16:15:23.225839 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagepruners.imageregistry.operator.openshift.io, uid: e11aa77a-fb0b-41eb-ba82-5de70dd65a8a]" virtual=false 2025-12-12T16:15:23.239336559+00:00 stderr F I1212 16:15:23.239292 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: controllerconfigs.machineconfiguration.openshift.io, uid: 3ee78e08-4826-4f38-a94e-d34babad29eb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.239396460+00:00 stderr F I1212 16:15:23.239376 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: images.config.openshift.io, uid: d0c2aef5-34ac-41eb-8d13-0e760d54bcb8]" virtual=false 2025-12-12T16:15:23.270058769+00:00 stderr F I1212 16:15:23.269955 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: controlplanemachinesets.machine.openshift.io, uid: d195a2dc-5ebd-41a4-a85a-85d5e4262952]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.270058769+00:00 stderr F I1212 16:15:23.270019 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagetagmirrorsets.config.openshift.io, uid: f10ab076-d8e4-4be9-aada-ef5b0b81c11d]" virtual=false 2025-12-12T16:15:23.276781751+00:00 stderr F I1212 16:15:23.276743 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: csisnapshotcontrollers.operator.openshift.io, uid: 63ca733d-f087-471c-9dad-1ab8ad42d7c6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.276804461+00:00 stderr F I1212 16:15:23.276774 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: infrastructures.config.openshift.io, uid: 3f267746-ce3c-4380-b9bd-b3946d192d20]" virtual=false 2025-12-12T16:15:23.285498221+00:00 stderr F I1212 16:15:23.285463 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: dnses.config.openshift.io, uid: 9987e5da-908f-4faa-930d-d78df06e1e56]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.285561752+00:00 stderr F I1212 16:15:23.285532 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ingresscontrollers.operator.openshift.io, uid: b8edff74-844c-499b-92fe-bef4ea12825a]" virtual=false 2025-12-12T16:15:23.300995854+00:00 stderr F I1212 16:15:23.300898 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: dnses.operator.openshift.io, uid: 878d4725-ccfa-4700-a3b5-bbd62136d22d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.300995854+00:00 stderr F I1212 16:15:23.300977 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ingresses.config.openshift.io, uid: 85bc6705-7780-4abd-b1f6-a316241256ed]" virtual=false 2025-12-12T16:15:23.311161509+00:00 stderr F I1212 16:15:23.311087 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: dnsrecords.ingress.operator.openshift.io, uid: 348a8296-6e3d-46ea-abd1-cae46477ce67]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.311211230+00:00 stderr F I1212 16:15:23.311151 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: installplans.operators.coreos.com, uid: f96d13c2-7d7b-43c5-b33e-701618c38a23]" virtual=false 2025-12-12T16:15:23.311690062+00:00 stderr F I1212 16:15:23.311646 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressfirewalls.k8s.ovn.org, uid: ffe6f352-fb5a-4e17-9e91-697bfb9c95af]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.311690062+00:00 stderr F I1212 16:15:23.311673 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ipaddressclaims.ipam.cluster.x-k8s.io, uid: 13138fe7-629f-4cdb-a331-c5be33d73b5b]" virtual=false 2025-12-12T16:15:23.311798974+00:00 stderr F I1212 16:15:23.311763 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: applied-cluster, uid: d45e81f6-f612-465e-aeaa-ac35b5a9786a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.311798974+00:00 stderr F I1212 16:15:23.311788 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ipaddresses.ipam.cluster.x-k8s.io, uid: 957f3610-2225-4550-a3f5-8dfaee8fa002]" virtual=false 2025-12-12T16:15:23.315168156+00:00 stderr F I1212 16:15:23.315105 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressips.k8s.ovn.org, uid: de03b74b-158a-4480-a93b-44a428859320]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.315168156+00:00 stderr F I1212 16:15:23.315139 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ipamclaims.k8s.cni.cncf.io, uid: 72121c58-362d-42d2-8366-b8812a968927]" virtual=false 2025-12-12T16:15:23.317469921+00:00 stderr F I1212 16:15:23.317401 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressqoses.k8s.ovn.org, uid: 6012b40c-c7ad-4b14-8312-982944731211]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.317469921+00:00 stderr F I1212 16:15:23.317445 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ippools.whereabouts.cni.cncf.io, uid: b63b4e38-669f-44ad-ab98-76d5eaf71405]" virtual=false 2025-12-12T16:15:23.321662812+00:00 stderr F I1212 16:15:23.321595 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressrouters.network.operator.openshift.io, uid: a7408cdd-0e3a-4ddc-8751-cd603866ca5f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.321662812+00:00 stderr F I1212 16:15:23.321632 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubeapiservers.operator.openshift.io, uid: ae659616-9944-465c-a47f-43bc8970fc87]" virtual=false 2025-12-12T16:15:23.328096927+00:00 stderr F I1212 16:15:23.326924 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: egressservices.k8s.ovn.org, uid: b69783ee-7862-4364-816f-6e734c9cde3c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.328096927+00:00 stderr F I1212 16:15:23.326965 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubecontrollermanagers.operator.openshift.io, uid: a15cd927-8cd2-44fa-bfe9-3f5c941aa4b8]" virtual=false 2025-12-12T16:15:23.328875096+00:00 stderr F I1212 16:15:23.328838 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: etcds.operator.openshift.io, uid: e0efca0b-7c4e-411a-be11-deff35d5c346]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.328893566+00:00 stderr F I1212 16:15:23.328868 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubeletconfigs.machineconfiguration.openshift.io, uid: adb3f6f7-eda8-40e0-beca-678ac67912b6]" virtual=false 2025-12-12T16:15:23.330019063+00:00 stderr F I1212 16:15:23.329966 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: featuregates.config.openshift.io, uid: eadc8d2d-27ca-4258-ad18-eac95c3fb58c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.330019063+00:00 stderr F I1212 16:15:23.329991 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubeschedulers.operator.openshift.io, uid: 4d4de4c9-6936-40ee-822c-5a1c4ba64863]" virtual=false 2025-12-12T16:15:23.333210570+00:00 stderr F I1212 16:15:23.333164 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: helmchartrepositories.helm.openshift.io, uid: d24d8b1d-992d-4494-8556-78b671c59ba0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.333210570+00:00 stderr F I1212 16:15:23.333201 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubestorageversionmigrators.operator.openshift.io, uid: 23985b38-bdae-4cf8-a393-1b72629bd3f2]" virtual=false 2025-12-12T16:15:23.340802013+00:00 stderr F I1212 16:15:23.340691 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagecontentpolicies.config.openshift.io, uid: 1892dff0-d452-48ae-9476-292d8a8e4980]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.340876045+00:00 stderr F I1212 16:15:23.340784 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineautoscalers.autoscaling.openshift.io, uid: fd77d279-dc79-4a17-bbd8-fe2e4f39e07d]" virtual=false 2025-12-12T16:15:23.346612353+00:00 stderr F I1212 16:15:23.346566 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagecontentsourcepolicies.operator.openshift.io, uid: a2528cbc-6de5-47da-905d-4a59d451737d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.346647694+00:00 stderr F I1212 16:15:23.346603 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfignodes.machineconfiguration.openshift.io, uid: 66d7f6eb-6706-4498-90ec-945aadc77195]" virtual=false 2025-12-12T16:15:23.349733238+00:00 stderr F I1212 16:15:23.349678 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagedigestmirrorsets.config.openshift.io, uid: 236fe1ea-b8f1-4ffc-84b4-833493738def]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.349733238+00:00 stderr F I1212 16:15:23.349707 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfigpools.machineconfiguration.openshift.io, uid: 4055e194-79f5-4823-89ad-e24952176abb]" virtual=false 2025-12-12T16:15:23.358444268+00:00 stderr F I1212 16:15:23.358367 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagepolicies.config.openshift.io, uid: 07ca605c-7483-4fc3-aaf5-2f0daba64b49]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.358468879+00:00 stderr F I1212 16:15:23.358438 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfigs.machineconfiguration.openshift.io, uid: c0990f08-2d2d-4c3f-9956-2632cb30f38d]" virtual=false 2025-12-12T16:15:23.364158486+00:00 stderr F I1212 16:15:23.364068 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagepruners.imageregistry.operator.openshift.io, uid: e11aa77a-fb0b-41eb-ba82-5de70dd65a8a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.364218807+00:00 stderr F I1212 16:15:23.364142 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfigurations.operator.openshift.io, uid: 0a81857f-6f37-48f8-92e5-e8cd37b8d511]" virtual=false 2025-12-12T16:15:23.372101977+00:00 stderr F I1212 16:15:23.372015 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: images.config.openshift.io, uid: d0c2aef5-34ac-41eb-8d13-0e760d54bcb8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.372101977+00:00 stderr F I1212 16:15:23.372079 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machinehealthchecks.machine.openshift.io, uid: 7e55a2e7-f07d-4536-a123-9f25d00e259c]" virtual=false 2025-12-12T16:15:23.402638523+00:00 stderr F I1212 16:15:23.402533 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: imagetagmirrorsets.config.openshift.io, uid: f10ab076-d8e4-4be9-aada-ef5b0b81c11d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.402638523+00:00 stderr F I1212 16:15:23.402610 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineosbuilds.machineconfiguration.openshift.io, uid: 333d2790-aa6d-4adf-be39-678e6e016c80]" virtual=false 2025-12-12T16:15:23.412686725+00:00 stderr F I1212 16:15:23.412620 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: infrastructures.config.openshift.io, uid: 3f267746-ce3c-4380-b9bd-b3946d192d20]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.412686725+00:00 stderr F I1212 16:15:23.412649 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineosconfigs.machineconfiguration.openshift.io, uid: fa51a3a0-a331-484c-be1f-5cdd1b206d9f]" virtual=false 2025-12-12T16:15:23.420231427+00:00 stderr F I1212 16:15:23.419761 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ingresscontrollers.operator.openshift.io, uid: b8edff74-844c-499b-92fe-bef4ea12825a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.420231427+00:00 stderr F I1212 16:15:23.419795 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machines.machine.openshift.io, uid: ed1aeef5-befb-4c5b-98d7-719782d90e29]" virtual=false 2025-12-12T16:15:23.432714027+00:00 stderr F I1212 16:15:23.432627 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ingresses.config.openshift.io, uid: 85bc6705-7780-4abd-b1f6-a316241256ed]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.432714027+00:00 stderr F I1212 16:15:23.432684 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machinesets.machine.openshift.io, uid: 9a18917c-99ea-43b3-bbe9-b849935b6584]" virtual=false 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455499 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ipamclaims.k8s.cni.cncf.io, uid: 72121c58-362d-42d2-8366-b8812a968927]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455554 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: metal3remediations.infrastructure.cluster.x-k8s.io, uid: 26177de7-fafe-4901-aa87-33e349c0ae2b]" virtual=false 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455766 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ipaddressclaims.ipam.cluster.x-k8s.io, uid: 13138fe7-629f-4cdb-a331-c5be33d73b5b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455784 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: metal3remediationtemplates.infrastructure.cluster.x-k8s.io, uid: 0a73937e-6485-4a6c-8ba9-e0f6ef27f034]" virtual=false 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455823 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubeapiservers.operator.openshift.io, uid: ae659616-9944-465c-a47f-43bc8970fc87]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455850 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: network-attachment-definitions.k8s.cni.cncf.io, uid: 61eefa62-520e-4825-a906-cf9dfb17cf1e]" virtual=false 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455913 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ippools.whereabouts.cni.cncf.io, uid: b63b4e38-669f-44ad-ab98-76d5eaf71405]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455926 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: networks.config.openshift.io, uid: 43c3625c-a9e5-4ccd-9777-3499f86b855d]" virtual=false 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.455993 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: installplans.operators.coreos.com, uid: f96d13c2-7d7b-43c5-b33e-701618c38a23]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.456006 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: networks.operator.openshift.io, uid: 255404a6-81ea-445e-a00e-d3555cb37a10]" virtual=false 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.456087 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: ipaddresses.ipam.cluster.x-k8s.io, uid: 957f3610-2225-4550-a3f5-8dfaee8fa002]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.458240751+00:00 stderr F I1212 16:15:23.456102 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: nodes.config.openshift.io, uid: e643d9d7-3873-4e06-b349-afc7c79c3045]" virtual=false 2025-12-12T16:15:23.462223097+00:00 stderr F I1212 16:15:23.461330 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubecontrollermanagers.operator.openshift.io, uid: a15cd927-8cd2-44fa-bfe9-3f5c941aa4b8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.462223097+00:00 stderr F I1212 16:15:23.461368 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: nodeslicepools.whereabouts.cni.cncf.io, uid: 6d1c1342-2500-4d55-b87e-8dd2119f807e]" virtual=false 2025-12-12T16:15:23.466218493+00:00 stderr F I1212 16:15:23.465460 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubeletconfigs.machineconfiguration.openshift.io, uid: adb3f6f7-eda8-40e0-beca-678ac67912b6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.466218493+00:00 stderr F I1212 16:15:23.465520 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: oauths.config.openshift.io, uid: bd28ddcd-9224-4bc3-8240-1a49c1854541]" virtual=false 2025-12-12T16:15:23.466218493+00:00 stderr F I1212 16:15:23.465654 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubeschedulers.operator.openshift.io, uid: 4d4de4c9-6936-40ee-822c-5a1c4ba64863]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.466218493+00:00 stderr F I1212 16:15:23.465667 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: olmconfigs.operators.coreos.com, uid: e6039e18-27fc-4b20-947d-692bda721621]" virtual=false 2025-12-12T16:15:23.474221176+00:00 stderr F I1212 16:15:23.473114 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: kubestorageversionmigrators.operator.openshift.io, uid: 23985b38-bdae-4cf8-a393-1b72629bd3f2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.474221176+00:00 stderr F I1212 16:15:23.473192 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: openshiftapiservers.operator.openshift.io, uid: 058a2559-956f-46e4-89d3-baa87bbc6990]" virtual=false 2025-12-12T16:15:23.480936538+00:00 stderr F I1212 16:15:23.479531 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineautoscalers.autoscaling.openshift.io, uid: fd77d279-dc79-4a17-bbd8-fe2e4f39e07d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.480936538+00:00 stderr F I1212 16:15:23.479596 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: openshiftcontrollermanagers.operator.openshift.io, uid: b2265863-b590-444e-8c9c-2cb375745d0d]" virtual=false 2025-12-12T16:15:23.480936538+00:00 stderr F I1212 16:15:23.479755 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfignodes.machineconfiguration.openshift.io, uid: 66d7f6eb-6706-4498-90ec-945aadc77195]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.480936538+00:00 stderr F I1212 16:15:23.479776 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorconditions.operators.coreos.com, uid: 52bd2691-91b7-46cf-b18a-f7f7d4ca4100]" virtual=false 2025-12-12T16:15:23.489218698+00:00 stderr F I1212 16:15:23.488675 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfigpools.machineconfiguration.openshift.io, uid: 4055e194-79f5-4823-89ad-e24952176abb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.489218698+00:00 stderr F I1212 16:15:23.488745 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorgroups.operators.coreos.com, uid: 0e8b5955-f13f-4be3-ae9c-61bcd320b18e]" virtual=false 2025-12-12T16:15:23.492264201+00:00 stderr F I1212 16:15:23.492206 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfigs.machineconfiguration.openshift.io, uid: c0990f08-2d2d-4c3f-9956-2632cb30f38d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.492282741+00:00 stderr F I1212 16:15:23.492257 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorhubs.config.openshift.io, uid: 70e98ca5-4adc-403a-a86f-6c0b42b1662c]" virtual=false 2025-12-12T16:15:23.494598697+00:00 stderr F I1212 16:15:23.494542 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineconfigurations.operator.openshift.io, uid: 0a81857f-6f37-48f8-92e5-e8cd37b8d511]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.494598697+00:00 stderr F I1212 16:15:23.494568 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorpkis.network.operator.openshift.io, uid: 3164274b-6f8f-4898-b812-65d961410f7d]" virtual=false 2025-12-12T16:15:23.514349093+00:00 stderr F I1212 16:15:23.514045 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machinehealthchecks.machine.openshift.io, uid: 7e55a2e7-f07d-4536-a123-9f25d00e259c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.514349093+00:00 stderr F I1212 16:15:23.514119 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operators.operators.coreos.com, uid: 076c41ab-437f-447c-93dd-26fea64a70fd]" virtual=false 2025-12-12T16:15:23.538881994+00:00 stderr F I1212 16:15:23.538801 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineosbuilds.machineconfiguration.openshift.io, uid: 333d2790-aa6d-4adf-be39-678e6e016c80]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.538988287+00:00 stderr F I1212 16:15:23.538960 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: overlappingrangeipreservations.whereabouts.cni.cncf.io, uid: 0ff79e69-ef96-42c6-b746-dbc7d0703c42]" virtual=false 2025-12-12T16:15:23.546236751+00:00 stderr F I1212 16:15:23.546133 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machineosconfigs.machineconfiguration.openshift.io, uid: fa51a3a0-a331-484c-be1f-5cdd1b206d9f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.546257732+00:00 stderr F I1212 16:15:23.546237 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: pinnedimagesets.machineconfiguration.openshift.io, uid: 6613eb61-9f46-40bb-8543-ca39cc1b23ef]" virtual=false 2025-12-12T16:15:23.554036079+00:00 stderr F I1212 16:15:23.553976 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machines.machine.openshift.io, uid: ed1aeef5-befb-4c5b-98d7-719782d90e29]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.554058490+00:00 stderr F I1212 16:15:23.554040 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: podmonitors.monitoring.coreos.com, uid: 5f6782ac-5022-478d-bfd0-cb961eb26184]" virtual=false 2025-12-12T16:15:23.563631260+00:00 stderr F I1212 16:15:23.563579 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: machinesets.machine.openshift.io, uid: 9a18917c-99ea-43b3-bbe9-b849935b6584]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.563671431+00:00 stderr F I1212 16:15:23.563621 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: probes.monitoring.coreos.com, uid: 638cc330-50ef-4052-a4de-f5c1ff26d14c]" virtual=false 2025-12-12T16:15:23.566003857+00:00 stderr F I1212 16:15:23.565959 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: metal3remediations.infrastructure.cluster.x-k8s.io, uid: 26177de7-fafe-4901-aa87-33e349c0ae2b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.566035408+00:00 stderr F I1212 16:15:23.565995 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: projecthelmchartrepositories.helm.openshift.io, uid: 2105cc8a-c10c-401e-b0dc-7e2748bc7891]" virtual=false 2025-12-12T16:15:23.570994548+00:00 stderr F I1212 16:15:23.570898 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: metal3remediationtemplates.infrastructure.cluster.x-k8s.io, uid: 0a73937e-6485-4a6c-8ba9-e0f6ef27f034]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.570994548+00:00 stderr F I1212 16:15:23.570948 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: projects.config.openshift.io, uid: 5979d120-38d8-4b13-ab6d-5958992124d9]" virtual=false 2025-12-12T16:15:23.573946669+00:00 stderr F I1212 16:15:23.573896 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: network-attachment-definitions.k8s.cni.cncf.io, uid: 61eefa62-520e-4825-a906-cf9dfb17cf1e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.573946669+00:00 stderr F I1212 16:15:23.573928 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheuses.monitoring.coreos.com, uid: df2df67d-b623-4e8f-aa23-7e9d2c82a279]" virtual=false 2025-12-12T16:15:23.576932721+00:00 stderr F I1212 16:15:23.576851 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: networks.config.openshift.io, uid: 43c3625c-a9e5-4ccd-9777-3499f86b855d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.576952961+00:00 stderr F I1212 16:15:23.576919 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusrules.monitoring.coreos.com, uid: fc28e963-9d54-4a19-bcaa-c272ad385a82]" virtual=false 2025-12-12T16:15:23.583106109+00:00 stderr F I1212 16:15:23.583044 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: networks.operator.openshift.io, uid: 255404a6-81ea-445e-a00e-d3555cb37a10]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.583106109+00:00 stderr F I1212 16:15:23.583089 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: proxies.config.openshift.io, uid: 56becff0-4867-4bd6-9803-9995d62edcee]" virtual=false 2025-12-12T16:15:23.587306061+00:00 stderr F I1212 16:15:23.587206 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: nodes.config.openshift.io, uid: e643d9d7-3873-4e06-b349-afc7c79c3045]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.587306061+00:00 stderr F I1212 16:15:23.587264 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: rangeallocations.security.internal.openshift.io, uid: 7096e501-baf8-4fcc-bded-c349016206f9]" virtual=false 2025-12-12T16:15:23.590060657+00:00 stderr F I1212 16:15:23.589984 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: nodeslicepools.whereabouts.cni.cncf.io, uid: 6d1c1342-2500-4d55-b87e-8dd2119f807e]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.590060657+00:00 stderr F I1212 16:15:23.590036 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: rolebindingrestrictions.authorization.openshift.io, uid: c012bd51-64d0-435b-a4fe-5c0b53406266]" virtual=false 2025-12-12T16:15:23.593475019+00:00 stderr F I1212 16:15:23.593389 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: oauths.config.openshift.io, uid: bd28ddcd-9224-4bc3-8240-1a49c1854541]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.593475019+00:00 stderr F I1212 16:15:23.593455 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: schedulers.config.openshift.io, uid: a179b980-f1da-4aa8-87c1-22d416087c9b]" virtual=false 2025-12-12T16:15:23.596230206+00:00 stderr F I1212 16:15:23.596143 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: olmconfigs.operators.coreos.com, uid: e6039e18-27fc-4b20-947d-692bda721621]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.596230206+00:00 stderr F I1212 16:15:23.596211 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: securitycontextconstraints.security.openshift.io, uid: c2c1b51b-1f1e-41ac-8792-96123aa08c0d]" virtual=false 2025-12-12T16:15:23.600330794+00:00 stderr F I1212 16:15:23.600267 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: openshiftapiservers.operator.openshift.io, uid: 058a2559-956f-46e4-89d3-baa87bbc6990]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.600355015+00:00 stderr F I1212 16:15:23.600328 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicecas.operator.openshift.io, uid: 9cfb00ff-3ec8-4778-90de-869919f480a6]" virtual=false 2025-12-12T16:15:23.606737099+00:00 stderr F I1212 16:15:23.606667 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: openshiftcontrollermanagers.operator.openshift.io, uid: b2265863-b590-444e-8c9c-2cb375745d0d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.606764159+00:00 stderr F I1212 16:15:23.606727 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicemonitors.monitoring.coreos.com, uid: 888d72a2-1d4c-499e-87f2-270cdb7a307f]" virtual=false 2025-12-12T16:15:23.610670014+00:00 stderr F I1212 16:15:23.610567 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorconditions.operators.coreos.com, uid: 52bd2691-91b7-46cf-b18a-f7f7d4ca4100]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.610691964+00:00 stderr F I1212 16:15:23.610670 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: storages.operator.openshift.io, uid: 7b391c15-9c8b-4a44-9c2e-96a6aee9ced7]" virtual=false 2025-12-12T16:15:23.616479464+00:00 stderr F I1212 16:15:23.616409 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorgroups.operators.coreos.com, uid: 0e8b5955-f13f-4be3-ae9c-61bcd320b18e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.616502874+00:00 stderr F I1212 16:15:23.616473 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: storagestates.migration.k8s.io, uid: 0228ec5e-2580-41dd-9480-5f6e00e945a0]" virtual=false 2025-12-12T16:15:23.623315438+00:00 stderr F I1212 16:15:23.623244 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorhubs.config.openshift.io, uid: 70e98ca5-4adc-403a-a86f-6c0b42b1662c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.623315438+00:00 stderr F I1212 16:15:23.623284 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: storageversionmigrations.migration.k8s.io, uid: fe171e47-da4d-47a1-85d1-e7d765c45d94]" virtual=false 2025-12-12T16:15:23.627046638+00:00 stderr F I1212 16:15:23.626550 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operatorpkis.network.operator.openshift.io, uid: 3164274b-6f8f-4898-b812-65d961410f7d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.627046638+00:00 stderr F I1212 16:15:23.626624 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: iptables-alerter-script, uid: d6454361-d693-4bd5-970e-8dee8c867368]" virtual=false 2025-12-12T16:15:23.646366724+00:00 stderr F I1212 16:15:23.646278 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: operators.operators.coreos.com, uid: 076c41ab-437f-447c-93dd-26fea64a70fd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.646366724+00:00 stderr F I1212 16:15:23.646335 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-oauth-apiserver, name: revision-status-1, uid: 444fbf90-75af-4a73-9695-57f0d8acfffe]" virtual=false 2025-12-12T16:15:23.670688489+00:00 stderr F I1212 16:15:23.670595 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: overlappingrangeipreservations.whereabouts.cni.cncf.io, uid: 0ff79e69-ef96-42c6-b746-dbc7d0703c42]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.670688489+00:00 stderr F I1212 16:15:23.670656 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: subscriptions.operators.coreos.com, uid: be63efc0-c9a5-4e6a-b7cb-96f2e3e850df]" virtual=false 2025-12-12T16:15:23.679573374+00:00 stderr F I1212 16:15:23.679496 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: pinnedimagesets.machineconfiguration.openshift.io, uid: 6613eb61-9f46-40bb-8543-ca39cc1b23ef]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.679573374+00:00 stderr F I1212 16:15:23.679542 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosrulers.monitoring.coreos.com, uid: 7a4b3665-d6b3-480a-b466-ee06c781d6d9]" virtual=false 2025-12-12T16:15:23.686865129+00:00 stderr F I1212 16:15:23.686765 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: podmonitors.monitoring.coreos.com, uid: 5f6782ac-5022-478d-bfd0-cb961eb26184]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.686865129+00:00 stderr F I1212 16:15:23.686794 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: userdefinednetworks.k8s.ovn.org, uid: 5ddfa170-ec4d-473f-abbf-efc6b244aede]" virtual=false 2025-12-12T16:15:23.696671895+00:00 stderr F I1212 16:15:23.696573 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: probes.monitoring.coreos.com, uid: 638cc330-50ef-4052-a4de-f5c1ff26d14c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.696671895+00:00 stderr F I1212 16:15:23.696631 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-operator-lifecycle-manager, name: collect-profiles-config, uid: ba620145-fbc2-4aba-b871-93a383d45a30]" virtual=false 2025-12-12T16:15:23.699448592+00:00 stderr F I1212 16:15:23.699386 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: projecthelmchartrepositories.helm.openshift.io, uid: 2105cc8a-c10c-401e-b0dc-7e2748bc7891]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.699448592+00:00 stderr F I1212 16:15:23.699425 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-config, uid: bb2cfc2c-89dd-4cea-b292-4d5129ae614c]" virtual=false 2025-12-12T16:15:23.702780543+00:00 stderr F I1212 16:15:23.702719 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: projects.config.openshift.io, uid: 5979d120-38d8-4b13-ab6d-5958992124d9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.702850844+00:00 stderr F I1212 16:15:23.702828 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-script-lib, uid: 72382e9c-006e-43aa-a927-ef5b09d76e4a]" virtual=false 2025-12-12T16:15:23.706752008+00:00 stderr F I1212 16:15:23.706633 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheusrules.monitoring.coreos.com, uid: fc28e963-9d54-4a19-bcaa-c272ad385a82]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.706752008+00:00 stderr F I1212 16:15:23.706676 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-service-ca-operator, name: service-ca-operator-config, uid: 22a18c28-634b-4406-aa6b-cc1e494c9cf1]" virtual=false 2025-12-12T16:15:23.708525991+00:00 stderr F I1212 16:15:23.708478 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-oauth-apiserver, name: revision-status-1, uid: 444fbf90-75af-4a73-9695-57f0d8acfffe]" 2025-12-12T16:15:23.708525991+00:00 stderr F I1212 16:15:23.708501 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-machine-approver, name: machineapprover-rules, uid: 03750884-15c0-4b90-b038-07d324e83865]" virtual=false 2025-12-12T16:15:23.713333487+00:00 stderr F I1212 16:15:23.713260 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: proxies.config.openshift.io, uid: 56becff0-4867-4bd6-9803-9995d62edcee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.713349817+00:00 stderr F I1212 16:15:23.713325 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-samples-operator, name: samples-operator-alerts, uid: b6a0a11d-7bbc-4d70-aa23-0e557edf8696]" virtual=false 2025-12-12T16:15:23.715918899+00:00 stderr F I1212 16:15:23.715857 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: rangeallocations.security.internal.openshift.io, uid: 7096e501-baf8-4fcc-bded-c349016206f9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.715934889+00:00 stderr F I1212 16:15:23.715909 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-version, name: cluster-version-operator, uid: f57fe110-2989-4987-a61c-24caa6fc9bb2]" virtual=false 2025-12-12T16:15:23.720075459+00:00 stderr F I1212 16:15:23.720036 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: rolebindingrestrictions.authorization.openshift.io, uid: c012bd51-64d0-435b-a4fe-5c0b53406266]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.720092080+00:00 stderr F I1212 16:15:23.720067 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-console-operator, name: cluster-monitoring-prometheus-rules, uid: 91547e96-31b3-460d-80d1-83b02bd7d873]" virtual=false 2025-12-12T16:15:23.723306297+00:00 stderr F I1212 16:15:23.723211 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: prometheuses.monitoring.coreos.com, uid: df2df67d-b623-4e8f-aa23-7e9d2c82a279]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.723350808+00:00 stderr F I1212 16:15:23.723311 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-dns-operator, name: dns, uid: eb61aaa9-4e25-4e91-a620-88091d39e58f]" virtual=false 2025-12-12T16:15:23.726547275+00:00 stderr F I1212 16:15:23.726470 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: schedulers.config.openshift.io, uid: a179b980-f1da-4aa8-87c1-22d416087c9b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.726588746+00:00 stderr F I1212 16:15:23.726545 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-etcd-operator, name: etcd-prometheus-rules, uid: 97274c51-fc95-4eb6-ad00-7a3b4f31f2ca]" virtual=false 2025-12-12T16:15:23.729422074+00:00 stderr F I1212 16:15:23.729385 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: securitycontextconstraints.security.openshift.io, uid: c2c1b51b-1f1e-41ac-8792-96123aa08c0d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.729440165+00:00 stderr F I1212 16:15:23.729417 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-operator-alerts, uid: c76d6124-d19c-4231-b946-23f2c04f09c7]" virtual=false 2025-12-12T16:15:23.733239596+00:00 stderr F I1212 16:15:23.733143 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicecas.operator.openshift.io, uid: 9cfb00ff-3ec8-4778-90de-869919f480a6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.733239596+00:00 stderr F I1212 16:15:23.733204 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-rules, uid: a1a802b1-bbfe-4655-8366-9f94d997c9ee]" virtual=false 2025-12-12T16:15:23.740538522+00:00 stderr F I1212 16:15:23.740430 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: servicemonitors.monitoring.coreos.com, uid: 888d72a2-1d4c-499e-87f2-270cdb7a307f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.740538522+00:00 stderr F I1212 16:15:23.740519 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: imagestreams-rules, uid: 7f8f7459-95b2-46d1-a5f5-c98861a56f22]" virtual=false 2025-12-12T16:15:23.743744239+00:00 stderr F I1212 16:15:23.743580 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: storages.operator.openshift.io, uid: 7b391c15-9c8b-4a44-9c2e-96a6aee9ced7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.743744239+00:00 stderr F I1212 16:15:23.743679 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ingress-operator, name: ingress-operator, uid: 25ced67d-4442-487b-9828-7b52d14815c0]" virtual=false 2025-12-12T16:15:23.752292285+00:00 stderr F I1212 16:15:23.752150 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: storagestates.migration.k8s.io, uid: 0228ec5e-2580-41dd-9480-5f6e00e945a0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.752292285+00:00 stderr F I1212 16:15:23.752259 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 2877fbf1-ff5a-4aa9-b775-2605d4bccd96]" virtual=false 2025-12-12T16:15:23.758347641+00:00 stderr F I1212 16:15:23.758249 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: storageversionmigrations.migration.k8s.io, uid: fe171e47-da4d-47a1-85d1-e7d765c45d94]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.758466684+00:00 stderr F I1212 16:15:23.758439 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver, name: kube-apiserver-performance-recording-rules, uid: f911c2bf-6b61-41f5-9e5a-f111e13fea13]" virtual=false 2025-12-12T16:15:23.760852902+00:00 stderr F I1212 16:15:23.760745 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-network-operator, name: iptables-alerter-script, uid: d6454361-d693-4bd5-970e-8dee8c867368]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.760964844+00:00 stderr F I1212 16:15:23.760940 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 0b5fb497-c46f-4ec8-b3ac-f01c6ed66367]" virtual=false 2025-12-12T16:15:23.807912075+00:00 stderr F I1212 16:15:23.807841 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: subscriptions.operators.coreos.com, uid: be63efc0-c9a5-4e6a-b7cb-96f2e3e850df]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.807912075+00:00 stderr F I1212 16:15:23.807895 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: b334ae77-e6a0-41f9-b470-35a7cb6618a5]" virtual=false 2025-12-12T16:15:23.816020871+00:00 stderr F I1212 16:15:23.815957 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: userdefinednetworks.k8s.ovn.org, uid: 5ddfa170-ec4d-473f-abbf-efc6b244aede]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.816020871+00:00 stderr F I1212 16:15:23.815999 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: be18a557-8d05-434c-9f9b-d928b26e652a]" virtual=false 2025-12-12T16:15:23.826153785+00:00 stderr F I1212 16:15:23.826089 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[apiextensions.k8s.io/v1/CustomResourceDefinition, namespace: , name: thanosrulers.monitoring.coreos.com, uid: 7a4b3665-d6b3-480a-b466-ee06c781d6d9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.826153785+00:00 stderr F I1212 16:15:23.826137 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: ace02251-92c6-4ead-9477-02801ce2fc3d]" virtual=false 2025-12-12T16:15:23.829777962+00:00 stderr F I1212 16:15:23.829723 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-operator-lifecycle-manager, name: collect-profiles-config, uid: ba620145-fbc2-4aba-b871-93a383d45a30]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:23.829796842+00:00 stderr F I1212 16:15:23.829777 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-marketplace, name: marketplace-alert-rules, uid: dc8ef252-29f9-421b-9166-5d8a6fb2cb84]" virtual=false 2025-12-12T16:15:23.832410216+00:00 stderr F I1212 16:15:23.832370 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-config, uid: bb2cfc2c-89dd-4cea-b292-4d5129ae614c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.832437636+00:00 stderr F I1212 16:15:23.832413 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-multus, name: prometheus-k8s-rules, uid: f66d3c2d-8031-4960-b90a-2518392083d6]" virtual=false 2025-12-12T16:15:23.835706325+00:00 stderr F I1212 16:15:23.835665 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-ovn-kubernetes, name: ovnkube-script-lib, uid: 72382e9c-006e-43aa-a927-ef5b09d76e4a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.835722915+00:00 stderr F I1212 16:15:23.835699 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-network-operator, name: openshift-network-operator-ipsec-rules, uid: 4b9d0264-6145-4803-a9c5-e7715dde16c7]" virtual=false 2025-12-12T16:15:23.839045265+00:00 stderr F I1212 16:15:23.838922 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[v1/ConfigMap, namespace: openshift-service-ca-operator, name: service-ca-operator-config, uid: 22a18c28-634b-4406-aa6b-cc1e494c9cf1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.839045265+00:00 stderr F I1212 16:15:23.838952 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-operator-lifecycle-manager, name: olm-alert-rules, uid: d90e2096-395c-40fd-9ade-393efa2e6c5b]" virtual=false 2025-12-12T16:15:23.844503097+00:00 stderr F I1212 16:15:23.844398 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-machine-approver, name: machineapprover-rules, uid: 03750884-15c0-4b90-b038-07d324e83865]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.844503097+00:00 stderr F I1212 16:15:23.844475 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: master-rules, uid: a8f235d8-6055-4051-84b4-f75387ba159c]" virtual=false 2025-12-12T16:15:23.846824163+00:00 stderr F I1212 16:15:23.846767 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-samples-operator, name: samples-operator-alerts, uid: b6a0a11d-7bbc-4d70-aa23-0e557edf8696]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.846855593+00:00 stderr F I1212 16:15:23.846831 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: networking-rules, uid: 31962b12-2774-4f24-9d5c-f55ad1ee66ac]" virtual=false 2025-12-12T16:15:23.850063601+00:00 stderr F I1212 16:15:23.850006 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-cluster-version, name: cluster-version-operator, uid: f57fe110-2989-4987-a61c-24caa6fc9bb2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.850123202+00:00 stderr F I1212 16:15:23.850104 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: 92c9f946-b2a0-4dc5-975c-25ebc3bb9c4e]" virtual=false 2025-12-12T16:15:23.853305009+00:00 stderr F I1212 16:15:23.853263 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-console-operator, name: cluster-monitoring-prometheus-rules, uid: 91547e96-31b3-460d-80d1-83b02bd7d873]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.853372901+00:00 stderr F I1212 16:15:23.853349 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver, uid: 38fd95a7-b03c-4438-abb8-83e272fd6912]" virtual=false 2025-12-12T16:15:23.856362123+00:00 stderr F I1212 16:15:23.856290 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-dns-operator, name: dns, uid: eb61aaa9-4e25-4e91-a620-88091d39e58f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.856384243+00:00 stderr F I1212 16:15:23.856365 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver-operator-check-endpoints, uid: bb13a4c2-4a26-4c4c-be0a-c92c6553ee6e]" virtual=false 2025-12-12T16:15:23.860942883+00:00 stderr F I1212 16:15:23.860820 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-etcd-operator, name: etcd-prometheus-rules, uid: 97274c51-fc95-4eb6-ad00-7a3b4f31f2ca]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.860942883+00:00 stderr F I1212 16:15:23.860853 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication-operator, name: authentication-operator, uid: 01f7855a-a823-4705-8db6-27c45980a6cb]" virtual=false 2025-12-12T16:15:23.863787971+00:00 stderr F I1212 16:15:23.863735 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-operator-alerts, uid: c76d6124-d19c-4231-b946-23f2c04f09c7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.863787971+00:00 stderr F I1212 16:15:23.863775 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication, name: oauth-openshift, uid: f570f814-28f0-43c6-a672-c5ff8b60e0a0]" virtual=false 2025-12-12T16:15:23.866733392+00:00 stderr F I1212 16:15:23.866624 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: image-registry-rules, uid: a1a802b1-bbfe-4655-8366-9f94d997c9ee]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.866733392+00:00 stderr F I1212 16:15:23.866714 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-machine-approver, name: cluster-machine-approver, uid: 5a288427-478d-4f77-8ab7-5a6a841b42ec]" virtual=false 2025-12-12T16:15:23.869047358+00:00 stderr F I1212 16:15:23.868995 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-image-registry, name: imagestreams-rules, uid: 7f8f7459-95b2-46d1-a5f5-c98861a56f22]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.869047358+00:00 stderr F I1212 16:15:23.869030 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: c54dfa10-f53a-4c08-8fe5-f78de034450b]" virtual=false 2025-12-12T16:15:23.876508198+00:00 stderr F I1212 16:15:23.876424 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ingress-operator, name: ingress-operator, uid: 25ced67d-4442-487b-9828-7b52d14815c0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.876508198+00:00 stderr F I1212 16:15:23.876480 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3cf7eed4-951b-4ed5-b5fe-6175a10b9554]" virtual=false 2025-12-12T16:15:23.878767532+00:00 stderr F I1212 16:15:23.878713 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 2877fbf1-ff5a-4aa9-b775-2605d4bccd96]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.878789623+00:00 stderr F I1212 16:15:23.878756 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-config-operator, name: config-operator, uid: dd012f1c-40f4-428c-a842-46f94cbe2c6c]" virtual=false 2025-12-12T16:15:23.886763295+00:00 stderr F I1212 16:15:23.886710 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-apiserver, name: kube-apiserver-performance-recording-rules, uid: f911c2bf-6b61-41f5-9e5a-f111e13fea13]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.886786035+00:00 stderr F I1212 16:15:23.886753 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console-operator, name: console-operator, uid: 67aaa41b-07c9-42c2-b24b-e21c702aaf38]" virtual=false 2025-12-12T16:15:23.889249895+00:00 stderr F I1212 16:15:23.889201 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 0b5fb497-c46f-4ec8-b3ac-f01c6ed66367]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.889307016+00:00 stderr F I1212 16:15:23.889287 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console, name: console, uid: ce6eb94a-67e7-4594-8332-d294f5f0ed28]" virtual=false 2025-12-12T16:15:23.940281314+00:00 stderr F I1212 16:15:23.940161 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: b334ae77-e6a0-41f9-b470-35a7cb6618a5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.940281314+00:00 stderr F I1212 16:15:23.940242 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: b6b1ba29-2359-42ad-a1f3-c61c995c528f]" virtual=false 2025-12-12T16:15:23.949708371+00:00 stderr F I1212 16:15:23.949639 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: be18a557-8d05-434c-9f9b-d928b26e652a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.949708371+00:00 stderr F I1212 16:15:23.949687 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager, name: openshift-controller-manager, uid: 3ead3b3c-6cb1-4941-8e45-fcfb01cfa6b6]" virtual=false 2025-12-12T16:15:23.959728043+00:00 stderr F I1212 16:15:23.959653 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: ace02251-92c6-4ead-9477-02801ce2fc3d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.959728043+00:00 stderr F I1212 16:15:23.959700 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-dns-operator, name: dns-operator, uid: 50a8933f-a9ac-4a80-b460-c36e9fb81474]" virtual=false 2025-12-12T16:15:23.964636641+00:00 stderr F I1212 16:15:23.964553 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-marketplace, name: marketplace-alert-rules, uid: dc8ef252-29f9-421b-9166-5d8a6fb2cb84]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.964697312+00:00 stderr F I1212 16:15:23.964649 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-etcd-operator, name: etcd-operator, uid: aef2648c-8377-4d11-a4a1-fb24f5095a8d]" virtual=false 2025-12-12T16:15:23.966431404+00:00 stderr F I1212 16:15:23.966391 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-multus, name: prometheus-k8s-rules, uid: f66d3c2d-8031-4960-b90a-2518392083d6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.966449215+00:00 stderr F I1212 16:15:23.966429 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry, uid: fc13ca00-4d30-4cf5-ba4e-0aeb5356211f]" virtual=false 2025-12-12T16:15:23.969756684+00:00 stderr F I1212 16:15:23.969664 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-network-operator, name: openshift-network-operator-ipsec-rules, uid: 4b9d0264-6145-4803-a9c5-e7715dde16c7]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.969756684+00:00 stderr F I1212 16:15:23.969691 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry-operator, uid: 7693198b-e80b-4c67-a087-641d91ca8741]" virtual=false 2025-12-12T16:15:23.973609037+00:00 stderr F I1212 16:15:23.973544 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-operator-lifecycle-manager, name: olm-alert-rules, uid: d90e2096-395c-40fd-9ade-393efa2e6c5b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.973652288+00:00 stderr F I1212 16:15:23.973598 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ingress-operator, name: ingress-operator, uid: e02df054-9826-42fd-bd6b-a68c996ebec0]" virtual=false 2025-12-12T16:15:23.975683157+00:00 stderr F I1212 16:15:23.975594 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: master-rules, uid: a8f235d8-6055-4051-84b4-f75387ba159c]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.975683157+00:00 stderr F I1212 16:15:23.975645 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 005ad9fa-457c-4a70-9bb0-2a624385cac9]" virtual=false 2025-12-12T16:15:23.978851743+00:00 stderr F I1212 16:15:23.978809 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/PrometheusRule, namespace: openshift-ovn-kubernetes, name: networking-rules, uid: 31962b12-2774-4f24-9d5c-f55ad1ee66ac]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:23.978851743+00:00 stderr F I1212 16:15:23.978838 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver, name: kube-apiserver, uid: 9de9deed-9721-49d9-9ffc-fe6fde17ec88]" virtual=false 2025-12-12T16:15:23.984252844+00:00 stderr F I1212 16:15:23.984141 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver-operator, name: openshift-apiserver-operator, uid: 92c9f946-b2a0-4dc5-975c-25ebc3bb9c4e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.984252844+00:00 stderr F I1212 16:15:23.984224 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 767bca57-71f8-40a0-8831-50d92f59808c]" virtual=false 2025-12-12T16:15:23.986166560+00:00 stderr F I1212 16:15:23.986101 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver, uid: 38fd95a7-b03c-4438-abb8-83e272fd6912]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.986222311+00:00 stderr F I1212 16:15:23.986138 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager, name: kube-controller-manager, uid: 43eede0a-d0aa-4d52-9cba-d673fe0fc344]" virtual=false 2025-12-12T16:15:23.990089074+00:00 stderr F I1212 16:15:23.990022 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-apiserver, name: openshift-apiserver-operator-check-endpoints, uid: bb13a4c2-4a26-4c4c-be0a-c92c6553ee6e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.990089074+00:00 stderr F I1212 16:15:23.990052 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: 1e85867d-2566-494d-83b0-620b526f122f]" virtual=false 2025-12-12T16:15:23.992433021+00:00 stderr F I1212 16:15:23.992381 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication-operator, name: authentication-operator, uid: 01f7855a-a823-4705-8db6-27c45980a6cb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.992433021+00:00 stderr F I1212 16:15:23.992408 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler, name: kube-scheduler, uid: 0ef34820-7d38-4563-bcab-20b7d718ade2]" virtual=false 2025-12-12T16:15:23.996312414+00:00 stderr F I1212 16:15:23.996265 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-authentication, name: oauth-openshift, uid: f570f814-28f0-43c6-a672-c5ff8b60e0a0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.996312414+00:00 stderr F I1212 16:15:23.996293 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: 743c6934-a727-4576-9532-c45126dd50bd]" virtual=false 2025-12-12T16:15:23.999779408+00:00 stderr F I1212 16:15:23.999731 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-machine-approver, name: cluster-machine-approver, uid: 5a288427-478d-4f77-8ab7-5a6a841b42ec]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:23.999779408+00:00 stderr F I1212 16:15:23.999758 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: b8f2912e-1679-4233-b796-af02cbe7e18b]" virtual=false 2025-12-12T16:15:24.004165103+00:00 stderr F I1212 16:15:24.003936 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-samples-operator, name: cluster-samples-operator, uid: c54dfa10-f53a-4c08-8fe5-f78de034450b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.004165103+00:00 stderr F I1212 16:15:24.004146 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: de7baa5b-d04a-4e5e-9669-bf620c8a04d1]" virtual=false 2025-12-12T16:15:24.010323492+00:00 stderr F I1212 16:15:24.010203 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-cluster-version, name: cluster-version-operator, uid: 3cf7eed4-951b-4ed5-b5fe-6175a10b9554]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.010323492+00:00 stderr F I1212 16:15:24.010234 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-marketplace, name: marketplace-operator, uid: dca395b3-cf6a-4fc7-92bc-15c290009884]" virtual=false 2025-12-12T16:15:24.013070618+00:00 stderr F I1212 16:15:24.013016 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-config-operator, name: config-operator, uid: dd012f1c-40f4-428c-a842-46f94cbe2c6c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.013070618+00:00 stderr F I1212 16:15:24.013044 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-multus-admission-controller, uid: 01fa16c6-f969-42c1-b69a-43de0deff522]" virtual=false 2025-12-12T16:15:24.019969714+00:00 stderr F I1212 16:15:24.019902 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console-operator, name: console-operator, uid: 67aaa41b-07c9-42c2-b24b-e21c702aaf38]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.019969714+00:00 stderr F I1212 16:15:24.019944 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-network, uid: 9f83a4ef-cd08-40b7-a5a3-f1f39610e4bf]" virtual=false 2025-12-12T16:15:24.024228166+00:00 stderr F I1212 16:15:24.024151 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-console, name: console, uid: ce6eb94a-67e7-4594-8332-d294f5f0ed28]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.024228166+00:00 stderr F I1212 16:15:24.024208 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-diagnostics, name: network-check-source, uid: a77ddccd-3bdf-40da-b44d-39f9ccca28bb]" virtual=false 2025-12-12T16:15:24.074029446+00:00 stderr F I1212 16:15:24.073893 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager-operator, name: openshift-controller-manager-operator, uid: b6b1ba29-2359-42ad-a1f3-c61c995c528f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.074029446+00:00 stderr F I1212 16:15:24.073981 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-operator, name: network-operator, uid: 738c1721-593f-4f60-a567-4597ff37ea6a]" virtual=false 2025-12-12T16:15:24.083717410+00:00 stderr F I1212 16:15:24.083634 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-controller-manager, name: openshift-controller-manager, uid: 3ead3b3c-6cb1-4941-8e45-fcfb01cfa6b6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.083717410+00:00 stderr F I1212 16:15:24.083671 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-oauth-apiserver, name: openshift-oauth-apiserver, uid: 668f8445-4e0b-4306-a0c6-9208bf76efb5]" virtual=false 2025-12-12T16:15:24.094820077+00:00 stderr F I1212 16:15:24.094598 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-dns-operator, name: dns-operator, uid: 50a8933f-a9ac-4a80-b460-c36e9fb81474]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.094820077+00:00 stderr F I1212 16:15:24.094641 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 308b0b19-f7aa-40a2-b0b9-10a5d2b356f8]" virtual=false 2025-12-12T16:15:24.097122413+00:00 stderr F I1212 16:15:24.097068 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-etcd-operator, name: etcd-operator, uid: aef2648c-8377-4d11-a4a1-fb24f5095a8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.097122413+00:00 stderr F I1212 16:15:24.097097 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: cfa008c8-2c0c-470c-bc32-f95c0e394dd5]" virtual=false 2025-12-12T16:15:24.100000232+00:00 stderr F I1212 16:15:24.099937 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry, uid: fc13ca00-4d30-4cf5-ba4e-0aeb5356211f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.100000232+00:00 stderr F I1212 16:15:24.099965 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 927e38ed-2fe0-4faa-8d67-18f898398255]" virtual=false 2025-12-12T16:15:24.103339932+00:00 stderr F I1212 16:15:24.103243 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-image-registry, name: image-registry-operator, uid: 7693198b-e80b-4c67-a087-641d91ca8741]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.103339932+00:00 stderr F I1212 16:15:24.103312 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-control-plane-metrics, uid: e8d49b63-d110-45e5-a2ec-20435c71bd60]" virtual=false 2025-12-12T16:15:24.106468158+00:00 stderr F I1212 16:15:24.106396 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ingress-operator, name: ingress-operator, uid: e02df054-9826-42fd-bd6b-a68c996ebec0]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.106468158+00:00 stderr F I1212 16:15:24.106446 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-node, uid: 992d1159-1bbf-4ff7-adf1-580c362b690a]" virtual=false 2025-12-12T16:15:24.110930265+00:00 stderr F I1212 16:15:24.110786 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver-operator, name: kube-apiserver-operator, uid: 005ad9fa-457c-4a70-9bb0-2a624385cac9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.110930265+00:00 stderr F I1212 16:15:24.110830 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-route-controller-manager, name: openshift-route-controller-manager, uid: 6c85dcfe-44cb-4596-b7d4-b6e79a18159e]" virtual=false 2025-12-12T16:15:24.113847456+00:00 stderr F I1212 16:15:24.113769 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-apiserver, name: kube-apiserver, uid: 9de9deed-9721-49d9-9ffc-fe6fde17ec88]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.113847456+00:00 stderr F I1212 16:15:24.113830 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 4979bb5a-bf15-43a9-9eee-231d52574ca5]" virtual=false 2025-12-12T16:15:24.116977541+00:00 stderr F I1212 16:15:24.116909 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager-operator, name: kube-controller-manager-operator, uid: 767bca57-71f8-40a0-8831-50d92f59808c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.117097554+00:00 stderr F I1212 16:15:24.117023 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-network-node-identity, name: network-node-identity, uid: 19863168-4684-4a75-87e9-a586be776b3a]" virtual=false 2025-12-12T16:15:24.119927722+00:00 stderr F I1212 16:15:24.119851 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-controller-manager, name: kube-controller-manager, uid: 43eede0a-d0aa-4d52-9cba-d673fe0fc344]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.119955803+00:00 stderr F I1212 16:15:24.119922 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: ovn, uid: f3ff9d5c-ea26-43b0-91ee-b403a4b4d4f6]" virtual=false 2025-12-12T16:15:24.123016446+00:00 stderr F I1212 16:15:24.122978 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler-operator, name: kube-scheduler-operator, uid: 1e85867d-2566-494d-83b0-620b526f122f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.123043647+00:00 stderr F I1212 16:15:24.123012 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: signer, uid: 491c5375-b7a4-4e86-8e7b-e538b36d6095]" virtual=false 2025-12-12T16:15:24.126802388+00:00 stderr F I1212 16:15:24.126704 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-kube-scheduler, name: kube-scheduler, uid: 0ef34820-7d38-4563-bcab-20b7d718ade2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.126802388+00:00 stderr F I1212 16:15:24.126736 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-monitoring, name: openshift-cluster-monitoring, uid: 4ce70de4-7730-472f-aa41-55b320f6a48b]" virtual=false 2025-12-12T16:15:24.129676827+00:00 stderr F I1212 16:15:24.129608 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-config-operator, name: machine-config-controller, uid: 743c6934-a727-4576-9532-c45126dd50bd]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.129676827+00:00 stderr F I1212 16:15:24.129649 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operator-lifecycle-manager, name: olm-operators, uid: 1332ecfd-3d6a-4222-b9b5-6e6e389f06df]" virtual=false 2025-12-12T16:15:24.133486529+00:00 stderr F I1212 16:15:24.133434 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-config-operator, name: machine-config-daemon, uid: b8f2912e-1679-4233-b796-af02cbe7e18b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.133511539+00:00 stderr F I1212 16:15:24.133481 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operators, name: global-operators, uid: 83417554-904c-4254-8944-b91da7453b27]" virtual=false 2025-12-12T16:15:24.135970748+00:00 stderr F I1212 16:15:24.135917 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-machine-config-operator, name: machine-config-operator, uid: de7baa5b-d04a-4e5e-9669-bf620c8a04d1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.136020930+00:00 stderr F I1212 16:15:24.135971 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operator.openshift.io/v1/ServiceCA, namespace: , name: cluster, uid: 8c3e435e-8a77-447a-9c6f-30c03403a965]" virtual=false 2025-12-12T16:15:24.143653304+00:00 stderr F I1212 16:15:24.143605 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-marketplace, name: marketplace-operator, uid: dca395b3-cf6a-4fc7-92bc-15c290009884]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.143672654+00:00 stderr F I1212 16:15:24.143653 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/Build, namespace: , name: cluster, uid: cdc5477c-3d6d-4864-8f4a-03135b91dad6]" virtual=false 2025-12-12T16:15:24.146825660+00:00 stderr F I1212 16:15:24.146733 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-multus-admission-controller, uid: 01fa16c6-f969-42c1-b69a-43de0deff522]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.146849931+00:00 stderr F I1212 16:15:24.146810 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/Node, namespace: , name: cluster, uid: 928c64ea-9802-47a5-adcd-e612d697b9f2]" virtual=false 2025-12-12T16:15:24.153667655+00:00 stderr F I1212 16:15:24.153576 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-multus, name: monitor-network, uid: 9f83a4ef-cd08-40b7-a5a3-f1f39610e4bf]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.153667655+00:00 stderr F I1212 16:15:24.153642 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/Console, namespace: , name: cluster, uid: 959ffad2-ed2e-4182-a276-5e4480abd21d]" virtual=false 2025-12-12T16:15:24.156789080+00:00 stderr F I1212 16:15:24.156723 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-diagnostics, name: network-check-source, uid: a77ddccd-3bdf-40da-b44d-39f9ccca28bb]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.156789080+00:00 stderr F I1212 16:15:24.156758 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/APIServer, namespace: , name: cluster, uid: 882b2cd7-b850-48f6-bf8e-574ec4a78c00]" virtual=false 2025-12-12T16:15:24.208611258+00:00 stderr F I1212 16:15:24.208502 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-network-operator, name: network-operator, uid: 738c1721-593f-4f60-a567-4597ff37ea6a]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.208681860+00:00 stderr F I1212 16:15:24.208600 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: authentication, uid: 7dc16214-e534-4a1d-9a2f-f22d8c3895bc]" virtual=false 2025-12-12T16:15:24.217209836+00:00 stderr F I1212 16:15:24.217114 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-oauth-apiserver, name: openshift-oauth-apiserver, uid: 668f8445-4e0b-4306-a0c6-9208bf76efb5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.217237366+00:00 stderr F I1212 16:15:24.217213 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: config-operator, uid: 7f7c14e4-1cd2-431b-8476-7e6aa38d34fe]" virtual=false 2025-12-12T16:15:24.227223497+00:00 stderr F I1212 16:15:24.227131 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: catalog-operator, uid: 308b0b19-f7aa-40a2-b0b9-10a5d2b356f8]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.227245347+00:00 stderr F I1212 16:15:24.227220 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: console, uid: 08a05eba-8652-4940-9365-0912d8b13a45]" virtual=false 2025-12-12T16:15:24.230382743+00:00 stderr F I1212 16:15:24.230324 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: olm-operator, uid: cfa008c8-2c0c-470c-bc32-f95c0e394dd5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.230382743+00:00 stderr F I1212 16:15:24.230358 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: control-plane-machine-set, uid: 59e94623-ff61-4962-b425-6674d473cf2f]" virtual=false 2025-12-12T16:15:24.232587666+00:00 stderr F I1212 16:15:24.232545 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-operator-lifecycle-manager, name: package-server-manager-metrics, uid: 927e38ed-2fe0-4faa-8d67-18f898398255]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.232607057+00:00 stderr F I1212 16:15:24.232573 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: dns, uid: 255953b7-05dd-4705-b105-acf1c84c709c]" virtual=false 2025-12-12T16:15:24.236022659+00:00 stderr F I1212 16:15:24.235982 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-control-plane-metrics, uid: e8d49b63-d110-45e5-a2ec-20435c71bd60]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.236022659+00:00 stderr F I1212 16:15:24.236009 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: etcd, uid: e3a0e56a-d54b-4c58-957e-76808ef33cd7]" virtual=false 2025-12-12T16:15:24.239222686+00:00 stderr F I1212 16:15:24.239165 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-ovn-kubernetes, name: monitor-ovn-node, uid: 992d1159-1bbf-4ff7-adf1-580c362b690a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.239241966+00:00 stderr F I1212 16:15:24.239213 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: image-registry, uid: 8ba3c18e-0bb5-491c-b336-6bab8985398d]" virtual=false 2025-12-12T16:15:24.243134270+00:00 stderr F I1212 16:15:24.243016 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-route-controller-manager, name: openshift-route-controller-manager, uid: 6c85dcfe-44cb-4596-b7d4-b6e79a18159e]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.243134270+00:00 stderr F I1212 16:15:24.243081 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: ingress, uid: 5545d2ad-114a-499b-b9e4-10ee96f0e338]" virtual=false 2025-12-12T16:15:24.247156307+00:00 stderr F I1212 16:15:24.247100 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[monitoring.coreos.com/v1/ServiceMonitor, namespace: openshift-service-ca-operator, name: service-ca-operator, uid: 4979bb5a-bf15-43a9-9eee-231d52574ca5]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.247156307+00:00 stderr F I1212 16:15:24.247128 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-apiserver, uid: 95450502-d35e-4f72-8795-7e6d9769b769]" virtual=false 2025-12-12T16:15:24.249456792+00:00 stderr F I1212 16:15:24.249394 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-network-node-identity, name: network-node-identity, uid: 19863168-4684-4a75-87e9-a586be776b3a]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.249456792+00:00 stderr F I1212 16:15:24.249424 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-controller-manager, uid: 2f33262d-9747-4594-b7bb-9f107cbfff3c]" virtual=false 2025-12-12T16:15:24.253710485+00:00 stderr F I1212 16:15:24.253673 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: ovn, uid: f3ff9d5c-ea26-43b0-91ee-b403a4b4d4f6]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.253710485+00:00 stderr F I1212 16:15:24.253698 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-scheduler, uid: 9a4bf865-e03d-4408-aab2-5483238a3711]" virtual=false 2025-12-12T16:15:24.256422320+00:00 stderr F I1212 16:15:24.256372 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[network.operator.openshift.io/v1/OperatorPKI, namespace: openshift-ovn-kubernetes, name: signer, uid: 491c5375-b7a4-4e86-8e7b-e538b36d6095]" owner=[{"apiVersion":"operator.openshift.io/v1","kind":"Network","name":"cluster","uid":"d56acc66-d25c-4e5c-aa52-5418dd270c94","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.256422320+00:00 stderr F I1212 16:15:24.256407 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-storage-version-migrator, uid: 1b6db384-ac98-490b-9b8b-182f03d896c1]" virtual=false 2025-12-12T16:15:24.260990690+00:00 stderr F I1212 16:15:24.260916 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-monitoring, name: openshift-cluster-monitoring, uid: 4ce70de4-7730-472f-aa41-55b320f6a48b]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.261070192+00:00 stderr F I1212 16:15:24.261042 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: machine-api, uid: 494cf6d0-1fe8-4418-be09-e81ba6a1f11d]" virtual=false 2025-12-12T16:15:24.264153256+00:00 stderr F I1212 16:15:24.264092 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operator-lifecycle-manager, name: olm-operators, uid: 1332ecfd-3d6a-4222-b9b5-6e6e389f06df]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.264248619+00:00 stderr F I1212 16:15:24.264227 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: machine-approver, uid: 5e71ff77-7325-49e4-9244-048aaf71abde]" virtual=false 2025-12-12T16:15:24.267352204+00:00 stderr F I1212 16:15:24.267275 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operators.coreos.com/v1/OperatorGroup, namespace: openshift-operators, name: global-operators, uid: 83417554-904c-4254-8944-b91da7453b27]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.267372064+00:00 stderr F I1212 16:15:24.267343 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: machine-config, uid: 7f2fd96d-8d64-472c-934f-96c0625ce7a9]" virtual=false 2025-12-12T16:15:24.270105100+00:00 stderr F I1212 16:15:24.270046 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operator.openshift.io/v1/ServiceCA, namespace: , name: cluster, uid: 8c3e435e-8a77-447a-9c6f-30c03403a965]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.270142901+00:00 stderr F I1212 16:15:24.270094 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: marketplace, uid: 42b7cfcf-3023-4592-8c82-a96a5a782521]" virtual=false 2025-12-12T16:15:24.276624547+00:00 stderr F I1212 16:15:24.276560 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/Build, namespace: , name: cluster, uid: cdc5477c-3d6d-4864-8f4a-03135b91dad6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.276705469+00:00 stderr F I1212 16:15:24.276682 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: network, uid: afe01b73-2eb4-4fed-9342-d17065d3d5d7]" virtual=false 2025-12-12T16:15:24.279624099+00:00 stderr F I1212 16:15:24.279597 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/Node, namespace: , name: cluster, uid: 928c64ea-9802-47a5-adcd-e612d697b9f2]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.279674360+00:00 stderr F I1212 16:15:24.279655 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: openshift-apiserver, uid: 0c19c0e3-e9c3-4dda-8285-b174ff57dd10]" virtual=false 2025-12-12T16:15:24.285919261+00:00 stderr F I1212 16:15:24.285894 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/Console, namespace: , name: cluster, uid: 959ffad2-ed2e-4182-a276-5e4480abd21d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.285970462+00:00 stderr F I1212 16:15:24.285953 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: openshift-controller-manager, uid: 30c5f67d-c735-459e-973b-a827d6b0ca6d]" virtual=false 2025-12-12T16:15:24.289308443+00:00 stderr F I1212 16:15:24.289285 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/APIServer, namespace: , name: cluster, uid: 882b2cd7-b850-48f6-bf8e-574ec4a78c00]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.289352574+00:00 stderr F I1212 16:15:24.289337 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: openshift-samples, uid: 524189a7-0a5d-4a7d-a076-3f3f73dd61a7]" virtual=false 2025-12-12T16:15:24.340781193+00:00 stderr F I1212 16:15:24.340641 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: authentication, uid: 7dc16214-e534-4a1d-9a2f-f22d8c3895bc]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.340781193+00:00 stderr F I1212 16:15:24.340748 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: operator-lifecycle-manager, uid: 416ead29-75c4-4df9-9621-258c47786f8d]" virtual=false 2025-12-12T16:15:24.349218636+00:00 stderr F I1212 16:15:24.349068 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: config-operator, uid: 7f7c14e4-1cd2-431b-8476-7e6aa38d34fe]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.349218636+00:00 stderr F I1212 16:15:24.349133 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: operator-lifecycle-manager-catalog, uid: f4c95062-09d7-4270-a8c5-197d4b5f5ea6]" virtual=false 2025-12-12T16:15:24.359529794+00:00 stderr F I1212 16:15:24.359457 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: console, uid: 08a05eba-8652-4940-9365-0912d8b13a45]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.359565955+00:00 stderr F I1212 16:15:24.359517 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: operator-lifecycle-manager-packageserver, uid: 984ebfe9-a604-4cf7-9d48-3ae35d282e2f]" virtual=false 2025-12-12T16:15:24.363595862+00:00 stderr F I1212 16:15:24.363537 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: control-plane-machine-set, uid: 59e94623-ff61-4962-b425-6674d473cf2f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.363620573+00:00 stderr F I1212 16:15:24.363589 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: service-ca, uid: f045881d-e488-4f98-822c-9a64abc33c75]" virtual=false 2025-12-12T16:15:24.368357047+00:00 stderr F I1212 16:15:24.368201 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: dns, uid: 255953b7-05dd-4705-b105-acf1c84c709c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.368357047+00:00 stderr F I1212 16:15:24.368251 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[operator.openshift.io/v1/Etcd, namespace: , name: cluster, uid: 09d6dae5-8fed-46ea-a8ad-e7ea14c89fdb]" virtual=false 2025-12-12T16:15:24.370694673+00:00 stderr F I1212 16:15:24.370637 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: etcd, uid: e3a0e56a-d54b-4c58-957e-76808ef33cd7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.370716554+00:00 stderr F I1212 16:15:24.370692 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[machineconfiguration.openshift.io/v1/ControllerConfig, namespace: , name: machine-config-controller, uid: 0c31e9f3-9fc0-4142-8c45-241a8fea6e62]" virtual=false 2025-12-12T16:15:24.373712516+00:00 stderr F I1212 16:15:24.373668 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: image-registry, uid: 8ba3c18e-0bb5-491c-b336-6bab8985398d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.373766607+00:00 stderr F I1212 16:15:24.373707 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[config.openshift.io/v1/Image, namespace: , name: cluster, uid: 1f572312-825c-4dcf-8f7b-48c596dc9647]" virtual=false 2025-12-12T16:15:24.377275482+00:00 stderr F I1212 16:15:24.377201 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: ingress, uid: 5545d2ad-114a-499b-b9e4-10ee96f0e338]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.377296462+00:00 stderr F I1212 16:15:24.377263 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[machineconfiguration.openshift.io/v1/MachineConfig, namespace: , name: rendered-master-d582710c680b4cd4536e11249c7e09e9, uid: 5a77d6ee-756c-44f9-bee1-99f929fbc603]" virtual=false 2025-12-12T16:15:24.379851414+00:00 stderr F I1212 16:15:24.379802 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-apiserver, uid: 95450502-d35e-4f72-8795-7e6d9769b769]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.379851414+00:00 stderr F I1212 16:15:24.379828 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[machineconfiguration.openshift.io/v1/MachineConfig, namespace: , name: rendered-worker-483f6cb3f24d009adc54907af4f9813a, uid: 2d775aa7-1e02-45b0-ab6d-7673428e7703]" virtual=false 2025-12-12T16:15:24.382617941+00:00 stderr F I1212 16:15:24.382558 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-controller-manager, uid: 2f33262d-9747-4594-b7bb-9f107cbfff3c]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.386382931+00:00 stderr F I1212 16:15:24.386313 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-scheduler, uid: 9a4bf865-e03d-4408-aab2-5483238a3711]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.390797038+00:00 stderr F I1212 16:15:24.390718 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: kube-storage-version-migrator, uid: 1b6db384-ac98-490b-9b8b-182f03d896c1]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.393496313+00:00 stderr F I1212 16:15:24.393433 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: machine-api, uid: 494cf6d0-1fe8-4418-be09-e81ba6a1f11d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.396561986+00:00 stderr F I1212 16:15:24.396492 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: machine-approver, uid: 5e71ff77-7325-49e4-9244-048aaf71abde]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.400481881+00:00 stderr F I1212 16:15:24.400414 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: machine-config, uid: 7f2fd96d-8d64-472c-934f-96c0625ce7a9]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.403811761+00:00 stderr F I1212 16:15:24.403730 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: marketplace, uid: 42b7cfcf-3023-4592-8c82-a96a5a782521]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.409666342+00:00 stderr F I1212 16:15:24.409582 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: network, uid: afe01b73-2eb4-4fed-9342-d17065d3d5d7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.414208342+00:00 stderr F I1212 16:15:24.414106 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: openshift-apiserver, uid: 0c19c0e3-e9c3-4dda-8285-b174ff57dd10]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.419632422+00:00 stderr F I1212 16:15:24.419574 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: openshift-controller-manager, uid: 30c5f67d-c735-459e-973b-a827d6b0ca6d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.424416718+00:00 stderr F I1212 16:15:24.424355 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: openshift-samples, uid: 524189a7-0a5d-4a7d-a076-3f3f73dd61a7]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.437670537+00:00 stderr F I1212 16:15:24.437594 1 garbagecollector.go:548] "item doesn't have an owner, continue on next item" logger="garbage-collector-controller" item="[machineconfiguration.openshift.io/v1/ControllerConfig, namespace: , name: machine-config-controller, uid: 0c31e9f3-9fc0-4142-8c45-241a8fea6e62]" 2025-12-12T16:15:24.450095966+00:00 stderr F I1212 16:15:24.449968 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: operator-lifecycle-manager, uid: 416ead29-75c4-4df9-9621-258c47786f8d]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.453195681+00:00 stderr F I1212 16:15:24.453126 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: operator-lifecycle-manager-catalog, uid: f4c95062-09d7-4270-a8c5-197d4b5f5ea6]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.457872113+00:00 stderr F I1212 16:15:24.457774 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: operator-lifecycle-manager-packageserver, uid: 984ebfe9-a604-4cf7-9d48-3ae35d282e2f]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.459980984+00:00 stderr F I1212 16:15:24.459904 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/ClusterOperator, namespace: , name: service-ca, uid: f045881d-e488-4f98-822c-9a64abc33c75]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503","controller":true}] 2025-12-12T16:15:24.462304670+00:00 stderr F I1212 16:15:24.462220 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[operator.openshift.io/v1/Etcd, namespace: , name: cluster, uid: 09d6dae5-8fed-46ea-a8ad-e7ea14c89fdb]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.467076495+00:00 stderr F I1212 16:15:24.467017 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[config.openshift.io/v1/Image, namespace: , name: cluster, uid: 1f572312-825c-4dcf-8f7b-48c596dc9647]" owner=[{"apiVersion":"config.openshift.io/v1","kind":"ClusterVersion","name":"version","uid":"81a42ceb-dbf9-4bf3-9fcd-7b6d697f2503"}] 2025-12-12T16:15:24.469397061+00:00 stderr F I1212 16:15:24.469316 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[machineconfiguration.openshift.io/v1/MachineConfig, namespace: , name: rendered-master-d582710c680b4cd4536e11249c7e09e9, uid: 5a77d6ee-756c-44f9-bee1-99f929fbc603]" owner=[{"apiVersion":"machineconfiguration.openshift.io/v1","kind":"MachineConfigPool","name":"master","uid":"3b9df6d6-bacd-4862-b99f-10ec7fcf29ac","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:24.476922272+00:00 stderr F I1212 16:15:24.476837 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[machineconfiguration.openshift.io/v1/MachineConfig, namespace: , name: rendered-worker-483f6cb3f24d009adc54907af4f9813a, uid: 2d775aa7-1e02-45b0-ab6d-7673428e7703]" owner=[{"apiVersion":"machineconfiguration.openshift.io/v1","kind":"MachineConfigPool","name":"worker","uid":"633fcfae-03e0-4a3a-8d5c-de9a658e82f6","controller":true,"blockOwnerDeletion":true}] 2025-12-12T16:15:27.344430864+00:00 stderr F I1212 16:15:27.344345 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:27.344324742 +0000 UTC m=+25.902045221" 2025-12-12T16:15:32.069307702+00:00 stderr F I1212 16:15:32.069225 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:32.344723237+00:00 stderr F I1212 16:15:32.344589 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:32.344572323 +0000 UTC m=+30.902292792" 2025-12-12T16:15:33.522496691+00:00 stderr F I1212 16:15:33.522365 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:15:37.345680905+00:00 stderr F I1212 16:15:37.345504 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:37.34547149 +0000 UTC m=+35.903191959" 2025-12-12T16:15:42.346517868+00:00 stderr F I1212 16:15:42.346387 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:42.346366235 +0000 UTC m=+40.904086704" 2025-12-12T16:15:47.347562657+00:00 stderr F I1212 16:15:47.347460 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:47.347442444 +0000 UTC m=+45.905162913" 2025-12-12T16:15:48.259630010+00:00 stderr F E1212 16:15:48.259445 1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" logger="UnhandledError" 2025-12-12T16:15:49.442436644+00:00 stderr F I1212 16:15:49.442270 1 garbagecollector.go:787] "failed to discover some groups" logger="garbage-collector-controller" groups="map[\"apps.openshift.io/v1\":\"stale GroupVersion discovery: apps.openshift.io/v1\" \"authorization.openshift.io/v1\":\"stale GroupVersion discovery: authorization.openshift.io/v1\" \"build.openshift.io/v1\":\"stale GroupVersion discovery: build.openshift.io/v1\" \"image.openshift.io/v1\":\"stale GroupVersion discovery: image.openshift.io/v1\" \"oauth.openshift.io/v1\":\"stale GroupVersion discovery: oauth.openshift.io/v1\" \"packages.operators.coreos.com/v1\":\"stale GroupVersion discovery: packages.operators.coreos.com/v1\" \"project.openshift.io/v1\":\"stale GroupVersion discovery: project.openshift.io/v1\" \"quota.openshift.io/v1\":\"stale GroupVersion discovery: quota.openshift.io/v1\" \"route.openshift.io/v1\":\"stale GroupVersion discovery: route.openshift.io/v1\" \"security.openshift.io/v1\":\"stale GroupVersion discovery: security.openshift.io/v1\" \"template.openshift.io/v1\":\"stale GroupVersion discovery: template.openshift.io/v1\" \"user.openshift.io/v1\":\"stale GroupVersion discovery: user.openshift.io/v1\"]" 2025-12-12T16:15:52.348607597+00:00 stderr F I1212 16:15:52.348491 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:52.348472723 +0000 UTC m=+50.906193192" 2025-12-12T16:15:57.349139627+00:00 stderr F I1212 16:15:57.349025 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:15:57.348996584 +0000 UTC m=+55.906717063" 2025-12-12T16:16:02.350590630+00:00 stderr F I1212 16:16:02.350443 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:02.350413326 +0000 UTC m=+60.908133805" 2025-12-12T16:16:07.351875607+00:00 stderr F I1212 16:16:07.351686 1 node_lifecycle_controller.go:952] "Node hasn't been updated" logger="node-lifecycle-controller" node="crc" duration="50.008469368s" nodeConditionType="Ready" currentCondition="&NodeCondition{Type:Ready,Status:Unknown,LastHeartbeatTime:2025-11-03 09:39:35 +0000 UTC,LastTransitionTime:2025-11-03 09:40:44 +0000 UTC,Reason:NodeStatusUnknown,Message:Kubelet stopped posting node status.,}" 2025-12-12T16:16:07.351875607+00:00 stderr F I1212 16:16:07.351812 1 node_lifecycle_controller.go:952] "Node hasn't been updated" logger="node-lifecycle-controller" node="crc" duration="50.008608011s" nodeConditionType="MemoryPressure" currentCondition="&NodeCondition{Type:MemoryPressure,Status:Unknown,LastHeartbeatTime:2025-11-03 09:39:35 +0000 UTC,LastTransitionTime:2025-11-03 09:40:44 +0000 UTC,Reason:NodeStatusUnknown,Message:Kubelet stopped posting node status.,}" 2025-12-12T16:16:07.351875607+00:00 stderr F I1212 16:16:07.351842 1 node_lifecycle_controller.go:952] "Node hasn't been updated" logger="node-lifecycle-controller" node="crc" duration="50.008637852s" nodeConditionType="DiskPressure" currentCondition="&NodeCondition{Type:DiskPressure,Status:Unknown,LastHeartbeatTime:2025-11-03 09:39:35 +0000 UTC,LastTransitionTime:2025-11-03 09:40:44 +0000 UTC,Reason:NodeStatusUnknown,Message:Kubelet stopped posting node status.,}" 2025-12-12T16:16:07.351962839+00:00 stderr F I1212 16:16:07.351861 1 node_lifecycle_controller.go:952] "Node hasn't been updated" logger="node-lifecycle-controller" node="crc" duration="50.008661733s" nodeConditionType="PIDPressure" currentCondition="&NodeCondition{Type:PIDPressure,Status:Unknown,LastHeartbeatTime:2025-11-03 09:39:35 +0000 UTC,LastTransitionTime:2025-11-03 09:40:44 +0000 UTC,Reason:NodeStatusUnknown,Message:Kubelet stopped posting node status.,}" 2025-12-12T16:16:07.352030961+00:00 stderr F I1212 16:16:07.351993 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:07.351979749 +0000 UTC m=+65.909700218" 2025-12-12T16:16:12.352743129+00:00 stderr F I1212 16:16:12.352661 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:12.352641567 +0000 UTC m=+70.910362036" 2025-12-12T16:16:17.353925109+00:00 stderr F I1212 16:16:17.353805 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:17.353777825 +0000 UTC m=+75.911498294" 2025-12-12T16:16:18.269002850+00:00 stderr F E1212 16:16:18.268893 1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, oauth.openshift.io/v1: stale GroupVersion discovery: oauth.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1, user.openshift.io/v1: stale GroupVersion discovery: user.openshift.io/v1" logger="UnhandledError" 2025-12-12T16:16:19.460243813+00:00 stderr F I1212 16:16:19.460116 1 garbagecollector.go:787] "failed to discover some groups" logger="garbage-collector-controller" groups="map[\"apps.openshift.io/v1\":\"stale GroupVersion discovery: apps.openshift.io/v1\" \"authorization.openshift.io/v1\":\"stale GroupVersion discovery: authorization.openshift.io/v1\" \"build.openshift.io/v1\":\"stale GroupVersion discovery: build.openshift.io/v1\" \"image.openshift.io/v1\":\"stale GroupVersion discovery: image.openshift.io/v1\" \"oauth.openshift.io/v1\":\"stale GroupVersion discovery: oauth.openshift.io/v1\" \"packages.operators.coreos.com/v1\":\"stale GroupVersion discovery: packages.operators.coreos.com/v1\" \"project.openshift.io/v1\":\"stale GroupVersion discovery: project.openshift.io/v1\" \"quota.openshift.io/v1\":\"stale GroupVersion discovery: quota.openshift.io/v1\" \"route.openshift.io/v1\":\"stale GroupVersion discovery: route.openshift.io/v1\" \"security.openshift.io/v1\":\"stale GroupVersion discovery: security.openshift.io/v1\" \"template.openshift.io/v1\":\"stale GroupVersion discovery: template.openshift.io/v1\" \"user.openshift.io/v1\":\"stale GroupVersion discovery: user.openshift.io/v1\"]" 2025-12-12T16:16:21.629083093+00:00 stderr F I1212 16:16:21.628729 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:21.645925435+00:00 stderr F I1212 16:16:21.645796 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:21.744011619+00:00 stderr F I1212 16:16:21.743510 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:21.844529103+00:00 stderr F I1212 16:16:21.844439 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:21.947223671+00:00 stderr F I1212 16:16:21.947040 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:22.354373901+00:00 stderr F I1212 16:16:22.354243 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:22.354221858 +0000 UTC m=+80.911942337" 2025-12-12T16:16:23.955213585+00:00 stderr F I1212 16:16:23.954734 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-ovn-kubernetes/ovnkube-control-plane" err="Operation cannot be fulfilled on replicasets.apps \"ovnkube-control-plane-57b78d8988\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:27.355343547+00:00 stderr F I1212 16:16:27.355196 1 node_lifecycle_controller.go:791] "Node is unresponsive. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:27.355159882 +0000 UTC m=+85.912880351" 2025-12-12T16:16:28.762030499+00:00 stderr F I1212 16:16:28.761938 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:32.356051584+00:00 stderr F I1212 16:16:32.355871 1 node_lifecycle_controller.go:781] "Node is NotReady. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:32.355852909 +0000 UTC m=+90.913573378" 2025-12-12T16:16:37.356474186+00:00 stderr F I1212 16:16:37.356339 1 node_lifecycle_controller.go:781] "Node is NotReady. Adding it to the Taint queue" logger="node-lifecycle-controller" node="crc" timeStamp="2025-12-12 16:16:37.356314542 +0000 UTC m=+95.914035011" 2025-12-12T16:16:39.328106711+00:00 stderr F I1212 16:16:39.328002 1 topologycache.go:253] "Insufficient node info for topology hints" logger="endpointslice-controller" totalZones=0 totalCPU="0" sufficientNodeInfo=true 2025-12-12T16:16:39.347930935+00:00 stderr F I1212 16:16:39.347843 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.349404041+00:00 stderr F I1212 16:16:39.349329 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.349423232+00:00 stderr F I1212 16:16:39.349401 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.349797621+00:00 stderr F I1212 16:16:39.349760 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.350117959+00:00 stderr F I1212 16:16:39.350066 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.350192771+00:00 stderr F I1212 16:16:39.350086 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.350308873+00:00 stderr F I1212 16:16:39.350272 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.357977021+00:00 stderr F I1212 16:16:39.357904 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.359893887+00:00 stderr F I1212 16:16:39.359843 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.359918148+00:00 stderr F I1212 16:16:39.359893 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.359990490+00:00 stderr F I1212 16:16:39.359942 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.367080053+00:00 stderr F I1212 16:16:39.366999 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.367431841+00:00 stderr F I1212 16:16:39.367367 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.374373131+00:00 stderr F I1212 16:16:39.374321 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.375026897+00:00 stderr F I1212 16:16:39.375007 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.375427697+00:00 stderr F I1212 16:16:39.375406 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:39.375496188+00:00 stderr F I1212 16:16:39.375484 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.375631522+00:00 stderr F I1212 16:16:39.375602 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.375675873+00:00 stderr F I1212 16:16:39.375665 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.375925329+00:00 stderr F I1212 16:16:39.375909 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.376106203+00:00 stderr F I1212 16:16:39.376091 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.376223466+00:00 stderr F I1212 16:16:39.376209 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.376668477+00:00 stderr F I1212 16:16:39.376651 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.376810470+00:00 stderr F I1212 16:16:39.376796 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.376957874+00:00 stderr F I1212 16:16:39.376943 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.377525688+00:00 stderr F I1212 16:16:39.377508 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.377752523+00:00 stderr F I1212 16:16:39.377737 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.377816045+00:00 stderr F I1212 16:16:39.377803 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.378059731+00:00 stderr F I1212 16:16:39.378044 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.378505532+00:00 stderr F I1212 16:16:39.378473 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.380810608+00:00 stderr F I1212 16:16:39.380785 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.382644633+00:00 stderr F I1212 16:16:39.382553 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.382800977+00:00 stderr F I1212 16:16:39.382784 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.382983241+00:00 stderr F I1212 16:16:39.382947 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.383070113+00:00 stderr F I1212 16:16:39.383056 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.383103604+00:00 stderr F I1212 16:16:39.383060 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.383646617+00:00 stderr F I1212 16:16:39.383629 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.383907764+00:00 stderr F I1212 16:16:39.383891 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.384421976+00:00 stderr F I1212 16:16:39.384404 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.391622992+00:00 stderr F I1212 16:16:39.390338 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.391622992+00:00 stderr F I1212 16:16:39.390338 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.407937410+00:00 stderr F I1212 16:16:39.407885 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.436988310+00:00 stderr F I1212 16:16:39.434672 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:39.456216509+00:00 stderr F I1212 16:16:39.456126 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:39.457757497+00:00 stderr F I1212 16:16:39.457724 1 controller_utils.go:123] "Update ready status of pods on node" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:42.357829419+00:00 stderr F I1212 16:16:42.357729 1 node_lifecycle_controller.go:799] "Node is healthy again, removed all taints" logger="node-lifecycle-controller" node="crc" 2025-12-12T16:16:42.357865140+00:00 stderr F I1212 16:16:42.357805 1 node_lifecycle_controller.go:1044] "Controller detected that some Nodes are Ready. Exiting master disruption mode" logger="node-lifecycle-controller" 2025-12-12T16:16:43.610040431+00:00 stderr F I1212 16:16:43.609770 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:44.125561878+00:00 stderr F I1212 16:16:44.125478 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:46.157213718+00:00 stderr F I1212 16:16:46.155672 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operator-lifecycle-manager/packageserver" err="Operation cannot be fulfilled on deployments.apps \"packageserver\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:46.471708847+00:00 stderr F I1212 16:16:46.468450 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-operator-lifecycle-manager/packageserver" err="Operation cannot be fulfilled on deployments.apps \"packageserver\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:46.680236698+00:00 stderr F I1212 16:16:46.675330 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:47.692262056+00:00 stderr F I1212 16:16:47.686721 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:48.284941506+00:00 stderr F E1212 16:16:48.282227 1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: apps.openshift.io/v1: stale GroupVersion discovery: apps.openshift.io/v1, authorization.openshift.io/v1: stale GroupVersion discovery: authorization.openshift.io/v1, build.openshift.io/v1: stale GroupVersion discovery: build.openshift.io/v1, image.openshift.io/v1: stale GroupVersion discovery: image.openshift.io/v1, packages.operators.coreos.com/v1: stale GroupVersion discovery: packages.operators.coreos.com/v1, project.openshift.io/v1: stale GroupVersion discovery: project.openshift.io/v1, quota.openshift.io/v1: stale GroupVersion discovery: quota.openshift.io/v1, route.openshift.io/v1: stale GroupVersion discovery: route.openshift.io/v1, security.openshift.io/v1: stale GroupVersion discovery: security.openshift.io/v1, template.openshift.io/v1: stale GroupVersion discovery: template.openshift.io/v1" logger="UnhandledError" 2025-12-12T16:16:48.882294660+00:00 stderr F I1212 16:16:48.881937 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[apps/v1/ControllerRevision, namespace: openshift-multus, name: cni-sysctl-allowlist-ds-5f86d75d74, uid: 75df8290-e715-427a-9ccf-3a2ab00987b5]" virtual=false 2025-12-12T16:16:48.882294660+00:00 stderr F I1212 16:16:48.882156 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[v1/Pod, namespace: openshift-multus, name: cni-sysctl-allowlist-ds-q8kdt, uid: d943d968-b5e5-4d94-8fc7-8ba0013e5d76]" virtual=false 2025-12-12T16:16:48.910258503+00:00 stderr F I1212 16:16:48.909496 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[apps/v1/ControllerRevision, namespace: openshift-multus, name: cni-sysctl-allowlist-ds-5f86d75d74, uid: 75df8290-e715-427a-9ccf-3a2ab00987b5]" propagationPolicy="Background" 2025-12-12T16:16:48.910258503+00:00 stderr F I1212 16:16:48.909621 1 garbagecollector.go:640] "Deleting item" logger="garbage-collector-controller" item="[v1/Pod, namespace: openshift-multus, name: cni-sysctl-allowlist-ds-q8kdt, uid: d943d968-b5e5-4d94-8fc7-8ba0013e5d76]" propagationPolicy="Background" 2025-12-12T16:16:49.495489660+00:00 stderr F I1212 16:16:49.493940 1 garbagecollector.go:787] "failed to discover some groups" logger="garbage-collector-controller" groups="map[\"apps.openshift.io/v1\":\"stale GroupVersion discovery: apps.openshift.io/v1\" \"authorization.openshift.io/v1\":\"stale GroupVersion discovery: authorization.openshift.io/v1\" \"build.openshift.io/v1\":\"stale GroupVersion discovery: build.openshift.io/v1\" \"image.openshift.io/v1\":\"stale GroupVersion discovery: image.openshift.io/v1\" \"project.openshift.io/v1\":\"stale GroupVersion discovery: project.openshift.io/v1\" \"quota.openshift.io/v1\":\"stale GroupVersion discovery: quota.openshift.io/v1\" \"route.openshift.io/v1\":\"stale GroupVersion discovery: route.openshift.io/v1\" \"security.openshift.io/v1\":\"stale GroupVersion discovery: security.openshift.io/v1\" \"template.openshift.io/v1\":\"stale GroupVersion discovery: template.openshift.io/v1\"]" 2025-12-12T16:16:49.495489660+00:00 stderr F I1212 16:16:49.494222 1 garbagecollector.go:203] "syncing garbage collector with updated resources from discovery" logger="garbage-collector-controller" diff="added: [oauth.openshift.io/v1, Resource=oauthaccesstokens oauth.openshift.io/v1, Resource=oauthauthorizetokens oauth.openshift.io/v1, Resource=oauthclientauthorizations oauth.openshift.io/v1, Resource=oauthclients oauth.openshift.io/v1, Resource=useroauthaccesstokens user.openshift.io/v1, Resource=groups user.openshift.io/v1, Resource=identities user.openshift.io/v1, Resource=users], removed: []" 2025-12-12T16:16:49.511043009+00:00 stderr F I1212 16:16:49.510986 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:16:49.521981166+00:00 stderr F I1212 16:16:49.521918 1 reflector.go:430] "Caches populated" type="*v1.UserOAuthAccessToken" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/oauth/informers/externalversions/factory.go:125" 2025-12-12T16:16:49.612244860+00:00 stderr F I1212 16:16:49.612151 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:16:49.612244860+00:00 stderr F I1212 16:16:49.612216 1 garbagecollector.go:235] "synced garbage collector" logger="garbage-collector-controller" 2025-12-12T16:16:51.760436957+00:00 stderr F I1212 16:16:51.760348 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:52.777218281+00:00 stderr F I1212 16:16:52.774493 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:53.213006260+00:00 stderr F I1212 16:16:53.212929 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:53.787497666+00:00 stderr F I1212 16:16:53.787418 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:53.796736831+00:00 stderr F I1212 16:16:53.796666 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29425935" delay="1s" 2025-12-12T16:16:53.809891352+00:00 stderr F I1212 16:16:53.809831 1 job_controller.go:604] "enqueueing job" logger="job-controller" key="openshift-operator-lifecycle-manager/collect-profiles-29369340" delay="0s" 2025-12-12T16:17:00.751084026+00:00 stderr F I1212 16:17:00.751001 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:17:00.751298401+00:00 stderr F I1212 16:17:00.751241 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:00.75122675 +0000 UTC))" 2025-12-12T16:17:00.751298401+00:00 stderr F I1212 16:17:00.751275 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:00.751266551 +0000 UTC))" 2025-12-12T16:17:00.751311892+00:00 stderr F I1212 16:17:00.751294 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:00.751288391 +0000 UTC))" 2025-12-12T16:17:00.751354053+00:00 stderr F I1212 16:17:00.751316 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:00.751308342 +0000 UTC))" 2025-12-12T16:17:00.751354053+00:00 stderr F I1212 16:17:00.751339 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:00.751333012 +0000 UTC))" 2025-12-12T16:17:00.751384673+00:00 stderr F I1212 16:17:00.751359 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:00.751351383 +0000 UTC))" 2025-12-12T16:17:00.751402074+00:00 stderr F I1212 16:17:00.751381 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:00.751375403 +0000 UTC))" 2025-12-12T16:17:00.751423844+00:00 stderr F I1212 16:17:00.751398 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:00.751393724 +0000 UTC))" 2025-12-12T16:17:00.751431275+00:00 stderr F I1212 16:17:00.751416 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:00.751410094 +0000 UTC))" 2025-12-12T16:17:00.751468676+00:00 stderr F I1212 16:17:00.751436 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:00.751429815 +0000 UTC))" 2025-12-12T16:17:00.751708881+00:00 stderr F I1212 16:17:00.751671 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:17:00.75166037 +0000 UTC))" 2025-12-12T16:17:00.751889866+00:00 stderr F I1212 16:17:00.751861 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:01 +0000 UTC to 2028-12-12 15:15:01 +0000 UTC (now=2025-12-12 16:17:00.751851505 +0000 UTC))" 2025-12-12T16:17:03.388079587+00:00 stderr F I1212 16:17:03.387692 1 replica_set.go:479] "pod will be enqueued after a while for availability check" logger="replicaset-controller" duration=30 kind="ReplicaSet" pod="openshift-ingress/router-default-68cf44c8b8-bqttx" 2025-12-12T16:17:06.418422960+00:00 stderr F I1212 16:17:06.418329 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-65b6cccf98" need=0 deleting=1 2025-12-12T16:17:06.418422960+00:00 stderr F I1212 16:17:06.418385 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-65b6cccf98" relatedReplicaSets=["openshift-controller-manager/controller-manager-6ff9c7475c","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-5d4c96c665","openshift-controller-manager/controller-manager-65b6cccf98","openshift-controller-manager/controller-manager-74bfd85b68","openshift-controller-manager/controller-manager-7c9cdb8ff5","openshift-controller-manager/controller-manager-86f48fd68b","openshift-controller-manager/controller-manager-9fd5cc475","openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957","openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-5f76cf6594"] 2025-12-12T16:17:06.418534163+00:00 stderr F I1212 16:17:06.418515 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-65b6cccf98" pod="openshift-controller-manager/controller-manager-65b6cccf98-flnsl" 2025-12-12T16:17:06.429635434+00:00 stderr F I1212 16:17:06.429539 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-776cdc94d6" need=0 deleting=1 2025-12-12T16:17:06.429754967+00:00 stderr F I1212 16:17:06.429726 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-776cdc94d6" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-58f5cf7b86","openshift-route-controller-manager/route-controller-manager-5fccdd79b9","openshift-route-controller-manager/route-controller-manager-66bd94d94f","openshift-route-controller-manager/route-controller-manager-684bc95d64","openshift-route-controller-manager/route-controller-manager-76558c69dc","openshift-route-controller-manager/route-controller-manager-776cdc94d6","openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-6bc8749ddd","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-7cc45857b6","openshift-route-controller-manager/route-controller-manager-c47fcf799","openshift-route-controller-manager/route-controller-manager-6b749d96f6"] 2025-12-12T16:17:06.429948202+00:00 stderr F I1212 16:17:06.429927 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-776cdc94d6" pod="openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4" 2025-12-12T16:17:06.441359930+00:00 stderr F I1212 16:17:06.438167 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:06.449218882+00:00 stderr F I1212 16:17:06.449108 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:06.458477378+00:00 stderr F I1212 16:17:06.458381 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-86d99bb5b9" need=1 creating=1 2025-12-12T16:17:06.460464467+00:00 stderr F I1212 16:17:06.460398 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-6b749d96f6" need=1 creating=1 2025-12-12T16:17:06.489320841+00:00 stderr F I1212 16:17:06.489238 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:18.305322089+00:00 stderr F I1212 16:17:18.305143 1 resource_quota_controller.go:476] "syncing resource quota controller with updated resources from discovery" logger="resourcequota-controller" diff="added: [apps.openshift.io/v1, Resource=deploymentconfigs authorization.openshift.io/v1, Resource=rolebindingrestrictions build.openshift.io/v1, Resource=buildconfigs build.openshift.io/v1, Resource=builds image.openshift.io/v1, Resource=imagestreams route.openshift.io/v1, Resource=routes template.openshift.io/v1, Resource=templateinstances template.openshift.io/v1, Resource=templates], removed: []" 2025-12-12T16:17:18.305591276+00:00 stderr F I1212 16:17:18.305534 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="templates.template.openshift.io" 2025-12-12T16:17:18.305685759+00:00 stderr F I1212 16:17:18.305639 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="imagestreams.image.openshift.io" 2025-12-12T16:17:18.305770692+00:00 stderr F I1212 16:17:18.305732 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="templateinstances.template.openshift.io" 2025-12-12T16:17:18.305862244+00:00 stderr F I1212 16:17:18.305827 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="routes.route.openshift.io" 2025-12-12T16:17:18.305919216+00:00 stderr F I1212 16:17:18.305892 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="builds.build.openshift.io" 2025-12-12T16:17:18.305981408+00:00 stderr F I1212 16:17:18.305954 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="buildconfigs.build.openshift.io" 2025-12-12T16:17:18.306081321+00:00 stderr F I1212 16:17:18.306053 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="rolebindingrestrictions.authorization.openshift.io" 2025-12-12T16:17:18.306140642+00:00 stderr F I1212 16:17:18.306113 1 resource_quota_monitor.go:227] "QuotaMonitor created object count evaluator" logger="resourcequota-controller" resource="deploymentconfigs.apps.openshift.io" 2025-12-12T16:17:18.311431314+00:00 stderr F I1212 16:17:18.311374 1 reflector.go:430] "Caches populated" type="*v1.RoleBindingRestriction" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/authorization/informers/externalversions/factory.go:125" 2025-12-12T16:17:18.320676519+00:00 stderr F I1212 16:17:18.320546 1 reflector.go:430] "Caches populated" type="*v1.DeploymentConfig" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/apps/informers/externalversions/factory.go:125" 2025-12-12T16:17:18.321957786+00:00 stderr F I1212 16:17:18.321893 1 reflector.go:430] "Caches populated" type="*v1.Route" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/route/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.749087312+00:00 stderr F I1212 16:17:19.748963 1 reflector.go:430] "Caches populated" type="*v1.BuildConfig" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/build/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.749087312+00:00 stderr F I1212 16:17:19.748992 1 shared_informer.go:350] "Waiting for caches to sync" controller="resource quota" 2025-12-12T16:17:19.749456213+00:00 stderr F I1212 16:17:19.749312 1 reflector.go:430] "Caches populated" type="*v1.Build" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/build/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.764071832+00:00 stderr F I1212 16:17:19.763410 1 garbagecollector.go:203] "syncing garbage collector with updated resources from discovery" logger="garbage-collector-controller" diff="added: [apps.openshift.io/v1, Resource=deploymentconfigs authorization.openshift.io/v1, Resource=rolebindingrestrictions build.openshift.io/v1, Resource=buildconfigs build.openshift.io/v1, Resource=builds image.openshift.io/v1, Resource=images image.openshift.io/v1, Resource=imagestreams project.openshift.io/v1, Resource=projects quota.openshift.io/v1, Resource=clusterresourcequotas route.openshift.io/v1, Resource=routes security.openshift.io/v1, Resource=rangeallocations security.openshift.io/v1, Resource=securitycontextconstraints template.openshift.io/v1, Resource=brokertemplateinstances template.openshift.io/v1, Resource=templateinstances template.openshift.io/v1, Resource=templates], removed: []" 2025-12-12T16:17:19.764071832+00:00 stderr F I1212 16:17:19.763876 1 reflector.go:430] "Caches populated" type="*v1.TemplateInstance" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/template/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.771252148+00:00 stderr F I1212 16:17:19.771147 1 reflector.go:430] "Caches populated" type="*v1.ImageStream" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/image/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.776527339+00:00 stderr F I1212 16:17:19.776403 1 graph_builder.go:456] "item references an owner with coordinates that do not match the observed identity" logger="garbage-collector-controller" item="[route.openshift.io/v1/Route, namespace: openshift-ingress-canary, name: canary, uid: c77f72a8-1cd5-4d3c-9619-806f41a86efa]" owner="[apps/v1/DaemonSet, namespace: openshift-ingress-canary, name: ingress-canary, uid: 77896bcd-d1f7-46a2-984f-9205a544fb94]" 2025-12-12T16:17:19.776649843+00:00 stderr F I1212 16:17:19.776526 1 garbagecollector.go:501] "Processing item" logger="garbage-collector-controller" item="[route.openshift.io/v1/Route, namespace: openshift-ingress-canary, name: canary, uid: c77f72a8-1cd5-4d3c-9619-806f41a86efa]" virtual=false 2025-12-12T16:17:19.778080084+00:00 stderr F I1212 16:17:19.777723 1 shared_informer.go:350] "Waiting for caches to sync" controller="garbage collector" 2025-12-12T16:17:19.780639767+00:00 stderr F I1212 16:17:19.779423 1 reflector.go:430] "Caches populated" type="*v1.Template" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/template/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.780639767+00:00 stderr F I1212 16:17:19.780584 1 reflector.go:430] "Caches populated" type="*v1.ClusterResourceQuota" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/quota/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.782483390+00:00 stderr F I1212 16:17:19.782438 1 reflector.go:430] "Caches populated" type="*v1.RangeAllocation" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/security/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.783246352+00:00 stderr F I1212 16:17:19.783190 1 reflector.go:430] "Caches populated" type="*v1.SecurityContextConstraints" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/security/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.784523749+00:00 stderr F I1212 16:17:19.784354 1 reflector.go:430] "Caches populated" type="*v1.BrokerTemplateInstance" reflector="github.com/openshift/client-go@v0.0.0-20250710075018-396b36f983ee/template/informers/externalversions/factory.go:125" 2025-12-12T16:17:19.786035952+00:00 stderr F I1212 16:17:19.785939 1 garbagecollector.go:567] "item has at least one existing owner, will not garbage collect" logger="garbage-collector-controller" item="[route.openshift.io/v1/Route, namespace: openshift-ingress-canary, name: canary, uid: c77f72a8-1cd5-4d3c-9619-806f41a86efa]" owner=[{"apiVersion":"apps/v1","kind":"daemonset","name":"ingress-canary","uid":"77896bcd-d1f7-46a2-984f-9205a544fb94","controller":true}] 2025-12-12T16:17:19.849572125+00:00 stderr F I1212 16:17:19.849480 1 shared_informer.go:357] "Caches are synced" controller="resource quota" 2025-12-12T16:17:19.849572125+00:00 stderr F I1212 16:17:19.849514 1 resource_quota_controller.go:502] "synced quota controller" logger="resourcequota-controller" 2025-12-12T16:17:19.878594478+00:00 stderr F I1212 16:17:19.878493 1 shared_informer.go:357] "Caches are synced" controller="garbage collector" 2025-12-12T16:17:19.878594478+00:00 stderr F I1212 16:17:19.878521 1 garbagecollector.go:235] "synced garbage collector" logger="garbage-collector-controller" 2025-12-12T16:17:26.398267671+00:00 stderr F I1212 16:17:26.396465 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-86d99bb5b9" need=0 deleting=1 2025-12-12T16:17:26.398267671+00:00 stderr F I1212 16:17:26.397242 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-86d99bb5b9" relatedReplicaSets=["openshift-controller-manager/controller-manager-86f48fd68b","openshift-controller-manager/controller-manager-9fd5cc475","openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957","openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-5f76cf6594","openshift-controller-manager/controller-manager-6ff9c7475c","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-69f958c846","openshift-controller-manager/controller-manager-5d4c96c665","openshift-controller-manager/controller-manager-65b6cccf98","openshift-controller-manager/controller-manager-74bfd85b68","openshift-controller-manager/controller-manager-7c9cdb8ff5"] 2025-12-12T16:17:26.398267671+00:00 stderr F I1212 16:17:26.397382 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-86d99bb5b9" pod="openshift-controller-manager/controller-manager-86d99bb5b9-plxtj" 2025-12-12T16:17:26.403988835+00:00 stderr F I1212 16:17:26.403935 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:26.413873938+00:00 stderr F I1212 16:17:26.412944 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-6b749d96f6" need=0 deleting=1 2025-12-12T16:17:26.413873938+00:00 stderr F I1212 16:17:26.413015 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-6b749d96f6" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-58f5cf7b86","openshift-route-controller-manager/route-controller-manager-5fccdd79b9","openshift-route-controller-manager/route-controller-manager-66bd94d94f","openshift-route-controller-manager/route-controller-manager-684bc95d64","openshift-route-controller-manager/route-controller-manager-76558c69dc","openshift-route-controller-manager/route-controller-manager-776cdc94d6","openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-f4599bd79","openshift-route-controller-manager/route-controller-manager-6bc8749ddd","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-7cc45857b6","openshift-route-controller-manager/route-controller-manager-c47fcf799","openshift-route-controller-manager/route-controller-manager-6b749d96f6"] 2025-12-12T16:17:26.413873938+00:00 stderr F I1212 16:17:26.413197 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-6b749d96f6" pod="openshift-route-controller-manager/route-controller-manager-6b749d96f6-tvtts" 2025-12-12T16:17:26.414025673+00:00 stderr F I1212 16:17:26.413939 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-69f958c846" need=1 creating=1 2025-12-12T16:17:26.423060892+00:00 stderr F I1212 16:17:26.422943 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:26.440285156+00:00 stderr F I1212 16:17:26.440173 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-f4599bd79" need=1 creating=1 2025-12-12T16:17:26.456840221+00:00 stderr F I1212 16:17:26.456750 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:46.461045668+00:00 stderr F I1212 16:17:46.460932 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-69f958c846" need=0 deleting=1 2025-12-12T16:17:46.461045668+00:00 stderr F I1212 16:17:46.461004 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-69f958c846" relatedReplicaSets=["openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957","openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-5f76cf6594","openshift-controller-manager/controller-manager-6ff9c7475c","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-69f958c846","openshift-controller-manager/controller-manager-5d4c96c665","openshift-controller-manager/controller-manager-65b6cccf98","openshift-controller-manager/controller-manager-74bfd85b68","openshift-controller-manager/controller-manager-6445bd5bb7","openshift-controller-manager/controller-manager-9fd5cc475"] 2025-12-12T16:17:46.461313735+00:00 stderr F I1212 16:17:46.461283 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-69f958c846" pod="openshift-controller-manager/controller-manager-69f958c846-qd8rg" 2025-12-12T16:17:46.467856186+00:00 stderr F I1212 16:17:46.467791 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:46.482258022+00:00 stderr F I1212 16:17:46.480460 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-f4599bd79" need=0 deleting=1 2025-12-12T16:17:46.482258022+00:00 stderr F I1212 16:17:46.480479 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-6445bd5bb7" need=1 creating=1 2025-12-12T16:17:46.482258022+00:00 stderr F I1212 16:17:46.480527 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-f4599bd79" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-f4599bd79","openshift-route-controller-manager/route-controller-manager-6bc8749ddd","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-c47fcf799","openshift-route-controller-manager/route-controller-manager-6b749d96f6","openshift-route-controller-manager/route-controller-manager-58f5cf7b86","openshift-route-controller-manager/route-controller-manager-5fccdd79b9","openshift-route-controller-manager/route-controller-manager-66bd94d94f","openshift-route-controller-manager/route-controller-manager-6b47f77689","openshift-route-controller-manager/route-controller-manager-76558c69dc","openshift-route-controller-manager/route-controller-manager-776cdc94d6"] 2025-12-12T16:17:46.482258022+00:00 stderr F I1212 16:17:46.480680 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-f4599bd79" pod="openshift-route-controller-manager/route-controller-manager-f4599bd79-7rg9b" 2025-12-12T16:17:46.505289642+00:00 stderr F I1212 16:17:46.500968 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:46.528322131+00:00 stderr F I1212 16:17:46.527831 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-6b47f77689" need=1 creating=1 2025-12-12T16:17:46.554097968+00:00 stderr F I1212 16:17:46.553893 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:49.515782291+00:00 stderr F I1212 16:17:49.515720 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:17:49.516207401+00:00 stderr F I1212 16:17:49.516139 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:49.516125949 +0000 UTC))" 2025-12-12T16:17:49.516207401+00:00 stderr F I1212 16:17:49.516173 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:49.51616448 +0000 UTC))" 2025-12-12T16:17:49.516227402+00:00 stderr F I1212 16:17:49.516215 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:49.516206131 +0000 UTC))" 2025-12-12T16:17:49.516255972+00:00 stderr F I1212 16:17:49.516237 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:49.516228892 +0000 UTC))" 2025-12-12T16:17:49.516273373+00:00 stderr F I1212 16:17:49.516255 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:49.516248362 +0000 UTC))" 2025-12-12T16:17:49.516284023+00:00 stderr F I1212 16:17:49.516274 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:49.516266443 +0000 UTC))" 2025-12-12T16:17:49.516311394+00:00 stderr F I1212 16:17:49.516291 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:49.516285353 +0000 UTC))" 2025-12-12T16:17:49.516320434+00:00 stderr F I1212 16:17:49.516309 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:49.516302114 +0000 UTC))" 2025-12-12T16:17:49.516344635+00:00 stderr F I1212 16:17:49.516326 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:49.516320904 +0000 UTC))" 2025-12-12T16:17:49.516370025+00:00 stderr F I1212 16:17:49.516349 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:49.516342215 +0000 UTC))" 2025-12-12T16:17:49.516381906+00:00 stderr F I1212 16:17:49.516366 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:49.516360865 +0000 UTC))" 2025-12-12T16:17:49.516625492+00:00 stderr F I1212 16:17:49.516570 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:17:49.51656251 +0000 UTC))" 2025-12-12T16:17:49.516799166+00:00 stderr F I1212 16:17:49.516762 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:01 +0000 UTC to 2028-12-12 15:15:01 +0000 UTC (now=2025-12-12 16:17:49.516752855 +0000 UTC))" 2025-12-12T16:18:00.885499000+00:00 stderr F I1212 16:18:00.885389 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-authentication/oauth-openshift-66458b6674" need=0 deleting=1 2025-12-12T16:18:00.885499000+00:00 stderr F I1212 16:18:00.885459 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-authentication/oauth-openshift-66458b6674" relatedReplicaSets=["openshift-authentication/oauth-openshift-66458b6674","openshift-authentication/oauth-openshift-666c97c5cd","openshift-authentication/oauth-openshift-7676dcb6ff","openshift-authentication/oauth-openshift-7d6d58bcf6","openshift-authentication/oauth-openshift-dbcc6b948","openshift-authentication/oauth-openshift-6567f5ffdb"] 2025-12-12T16:18:00.885595222+00:00 stderr F I1212 16:18:00.885553 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="oauth-openshift-66458b6674" pod="openshift-authentication/oauth-openshift-66458b6674-brfdj" 2025-12-12T16:18:00.894317788+00:00 stderr F I1212 16:18:00.892151 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-authentication/oauth-openshift" err="Operation cannot be fulfilled on deployments.apps \"oauth-openshift\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:18:00.908988051+00:00 stderr F I1212 16:18:00.908902 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-authentication/oauth-openshift-6567f5ffdb" need=1 creating=1 2025-12-12T16:18:00.930830261+00:00 stderr F I1212 16:18:00.930564 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-authentication/oauth-openshift" err="Operation cannot be fulfilled on deployments.apps \"oauth-openshift\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:18:06.449150137+00:00 stderr F I1212 16:18:06.449054 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-6445bd5bb7" need=0 deleting=1 2025-12-12T16:18:06.449150137+00:00 stderr F I1212 16:18:06.449114 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-6445bd5bb7" relatedReplicaSets=["openshift-controller-manager/controller-manager-c7d4b49f6","openshift-controller-manager/controller-manager-c84474957","openshift-controller-manager/controller-manager-58897fffb5","openshift-controller-manager/controller-manager-5f76cf6594","openshift-controller-manager/controller-manager-6ff9c7475c","openshift-controller-manager/controller-manager-86d99bb5b9","openshift-controller-manager/controller-manager-69f958c846","openshift-controller-manager/controller-manager-7fffb5779","openshift-controller-manager/controller-manager-65b6cccf98","openshift-controller-manager/controller-manager-74bfd85b68","openshift-controller-manager/controller-manager-6445bd5bb7","openshift-controller-manager/controller-manager-9fd5cc475"] 2025-12-12T16:18:06.449357092+00:00 stderr F I1212 16:18:06.449259 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="controller-manager-6445bd5bb7" pod="openshift-controller-manager/controller-manager-6445bd5bb7-qhd4b" 2025-12-12T16:18:06.457615267+00:00 stderr F I1212 16:18:06.456645 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:18:06.464446225+00:00 stderr F I1212 16:18:06.464091 1 replica_set.go:626] "Too many replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-6b47f77689" need=0 deleting=1 2025-12-12T16:18:06.464446225+00:00 stderr F I1212 16:18:06.464150 1 replica_set.go:253] "Found related ReplicaSets" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-6b47f77689" relatedReplicaSets=["openshift-route-controller-manager/route-controller-manager-5fccdd79b9","openshift-route-controller-manager/route-controller-manager-66bd94d94f","openshift-route-controller-manager/route-controller-manager-6b47f77689","openshift-route-controller-manager/route-controller-manager-76558c69dc","openshift-route-controller-manager/route-controller-manager-776cdc94d6","openshift-route-controller-manager/route-controller-manager-7d86df95df","openshift-route-controller-manager/route-controller-manager-f4599bd79","openshift-route-controller-manager/route-controller-manager-6bc8749ddd","openshift-route-controller-manager/route-controller-manager-6d7f4ff85d","openshift-route-controller-manager/route-controller-manager-67bd47cff9","openshift-route-controller-manager/route-controller-manager-c47fcf799","openshift-route-controller-manager/route-controller-manager-6b749d96f6"] 2025-12-12T16:18:06.464446225+00:00 stderr F I1212 16:18:06.464326 1 controller_utils.go:618] "Deleting pod" logger="replicaset-controller" controller="route-controller-manager-6b47f77689" pod="openshift-route-controller-manager/route-controller-manager-6b47f77689-5r77s" 2025-12-12T16:18:06.477252482+00:00 stderr F I1212 16:18:06.474297 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-route-controller-manager/route-controller-manager" err="Operation cannot be fulfilled on deployments.apps \"route-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:18:06.484263465+00:00 stderr F I1212 16:18:06.483900 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-controller-manager/controller-manager-7fffb5779" need=1 creating=1 2025-12-12T16:18:06.493240337+00:00 stderr F I1212 16:18:06.489228 1 replica_set.go:590] "Too few replicas" logger="replicaset-controller" replicaSet="openshift-route-controller-manager/route-controller-manager-67bd47cff9" need=1 creating=1 2025-12-12T16:18:06.505824499+00:00 stderr F I1212 16:18:06.505714 1 deployment_controller.go:512] "Error syncing deployment" logger="deployment-controller" deployment="openshift-controller-manager/controller-manager" err="Operation cannot be fulfilled on deployments.apps \"controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:18:07.538224872+00:00 stderr F I1212 16:18:07.538120 1 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" 2025-12-12T16:18:07.538401667+00:00 stderr F I1212 16:18:07.538354 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:18:07.538334045 +0000 UTC))" 2025-12-12T16:18:07.538401667+00:00 stderr F I1212 16:18:07.538391 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:18:07.538384156 +0000 UTC))" 2025-12-12T16:18:07.538421787+00:00 stderr F I1212 16:18:07.538407 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:18:07.538402477 +0000 UTC))" 2025-12-12T16:18:07.538430878+00:00 stderr F I1212 16:18:07.538419 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:18:07.538415927 +0000 UTC))" 2025-12-12T16:18:07.538456848+00:00 stderr F I1212 16:18:07.538434 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:18:07.538428117 +0000 UTC))" 2025-12-12T16:18:07.538466078+00:00 stderr F I1212 16:18:07.538451 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:18:07.538447178 +0000 UTC))" 2025-12-12T16:18:07.538475059+00:00 stderr F I1212 16:18:07.538463 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:18:07.538459658 +0000 UTC))" 2025-12-12T16:18:07.538501249+00:00 stderr F I1212 16:18:07.538478 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:18:07.538474489 +0000 UTC))" 2025-12-12T16:18:07.538510550+00:00 stderr F I1212 16:18:07.538498 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:18:07.538491749 +0000 UTC))" 2025-12-12T16:18:07.538525460+00:00 stderr F I1212 16:18:07.538514 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca-bundle::/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt,request-header::/etc/kubernetes/static-pod-certs/configmaps/aggregator-client-ca/ca-bundle.crt" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:07.538508929 +0000 UTC))" 2025-12-12T16:18:07.538775036+00:00 stderr F I1212 16:18:07.538737 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"kube-controller-manager.openshift-kube-controller-manager.svc\" [serving] validServingFor=[kube-controller-manager.openshift-kube-controller-manager.svc,kube-controller-manager.openshift-kube-controller-manager.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:18:07.538729845 +0000 UTC))" 2025-12-12T16:18:07.538936620+00:00 stderr F I1212 16:18:07.538902 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556102\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556102\" (2025-12-12 15:15:01 +0000 UTC to 2028-12-12 15:15:01 +0000 UTC (now=2025-12-12 16:18:07.538895739 +0000 UTC))" 2025-12-12T16:18:14.406287379+00:00 stderr F E1212 16:18:14.406197 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:14.406443043+00:00 stderr F E1212 16:18:14.406414 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:17.407551399+00:00 stderr F E1212 16:18:17.407088 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:17.407807635+00:00 stderr F E1212 16:18:17.407769 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:19.859339003+00:00 stderr F E1212 16:18:19.859233 1 resource_quota_controller.go:446] "Unhandled Error" err="failed to discover resources: Get \"https://api-int.crc.testing:6443/api\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" 2025-12-12T16:18:19.890910334+00:00 stderr F I1212 16:18:19.890819 1 garbagecollector.go:789] "failed to discover preferred resources" logger="garbage-collector-controller" error="Get \"https://api-int.crc.testing:6443/api\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:19.890910334+00:00 stderr F I1212 16:18:19.890879 1 garbagecollector.go:184] "no resources reported by discovery, skipping garbage collector sync" logger="garbage-collector-controller" 2025-12-12T16:18:20.407378172+00:00 stderr F E1212 16:18:20.407264 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:20.407600928+00:00 stderr F E1212 16:18:20.407558 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:23.406866378+00:00 stderr F E1212 16:18:23.406778 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:23.407008081+00:00 stderr F E1212 16:18:23.406982 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-system/leases/kube-controller-manager?timeout=6s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:26.406110966+00:00 stderr F E1212 16:18:26.406047 1 leaderelection.go:436] error retrieving resource lock kube-system/kube-controller-manager: client rate limiter Wait returned an error: context deadline exceeded 2025-12-12T16:18:26.406273620+00:00 stderr F I1212 16:18:26.406257 1 leaderelection.go:297] failed to renew lease kube-system/kube-controller-manager: context deadline exceeded 2025-12-12T16:18:26.406538787+00:00 stderr F E1212 16:18:26.406521 1 controllermanager.go:373] "leaderelection lost" ././@LongLink0000644000000000000000000000032200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043063033054 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000644000175000017500000003542615117043063033070 0ustar zuulzuul2025-12-12T16:15:02.415841285+00:00 stderr F I1212 16:15:02.415424 1 base_controller.go:76] Waiting for caches to sync for CertSyncController 2025-12-12T16:15:02.416724615+00:00 stderr F I1212 16:15:02.416135 1 observer_polling.go:159] Starting file observer 2025-12-12T16:15:02.420245181+00:00 stderr F E1212 16:15:02.420170 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Secret: Ge********** \"https://localhost:6443/api/v1/namespaces/openshift-kube-controller-manager/secrets?limit=500&resourceVersion=0\": dial tcp [::1]:6443: connect: connection refused" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.Secret" 2025-12-12T16:15:02.420245181+00:00 stderr F E1212 16:15:02.420169 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ConfigMap: Get \"https://localhost:6443/api/v1/namespaces/openshift-kube-controller-manager/configmaps?limit=500&resourceVersion=0\": dial tcp [::1]:6443: connect: connection refused" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ConfigMap" 2025-12-12T16:15:03.365883036+00:00 stderr F E1212 16:15:03.365770 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ConfigMap: Get \"https://localhost:6443/api/v1/namespaces/openshift-kube-controller-manager/configmaps?limit=500&resourceVersion=0\": dial tcp [::1]:6443: connect: connection refused" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ConfigMap" 2025-12-12T16:15:04.011608512+00:00 stderr F E1212 16:15:04.010563 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Secret: Ge********** \"https://localhost:6443/api/v1/namespaces/openshift-kube-controller-manager/secrets?limit=500&resourceVersion=0\": dial tcp [::1]:6443: connect: connection refused" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.Secret" 2025-12-12T16:15:20.015911012+00:00 stderr F I1212 16:15:20.015828 1 base_controller.go:82] Caches are synced for CertSyncController 2025-12-12T16:15:20.015911012+00:00 stderr F I1212 16:15:20.015867 1 base_controller.go:119] Starting #1 worker of CertSyncController controller ... 2025-12-12T16:15:20.016128207+00:00 stderr F I1212 16:15:20.016034 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:15:20.017908860+00:00 stderr F I1212 16:15:20.017844 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:56.110092431+00:00 stderr F I1212 16:16:56.103454 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:56.110092431+00:00 stderr F I1212 16:16:56.103935 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:56.118779823+00:00 stderr F I1212 16:16:56.118685 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:56.120778491+00:00 stderr F I1212 16:16:56.119057 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:56.122436872+00:00 stderr F I1212 16:16:56.122394 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:56.123304503+00:00 stderr F I1212 16:16:56.123076 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:56.134554458+00:00 stderr F I1212 16:16:56.134308 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:56.137777006+00:00 stderr F I1212 16:16:56.136647 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:56.192008290+00:00 stderr F I1212 16:16:56.191256 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:56.192331508+00:00 stderr F I1212 16:16:56.192303 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:56.206023523+00:00 stderr F I1212 16:16:56.202414 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:56.206403682+00:00 stderr F I1212 16:16:56.206315 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:58.928288214+00:00 stderr F I1212 16:16:58.928192 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:16:58.929430662+00:00 stderr F I1212 16:16:58.928602 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:16:58.931743609+00:00 stderr F I1212 16:16:58.931702 1 certsync_controller.go:260] Creating directory "/etc/kubernetes/static-pod-certs/secrets/kube-controller-manager-client-cert-key" ... 2025-12-12T16:16:58.931789990+00:00 stderr F I1212 16:16:58.931770 1 certsync_controller.go:274] Writing secret manifest "/etc/kubernetes/static-pod-certs/secrets/kube-controller-manager-client-cert-key/tls.crt" ... 2025-12-12T16:16:58.932171379+00:00 stderr F I1212 16:16:58.932118 1 certsync_controller.go:274] Writing secret manifest "/etc/kubernetes/static-pod-certs/secrets/kube-controller-manager-client-cert-key/tls.key" ... 2025-12-12T16:16:58.932487547+00:00 stderr F I1212 16:16:58.932430 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CertificateUpdated' Wrote updated secret: op**********ey 2025-12-12T16:17:00.747514999+00:00 stderr F I1212 16:17:00.747306 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:17:00.750388499+00:00 stderr F I1212 16:17:00.750322 1 certsync_controller.go:155] Creating directory "/etc/kubernetes/static-pod-certs/configmaps/client-ca" ... 2025-12-12T16:17:00.750388499+00:00 stderr F I1212 16:17:00.750356 1 certsync_controller.go:168] Writing configmap manifest "/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" ... 2025-12-12T16:17:00.750905582+00:00 stderr F I1212 16:17:00.750810 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:17:00.750905582+00:00 stderr F I1212 16:17:00.750851 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CertificateUpdated' Wrote updated configmap: openshift-kube-controller-manager/client-ca 2025-12-12T16:17:49.510591072+00:00 stderr F I1212 16:17:49.510489 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:17:49.514696274+00:00 stderr F I1212 16:17:49.514647 1 certsync_controller.go:155] Creating directory "/etc/kubernetes/static-pod-certs/configmaps/client-ca" ... 2025-12-12T16:17:49.514717334+00:00 stderr F I1212 16:17:49.514702 1 certsync_controller.go:168] Writing configmap manifest "/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" ... 2025-12-12T16:17:49.515926754+00:00 stderr F I1212 16:17:49.515748 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CertificateUpdated' Wrote updated configmap: openshift-kube-controller-manager/client-ca 2025-12-12T16:17:49.515960185+00:00 stderr F I1212 16:17:49.515919 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:18:07.533377343+00:00 stderr F I1212 16:18:07.532073 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:18:07.537242558+00:00 stderr F I1212 16:18:07.537120 1 certsync_controller.go:155] Creating directory "/etc/kubernetes/static-pod-certs/configmaps/client-ca" ... 2025-12-12T16:18:07.537242558+00:00 stderr F I1212 16:18:07.537172 1 certsync_controller.go:168] Writing configmap manifest "/etc/kubernetes/static-pod-certs/configmaps/client-ca/ca-bundle.crt" ... 2025-12-12T16:18:07.538945530+00:00 stderr F I1212 16:18:07.537786 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:18:07.538945530+00:00 stderr F I1212 16:18:07.537931 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager", Name:"kube-controller-manager-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CertificateUpdated' Wrote updated configmap: openshift-kube-controller-manager/client-ca 2025-12-12T16:18:45.823006186+00:00 stderr F I1212 16:18:45.822917 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:18:45.823952290+00:00 stderr F I1212 16:18:45.823399 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:18:45.824054682+00:00 stderr F I1212 16:18:45.824029 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:18:45.824223346+00:00 stderr F I1212 16:18:45.824198 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:18:57.611131360+00:00 stderr F I1212 16:18:57.611081 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:18:57.611447518+00:00 stderr F I1212 16:18:57.611411 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:18:57.611719675+00:00 stderr F I1212 16:18:57.611704 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:18:57.611905289+00:00 stderr F I1212 16:18:57.611889 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:18:57.612148235+00:00 stderr F I1212 16:18:57.612036 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:18:57.612805902+00:00 stderr F I1212 16:18:57.612785 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:28:45.824320970+00:00 stderr F I1212 16:28:45.823390 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:28:45.824634268+00:00 stderr F I1212 16:28:45.824598 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:28:45.824815142+00:00 stderr F I1212 16:28:45.824781 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:28:45.825006507+00:00 stderr F I1212 16:28:45.824976 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:28:57.612503242+00:00 stderr F I1212 16:28:57.612387 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:28:57.612695437+00:00 stderr F I1212 16:28:57.612650 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:38:45.824512036+00:00 stderr F I1212 16:38:45.824006 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:38:45.824721882+00:00 stderr F I1212 16:38:45.824692 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:38:45.824838585+00:00 stderr F I1212 16:38:45.824797 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:38:45.825026989+00:00 stderr F I1212 16:38:45.824941 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:38:57.613212062+00:00 stderr F I1212 16:38:57.613091 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:38:57.613444118+00:00 stderr F I1212 16:38:57.613407 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:38:57.613578741+00:00 stderr F I1212 16:38:57.613546 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:38:57.613734385+00:00 stderr F I1212 16:38:57.613693 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:38:57.613794417+00:00 stderr F I1212 16:38:57.613767 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:38:57.613942590+00:00 stderr F I1212 16:38:57.613902 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] 2025-12-12T16:38:57.613995902+00:00 stderr F I1212 16:38:57.613973 1 certsync_controller.go:74] Syncing configmaps: [{aggregator-client-ca false} {client-ca false} {trusted-ca-bundle true}] 2025-12-12T16:38:57.614118685+00:00 stderr F I1212 16:38:57.614087 1 certsync_controller.go:178] Syncing secrets: [{kube-controller-manager-client-cert-key false} {csr-signer false}] ././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015117043043033162 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015117043062033163 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000644000175000017500000032411715117043042033173 0ustar zuulzuul2025-12-12T16:16:55.768712816+00:00 stderr F W1212 16:16:55.768253 1 feature_gate.go:241] Setting GA feature gate Topology=true. It will be removed in a future release. 2025-12-12T16:16:55.768933591+00:00 stderr F I1212 16:16:55.768912 1 feature_gate.go:249] feature gates: &{map[Topology:true]} 2025-12-12T16:16:55.769186487+00:00 stderr F I1212 16:16:55.769164 1 csi-provisioner.go:154] Version: 66d31bec20f1f15d0916c3b833d3aec6422942ca 2025-12-12T16:16:55.769216498+00:00 stderr F I1212 16:16:55.769207 1 csi-provisioner.go:177] Building kube configs for running in cluster... 2025-12-12T16:16:55.824399345+00:00 stderr F I1212 16:16:55.824327 1 connection.go:215] Connecting to unix:///csi/csi.sock 2025-12-12T16:16:55.855314410+00:00 stderr F I1212 16:16:55.848858 1 common.go:138] Probing CSI driver for readiness 2025-12-12T16:16:55.855314410+00:00 stderr F I1212 16:16:55.848891 1 connection.go:244] GRPC call: /csi.v1.Identity/Probe 2025-12-12T16:16:55.869513817+00:00 stderr F I1212 16:16:55.848897 1 connection.go:245] GRPC request: {} 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.900583 1 connection.go:251] GRPC response: {} 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.900605 1 connection.go:252] GRPC error: 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.900619 1 connection.go:244] GRPC call: /csi.v1.Identity/GetPluginInfo 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.900623 1 connection.go:245] GRPC request: {} 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.901089 1 connection.go:251] GRPC response: {"name":"kubevirt.io.hostpath-provisioner","vendor_version":"latest"} 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.901095 1 connection.go:252] GRPC error: 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.901103 1 csi-provisioner.go:230] Detected CSI driver kubevirt.io.hostpath-provisioner 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.901110 1 connection.go:244] GRPC call: /csi.v1.Identity/GetPluginCapabilities 2025-12-12T16:16:55.909290078+00:00 stderr F I1212 16:16:55.901114 1 connection.go:245] GRPC request: {} 2025-12-12T16:16:55.924295874+00:00 stderr F I1212 16:16:55.915591 1 connection.go:251] GRPC response: {"capabilities":[{"Type":{"Service":{"type":1}}},{"Type":{"Service":{"type":2}}}]} 2025-12-12T16:16:55.924295874+00:00 stderr F I1212 16:16:55.915616 1 connection.go:252] GRPC error: 2025-12-12T16:16:55.924295874+00:00 stderr F I1212 16:16:55.915630 1 connection.go:244] GRPC call: /csi.v1.Controller/ControllerGetCapabilities 2025-12-12T16:16:55.924295874+00:00 stderr F I1212 16:16:55.915635 1 connection.go:245] GRPC request: {} 2025-12-12T16:16:55.924295874+00:00 stderr F I1212 16:16:55.916214 1 connection.go:251] GRPC response: {"capabilities":[{"Type":{"Rpc":{"type":1}}},{"Type":{"Rpc":{"type":12}}},{"Type":{"Rpc":{"type":4}}},{"Type":{"Rpc":{"type":3}}},{"Type":{"Rpc":{"type":11}}}]} 2025-12-12T16:16:55.924295874+00:00 stderr F I1212 16:16:55.916223 1 connection.go:252] GRPC error: 2025-12-12T16:16:55.959285809+00:00 stderr F I1212 16:16:55.943952 1 csi-provisioner.go:302] CSI driver does not support PUBLISH_UNPUBLISH_VOLUME, not watching VolumeAttachments 2025-12-12T16:16:55.959285809+00:00 stderr F I1212 16:16:55.943988 1 connection.go:244] GRPC call: /csi.v1.Node/NodeGetInfo 2025-12-12T16:16:55.959285809+00:00 stderr F I1212 16:16:55.943993 1 connection.go:245] GRPC request: {} 2025-12-12T16:16:55.986008421+00:00 stderr F I1212 16:16:55.985226 1 connection.go:251] GRPC response: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"node_id":"crc"} 2025-12-12T16:16:55.986008421+00:00 stderr F I1212 16:16:55.985253 1 connection.go:252] GRPC error: 2025-12-12T16:16:56.001086699+00:00 stderr F I1212 16:16:55.985299 1 csi-provisioner.go:351] using local topology with Node = &Node{ObjectMeta:{crc 0 0001-01-01 00:00:00 +0000 UTC map[topology.hostpath.csi/node:crc] map[] [] [] []},Spec:NodeSpec{PodCIDR:,DoNotUseExternalID:,ProviderID:,Unschedulable:false,Taints:[]Taint{},ConfigSource:nil,PodCIDRs:[],},Status:NodeStatus{Capacity:ResourceList{},Allocatable:ResourceList{},Phase:,Conditions:[]NodeCondition{},Addresses:[]NodeAddress{},DaemonEndpoints:NodeDaemonEndpoints{KubeletEndpoint:DaemonEndpoint{Port:0,},},NodeInfo:NodeSystemInfo{MachineID:,SystemUUID:,BootID:,KernelVersion:,OSImage:,ContainerRuntimeVersion:,KubeletVersion:,KubeProxyVersion:,OperatingSystem:,Architecture:,},Images:[]ContainerImage{},VolumesInUse:[],VolumesAttached:[]AttachedVolume{},Config:nil,},} and CSINode = &CSINode{ObjectMeta:{crc 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},Spec:CSINodeSpec{Drivers:[]CSINodeDriver{CSINodeDriver{Name:kubevirt.io.hostpath-provisioner,NodeID:crc,TopologyKeys:[topology.hostpath.csi/node],Allocatable:nil,},},},} 2025-12-12T16:16:56.045770550+00:00 stderr F I1212 16:16:56.044466 1 csi-provisioner.go:464] using apps/v1/DaemonSet csi-hostpathplugin as owner of CSIStorageCapacity objects 2025-12-12T16:16:56.045770550+00:00 stderr F I1212 16:16:56.044518 1 csi-provisioner.go:483] producing CSIStorageCapacity objects with fixed topology segment [topology.hostpath.csi/node: crc] 2025-12-12T16:16:56.048512297+00:00 stderr F I1212 16:16:56.048479 1 csi-provisioner.go:529] using the CSIStorageCapacity v1 API 2025-12-12T16:16:56.048889106+00:00 stderr F I1212 16:16:56.048592 1 capacity.go:339] Capacity Controller: topology changed: added [0xc000840270 = topology.hostpath.csi/node: crc], removed [] 2025-12-12T16:16:56.068570047+00:00 stderr F I1212 16:16:56.055783 1 controller.go:732] Using saving PVs to API server in background 2025-12-12T16:16:56.068570047+00:00 stderr F I1212 16:16:56.058081 1 reflector.go:289] Starting reflector *v1.StorageClass (1h0m0s) from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:16:56.068570047+00:00 stderr F I1212 16:16:56.058094 1 reflector.go:325] Listing and watching *v1.StorageClass from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:16:56.068570047+00:00 stderr F I1212 16:16:56.068354 1 reflector.go:289] Starting reflector *v1.CSIStorageCapacity (1h0m0s) from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:16:56.068570047+00:00 stderr F I1212 16:16:56.068384 1 reflector.go:325] Listing and watching *v1.CSIStorageCapacity from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:16:56.068900175+00:00 stderr F I1212 16:16:56.068835 1 capacity.go:373] Capacity Controller: storage class crc-csi-hostpath-provisioner was updated or added 2025-12-12T16:16:56.068900175+00:00 stderr F I1212 16:16:56.068852 1 capacity.go:480] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:16:56.068941976+00:00 stderr F I1212 16:16:56.068912 1 reflector.go:289] Starting reflector *v1.PersistentVolumeClaim (15m0s) from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:16:56.068941976+00:00 stderr F I1212 16:16:56.068921 1 reflector.go:325] Listing and watching *v1.PersistentVolumeClaim from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:16:56.156725679+00:00 stderr F I1212 16:16:56.156650 1 shared_informer.go:341] caches populated 2025-12-12T16:16:56.156725679+00:00 stderr F I1212 16:16:56.156694 1 shared_informer.go:341] caches populated 2025-12-12T16:16:56.156767240+00:00 stderr F I1212 16:16:56.156743 1 controller.go:811] Starting provisioner controller kubevirt.io.hostpath-provisioner_csi-hostpathplugin-59hhc_8e9ab58d-5bf0-4e92-94e4-c6b10040e871! 2025-12-12T16:16:56.156879073+00:00 stderr F I1212 16:16:56.156785 1 capacity.go:243] Starting Capacity Controller 2025-12-12T16:16:56.156879073+00:00 stderr F I1212 16:16:56.156802 1 shared_informer.go:341] caches populated 2025-12-12T16:16:56.157088378+00:00 stderr F I1212 16:16:56.156807 1 capacity.go:339] Capacity Controller: topology changed: added [0xc000840270 = topology.hostpath.csi/node: crc], removed [] 2025-12-12T16:16:56.157117309+00:00 stderr F I1212 16:16:56.157083 1 capacity.go:480] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:16:56.157117309+00:00 stderr F I1212 16:16:56.157100 1 capacity.go:279] Initial number of topology segments 1, storage classes 1, potential CSIStorageCapacity objects 1 2025-12-12T16:16:56.157117309+00:00 stderr F I1212 16:16:56.157105 1 capacity.go:290] Checking for existing CSIStorageCapacity objects 2025-12-12T16:16:56.157713163+00:00 stderr F I1212 16:16:56.157148 1 capacity.go:725] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 35673 matches {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:16:56.157713163+00:00 stderr F I1212 16:16:56.157161 1 capacity.go:255] Started Capacity Controller 2025-12-12T16:16:56.157713163+00:00 stderr F I1212 16:16:56.157193 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:16:56.157713163+00:00 stderr F I1212 16:16:56.157211 1 volume_store.go:97] Starting save volume queue 2025-12-12T16:16:56.157713163+00:00 stderr F I1212 16:16:56.157394 1 reflector.go:289] Starting reflector *v1.PersistentVolume (15m0s) from sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845 2025-12-12T16:16:56.157713163+00:00 stderr F I1212 16:16:56.157403 1 reflector.go:325] Listing and watching *v1.PersistentVolume from sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845 2025-12-12T16:16:56.157734504+00:00 stderr F I1212 16:16:56.157717 1 reflector.go:289] Starting reflector *v1.StorageClass (15m0s) from sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848 2025-12-12T16:16:56.157764424+00:00 stderr F I1212 16:16:56.157741 1 reflector.go:325] Listing and watching *v1.StorageClass from sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848 2025-12-12T16:16:56.157842226+00:00 stderr F I1212 16:16:56.157784 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 35673 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:16:56.157932309+00:00 stderr F I1212 16:16:56.157865 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:16:56.157977350+00:00 stderr F I1212 16:16:56.157948 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:16:56.158353089+00:00 stderr F I1212 16:16:56.157956 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:16:56.160160433+00:00 stderr F I1212 16:16:56.160103 1 connection.go:251] GRPC response: {"available_capacity":61910159360,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:16:56.160160433+00:00 stderr F I1212 16:16:56.160121 1 connection.go:252] GRPC error: 2025-12-12T16:16:56.160213444+00:00 stderr F I1212 16:16:56.160143 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 60459140Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:16:56.165050632+00:00 stderr F I1212 16:16:56.164821 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 38056 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 60459140Ki 2025-12-12T16:16:56.166114388+00:00 stderr F I1212 16:16:56.165227 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 38056 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:16:56.265884584+00:00 stderr F I1212 16:16:56.264518 1 shared_informer.go:341] caches populated 2025-12-12T16:16:56.265884584+00:00 stderr F I1212 16:16:56.264727 1 controller.go:860] Started provisioner controller kubevirt.io.hostpath-provisioner_csi-hostpathplugin-59hhc_8e9ab58d-5bf0-4e92-94e4-c6b10040e871! 2025-12-12T16:16:56.276794180+00:00 stderr F I1212 16:16:56.264783 1 controller.go:1152] handleProtectionFinalizer Volume : &PersistentVolume{ObjectMeta:{pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 fc1bbaba-db8e-4b91-8f7b-815ce1e79968 24587 0 2025-11-03 08:50:26 +0000 UTC map[] map[pv.kubernetes.io/provisioned-by:kubevirt.io.hostpath-provisioner volume.kubernetes.io/provisioner-deletion-secret-name: volume.kubernetes.io/provisioner-deletion-secret-namespace:] [] [kubernetes.io/pv-protection] [{csi-provisioner Update v1 2025-11-03 08:50:26 +0000 UTC FieldsV1 {"f:metadata":{"f:annotations":{".":{},"f:pv.kubernetes.io/provisioned-by":{},"f:volume.kubernetes.io/provisioner-deletion-secret-name":{},"f:volume.kubernetes.io/provisioner-deletion-secret-namespace":{}}},"f:spec":{"f:accessModes":{},"f:capacity":{".":{},"f:storage":{}},"f:claimRef":{".":{},"f:apiVersion":{},"f:kind":{},"f:name":{},"f:namespace":{},"f:resourceVersion":{},"f:uid":{}},"f:csi":{".":{},"f:driver":{},"f:volumeAttributes":{".":{},"f:csi.storage.k8s.io/pv/name":{},"f:csi.storage.k8s.io/pvc/name":{},"f:csi.storage.k8s.io/pvc/namespace":{},"f:storage.kubernetes.io/csiProvisionerIdentity":{},"f:storagePool":{}},"f:volumeHandle":{}},"f:nodeAffinity":{".":{},"f:required":{}},"f:persistentVolumeReclaimPolicy":{},"f:storageClassName":{},"f:volumeMode":{}}} } {kube-controller-manager Update v1 2025-11-03 08:50:26 +0000 UTC FieldsV1 {"f:status":{"f:phase":{}}} status}]},Spec:PersistentVolumeSpec{Capacity:ResourceList{storage: {{32212254720 0} {} 30Gi BinarySI},},PersistentVolumeSource:PersistentVolumeSource{GCEPersistentDisk:nil,AWSElasticBlockStore:nil,HostPath:nil,Glusterfs:nil,NFS:nil,RBD:nil,ISCSI:nil,Cinder:nil,CephFS:nil,FC:nil,Flocker:nil,FlexVolume:nil,AzureFile:nil,VsphereVolume:nil,Quobyte:nil,AzureDisk:nil,PhotonPersistentDisk:nil,PortworxVolume:nil,ScaleIO:nil,Local:nil,StorageOS:nil,CSI:&CSIPersistentVolumeSource{Driver:kubevirt.io.hostpath-provisioner,VolumeHandle:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,ReadOnly:false,FSType:,VolumeAttributes:map[string]string{csi.storage.k8s.io/pv/name: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,csi.storage.k8s.io/pvc/name: crc-image-registry-storage,csi.storage.k8s.io/pvc/namespace: openshift-image-registry,storage.kubernetes.io/csiProvisionerIdentity: 1762159825768-6575-kubevirt.io.hostpath-provisioner-crc,storagePool: local,},ControllerPublishSecretRef:nil,NodeStageSecretRef:nil,NodePublishSecretRef:nil,ControllerExpandSecretRef:nil,NodeExpandSecretRef:nil,},},AccessModes:[ReadWriteMany],ClaimRef:&ObjectReference{Kind:PersistentVolumeClaim,Namespace:openshift-image-registry,Name:crc-image-registry-storage,UID:b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,APIVersion:v1,ResourceVersion:22386,FieldPath:,},PersistentVolumeReclaimPolicy:Retain,StorageClassName:crc-csi-hostpath-provisioner,MountOptions:[],VolumeMode:*Filesystem,NodeAffinity:&VolumeNodeAffinity{Required:&NodeSelector{NodeSelectorTerms:[]NodeSelectorTerm{NodeSelectorTerm{MatchExpressions:[]NodeSelectorRequirement{NodeSelectorRequirement{Key:topology.hostpath.csi/node,Operator:In,Values:[crc],},},MatchFields:[]NodeSelectorRequirement{},},},},},},Status:PersistentVolumeStatus{Phase:Bound,Message:,Reason:,LastPhaseTransitionTime:2025-11-03 08:50:26 +0000 UTC,},} 2025-12-12T16:16:56.276794180+00:00 stderr F I1212 16:16:56.276772 1 controller.go:1239] shouldDelete volume "pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2" 2025-12-12T16:16:56.276794180+00:00 stderr F I1212 16:16:56.276785 1 controller.go:1260] shouldDelete volume "pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2" is false: PersistentVolumePhase is not Released 2025-12-12T16:17:56.158222439+00:00 stderr F I1212 16:17:56.158116 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:17:56.158222439+00:00 stderr F I1212 16:17:56.158193 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:17:56.158324991+00:00 stderr F I1212 16:17:56.158226 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:17:56.158452324+00:00 stderr F I1212 16:17:56.158231 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:17:56.159531331+00:00 stderr F I1212 16:17:56.159493 1 connection.go:251] GRPC response: {"available_capacity":59762335744,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:17:56.159531331+00:00 stderr F I1212 16:17:56.159505 1 connection.go:252] GRPC error: 2025-12-12T16:17:56.159550851+00:00 stderr F I1212 16:17:56.159523 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58361656Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:17:56.165659262+00:00 stderr F I1212 16:17:56.165579 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 38944 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58361656Ki 2025-12-12T16:17:56.165710804+00:00 stderr F I1212 16:17:56.165653 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 38944 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:18:15.967287921+00:00 stderr F I1212 16:18:15.965859 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: Watch close - *v1.PersistentVolume total 1 items received 2025-12-12T16:18:15.992172276+00:00 stderr F I1212 16:18:15.985028 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.PersistentVolumeClaim total 1 items received 2025-12-12T16:18:15.992172276+00:00 stderr F I1212 16:18:15.989355 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume returned Get "https://10.217.4.1:443/api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=38942&timeout=6m49s&timeoutSeconds=409&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:16.013535044+00:00 stderr F I1212 16:18:16.010037 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim returned Get "https://10.217.4.1:443/api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=38941&timeout=8m24s&timeoutSeconds=504&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:16.025882700+00:00 stderr F I1212 16:18:16.025813 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.CSIStorageCapacity total 3 items received 2025-12-12T16:18:16.026045684+00:00 stderr F I1212 16:18:16.026022 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: Watch close - *v1.StorageClass total 1 items received 2025-12-12T16:18:16.026143856+00:00 stderr F I1212 16:18:16.026121 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.StorageClass total 1 items received 2025-12-12T16:18:16.038335978+00:00 stderr F I1212 16:18:16.038239 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=6m59s&timeoutSeconds=419&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:16.038413239+00:00 stderr F I1212 16:18:16.038351 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities?allowWatchBookmarks=true&labelSelector=csi.storage.k8s.io%2Fdrivername%3Dkubevirt.io.hostpath-provisioner%2Ccsi.storage.k8s.io%2Fmanaged-by%3Dexternal-provisioner-crc&resourceVersion=38944&timeout=6m57s&timeoutSeconds=417&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:16.038578784+00:00 stderr F I1212 16:18:16.038514 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=5m25s&timeoutSeconds=325&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:17.039043808+00:00 stderr F I1212 16:18:17.038974 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim returned Get "https://10.217.4.1:443/api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=38941&timeout=9m1s&timeoutSeconds=541&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:17.141387428+00:00 stderr F I1212 16:18:17.141245 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=9m55s&timeoutSeconds=595&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:17.253594412+00:00 stderr F I1212 16:18:17.253513 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=7m2s&timeoutSeconds=422&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:17.485740632+00:00 stderr F I1212 16:18:17.485649 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume returned Get "https://10.217.4.1:443/api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=38942&timeout=5m18s&timeoutSeconds=318&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:17.636616232+00:00 stderr F I1212 16:18:17.636537 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities?allowWatchBookmarks=true&labelSelector=csi.storage.k8s.io%2Fdrivername%3Dkubevirt.io.hostpath-provisioner%2Ccsi.storage.k8s.io%2Fmanaged-by%3Dexternal-provisioner-crc&resourceVersion=38944&timeout=5m48s&timeoutSeconds=348&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:19.398630743+00:00 stderr F I1212 16:18:19.398571 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=6m40s&timeoutSeconds=400&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:19.603587250+00:00 stderr F I1212 16:18:19.603487 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim returned Get "https://10.217.4.1:443/api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=38941&timeout=8m40s&timeoutSeconds=520&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:19.966294958+00:00 stderr F I1212 16:18:19.966211 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities?allowWatchBookmarks=true&labelSelector=csi.storage.k8s.io%2Fdrivername%3Dkubevirt.io.hostpath-provisioner%2Ccsi.storage.k8s.io%2Fmanaged-by%3Dexternal-provisioner-crc&resourceVersion=38944&timeout=6m20s&timeoutSeconds=380&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:20.308160649+00:00 stderr F I1212 16:18:20.307936 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=8m58s&timeoutSeconds=538&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:20.603626144+00:00 stderr F I1212 16:18:20.603538 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume returned Get "https://10.217.4.1:443/api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=38942&timeout=5m29s&timeoutSeconds=329&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:23.656047458+00:00 stderr F I1212 16:18:23.655985 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim returned Get "https://10.217.4.1:443/api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=38941&timeout=7m0s&timeoutSeconds=420&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:24.349224405+00:00 stderr F I1212 16:18:24.349091 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities?allowWatchBookmarks=true&labelSelector=csi.storage.k8s.io%2Fdrivername%3Dkubevirt.io.hostpath-provisioner%2Ccsi.storage.k8s.io%2Fmanaged-by%3Dexternal-provisioner-crc&resourceVersion=38944&timeout=9m16s&timeoutSeconds=556&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:24.865620852+00:00 stderr F I1212 16:18:24.865529 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume returned Get "https://10.217.4.1:443/api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=38942&timeout=8m26s&timeoutSeconds=506&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:25.046268318+00:00 stderr F I1212 16:18:25.046145 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=6m40s&timeoutSeconds=400&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:25.146529367+00:00 stderr F I1212 16:18:25.146461 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=5m59s&timeoutSeconds=359&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:34.094464653+00:00 stderr F I1212 16:18:34.094403 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=6m15s&timeoutSeconds=375&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:34.317836965+00:00 stderr F I1212 16:18:34.317702 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume returned Get "https://10.217.4.1:443/api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=38942&timeout=7m42s&timeoutSeconds=462&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:34.617022042+00:00 stderr F I1212 16:18:34.616959 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities?allowWatchBookmarks=true&labelSelector=csi.storage.k8s.io%2Fdrivername%3Dkubevirt.io.hostpath-provisioner%2Ccsi.storage.k8s.io%2Fmanaged-by%3Dexternal-provisioner-crc&resourceVersion=38944&timeout=5m9s&timeoutSeconds=309&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:35.474347558+00:00 stderr F I1212 16:18:35.474209 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim returned Get "https://10.217.4.1:443/api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=38941&timeout=9m44s&timeoutSeconds=584&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:36.418317686+00:00 stderr F I1212 16:18:36.418257 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=7m7s&timeoutSeconds=427&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:50.427326608+00:00 stderr F I1212 16:18:50.427249 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim returned Get "https://10.217.4.1:443/api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=38941&timeout=7m31s&timeoutSeconds=451&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:50.933097772+00:00 stderr F I1212 16:18:50.933021 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=38941&timeout=5m23s&timeoutSeconds=323&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:54.682703871+00:00 stderr F I1212 16:18:54.682608 1 reflector.go:421] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume returned Get "https://10.217.4.1:443/api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=38942&timeout=5m29s&timeoutSeconds=329&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:55.168686106+00:00 stderr F I1212 16:18:55.168597 1 reflector.go:421] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity returned Get "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities?allowWatchBookmarks=true&labelSelector=csi.storage.k8s.io%2Fdrivername%3Dkubevirt.io.hostpath-provisioner%2Ccsi.storage.k8s.io%2Fmanaged-by%3Dexternal-provisioner-crc&resourceVersion=38944&timeout=9m46s&timeoutSeconds=586&watch=true": dial tcp 10.217.4.1:443: connect: connection refused - backing off 2025-12-12T16:18:56.158717452+00:00 stderr F I1212 16:18:56.158636 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:18:56.158717452+00:00 stderr F I1212 16:18:56.158699 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:18:56.158795914+00:00 stderr F I1212 16:18:56.158728 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:18:56.159062571+00:00 stderr F I1212 16:18:56.158734 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:18:56.160012634+00:00 stderr F I1212 16:18:56.159984 1 connection.go:251] GRPC response: {"available_capacity":59743653888,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:18:56.160012634+00:00 stderr F I1212 16:18:56.160001 1 connection.go:252] GRPC error: 2025-12-12T16:18:56.160094716+00:00 stderr F I1212 16:18:56.160047 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58343412Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:18:56.161965913+00:00 stderr F E1212 16:18:56.161919 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Put "https://10.217.4.1:443/apis/storage.k8s.io/v1/namespaces/hostpath-provisioner/csistoragecapacities/csisc-k4gvk": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.161965913+00:00 stderr F W1212 16:18:56.161939 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 0 failures 2025-12-12T16:18:57.162911029+00:00 stderr F I1212 16:18:57.162821 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:18:57.162911029+00:00 stderr F I1212 16:18:57.162895 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:18:57.163023482+00:00 stderr F I1212 16:18:57.162904 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:18:57.163837972+00:00 stderr F I1212 16:18:57.163807 1 connection.go:251] GRPC response: {"available_capacity":59743678464,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:18:57.163837972+00:00 stderr F I1212 16:18:57.163823 1 connection.go:252] GRPC error: 2025-12-12T16:18:57.163879113+00:00 stderr F I1212 16:18:57.163844 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58343436Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:18:57.176379752+00:00 stderr F I1212 16:18:57.176323 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 39224 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58343436Ki 2025-12-12T16:18:57.639342838+00:00 stderr F I1212 16:18:57.639251 1 reflector.go:445] k8s.io/client-go/informers/factory.go:150: watch of *v1.StorageClass closed with: too old resource version: 38941 (39133) 2025-12-12T16:19:19.350859843+00:00 stderr F I1212 16:19:19.350804 1 reflector.go:445] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: watch of *v1.StorageClass closed with: too old resource version: 38941 (39133) 2025-12-12T16:19:25.253518592+00:00 stderr F I1212 16:19:25.253468 1 reflector.go:445] k8s.io/client-go/informers/factory.go:150: watch of *v1.PersistentVolumeClaim closed with: too old resource version: 38941 (39133) 2025-12-12T16:19:25.452061071+00:00 stderr F I1212 16:19:25.451999 1 reflector.go:445] k8s.io/client-go/informers/factory.go:150: watch of *v1.CSIStorageCapacity closed with: too old resource version: 38944 (39133) 2025-12-12T16:19:28.537077360+00:00 stderr F I1212 16:19:28.536992 1 reflector.go:445] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: watch of *v1.PersistentVolume closed with: too old resource version: 38942 (39133) 2025-12-12T16:19:30.912440530+00:00 stderr F I1212 16:19:30.912363 1 reflector.go:325] Listing and watching *v1.StorageClass from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:19:30.915398284+00:00 stderr F I1212 16:19:30.915335 1 capacity.go:373] Capacity Controller: storage class crc-csi-hostpath-provisioner was updated or added 2025-12-12T16:19:30.915398284+00:00 stderr F I1212 16:19:30.915361 1 capacity.go:480] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:30.915398284+00:00 stderr F I1212 16:19:30.915388 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:30.915451136+00:00 stderr F I1212 16:19:30.915421 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:30.915547238+00:00 stderr F I1212 16:19:30.915430 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:19:30.916833060+00:00 stderr F I1212 16:19:30.916790 1 connection.go:251] GRPC response: {"available_capacity":59740950528,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:19:30.916833060+00:00 stderr F I1212 16:19:30.916812 1 connection.go:252] GRPC error: 2025-12-12T16:19:30.916888952+00:00 stderr F I1212 16:19:30.916837 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58340772Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:19:30.921867107+00:00 stderr F E1212 16:19:30.921812 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:30.921867107+00:00 stderr F W1212 16:19:30.921837 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 0 failures 2025-12-12T16:19:31.922335967+00:00 stderr F I1212 16:19:31.922262 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:31.922335967+00:00 stderr F I1212 16:19:31.922308 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:31.922407239+00:00 stderr F I1212 16:19:31.922313 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:19:31.923157977+00:00 stderr F I1212 16:19:31.923130 1 connection.go:251] GRPC response: {"available_capacity":59741212672,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:19:31.923157977+00:00 stderr F I1212 16:19:31.923140 1 connection.go:252] GRPC error: 2025-12-12T16:19:31.923171518+00:00 stderr F I1212 16:19:31.923159 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58341028Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:19:31.926696376+00:00 stderr F E1212 16:19:31.926651 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:31.926696376+00:00 stderr F W1212 16:19:31.926676 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 1 failures 2025-12-12T16:19:33.927006911+00:00 stderr F I1212 16:19:33.926898 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:33.927006911+00:00 stderr F I1212 16:19:33.926953 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:33.927067982+00:00 stderr F I1212 16:19:33.926958 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:19:33.928314414+00:00 stderr F I1212 16:19:33.928266 1 connection.go:251] GRPC response: {"available_capacity":59741319168,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:19:33.928314414+00:00 stderr F I1212 16:19:33.928299 1 connection.go:252] GRPC error: 2025-12-12T16:19:33.928380165+00:00 stderr F I1212 16:19:33.928334 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58341132Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:19:33.932393546+00:00 stderr F E1212 16:19:33.932335 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:33.932393546+00:00 stderr F W1212 16:19:33.932359 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 2 failures 2025-12-12T16:19:37.933600917+00:00 stderr F I1212 16:19:37.933508 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:37.933600917+00:00 stderr F I1212 16:19:37.933567 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:37.933707750+00:00 stderr F I1212 16:19:37.933573 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:19:37.934604573+00:00 stderr F I1212 16:19:37.934563 1 connection.go:251] GRPC response: {"available_capacity":59741302784,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:19:37.935437254+00:00 stderr F I1212 16:19:37.935370 1 connection.go:252] GRPC error: 2025-12-12T16:19:37.935455714+00:00 stderr F I1212 16:19:37.935422 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58341116Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:19:37.939257909+00:00 stderr F E1212 16:19:37.939212 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:37.939257909+00:00 stderr F W1212 16:19:37.939231 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 3 failures 2025-12-12T16:19:45.939993701+00:00 stderr F I1212 16:19:45.939906 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:45.939993701+00:00 stderr F I1212 16:19:45.939959 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:45.940074243+00:00 stderr F I1212 16:19:45.939965 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:19:45.941306374+00:00 stderr F I1212 16:19:45.941269 1 connection.go:251] GRPC response: {"available_capacity":59741282304,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:19:45.941306374+00:00 stderr F I1212 16:19:45.941286 1 connection.go:252] GRPC error: 2025-12-12T16:19:45.941328064+00:00 stderr F I1212 16:19:45.941306 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58341096Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:19:45.945881909+00:00 stderr F E1212 16:19:45.945825 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:45.945881909+00:00 stderr F W1212 16:19:45.945850 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 4 failures 2025-12-12T16:19:56.160264580+00:00 stderr F I1212 16:19:56.158842 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:19:56.160264580+00:00 stderr F I1212 16:19:56.158911 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:19:56.160264580+00:00 stderr F I1212 16:19:56.158943 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:56.160264580+00:00 stderr F I1212 16:19:56.158948 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:19:56.160326631+00:00 stderr F I1212 16:19:56.160288 1 connection.go:251] GRPC response: {"available_capacity":59739852800,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:19:56.160326631+00:00 stderr F I1212 16:19:56.160295 1 connection.go:252] GRPC error: 2025-12-12T16:19:56.160326631+00:00 stderr F I1212 16:19:56.160314 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58339700Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:19:56.168229500+00:00 stderr F E1212 16:19:56.167365 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:56.168229500+00:00 stderr F W1212 16:19:56.167386 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 5 failures 2025-12-12T16:20:01.946218633+00:00 stderr F I1212 16:20:01.946105 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:20:01.946218633+00:00 stderr F I1212 16:20:01.946169 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:20:01.946276385+00:00 stderr F I1212 16:20:01.946190 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:20:01.947033094+00:00 stderr F I1212 16:20:01.946981 1 connection.go:251] GRPC response: {"available_capacity":59740016640,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:20:01.947033094+00:00 stderr F I1212 16:20:01.947000 1 connection.go:252] GRPC error: 2025-12-12T16:20:01.947048004+00:00 stderr F I1212 16:20:01.947024 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58339860Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:20:01.950657895+00:00 stderr F E1212 16:20:01.950584 1 capacity.go:551] update CSIStorageCapacity for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}: Operation cannot be fulfilled on csistoragecapacities.storage.k8s.io "csisc-k4gvk": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:01.950657895+00:00 stderr F W1212 16:20:01.950625 1 capacity.go:552] Retrying capacity.workItem{segment:(*topology.Segment)(0xc000840270), storageClassName:"crc-csi-hostpath-provisioner"} after 6 failures 2025-12-12T16:20:07.016464156+00:00 stderr F I1212 16:20:07.016390 1 reflector.go:325] Listing and watching *v1.CSIStorageCapacity from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:20:07.018649671+00:00 stderr F I1212 16:20:07.018590 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 39224 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:20:09.927585708+00:00 stderr F I1212 16:20:09.927484 1 reflector.go:325] Listing and watching *v1.PersistentVolumeClaim from k8s.io/client-go/informers/factory.go:150 2025-12-12T16:20:12.160664197+00:00 stderr F I1212 16:20:12.160596 1 reflector.go:325] Listing and watching *v1.StorageClass from sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848 2025-12-12T16:20:24.844527572+00:00 stderr F I1212 16:20:24.844458 1 reflector.go:325] Listing and watching *v1.PersistentVolume from sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845 2025-12-12T16:20:25.093963435+00:00 stderr F I1212 16:20:25.093680 1 controller.go:1152] handleProtectionFinalizer Volume : &PersistentVolume{ObjectMeta:{pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 fc1bbaba-db8e-4b91-8f7b-815ce1e79968 24587 0 2025-11-03 08:50:26 +0000 UTC map[] map[pv.kubernetes.io/provisioned-by:kubevirt.io.hostpath-provisioner volume.kubernetes.io/provisioner-deletion-secret-name: volume.kubernetes.io/provisioner-deletion-secret-namespace:] [] [kubernetes.io/pv-protection] [{csi-provisioner Update v1 2025-11-03 08:50:26 +0000 UTC FieldsV1 {"f:metadata":{"f:annotations":{".":{},"f:pv.kubernetes.io/provisioned-by":{},"f:volume.kubernetes.io/provisioner-deletion-secret-name":{},"f:volume.kubernetes.io/provisioner-deletion-secret-namespace":{}}},"f:spec":{"f:accessModes":{},"f:capacity":{".":{},"f:storage":{}},"f:claimRef":{".":{},"f:apiVersion":{},"f:kind":{},"f:name":{},"f:namespace":{},"f:resourceVersion":{},"f:uid":{}},"f:csi":{".":{},"f:driver":{},"f:volumeAttributes":{".":{},"f:csi.storage.k8s.io/pv/name":{},"f:csi.storage.k8s.io/pvc/name":{},"f:csi.storage.k8s.io/pvc/namespace":{},"f:storage.kubernetes.io/csiProvisionerIdentity":{},"f:storagePool":{}},"f:volumeHandle":{}},"f:nodeAffinity":{".":{},"f:required":{}},"f:persistentVolumeReclaimPolicy":{},"f:storageClassName":{},"f:volumeMode":{}}} } {kube-controller-manager Update v1 2025-11-03 08:50:26 +0000 UTC FieldsV1 {"f:status":{"f:phase":{}}} status}]},Spec:PersistentVolumeSpec{Capacity:ResourceList{storage: {{32212254720 0} {} 30Gi BinarySI},},PersistentVolumeSource:PersistentVolumeSource{GCEPersistentDisk:nil,AWSElasticBlockStore:nil,HostPath:nil,Glusterfs:nil,NFS:nil,RBD:nil,ISCSI:nil,Cinder:nil,CephFS:nil,FC:nil,Flocker:nil,FlexVolume:nil,AzureFile:nil,VsphereVolume:nil,Quobyte:nil,AzureDisk:nil,PhotonPersistentDisk:nil,PortworxVolume:nil,ScaleIO:nil,Local:nil,StorageOS:nil,CSI:&CSIPersistentVolumeSource{Driver:kubevirt.io.hostpath-provisioner,VolumeHandle:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,ReadOnly:false,FSType:,VolumeAttributes:map[string]string{csi.storage.k8s.io/pv/name: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,csi.storage.k8s.io/pvc/name: crc-image-registry-storage,csi.storage.k8s.io/pvc/namespace: openshift-image-registry,storage.kubernetes.io/csiProvisionerIdentity: 1762159825768-6575-kubevirt.io.hostpath-provisioner-crc,storagePool: local,},ControllerPublishSecretRef:nil,NodeStageSecretRef:nil,NodePublishSecretRef:nil,ControllerExpandSecretRef:nil,NodeExpandSecretRef:nil,},},AccessModes:[ReadWriteMany],ClaimRef:&ObjectReference{Kind:PersistentVolumeClaim,Namespace:openshift-image-registry,Name:crc-image-registry-storage,UID:b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,APIVersion:v1,ResourceVersion:22386,FieldPath:,},PersistentVolumeReclaimPolicy:Retain,StorageClassName:crc-csi-hostpath-provisioner,MountOptions:[],VolumeMode:*Filesystem,NodeAffinity:&VolumeNodeAffinity{Required:&NodeSelector{NodeSelectorTerms:[]NodeSelectorTerm{NodeSelectorTerm{MatchExpressions:[]NodeSelectorRequirement{NodeSelectorRequirement{Key:topology.hostpath.csi/node,Operator:In,Values:[crc],},},MatchFields:[]NodeSelectorRequirement{},},},},},},Status:PersistentVolumeStatus{Phase:Bound,Message:,Reason:,LastPhaseTransitionTime:2025-11-03 08:50:26 +0000 UTC,},} 2025-12-12T16:20:25.093963435+00:00 stderr F I1212 16:20:25.093920 1 controller.go:1239] shouldDelete volume "pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2" 2025-12-12T16:20:25.093963435+00:00 stderr F I1212 16:20:25.093930 1 controller.go:1260] shouldDelete volume "pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2" is false: PersistentVolumePhase is not Released 2025-12-12T16:20:56.159732152+00:00 stderr F I1212 16:20:56.159648 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:20:56.159732152+00:00 stderr F I1212 16:20:56.159720 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:20:56.159834574+00:00 stderr F I1212 16:20:56.159748 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:20:56.159919247+00:00 stderr F I1212 16:20:56.159772 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:20:56.161221171+00:00 stderr F I1212 16:20:56.161170 1 connection.go:251] GRPC response: {"available_capacity":59707482112,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:20:56.161221171+00:00 stderr F I1212 16:20:56.161196 1 connection.go:252] GRPC error: 2025-12-12T16:20:56.161241701+00:00 stderr F I1212 16:20:56.161217 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58308088Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:20:56.169492318+00:00 stderr F I1212 16:20:56.169402 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40069 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58308088Ki 2025-12-12T16:20:56.170973766+00:00 stderr F I1212 16:20:56.170908 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40069 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:21:05.951720412+00:00 stderr F I1212 16:21:05.951604 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:21:05.951720412+00:00 stderr F I1212 16:21:05.951664 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:21:05.951810084+00:00 stderr F I1212 16:21:05.951668 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:21:05.952970725+00:00 stderr F I1212 16:21:05.952886 1 connection.go:251] GRPC response: {"available_capacity":59707846656,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:21:05.952970725+00:00 stderr F I1212 16:21:05.952906 1 connection.go:252] GRPC error: 2025-12-12T16:21:05.953020486+00:00 stderr F I1212 16:21:05.952962 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58308444Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:21:05.958674235+00:00 stderr F I1212 16:21:05.958603 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40082 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:21:05.958859319+00:00 stderr F I1212 16:21:05.958802 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40082 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58308444Ki 2025-12-12T16:21:56.160644579+00:00 stderr F I1212 16:21:56.160503 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:21:56.160644579+00:00 stderr F I1212 16:21:56.160594 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:21:56.160644579+00:00 stderr F I1212 16:21:56.160623 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:21:56.160767983+00:00 stderr F I1212 16:21:56.160627 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:21:56.162948610+00:00 stderr F I1212 16:21:56.162903 1 connection.go:251] GRPC response: {"available_capacity":59710320640,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:21:56.162948610+00:00 stderr F I1212 16:21:56.162919 1 connection.go:252] GRPC error: 2025-12-12T16:21:56.162973661+00:00 stderr F I1212 16:21:56.162939 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58310860Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:21:56.170560600+00:00 stderr F I1212 16:21:56.170467 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40164 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:21:56.170560600+00:00 stderr F I1212 16:21:56.170485 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40164 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58310860Ki 2025-12-12T16:22:56.161149424+00:00 stderr F I1212 16:22:56.161058 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:22:56.161149424+00:00 stderr F I1212 16:22:56.161122 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:22:56.161245806+00:00 stderr F I1212 16:22:56.161155 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:22:56.161319558+00:00 stderr F I1212 16:22:56.161161 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:22:56.162727354+00:00 stderr F I1212 16:22:56.162686 1 connection.go:251] GRPC response: {"available_capacity":59697639424,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:22:56.162727354+00:00 stderr F I1212 16:22:56.162698 1 connection.go:252] GRPC error: 2025-12-12T16:22:56.162747294+00:00 stderr F I1212 16:22:56.162718 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58298476Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:22:56.169917005+00:00 stderr F I1212 16:22:56.169809 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40262 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:22:56.170344576+00:00 stderr F I1212 16:22:56.170301 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40262 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58298476Ki 2025-12-12T16:23:56.162532454+00:00 stderr F I1212 16:23:56.161660 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:23:56.162714528+00:00 stderr F I1212 16:23:56.162694 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:23:56.162797050+00:00 stderr F I1212 16:23:56.162782 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:23:56.162989385+00:00 stderr F I1212 16:23:56.162815 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:23:56.163929868+00:00 stderr F I1212 16:23:56.163909 1 connection.go:251] GRPC response: {"available_capacity":59696758784,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:23:56.163974880+00:00 stderr F I1212 16:23:56.163962 1 connection.go:252] GRPC error: 2025-12-12T16:23:56.164036631+00:00 stderr F I1212 16:23:56.164014 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58297616Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:23:56.172425030+00:00 stderr F I1212 16:23:56.172361 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40351 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:23:56.172549863+00:00 stderr F I1212 16:23:56.172521 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40351 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58297616Ki 2025-12-12T16:24:56.164837034+00:00 stderr F I1212 16:24:56.164613 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:24:56.164837034+00:00 stderr F I1212 16:24:56.164681 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:24:56.164926907+00:00 stderr F I1212 16:24:56.164843 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:24:56.165081381+00:00 stderr F I1212 16:24:56.164852 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:24:56.165843371+00:00 stderr F I1212 16:24:56.165810 1 connection.go:251] GRPC response: {"available_capacity":59696693248,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:24:56.165843371+00:00 stderr F I1212 16:24:56.165823 1 connection.go:252] GRPC error: 2025-12-12T16:24:56.165868202+00:00 stderr F I1212 16:24:56.165845 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58297552Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:24:56.172488685+00:00 stderr F I1212 16:24:56.172431 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40438 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58297552Ki 2025-12-12T16:24:56.172645059+00:00 stderr F I1212 16:24:56.172594 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40438 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:25:56.165109890+00:00 stderr F I1212 16:25:56.165015 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:25:56.165109890+00:00 stderr F I1212 16:25:56.165078 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:25:56.165241503+00:00 stderr F I1212 16:25:56.165108 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:25:56.165279504+00:00 stderr F I1212 16:25:56.165113 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:25:56.166860684+00:00 stderr F I1212 16:25:56.166787 1 connection.go:251] GRPC response: {"available_capacity":59698139136,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:25:56.166860684+00:00 stderr F I1212 16:25:56.166798 1 connection.go:252] GRPC error: 2025-12-12T16:25:56.166860684+00:00 stderr F I1212 16:25:56.166815 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58298964Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:25:56.176833076+00:00 stderr F I1212 16:25:56.176768 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 40870 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:25:56.177157114+00:00 stderr F I1212 16:25:56.177072 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 40870 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58298964Ki 2025-12-12T16:26:26.096058717+00:00 stderr F I1212 16:26:26.095975 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: Watch close - *v1.PersistentVolume total 6 items received 2025-12-12T16:26:56.165364628+00:00 stderr F I1212 16:26:56.165257 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:26:56.165364628+00:00 stderr F I1212 16:26:56.165346 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:26:56.165448240+00:00 stderr F I1212 16:26:56.165379 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:26:56.165504241+00:00 stderr F I1212 16:26:56.165384 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:26:56.168968429+00:00 stderr F I1212 16:26:56.168352 1 connection.go:251] GRPC response: {"available_capacity":59609325568,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:26:56.168968429+00:00 stderr F I1212 16:26:56.168371 1 connection.go:252] GRPC error: 2025-12-12T16:26:56.168968429+00:00 stderr F I1212 16:26:56.168397 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 58212232Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:26:56.177073324+00:00 stderr F I1212 16:26:56.177017 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 41374 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:26:56.177297379+00:00 stderr F I1212 16:26:56.177269 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 41374 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 58212232Ki 2025-12-12T16:27:56.166443335+00:00 stderr F I1212 16:27:56.166362 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:27:56.166561138+00:00 stderr F I1212 16:27:56.166549 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:27:56.166617230+00:00 stderr F I1212 16:27:56.166608 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:27:56.166924037+00:00 stderr F I1212 16:27:56.166668 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:27:56.168289072+00:00 stderr F I1212 16:27:56.168251 1 connection.go:251] GRPC response: {"available_capacity":57889792000,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:27:56.168289072+00:00 stderr F I1212 16:27:56.168274 1 connection.go:252] GRPC error: 2025-12-12T16:27:56.168333733+00:00 stderr F I1212 16:27:56.168301 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 56533000Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:27:56.176672484+00:00 stderr F I1212 16:27:56.176594 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 43525 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 56533000Ki 2025-12-12T16:27:56.176672484+00:00 stderr F I1212 16:27:56.176622 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 43525 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:28:14.931979594+00:00 stderr F I1212 16:28:14.931880 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.PersistentVolumeClaim total 10 items received 2025-12-12T16:28:47.020954646+00:00 stderr F I1212 16:28:47.020814 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.CSIStorageCapacity total 18 items received 2025-12-12T16:28:56.167452609+00:00 stderr F I1212 16:28:56.167308 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:28:56.167452609+00:00 stderr F I1212 16:28:56.167396 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:28:56.167452609+00:00 stderr F I1212 16:28:56.167434 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:28:56.167626114+00:00 stderr F I1212 16:28:56.167441 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:28:56.168802153+00:00 stderr F I1212 16:28:56.168759 1 connection.go:251] GRPC response: {"available_capacity":57105453056,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:28:56.168802153+00:00 stderr F I1212 16:28:56.168783 1 connection.go:252] GRPC error: 2025-12-12T16:28:56.168843894+00:00 stderr F I1212 16:28:56.168812 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55767044Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:28:56.175645226+00:00 stderr F I1212 16:28:56.175541 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 43828 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:28:56.175851802+00:00 stderr F I1212 16:28:56.175792 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 43828 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55767044Ki 2025-12-12T16:29:11.917243185+00:00 stderr F I1212 16:29:11.917085 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.StorageClass total 10 items received 2025-12-12T16:29:56.165049547+00:00 stderr F I1212 16:29:56.164983 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: Watch close - *v1.StorageClass total 10 items received 2025-12-12T16:29:56.168353331+00:00 stderr F I1212 16:29:56.168276 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:29:56.168377991+00:00 stderr F I1212 16:29:56.168350 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:29:56.168414452+00:00 stderr F I1212 16:29:56.168384 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:29:56.168695469+00:00 stderr F I1212 16:29:56.168395 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:29:56.169839238+00:00 stderr F I1212 16:29:56.169816 1 connection.go:251] GRPC response: {"available_capacity":57036025856,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:29:56.169839238+00:00 stderr F I1212 16:29:56.169826 1 connection.go:252] GRPC error: 2025-12-12T16:29:56.169866869+00:00 stderr F I1212 16:29:56.169843 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55699244Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:29:56.174987578+00:00 stderr F I1212 16:29:56.174912 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 44135 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:29:56.175368868+00:00 stderr F I1212 16:29:56.175329 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 44135 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55699244Ki 2025-12-12T16:30:56.169540763+00:00 stderr F I1212 16:30:56.169282 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:30:56.169540763+00:00 stderr F I1212 16:30:56.169370 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:30:56.169540763+00:00 stderr F I1212 16:30:56.169403 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:30:56.169540763+00:00 stderr F I1212 16:30:56.169408 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:30:56.170658541+00:00 stderr F I1212 16:30:56.170612 1 connection.go:251] GRPC response: {"available_capacity":57035747328,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:30:56.170658541+00:00 stderr F I1212 16:30:56.170632 1 connection.go:252] GRPC error: 2025-12-12T16:30:56.170679942+00:00 stderr F I1212 16:30:56.170660 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55698972Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:30:56.176992389+00:00 stderr F I1212 16:30:56.176883 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 44310 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55698972Ki 2025-12-12T16:30:56.177582604+00:00 stderr F I1212 16:30:56.177527 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 44310 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:31:56.170337864+00:00 stderr F I1212 16:31:56.170229 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:31:56.170337864+00:00 stderr F I1212 16:31:56.170307 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:31:56.170404155+00:00 stderr F I1212 16:31:56.170339 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:31:56.170521158+00:00 stderr F I1212 16:31:56.170354 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:31:56.172206130+00:00 stderr F I1212 16:31:56.172166 1 connection.go:251] GRPC response: {"available_capacity":57037406208,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:31:56.172206130+00:00 stderr F I1212 16:31:56.172192 1 connection.go:252] GRPC error: 2025-12-12T16:31:56.172233440+00:00 stderr F I1212 16:31:56.172212 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55700592Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:31:56.177480409+00:00 stderr F I1212 16:31:56.177401 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 44448 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55700592Ki 2025-12-12T16:31:56.177579672+00:00 stderr F I1212 16:31:56.177538 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 44448 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:32:10.100281392+00:00 stderr F I1212 16:32:10.100163 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: Watch close - *v1.PersistentVolume total 6 items received 2025-12-12T16:32:56.171199760+00:00 stderr F I1212 16:32:56.171075 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:32:56.171199760+00:00 stderr F I1212 16:32:56.171165 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:32:56.171297852+00:00 stderr F I1212 16:32:56.171234 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:32:56.171400315+00:00 stderr F I1212 16:32:56.171244 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:32:56.172594355+00:00 stderr F I1212 16:32:56.172536 1 connection.go:251] GRPC response: {"available_capacity":57038860288,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:32:56.172594355+00:00 stderr F I1212 16:32:56.172564 1 connection.go:252] GRPC error: 2025-12-12T16:32:56.172635776+00:00 stderr F I1212 16:32:56.172594 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55702012Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:32:56.180656686+00:00 stderr F I1212 16:32:56.180546 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 44594 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:32:56.180888442+00:00 stderr F I1212 16:32:56.180772 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 44594 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55702012Ki 2025-12-12T16:33:56.172385168+00:00 stderr F I1212 16:33:56.172272 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:33:56.172385168+00:00 stderr F I1212 16:33:56.172354 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:33:56.172560912+00:00 stderr F I1212 16:33:56.172397 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:33:56.172734927+00:00 stderr F I1212 16:33:56.172544 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:33:56.173876255+00:00 stderr F I1212 16:33:56.173832 1 connection.go:251] GRPC response: {"available_capacity":57038041088,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:33:56.173876255+00:00 stderr F I1212 16:33:56.173851 1 connection.go:252] GRPC error: 2025-12-12T16:33:56.173913866+00:00 stderr F I1212 16:33:56.173885 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55701212Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:33:56.181151147+00:00 stderr F I1212 16:33:56.181059 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 44731 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55701212Ki 2025-12-12T16:33:56.181224039+00:00 stderr F I1212 16:33:56.181144 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 44731 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:34:34.923120276+00:00 stderr F I1212 16:34:34.921654 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.StorageClass total 6 items received 2025-12-12T16:34:50.937432060+00:00 stderr F I1212 16:34:50.937348 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.PersistentVolumeClaim total 7 items received 2025-12-12T16:34:56.173819517+00:00 stderr F I1212 16:34:56.172766 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:34:56.173932990+00:00 stderr F I1212 16:34:56.173812 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:34:56.173932990+00:00 stderr F I1212 16:34:56.173878 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:34:56.174073473+00:00 stderr F I1212 16:34:56.173891 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:34:56.175343105+00:00 stderr F I1212 16:34:56.175300 1 connection.go:251] GRPC response: {"available_capacity":57055371264,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:34:56.175343105+00:00 stderr F I1212 16:34:56.175329 1 connection.go:252] GRPC error: 2025-12-12T16:34:56.175420337+00:00 stderr F I1212 16:34:56.175370 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55718136Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:34:56.182693000+00:00 stderr F I1212 16:34:56.182566 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 44882 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55718136Ki 2025-12-12T16:34:56.182974437+00:00 stderr F I1212 16:34:56.182886 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 44882 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:35:09.930985489+00:00 stderr F I1212 16:35:09.930912 1 reflector.go:378] k8s.io/client-go/informers/factory.go:150: forcing resync 2025-12-12T16:35:25.094435284+00:00 stderr F I1212 16:35:25.094381 1 reflector.go:378] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: forcing resync 2025-12-12T16:35:25.094904376+00:00 stderr F I1212 16:35:25.094583 1 controller.go:1152] handleProtectionFinalizer Volume : &PersistentVolume{ObjectMeta:{pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 fc1bbaba-db8e-4b91-8f7b-815ce1e79968 24587 0 2025-11-03 08:50:26 +0000 UTC map[] map[pv.kubernetes.io/provisioned-by:kubevirt.io.hostpath-provisioner volume.kubernetes.io/provisioner-deletion-secret-name: volume.kubernetes.io/provisioner-deletion-secret-namespace:] [] [kubernetes.io/pv-protection] [{csi-provisioner Update v1 2025-11-03 08:50:26 +0000 UTC FieldsV1 {"f:metadata":{"f:annotations":{".":{},"f:pv.kubernetes.io/provisioned-by":{},"f:volume.kubernetes.io/provisioner-deletion-secret-name":{},"f:volume.kubernetes.io/provisioner-deletion-secret-namespace":{}}},"f:spec":{"f:accessModes":{},"f:capacity":{".":{},"f:storage":{}},"f:claimRef":{".":{},"f:apiVersion":{},"f:kind":{},"f:name":{},"f:namespace":{},"f:resourceVersion":{},"f:uid":{}},"f:csi":{".":{},"f:driver":{},"f:volumeAttributes":{".":{},"f:csi.storage.k8s.io/pv/name":{},"f:csi.storage.k8s.io/pvc/name":{},"f:csi.storage.k8s.io/pvc/namespace":{},"f:storage.kubernetes.io/csiProvisionerIdentity":{},"f:storagePool":{}},"f:volumeHandle":{}},"f:nodeAffinity":{".":{},"f:required":{}},"f:persistentVolumeReclaimPolicy":{},"f:storageClassName":{},"f:volumeMode":{}}} } {kube-controller-manager Update v1 2025-11-03 08:50:26 +0000 UTC FieldsV1 {"f:status":{"f:phase":{}}} status}]},Spec:PersistentVolumeSpec{Capacity:ResourceList{storage: {{32212254720 0} {} 30Gi BinarySI},},PersistentVolumeSource:PersistentVolumeSource{GCEPersistentDisk:nil,AWSElasticBlockStore:nil,HostPath:nil,Glusterfs:nil,NFS:nil,RBD:nil,ISCSI:nil,Cinder:nil,CephFS:nil,FC:nil,Flocker:nil,FlexVolume:nil,AzureFile:nil,VsphereVolume:nil,Quobyte:nil,AzureDisk:nil,PhotonPersistentDisk:nil,PortworxVolume:nil,ScaleIO:nil,Local:nil,StorageOS:nil,CSI:&CSIPersistentVolumeSource{Driver:kubevirt.io.hostpath-provisioner,VolumeHandle:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,ReadOnly:false,FSType:,VolumeAttributes:map[string]string{csi.storage.k8s.io/pv/name: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,csi.storage.k8s.io/pvc/name: crc-image-registry-storage,csi.storage.k8s.io/pvc/namespace: openshift-image-registry,storage.kubernetes.io/csiProvisionerIdentity: 1762159825768-6575-kubevirt.io.hostpath-provisioner-crc,storagePool: local,},ControllerPublishSecretRef:nil,NodeStageSecretRef:nil,NodePublishSecretRef:nil,ControllerExpandSecretRef:nil,NodeExpandSecretRef:nil,},},AccessModes:[ReadWriteMany],ClaimRef:&ObjectReference{Kind:PersistentVolumeClaim,Namespace:openshift-image-registry,Name:crc-image-registry-storage,UID:b21f41aa-58d4-44b1-aeaa-280a8e32ddf2,APIVersion:v1,ResourceVersion:22386,FieldPath:,},PersistentVolumeReclaimPolicy:Retain,StorageClassName:crc-csi-hostpath-provisioner,MountOptions:[],VolumeMode:*Filesystem,NodeAffinity:&VolumeNodeAffinity{Required:&NodeSelector{NodeSelectorTerms:[]NodeSelectorTerm{NodeSelectorTerm{MatchExpressions:[]NodeSelectorRequirement{NodeSelectorRequirement{Key:topology.hostpath.csi/node,Operator:In,Values:[crc],},},MatchFields:[]NodeSelectorRequirement{},},},},},},Status:PersistentVolumeStatus{Phase:Bound,Message:,Reason:,LastPhaseTransitionTime:2025-11-03 08:50:26 +0000 UTC,},} 2025-12-12T16:35:25.094971847+00:00 stderr F I1212 16:35:25.094955 1 controller.go:1239] shouldDelete volume "pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2" 2025-12-12T16:35:25.095003748+00:00 stderr F I1212 16:35:25.094991 1 controller.go:1260] shouldDelete volume "pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2" is false: PersistentVolumePhase is not Released 2025-12-12T16:35:34.026843115+00:00 stderr F I1212 16:35:34.026787 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.CSIStorageCapacity total 14 items received 2025-12-12T16:35:56.174737504+00:00 stderr F I1212 16:35:56.174617 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:35:56.174737504+00:00 stderr F I1212 16:35:56.174686 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:35:56.174737504+00:00 stderr F I1212 16:35:56.174715 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:35:56.174923759+00:00 stderr F I1212 16:35:56.174721 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:35:56.176158040+00:00 stderr F I1212 16:35:56.176023 1 connection.go:251] GRPC response: {"available_capacity":57052672000,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:35:56.176158040+00:00 stderr F I1212 16:35:56.176044 1 connection.go:252] GRPC error: 2025-12-12T16:35:56.176158040+00:00 stderr F I1212 16:35:56.176067 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55715500Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:35:56.182384096+00:00 stderr F I1212 16:35:56.182275 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45029 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55715500Ki 2025-12-12T16:35:56.182765056+00:00 stderr F I1212 16:35:56.182705 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45029 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:36:56.175259981+00:00 stderr F I1212 16:36:56.175137 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:36:56.175259981+00:00 stderr F I1212 16:36:56.175246 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:36:56.175369654+00:00 stderr F I1212 16:36:56.175294 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:36:56.175494687+00:00 stderr F I1212 16:36:56.175302 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:36:56.176962134+00:00 stderr F I1212 16:36:56.176915 1 connection.go:251] GRPC response: {"available_capacity":57051860992,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:36:56.176962134+00:00 stderr F I1212 16:36:56.176940 1 connection.go:252] GRPC error: 2025-12-12T16:36:56.177014025+00:00 stderr F I1212 16:36:56.176975 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55714708Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:36:56.186311949+00:00 stderr F I1212 16:36:56.186235 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45173 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55714708Ki 2025-12-12T16:36:56.186350430+00:00 stderr F I1212 16:36:56.186309 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45173 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:37:56.176258883+00:00 stderr F I1212 16:37:56.176158 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:37:56.176258883+00:00 stderr F I1212 16:37:56.176244 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:37:56.176389086+00:00 stderr F I1212 16:37:56.176273 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:37:56.176402506+00:00 stderr F I1212 16:37:56.176278 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:37:56.177359961+00:00 stderr F I1212 16:37:56.177326 1 connection.go:251] GRPC response: {"available_capacity":57051299840,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:37:56.177359961+00:00 stderr F I1212 16:37:56.177344 1 connection.go:252] GRPC error: 2025-12-12T16:37:56.177405962+00:00 stderr F I1212 16:37:56.177369 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55714160Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:37:56.186563882+00:00 stderr F I1212 16:37:56.186530 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45366 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55714160Ki 2025-12-12T16:37:56.186873710+00:00 stderr F I1212 16:37:56.186827 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45366 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:38:00.168838876+00:00 stderr F I1212 16:38:00.168743 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:848: Watch close - *v1.StorageClass total 9 items received 2025-12-12T16:38:56.177330615+00:00 stderr F I1212 16:38:56.177219 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:38:56.177422948+00:00 stderr F I1212 16:38:56.177358 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:38:56.177422948+00:00 stderr F I1212 16:38:56.177411 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:38:56.177606722+00:00 stderr F I1212 16:38:56.177424 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:38:56.179465719+00:00 stderr F I1212 16:38:56.179407 1 connection.go:251] GRPC response: {"available_capacity":57043419136,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:38:56.179465719+00:00 stderr F I1212 16:38:56.179424 1 connection.go:252] GRPC error: 2025-12-12T16:38:56.179533271+00:00 stderr F I1212 16:38:56.179496 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55706464Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:38:56.186972468+00:00 stderr F I1212 16:38:56.186929 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45539 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:38:56.187336377+00:00 stderr F I1212 16:38:56.187270 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45539 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55706464Ki 2025-12-12T16:39:01.104698596+00:00 stderr F I1212 16:39:01.104614 1 reflector.go:790] sigs.k8s.io/sig-storage-lib-external-provisioner/v9/controller/controller.go:845: Watch close - *v1.PersistentVolume total 8 items received 2025-12-12T16:39:56.180258052+00:00 stderr F I1212 16:39:56.180124 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:39:56.180429297+00:00 stderr F I1212 16:39:56.180404 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:39:56.180512659+00:00 stderr F I1212 16:39:56.180494 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:39:56.180787546+00:00 stderr F I1212 16:39:56.180540 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:39:56.182328134+00:00 stderr F I1212 16:39:56.182271 1 connection.go:251] GRPC response: {"available_capacity":57048080384,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:39:56.182328134+00:00 stderr F I1212 16:39:56.182299 1 connection.go:252] GRPC error: 2025-12-12T16:39:56.182366275+00:00 stderr F I1212 16:39:56.182331 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55711016Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:39:56.191412153+00:00 stderr F I1212 16:39:56.191356 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45679 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:39:56.191640018+00:00 stderr F I1212 16:39:56.191557 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45679 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55711016Ki 2025-12-12T16:40:56.182358279+00:00 stderr F I1212 16:40:56.182294 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:40:56.182468842+00:00 stderr F I1212 16:40:56.182456 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:40:56.182511823+00:00 stderr F I1212 16:40:56.182503 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:40:56.182680077+00:00 stderr F I1212 16:40:56.182554 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:40:56.183725704+00:00 stderr F I1212 16:40:56.183668 1 connection.go:251] GRPC response: {"available_capacity":56606535680,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:40:56.183725704+00:00 stderr F I1212 16:40:56.183691 1 connection.go:252] GRPC error: 2025-12-12T16:40:56.183744214+00:00 stderr F I1212 16:40:56.183717 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 55279820Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:40:56.189575521+00:00 stderr F I1212 16:40:56.189533 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45853 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 55279820Ki 2025-12-12T16:40:56.189637772+00:00 stderr F I1212 16:40:56.189615 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45853 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:41:56.183865650+00:00 stderr F I1212 16:41:56.183495 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:41:56.183865650+00:00 stderr F I1212 16:41:56.183587 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:41:56.183865650+00:00 stderr F I1212 16:41:56.183632 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:41:56.184071685+00:00 stderr F I1212 16:41:56.183639 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:41:56.186226019+00:00 stderr F I1212 16:41:56.186159 1 connection.go:251] GRPC response: {"available_capacity":56276037632,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:41:56.186226019+00:00 stderr F I1212 16:41:56.186172 1 connection.go:252] GRPC error: 2025-12-12T16:41:56.186246910+00:00 stderr F I1212 16:41:56.186218 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 54957068Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:41:56.192038395+00:00 stderr F I1212 16:41:56.191956 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 45980 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 54957068Ki 2025-12-12T16:41:56.192235470+00:00 stderr F I1212 16:41:56.192161 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 45980 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:41:59.927392827+00:00 stderr F I1212 16:41:59.927300 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.StorageClass total 8 items received 2025-12-12T16:42:51.940306085+00:00 stderr F I1212 16:42:51.940235 1 reflector.go:790] k8s.io/client-go/informers/factory.go:150: Watch close - *v1.PersistentVolumeClaim total 8 items received 2025-12-12T16:42:56.183868997+00:00 stderr F I1212 16:42:56.183754 1 capacity.go:518] Capacity Controller: enqueuing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} for periodic update 2025-12-12T16:42:56.183868997+00:00 stderr F I1212 16:42:56.183849 1 capacity.go:574] Capacity Controller: refreshing {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:42:56.183918928+00:00 stderr F I1212 16:42:56.183884 1 connection.go:244] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:42:56.184008950+00:00 stderr F I1212 16:42:56.183889 1 connection.go:245] GRPC request: {"accessible_topology":{"segments":{"topology.hostpath.csi/node":"crc"}},"parameters":{"storagePool":"local"},"volume_capabilities":[{"AccessType":{"Mount":null},"access_mode":{}}]} 2025-12-12T16:42:56.185126978+00:00 stderr F I1212 16:42:56.185088 1 connection.go:251] GRPC response: {"available_capacity":56275296256,"maximum_volume_size":{"value":85292941312},"minimum_volume_size":{}} 2025-12-12T16:42:56.185126978+00:00 stderr F I1212 16:42:56.185099 1 connection.go:252] GRPC error: 2025-12-12T16:42:56.185145109+00:00 stderr F I1212 16:42:56.185122 1 capacity.go:667] Capacity Controller: updating csisc-k4gvk for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner}, new capacity 54956344Ki, new maximumVolumeSize 83293888Ki 2025-12-12T16:42:56.190914284+00:00 stderr F I1212 16:42:56.190843 1 capacity.go:715] Capacity Controller: CSIStorageCapacity csisc-k4gvk with resource version 46104 is already known to match {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} 2025-12-12T16:42:56.190914284+00:00 stderr F I1212 16:42:56.190851 1 capacity.go:672] Capacity Controller: updated csisc-k4gvk with new resource version 46104 for {segment:0xc000840270 storageClassName:crc-csi-hostpath-provisioner} with capacity 54956344Ki ././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015117043062033163 5ustar zuulzuul././@LongLink0000644000000000000000000000027500000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000644000175000017500000000274015117043042033166 0ustar zuulzuul2025-12-12T16:16:53.367876841+00:00 stderr F I1212 16:16:53.367591 1 main.go:135] Version: 902b844d6eef9c046509e5b4ce435e3240c54389 2025-12-12T16:16:53.367876841+00:00 stderr F I1212 16:16:53.367782 1 main.go:136] Running node-driver-registrar in mode= 2025-12-12T16:16:53.367876841+00:00 stderr F I1212 16:16:53.367788 1 main.go:157] Attempting to open a gRPC connection with: "/csi/csi.sock" 2025-12-12T16:16:53.371311795+00:00 stderr F I1212 16:16:53.371253 1 main.go:164] Calling CSI driver to discover driver name 2025-12-12T16:16:53.395792542+00:00 stderr F I1212 16:16:53.395704 1 main.go:173] CSI driver name: "kubevirt.io.hostpath-provisioner" 2025-12-12T16:16:53.395792542+00:00 stderr F I1212 16:16:53.395771 1 node_register.go:55] Starting Registration Server at: /registration/kubevirt.io.hostpath-provisioner-reg.sock 2025-12-12T16:16:53.398470598+00:00 stderr F I1212 16:16:53.397513 1 node_register.go:64] Registration Server started at: /registration/kubevirt.io.hostpath-provisioner-reg.sock 2025-12-12T16:16:53.398744034+00:00 stderr F I1212 16:16:53.398719 1 node_register.go:88] Skipping HTTP server because endpoint is set to: "" 2025-12-12T16:16:53.536681992+00:00 stderr F I1212 16:16:53.536632 1 main.go:90] Received GetInfo call: &InfoRequest{} 2025-12-12T16:16:53.591390798+00:00 stderr F I1212 16:16:53.591322 1 main.go:101] Received NotifyRegistrationStatus call: &RegistrationStatus{PluginRegistered:true,Error:,} ././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015117043062033163 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000644000175000017500000012775615117043043033206 0ustar zuulzuul2025-12-12T16:16:51.146290673+00:00 stderr F I1212 16:16:51.145681 1 plugin.go:44] Starting Prometheus metrics endpoint server 2025-12-12T16:16:51.146290673+00:00 stderr F I1212 16:16:51.146257 1 plugin.go:47] Starting new HostPathDriver, config: {kubevirt.io.hostpath-provisioner unix:///csi/csi.sock crc map[] latest } 2025-12-12T16:16:51.338308051+00:00 stderr F I1212 16:16:51.338224 1 mount_linux.go:174] Cannot run systemd-run, assuming non-systemd OS 2025-12-12T16:16:51.338358972+00:00 stderr F I1212 16:16:51.338332 1 hostpath.go:88] name: local, dataDir: /csi-data-dir 2025-12-12T16:16:51.338446785+00:00 stderr F I1212 16:16:51.338413 1 hostpath.go:107] Driver: kubevirt.io.hostpath-provisioner, version: latest 2025-12-12T16:16:51.338900396+00:00 stderr F I1212 16:16:51.338865 1 server.go:194] Starting domain socket: unix///csi/csi.sock 2025-12-12T16:16:51.339197303+00:00 stderr F I1212 16:16:51.339125 1 server.go:89] Listening for connections on address: &net.UnixAddr{Name:"//csi/csi.sock", Net:"unix"} 2025-12-12T16:16:51.359519909+00:00 stderr F I1212 16:16:51.359433 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:16:53.388872543+00:00 stderr F I1212 16:16:53.388745 1 server.go:104] GRPC call: /csi.v1.Identity/GetPluginInfo 2025-12-12T16:16:53.551801921+00:00 stderr F I1212 16:16:53.551748 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetInfo 2025-12-12T16:16:53.617822113+00:00 stderr F I1212 16:16:53.616734 1 server.go:104] GRPC call: /csi.v1.Node/NodeUnpublishVolume 2025-12-12T16:16:53.617822113+00:00 stderr F I1212 16:16:53.616760 1 nodeserver.go:199] Node Unpublish Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 TargetPath:/var/lib/kubelet/pods/9e9b5059-1b3e-4067-a63d-2952cbe863af/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:16:53.617822113+00:00 stderr F I1212 16:16:53.616791 1 nodeserver.go:206] Unmounting path: /var/lib/kubelet/pods/9e9b5059-1b3e-4067-a63d-2952cbe863af/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount 2025-12-12T16:16:53.721481464+00:00 stderr F I1212 16:16:53.720247 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:16:53.725494052+00:00 stderr F I1212 16:16:53.725164 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:16:53.735443835+00:00 stderr F I1212 16:16:53.734818 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:16:53.740268273+00:00 stderr F I1212 16:16:53.738054 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:16:53.743935422+00:00 stderr F I1212 16:16:53.742490 1 server.go:104] GRPC call: /csi.v1.Node/NodePublishVolume 2025-12-12T16:16:53.743935422+00:00 stderr F I1212 16:16:53.742512 1 nodeserver.go:82] Node Publish Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 PublishContext:map[] StagingTargetPath: TargetPath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount VolumeCapability:mount:<> access_mode: Readonly:false Secrets:map[] VolumeContext:map[csi.storage.k8s.io/ephemeral:false csi.storage.k8s.io/pod.name:image-registry-66587d64c8-jqtjf csi.storage.k8s.io/pod.namespace:openshift-image-registry csi.storage.k8s.io/pod.uid:162da780-4bd3-4acf-b114-06ae104fc8ad csi.storage.k8s.io/pv/name:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 csi.storage.k8s.io/pvc/name:crc-image-registry-storage csi.storage.k8s.io/pvc/namespace:openshift-image-registry csi.storage.k8s.io/serviceAccount.name:registry storage.kubernetes.io/csiProvisionerIdentity:1762159825768-6575-kubevirt.io.hostpath-provisioner-crc storagePool:local] XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:16:54.806359171+00:00 stderr F I1212 16:16:54.797397 1 server.go:104] GRPC call: /csi.v1.Identity/GetPluginInfo 2025-12-12T16:16:55.901232061+00:00 stderr F I1212 16:16:55.900883 1 server.go:104] GRPC call: /csi.v1.Identity/GetPluginInfo 2025-12-12T16:16:55.921249770+00:00 stderr F I1212 16:16:55.914862 1 server.go:104] GRPC call: /csi.v1.Identity/GetPluginCapabilities 2025-12-12T16:16:55.921249770+00:00 stderr F I1212 16:16:55.915832 1 server.go:104] GRPC call: /csi.v1.Controller/ControllerGetCapabilities 2025-12-12T16:16:55.953608590+00:00 stderr F I1212 16:16:55.944333 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetInfo 2025-12-12T16:16:56.159053166+00:00 stderr F I1212 16:16:56.158983 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:16:56.159053166+00:00 stderr F I1212 16:16:56.159011 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:17:00.334897325+00:00 stderr F I1212 16:17:00.334786 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:17:00.341388144+00:00 stderr F I1212 16:17:00.341330 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:17:00.341436275+00:00 stderr F I1212 16:17:00.341362 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:17:00.350524817+00:00 stderr F I1212 16:17:00.350447 1 healthcheck.go:84] fs available: 61358632960, total capacity: 85292941312, percentage available: 71.94, number of free inodes: 41550478 2025-12-12T16:17:00.350524817+00:00 stderr F I1212 16:17:00.350497 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:17:00.350571318+00:00 stderr F I1212 16:17:00.350540 1 nodeserver.go:330] Capacity: 85292941312 Used: 23934308352 Available: 61358632960 Inodes: 41679680 Free inodes: 41550488 Used inodes: 129192 2025-12-12T16:17:51.370324469+00:00 stderr F I1212 16:17:51.370260 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:17:56.159213453+00:00 stderr F I1212 16:17:56.159094 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:17:56.159213453+00:00 stderr F I1212 16:17:56.159150 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:18:24.402911133+00:00 stderr F I1212 16:18:24.402842 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:18:24.403830095+00:00 stderr F I1212 16:18:24.403768 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:18:24.403830095+00:00 stderr F I1212 16:18:24.403784 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:18:24.408490031+00:00 stderr F I1212 16:18:24.408429 1 healthcheck.go:84] fs available: 59761364992, total capacity: 85292941312, percentage available: 70.07, number of free inodes: 41544763 2025-12-12T16:18:24.408490031+00:00 stderr F I1212 16:18:24.408447 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:18:24.408490031+00:00 stderr F I1212 16:18:24.408459 1 nodeserver.go:330] Capacity: 85292941312 Used: 25531576320 Available: 59761364992 Inodes: 41679680 Free inodes: 41544763 Used inodes: 134917 2025-12-12T16:18:51.380115183+00:00 stderr F I1212 16:18:51.379668 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:18:56.159613634+00:00 stderr F I1212 16:18:56.159534 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:18:56.159613634+00:00 stderr F I1212 16:18:56.159582 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:18:57.163503804+00:00 stderr F I1212 16:18:57.163400 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:18:57.163503804+00:00 stderr F I1212 16:18:57.163421 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:19:30.916030420+00:00 stderr F I1212 16:19:30.915938 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:30.916030420+00:00 stderr F I1212 16:19:30.915985 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:19:31.922970693+00:00 stderr F I1212 16:19:31.922875 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:31.922970693+00:00 stderr F I1212 16:19:31.922922 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:19:33.927672648+00:00 stderr F I1212 16:19:33.927610 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:33.927672648+00:00 stderr F I1212 16:19:33.927633 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:19:37.934157321+00:00 stderr F I1212 16:19:37.934077 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:37.934157321+00:00 stderr F I1212 16:19:37.934107 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:19:39.432227425+00:00 stderr F I1212 16:19:39.432145 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:19:39.433052676+00:00 stderr F I1212 16:19:39.433026 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:19:39.433068436+00:00 stderr F I1212 16:19:39.433036 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:19:39.438915853+00:00 stderr F I1212 16:19:39.438842 1 healthcheck.go:84] fs available: 59741323264, total capacity: 85292941312, percentage available: 70.04, number of free inodes: 41544534 2025-12-12T16:19:39.438915853+00:00 stderr F I1212 16:19:39.438877 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:19:39.438915853+00:00 stderr F I1212 16:19:39.438893 1 nodeserver.go:330] Capacity: 85292941312 Used: 25551618048 Available: 59741323264 Inodes: 41679680 Free inodes: 41544534 Used inodes: 135146 2025-12-12T16:19:45.940549565+00:00 stderr F I1212 16:19:45.940506 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:45.940709539+00:00 stderr F I1212 16:19:45.940697 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:19:51.390409599+00:00 stderr F I1212 16:19:51.390343 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:19:56.159658845+00:00 stderr F I1212 16:19:56.159492 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:19:56.159658845+00:00 stderr F I1212 16:19:56.159517 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:20:01.946717886+00:00 stderr F I1212 16:20:01.946648 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:20:01.946717886+00:00 stderr F I1212 16:20:01.946671 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:20:47.362016190+00:00 stderr F I1212 16:20:47.360813 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:20:47.369971448+00:00 stderr F I1212 16:20:47.367310 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:20:47.369971448+00:00 stderr F I1212 16:20:47.367339 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:20:47.375434992+00:00 stderr F I1212 16:20:47.375379 1 healthcheck.go:84] fs available: 59695087616, total capacity: 85292941312, percentage available: 69.99, number of free inodes: 41544546 2025-12-12T16:20:47.375434992+00:00 stderr F I1212 16:20:47.375415 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:20:47.375498683+00:00 stderr F I1212 16:20:47.375431 1 nodeserver.go:330] Capacity: 85292941312 Used: 25597853696 Available: 59695087616 Inodes: 41679680 Free inodes: 41544546 Used inodes: 135134 2025-12-12T16:20:51.401063703+00:00 stderr F I1212 16:20:51.400614 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:20:56.160503082+00:00 stderr F I1212 16:20:56.160396 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:20:56.160594104+00:00 stderr F I1212 16:20:56.160581 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:21:05.952424881+00:00 stderr F I1212 16:21:05.952328 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:21:05.952424881+00:00 stderr F I1212 16:21:05.952351 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:21:51.412876749+00:00 stderr F I1212 16:21:51.412584 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:21:56.161524393+00:00 stderr F I1212 16:21:56.161300 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:21:56.161600985+00:00 stderr F I1212 16:21:56.161568 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:22:42.259640154+00:00 stderr F I1212 16:22:42.258919 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:22:42.260815103+00:00 stderr F I1212 16:22:42.260759 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:22:42.260815103+00:00 stderr F I1212 16:22:42.260775 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:22:42.265623405+00:00 stderr F I1212 16:22:42.265559 1 healthcheck.go:84] fs available: 59697717248, total capacity: 85292941312, percentage available: 69.99, number of free inodes: 41544739 2025-12-12T16:22:42.265623405+00:00 stderr F I1212 16:22:42.265578 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:22:42.265623405+00:00 stderr F I1212 16:22:42.265588 1 nodeserver.go:330] Capacity: 85292941312 Used: 25595224064 Available: 59697717248 Inodes: 41679680 Free inodes: 41544739 Used inodes: 134941 2025-12-12T16:22:51.429035705+00:00 stderr F I1212 16:22:51.428952 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:22:56.162409406+00:00 stderr F I1212 16:22:56.161815 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:22:56.162409406+00:00 stderr F I1212 16:22:56.161868 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:23:51.448698319+00:00 stderr F I1212 16:23:51.448550 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:23:56.163647921+00:00 stderr F I1212 16:23:56.163545 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:23:56.163647921+00:00 stderr F I1212 16:23:56.163584 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:24:25.449400333+00:00 stderr F I1212 16:24:25.449329 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:24:25.451093795+00:00 stderr F I1212 16:24:25.450915 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:24:25.451093795+00:00 stderr F I1212 16:24:25.450933 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:24:25.456890659+00:00 stderr F I1212 16:24:25.456845 1 healthcheck.go:84] fs available: 59696750592, total capacity: 85292941312, percentage available: 69.99, number of free inodes: 41544773 2025-12-12T16:24:25.456890659+00:00 stderr F I1212 16:24:25.456872 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:24:25.456890659+00:00 stderr F I1212 16:24:25.456884 1 nodeserver.go:330] Capacity: 85292941312 Used: 25596190720 Available: 59696750592 Inodes: 41679680 Free inodes: 41544773 Used inodes: 134907 2025-12-12T16:24:51.466240302+00:00 stderr F I1212 16:24:51.465847 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:24:56.165496672+00:00 stderr F I1212 16:24:56.165441 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:24:56.165496672+00:00 stderr F I1212 16:24:56.165470 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:25:51.478391748+00:00 stderr F I1212 16:25:51.478277 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:25:56.166027763+00:00 stderr F I1212 16:25:56.165929 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:25:56.166027763+00:00 stderr F I1212 16:25:56.165974 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:26:18.419610724+00:00 stderr F I1212 16:26:18.419503 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:26:18.420797094+00:00 stderr F I1212 16:26:18.420747 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:26:18.420797094+00:00 stderr F I1212 16:26:18.420772 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:26:18.428331684+00:00 stderr F I1212 16:26:18.428209 1 healthcheck.go:84] fs available: 59698962432, total capacity: 85292941312, percentage available: 69.99, number of free inodes: 41544724 2025-12-12T16:26:18.428331684+00:00 stderr F I1212 16:26:18.428247 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:26:18.428331684+00:00 stderr F I1212 16:26:18.428260 1 nodeserver.go:330] Capacity: 85292941312 Used: 25593978880 Available: 59698962432 Inodes: 41679680 Free inodes: 41544724 Used inodes: 134956 2025-12-12T16:26:40.071751468+00:00 stderr F I1212 16:26:40.071649 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:26:40.073525092+00:00 stderr F I1212 16:26:40.073488 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:26:40.074972609+00:00 stderr F I1212 16:26:40.074841 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:26:40.076147469+00:00 stderr F I1212 16:26:40.076094 1 server.go:104] GRPC call: /csi.v1.Node/NodePublishVolume 2025-12-12T16:26:40.076251091+00:00 stderr F I1212 16:26:40.076115 1 nodeserver.go:82] Node Publish Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 PublishContext:map[] StagingTargetPath: TargetPath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount VolumeCapability:mount:<> access_mode: Readonly:false Secrets:map[] VolumeContext:map[csi.storage.k8s.io/ephemeral:false csi.storage.k8s.io/pod.name:image-registry-5d9d95bf5b-6md9w csi.storage.k8s.io/pod.namespace:openshift-image-registry csi.storage.k8s.io/pod.uid:b75bc011-274b-4fb1-8311-15ffa1b33366 csi.storage.k8s.io/pv/name:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 csi.storage.k8s.io/pvc/name:crc-image-registry-storage csi.storage.k8s.io/pvc/namespace:openshift-image-registry csi.storage.k8s.io/serviceAccount.name:registry storage.kubernetes.io/csiProvisionerIdentity:1762159825768-6575-kubevirt.io.hostpath-provisioner-crc storagePool:local] XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:26:51.490113785+00:00 stderr F I1212 16:26:51.490004 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:26:56.166624050+00:00 stderr F I1212 16:26:56.166546 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:26:56.166624050+00:00 stderr F I1212 16:26:56.166578 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:27:00.358498982+00:00 stderr F I1212 16:27:00.353540 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:27:00.362202005+00:00 stderr F I1212 16:27:00.361739 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:27:00.362202005+00:00 stderr F I1212 16:27:00.361761 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:27:00.377089812+00:00 stderr F I1212 16:27:00.376448 1 healthcheck.go:84] fs available: 59494211584, total capacity: 85292941312, percentage available: 69.75, number of free inodes: 41542827 2025-12-12T16:27:00.377154064+00:00 stderr F I1212 16:27:00.377139 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:27:00.377219135+00:00 stderr F I1212 16:27:00.377204 1 nodeserver.go:330] Capacity: 85292941312 Used: 25798795264 Available: 59494146048 Inodes: 41679680 Free inodes: 41542827 Used inodes: 136853 2025-12-12T16:27:30.018787981+00:00 stderr F I1212 16:27:30.011607 1 server.go:104] GRPC call: /csi.v1.Node/NodeUnpublishVolume 2025-12-12T16:27:30.018787981+00:00 stderr F I1212 16:27:30.011629 1 nodeserver.go:199] Node Unpublish Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 TargetPath:/var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:27:30.018787981+00:00 stderr F I1212 16:27:30.011643 1 nodeserver.go:206] Unmounting path: /var/lib/kubelet/pods/162da780-4bd3-4acf-b114-06ae104fc8ad/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount 2025-12-12T16:27:51.503139813+00:00 stderr F I1212 16:27:51.503000 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:27:56.167526253+00:00 stderr F I1212 16:27:56.167437 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:27:56.167526253+00:00 stderr F I1212 16:27:56.167466 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:28:04.559844821+00:00 stderr F I1212 16:28:04.559779 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:28:04.562586290+00:00 stderr F I1212 16:28:04.562493 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:28:04.562586290+00:00 stderr F I1212 16:28:04.562516 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:28:04.569079705+00:00 stderr F I1212 16:28:04.568998 1 healthcheck.go:84] fs available: 57318883328, total capacity: 85292941312, percentage available: 67.20, number of free inodes: 41522124 2025-12-12T16:28:04.569079705+00:00 stderr F I1212 16:28:04.569037 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:28:04.569079705+00:00 stderr F I1212 16:28:04.569051 1 nodeserver.go:330] Capacity: 85292941312 Used: 27974057984 Available: 57318883328 Inodes: 41679680 Free inodes: 41522124 Used inodes: 157556 2025-12-12T16:28:51.521644692+00:00 stderr F I1212 16:28:51.521551 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:28:56.168099816+00:00 stderr F I1212 16:28:56.167990 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:28:56.168099816+00:00 stderr F I1212 16:28:56.168030 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:29:51.540973623+00:00 stderr F I1212 16:29:51.539051 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:29:56.169148731+00:00 stderr F I1212 16:29:56.169106 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:29:56.169217843+00:00 stderr F I1212 16:29:56.169207 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:30:01.745789312+00:00 stderr F I1212 16:30:01.744974 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:30:01.748463829+00:00 stderr F I1212 16:30:01.748419 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:30:01.748486400+00:00 stderr F I1212 16:30:01.748446 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:30:01.754189094+00:00 stderr F I1212 16:30:01.754118 1 healthcheck.go:84] fs available: 57035522048, total capacity: 85292941312, percentage available: 66.87, number of free inodes: 41510713 2025-12-12T16:30:01.754189094+00:00 stderr F I1212 16:30:01.754142 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:30:01.754189094+00:00 stderr F I1212 16:30:01.754157 1 nodeserver.go:330] Capacity: 85292941312 Used: 28257419264 Available: 57035522048 Inodes: 41679680 Free inodes: 41510713 Used inodes: 168967 2025-12-12T16:30:51.555171451+00:00 stderr F I1212 16:30:51.555093 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:30:56.170022465+00:00 stderr F I1212 16:30:56.169948 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:30:56.170022465+00:00 stderr F I1212 16:30:56.169977 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:31:10.087173524+00:00 stderr F I1212 16:31:10.085745 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:31:10.088862076+00:00 stderr F I1212 16:31:10.088822 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:31:10.088862076+00:00 stderr F I1212 16:31:10.088836 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:31:10.095230585+00:00 stderr F I1212 16:31:10.095110 1 healthcheck.go:84] fs available: 57037697024, total capacity: 85292941312, percentage available: 66.87, number of free inodes: 41510776 2025-12-12T16:31:10.095230585+00:00 stderr F I1212 16:31:10.095131 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:31:10.095230585+00:00 stderr F I1212 16:31:10.095147 1 nodeserver.go:330] Capacity: 85292941312 Used: 28255244288 Available: 57037697024 Inodes: 41679680 Free inodes: 41510776 Used inodes: 168904 2025-12-12T16:31:51.567600158+00:00 stderr F I1212 16:31:51.567528 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:31:56.171514553+00:00 stderr F I1212 16:31:56.171468 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:31:56.171577864+00:00 stderr F I1212 16:31:56.171567 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:32:13.694225191+00:00 stderr F I1212 16:32:13.694085 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:32:13.695000920+00:00 stderr F I1212 16:32:13.694931 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:32:13.695000920+00:00 stderr F I1212 16:32:13.694944 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:32:13.703234893+00:00 stderr F I1212 16:32:13.703143 1 healthcheck.go:84] fs available: 57038761984, total capacity: 85292941312, percentage available: 66.87, number of free inodes: 41510776 2025-12-12T16:32:13.703234893+00:00 stderr F I1212 16:32:13.703167 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:32:13.703234893+00:00 stderr F I1212 16:32:13.703191 1 nodeserver.go:330] Capacity: 85292941312 Used: 28254179328 Available: 57038761984 Inodes: 41679680 Free inodes: 41510776 Used inodes: 168904 2025-12-12T16:32:51.587377808+00:00 stderr F I1212 16:32:51.587270 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:32:56.171915808+00:00 stderr F I1212 16:32:56.171797 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:32:56.171915808+00:00 stderr F I1212 16:32:56.171822 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:33:19.671210098+00:00 stderr F I1212 16:33:19.671069 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:33:19.672822299+00:00 stderr F I1212 16:33:19.672734 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:33:19.672822299+00:00 stderr F I1212 16:33:19.672757 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:33:19.683913476+00:00 stderr F I1212 16:33:19.683843 1 healthcheck.go:84] fs available: 57038745600, total capacity: 85292941312, percentage available: 66.87, number of free inodes: 41510848 2025-12-12T16:33:19.683913476+00:00 stderr F I1212 16:33:19.683881 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:33:19.683913476+00:00 stderr F I1212 16:33:19.683905 1 nodeserver.go:330] Capacity: 85292941312 Used: 28254195712 Available: 57038745600 Inodes: 41679680 Free inodes: 41510848 Used inodes: 168832 2025-12-12T16:33:51.599330627+00:00 stderr F I1212 16:33:51.599243 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:33:56.173497216+00:00 stderr F I1212 16:33:56.173366 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:33:56.173497216+00:00 stderr F I1212 16:33:56.173402 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:34:51.612690236+00:00 stderr F I1212 16:34:51.612590 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:34:56.174783671+00:00 stderr F I1212 16:34:56.174705 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:34:56.174783671+00:00 stderr F I1212 16:34:56.174746 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:34:58.275097993+00:00 stderr F I1212 16:34:58.275029 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:34:58.276641882+00:00 stderr F I1212 16:34:58.276608 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:34:58.276694473+00:00 stderr F I1212 16:34:58.276626 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:34:58.282590371+00:00 stderr F I1212 16:34:58.282519 1 healthcheck.go:84] fs available: 57055371264, total capacity: 85292941312, percentage available: 66.89, number of free inodes: 41510851 2025-12-12T16:34:58.282590371+00:00 stderr F I1212 16:34:58.282553 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:34:58.282590371+00:00 stderr F I1212 16:34:58.282572 1 nodeserver.go:330] Capacity: 85292941312 Used: 28237570048 Available: 57055371264 Inodes: 41679680 Free inodes: 41510851 Used inodes: 168829 2025-12-12T16:35:51.625594347+00:00 stderr F I1212 16:35:51.625498 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:35:56.175718319+00:00 stderr F I1212 16:35:56.175486 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:35:56.175718319+00:00 stderr F I1212 16:35:56.175659 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:36:44.535233811+00:00 stderr F I1212 16:36:44.535092 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:36:44.536713878+00:00 stderr F I1212 16:36:44.536642 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:36:44.536713878+00:00 stderr F I1212 16:36:44.536689 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:36:44.543891139+00:00 stderr F I1212 16:36:44.543825 1 healthcheck.go:84] fs available: 57051983872, total capacity: 85292941312, percentage available: 66.89, number of free inodes: 41510851 2025-12-12T16:36:44.543891139+00:00 stderr F I1212 16:36:44.543849 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:36:44.543891139+00:00 stderr F I1212 16:36:44.543865 1 nodeserver.go:330] Capacity: 85292941312 Used: 28240957440 Available: 57051983872 Inodes: 41679680 Free inodes: 41510851 Used inodes: 168829 2025-12-12T16:36:51.636947404+00:00 stderr F I1212 16:36:51.636868 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:36:56.176070431+00:00 stderr F I1212 16:36:56.175956 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:36:56.176070431+00:00 stderr F I1212 16:36:56.175997 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:37:45.307392862+00:00 stderr F I1212 16:37:45.306615 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:37:45.308976682+00:00 stderr F I1212 16:37:45.308932 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:37:45.309032323+00:00 stderr F I1212 16:37:45.308955 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:37:45.319662140+00:00 stderr F I1212 16:37:45.319613 1 healthcheck.go:84] fs available: 57051303936, total capacity: 85292941312, percentage available: 66.89, number of free inodes: 41510814 2025-12-12T16:37:45.319662140+00:00 stderr F I1212 16:37:45.319647 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:37:45.319686751+00:00 stderr F I1212 16:37:45.319671 1 nodeserver.go:330] Capacity: 85292941312 Used: 28241637376 Available: 57051303936 Inodes: 41679680 Free inodes: 41510814 Used inodes: 168866 2025-12-12T16:37:51.650041221+00:00 stderr F I1212 16:37:51.649954 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:37:56.176914609+00:00 stderr F I1212 16:37:56.176838 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:37:56.176914609+00:00 stderr F I1212 16:37:56.176871 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:38:51.662906208+00:00 stderr F I1212 16:38:51.662819 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:38:56.178425963+00:00 stderr F I1212 16:38:56.178279 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:38:56.178425963+00:00 stderr F I1212 16:38:56.178342 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:39:09.109386196+00:00 stderr F I1212 16:39:09.109286 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:39:09.110640877+00:00 stderr F I1212 16:39:09.110585 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:39:09.110640877+00:00 stderr F I1212 16:39:09.110615 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:39:09.116708710+00:00 stderr F I1212 16:39:09.116612 1 healthcheck.go:84] fs available: 57042251776, total capacity: 85292941312, percentage available: 66.88, number of free inodes: 41510850 2025-12-12T16:39:09.116708710+00:00 stderr F I1212 16:39:09.116645 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:39:09.116708710+00:00 stderr F I1212 16:39:09.116657 1 nodeserver.go:330] Capacity: 85292941312 Used: 28250689536 Available: 57042251776 Inodes: 41679680 Free inodes: 41510850 Used inodes: 168830 2025-12-12T16:39:51.674846053+00:00 stderr F I1212 16:39:51.674745 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:39:56.181576445+00:00 stderr F I1212 16:39:56.181432 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:39:56.181576445+00:00 stderr F I1212 16:39:56.181477 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:40:49.465695030+00:00 stderr F I1212 16:40:49.465599 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:40:49.473670451+00:00 stderr F I1212 16:40:49.473541 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:40:49.473670451+00:00 stderr F I1212 16:40:49.473561 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:40:49.484422261+00:00 stderr F I1212 16:40:49.482490 1 healthcheck.go:84] fs available: 56604909568, total capacity: 85292941312, percentage available: 66.37, number of free inodes: 41500706 2025-12-12T16:40:49.484422261+00:00 stderr F I1212 16:40:49.482524 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:40:49.484422261+00:00 stderr F I1212 16:40:49.482536 1 nodeserver.go:330] Capacity: 85292941312 Used: 28688031744 Available: 56604909568 Inodes: 41679680 Free inodes: 41500706 Used inodes: 178974 2025-12-12T16:40:51.691123506+00:00 stderr F I1212 16:40:51.691029 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:40:56.183191750+00:00 stderr F I1212 16:40:56.183114 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:40:56.183191750+00:00 stderr F I1212 16:40:56.183141 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:41:51.706481524+00:00 stderr F I1212 16:41:51.706406 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:41:56.185914592+00:00 stderr F I1212 16:41:56.184619 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:41:56.185914592+00:00 stderr F I1212 16:41:56.184646 1 controllerserver.go:230] Checking capacity for storage pool local 2025-12-12T16:42:09.098435534+00:00 stderr F I1212 16:42:09.097939 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetCapabilities 2025-12-12T16:42:09.099119171+00:00 stderr F I1212 16:42:09.099072 1 server.go:104] GRPC call: /csi.v1.Node/NodeGetVolumeStats 2025-12-12T16:42:09.099119171+00:00 stderr F I1212 16:42:09.099095 1 nodeserver.go:314] Node Get Volume Stats Request: {VolumeId:pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 VolumePath:/var/lib/kubelet/pods/b75bc011-274b-4fb1-8311-15ffa1b33366/volumes/kubernetes.io~csi/pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2/mount StagingTargetPath: XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0} 2025-12-12T16:42:09.106391904+00:00 stderr F I1212 16:42:09.104247 1 healthcheck.go:84] fs available: 56276582400, total capacity: 85292941312, percentage available: 65.98, number of free inodes: 41500431 2025-12-12T16:42:09.106391904+00:00 stderr F I1212 16:42:09.104272 1 nodeserver.go:321] Healthy state: pvc-b21f41aa-58d4-44b1-aeaa-280a8e32ddf2 Volume: true 2025-12-12T16:42:09.106391904+00:00 stderr F I1212 16:42:09.104284 1 nodeserver.go:330] Capacity: 85292941312 Used: 29016358912 Available: 56276582400 Inodes: 41679680 Free inodes: 41500431 Used inodes: 179249 2025-12-12T16:42:51.720292607+00:00 stderr F I1212 16:42:51.720201 1 utils.go:221] pool (local, /csi-data-dir), shares path with OS which can lead to node disk pressure 2025-12-12T16:42:56.184536594+00:00 stderr F I1212 16:42:56.184461 1 server.go:104] GRPC call: /csi.v1.Controller/GetCapacity 2025-12-12T16:42:56.184536594+00:00 stderr F I1212 16:42:56.184493 1 controllerserver.go:230] Checking capacity for storage pool local ././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015117043062033163 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000644000175000017500000000061415117043043033165 0ustar zuulzuul2025-12-12T16:16:54.790608406+00:00 stderr F I1212 16:16:54.790530 1 main.go:149] calling CSI driver to discover driver name 2025-12-12T16:16:54.809467077+00:00 stderr F I1212 16:16:54.809273 1 main.go:155] CSI driver name: "kubevirt.io.hostpath-provisioner" 2025-12-12T16:16:54.809467077+00:00 stderr F I1212 16:16:54.809315 1 main.go:183] ServeMux listening at "0.0.0.0:9898" ././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043043033051 5ustar zuulzuul././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043062033052 5ustar zuulzuul././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000017555215117043043033072 0ustar zuulzuul2025-12-12T16:16:48.663070548+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="Using in-cluster kube client config" 2025-12-12T16:16:48.670686324+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="Defaulting Interval to '12h0m0s'" 2025-12-12T16:16:48.713978421+00:00 stderr F I1212 16:16:48.713562 1 handler.go:288] Adding GroupVersion packages.operators.coreos.com v1 to ResourceManager 2025-12-12T16:16:48.717329312+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="connection established. cluster-version: v1.33.5" 2025-12-12T16:16:48.717329312+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="operator ready" 2025-12-12T16:16:48.717329312+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="starting informers..." 2025-12-12T16:16:48.717329312+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="informers started" 2025-12-12T16:16:48.717329312+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="waiting for caches to sync..." 2025-12-12T16:16:48.818244386+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="starting workers..." 2025-12-12T16:16:48.819129618+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="connecting to source" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:16:48.819750933+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="connecting to source" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:16:48.820201054+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="connecting to source" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:16:48.820939942+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="connecting to source" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:16:48.867709054+00:00 stderr F I1212 16:16:48.867484 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:48.867709054+00:00 stderr F I1212 16:16:48.867671 1 secure_serving.go:211] Serving securely on [::]:5443 2025-12-12T16:16:48.867889878+00:00 stderr F I1212 16:16:48.867766 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::apiserver.local.config/certificates/apiserver.crt::apiserver.local.config/certificates/apiserver.key" 2025-12-12T16:16:48.867889878+00:00 stderr F I1212 16:16:48.867750 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:48.867907549+00:00 stderr F I1212 16:16:48.867879 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.868421131+00:00 stderr F I1212 16:16:48.867911 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.868421131+00:00 stderr F I1212 16:16:48.867998 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.868421131+00:00 stderr F I1212 16:16:48.868028 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:48.868421131+00:00 stderr F I1212 16:16:48.868034 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.877309028+00:00 stderr F I1212 16:16:48.875847 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:48.877309028+00:00 stderr F I1212 16:16:48.875879 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:48.877309028+00:00 stderr F I1212 16:16:48.876016 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.877309028+00:00 stderr F I1212 16:16:48.876099 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.877767279+00:00 stderr F I1212 16:16:48.877728 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.877828991+00:00 stderr F I1212 16:16:48.877812 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.878649221+00:00 stderr F W1212 16:16:48.878601 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-marketplace.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:60460->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.880257900+00:00 stderr F W1212 16:16:48.880195 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:49173->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.880759452+00:00 stderr F W1212 16:16:48.880730 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup community-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:44298->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.880828294+00:00 stderr F W1212 16:16:48.880805 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:47644->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.971154769+00:00 stderr F I1212 16:16:48.971081 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:48.971259822+00:00 stderr F I1212 16:16:48.971243 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.971468497+00:00 stderr F I1212 16:16:48.971324 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.986365251+00:00 stderr F I1212 16:16:48.981640 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.986365251+00:00 stderr F I1212 16:16:48.981682 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:48.986365251+00:00 stderr F I1212 16:16:48.981756 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:49.931200997+00:00 stderr F W1212 16:16:49.930520 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-marketplace.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:59955->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.931200997+00:00 stderr F W1212 16:16:49.930557 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup community-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:42261->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.931200997+00:00 stderr F W1212 16:16:49.930619 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:59965->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.931200997+00:00 stderr F W1212 16:16:49.930621 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:51315->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:50.288315476+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:16:50.288315476+00:00 stderr F time="2025-12-12T16:16:50Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:59965->10.217.4.10:53: read: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:16:50.677883007+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:16:50.677951119+00:00 stderr F time="2025-12-12T16:16:50Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup community-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:42261->10.217.4.10:53: read: connection refused\"" source="{community-operators openshift-marketplace}" 2025-12-12T16:16:51.497307033+00:00 stderr F W1212 16:16:51.496133 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:35203->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:51.703269451+00:00 stderr F W1212 16:16:51.703102 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:45070->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:51.746818975+00:00 stderr F W1212 16:16:51.746746 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup community-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:50609->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:51.820165586+00:00 stderr F W1212 16:16:51.820084 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-marketplace.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:52066->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:52.480241611+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:16:52.480241611+00:00 stderr F time="2025-12-12T16:16:52Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup redhat-marketplace.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:52066->10.217.4.10:53: read: connection refused\"" source="{redhat-marketplace openshift-marketplace}" 2025-12-12T16:16:53.479200629+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:16:53.479200629+00:00 stderr F time="2025-12-12T16:16:53Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup redhat-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:35203->10.217.4.10:53: read: connection refused\"" source="{redhat-operators openshift-marketplace}" 2025-12-12T16:16:54.190431413+00:00 stderr F W1212 16:16:54.186904 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:49860->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:54.216325595+00:00 stderr F W1212 16:16:54.215989 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-marketplace.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:45228->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:54.225652583+00:00 stderr F W1212 16:16:54.222757 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup redhat-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:33381->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:54.289924722+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:16:54.289924722+00:00 stderr F time="2025-12-12T16:16:54Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:49860->10.217.4.10:53: read: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:16:54.611242757+00:00 stderr F W1212 16:16:54.611070 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp: lookup community-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:50574->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:55.690110677+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:16:55.690110677+00:00 stderr F time="2025-12-12T16:16:55Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup community-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:50574->10.217.4.10:53: read: connection refused\"" source="{community-operators openshift-marketplace}" 2025-12-12T16:16:56.288354373+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:16:56.288354373+00:00 stderr F time="2025-12-12T16:16:56Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup redhat-marketplace.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:45228->10.217.4.10:53: read: connection refused\"" source="{redhat-marketplace openshift-marketplace}" 2025-12-12T16:16:57.674006382+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:16:57.674059173+00:00 stderr F time="2025-12-12T16:16:57Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup redhat-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:33381->10.217.4.10:53: read: connection refused\"" source="{redhat-operators openshift-marketplace}" 2025-12-12T16:16:58.512206646+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:16:58.512206646+00:00 stderr F time="2025-12-12T16:16:58Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp: lookup certified-operators.openshift-marketplace.svc on 10.217.4.10:53: read udp 10.217.0.37:49860->10.217.4.10:53: read: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:16:58.525520271+00:00 stderr F W1212 16:16:58.524283 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:16:58.565073767+00:00 stderr F W1212 16:16:58.564466 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:16:58.584343467+00:00 stderr F W1212 16:16:58.584267 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:16:58.704854949+00:00 stderr F W1212 16:16:58.704563 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:16:59.482240969+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:16:59.482240969+00:00 stderr F time="2025-12-12T16:16:59Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused\"" source="{community-operators openshift-marketplace}" 2025-12-12T16:17:00.274112761+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:17:00.274161612+00:00 stderr F time="2025-12-12T16:17:00Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused\"" source="{redhat-marketplace openshift-marketplace}" 2025-12-12T16:17:01.674348977+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:17:01.674497421+00:00 stderr F time="2025-12-12T16:17:01Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused\"" source="{redhat-operators openshift-marketplace}" 2025-12-12T16:17:03.857909467+00:00 stderr F W1212 16:17:03.857804 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:17:04.207417760+00:00 stderr F W1212 16:17:04.207333 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:17:04.300440841+00:00 stderr F W1212 16:17:04.300377 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:17:05.697521890+00:00 stderr F W1212 16:17:05.696829 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:17:13.141655101+00:00 stderr F W1212 16:17:13.141058 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:17:15.275327673+00:00 stderr F W1212 16:17:15.275249 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:17:15.386875736+00:00 stderr F W1212 16:17:15.386781 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:17:15.780551177+00:00 stderr F W1212 16:17:15.780454 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:17:27.589144318+00:00 stderr F W1212 16:17:27.588486 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:17:31.734907844+00:00 stderr F W1212 16:17:31.734804 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:17:32.683275893+00:00 stderr F W1212 16:17:32.683119 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:17:33.706242813+00:00 stderr F W1212 16:17:33.704055 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:18:00.221221337+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:18:01.214500853+00:00 stderr F time="2025-12-12T16:18:01Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:18:03.435277497+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:18:04.809441559+00:00 stderr F time="2025-12-12T16:18:04Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:18:05.413452722+00:00 stderr F time="2025-12-12T16:18:05Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:18:30.628068225+00:00 stderr F E1212 16:18:30.627453 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.628212448+00:00 stderr F E1212 16:18:30.628193 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.566403253+00:00 stderr F E1212 16:18:31.566058 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.566527336+00:00 stderr F E1212 16:18:31.566509 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.593006511+00:00 stderr F E1212 16:18:31.592955 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.593123023+00:00 stderr F E1212 16:18:31.593107 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:54.146885718+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:19:54.146948280+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:19:54.911895346+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:19:55.143547213+00:00 stderr F time="2025-12-12T16:19:55Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:20:35.807507058+00:00 stderr F W1212 16:20:35.806874 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:20:35.811217846+00:00 stderr F W1212 16:20:35.809420 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:20:35.827692158+00:00 stderr F W1212 16:20:35.827591 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:20:35.841574762+00:00 stderr F W1212 16:20:35.838533 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:20:36.235973237+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:20:36.236080310+00:00 stderr F time="2025-12-12T16:20:36Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:20:36.435618074+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:20:36.435665535+00:00 stderr F time="2025-12-12T16:20:36Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused\"" source="{community-operators openshift-marketplace}" 2025-12-12T16:20:36.862642535+00:00 stderr F W1212 16:20:36.862588 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:20:37.439155326+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:20:37.439155326+00:00 stderr F time="2025-12-12T16:20:37Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused\"" source="{redhat-marketplace openshift-marketplace}" 2025-12-12T16:20:37.829211988+00:00 stderr F W1212 16:20:37.829138 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:20:37.829211988+00:00 stderr F W1212 16:20:37.829138 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:20:37.893281598+00:00 stderr F W1212 16:20:37.893209 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:20:38.733332982+00:00 stderr F W1212 16:20:38.732705 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:20:38.831988699+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:20:38.831988699+00:00 stderr F time="2025-12-12T16:20:38Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused\"" source="{redhat-operators openshift-marketplace}" 2025-12-12T16:20:39.259325129+00:00 stderr F W1212 16:20:39.259013 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:20:39.361657043+00:00 stderr F W1212 16:20:39.361428 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:20:39.388746673+00:00 stderr F W1212 16:20:39.388687 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:20:39.830598813+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:20:39.830746387+00:00 stderr F time="2025-12-12T16:20:39Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:20:40.969946988+00:00 stderr F W1212 16:20:40.969899 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:20:41.235425482+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:20:41.235425482+00:00 stderr F time="2025-12-12T16:20:41Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused\"" source="{community-operators openshift-marketplace}" 2025-12-12T16:20:41.388739953+00:00 stderr F W1212 16:20:41.388684 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:20:41.869441762+00:00 stderr F W1212 16:20:41.869398 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:20:41.977434185+00:00 stderr F W1212 16:20:41.976754 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:20:42.234538767+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:20:42.235843922+00:00 stderr F time="2025-12-12T16:20:42Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused\"" source="{redhat-marketplace openshift-marketplace}" 2025-12-12T16:20:44.186097797+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:20:44.186161938+00:00 stderr F time="2025-12-12T16:20:44Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:20:44.782820799+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:20:44.782820799+00:00 stderr F time="2025-12-12T16:20:44Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused\"" source="{community-operators openshift-marketplace}" 2025-12-12T16:20:45.471138263+00:00 stderr F W1212 16:20:45.470569 1 logging.go:55] [core] [Channel #6 SubChannel #7]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-operators.openshift-marketplace.svc:50051", ServerName: "redhat-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.4.50:50051: connect: connection refused" 2025-12-12T16:20:45.804167138+00:00 stderr F W1212 16:20:45.804125 1 logging.go:55] [core] [Channel #1 SubChannel #8]grpc: addrConn.createTransport failed to connect to {Addr: "certified-operators.openshift-marketplace.svc:50051", ServerName: "certified-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused" 2025-12-12T16:20:45.987756313+00:00 stderr F W1212 16:20:45.986609 1 logging.go:55] [core] [Channel #2 SubChannel #3]grpc: addrConn.createTransport failed to connect to {Addr: "community-operators.openshift-marketplace.svc:50051", ServerName: "community-operators.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.29:50051: connect: connection refused" 2025-12-12T16:20:46.473253008+00:00 stderr F W1212 16:20:46.473187 1 logging.go:55] [core] [Channel #4 SubChannel #5]grpc: addrConn.createTransport failed to connect to {Addr: "redhat-marketplace.openshift-marketplace.svc:50051", ServerName: "redhat-marketplace.openshift-marketplace.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.43:50051: connect: connection refused" 2025-12-12T16:20:52.184436111+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:20:52.184436111+00:00 stderr F time="2025-12-12T16:20:52Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.115:50051: connect: connection refused\"" source="{certified-operators openshift-marketplace}" 2025-12-12T16:20:52.798850426+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:20:54.186857503+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:20:54.788432342+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:20:56.184858101+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-marketplace openshift-marketplace}" action="sync catalogsource" address="redhat-marketplace.openshift-marketplace.svc:50051" name=redhat-marketplace namespace=openshift-marketplace 2025-12-12T16:20:56.785243748+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:26:53.609221421+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:26:58.199253011+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:27:25.363667767+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:29:23.648633202+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="connecting to source" action="sync catalogsource" address="infrawatch-operators.service-telemetry.svc:50051" name=infrawatch-operators namespace=service-telemetry 2025-12-12T16:29:23.665110177+00:00 stderr F W1212 16:29:23.664933 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:24.669001715+00:00 stderr F W1212 16:29:24.668946 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:26.459443074+00:00 stderr F W1212 16:29:26.459356 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:28.566069673+00:00 stderr F W1212 16:29:28.565779 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:28.643188407+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="updating PackageManifest based on CatalogSource changes: {infrawatch-operators service-telemetry}" action="sync catalogsource" address="infrawatch-operators.service-telemetry.svc:50051" name=infrawatch-operators namespace=service-telemetry 2025-12-12T16:29:28.643302630+00:00 stderr F time="2025-12-12T16:29:28Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" source="{infrawatch-operators service-telemetry}" 2025-12-12T16:29:33.409834597+00:00 stderr F W1212 16:29:33.409781 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:35.443500237+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="updating PackageManifest based on CatalogSource changes: {infrawatch-operators service-telemetry}" action="sync catalogsource" address="infrawatch-operators.service-telemetry.svc:50051" name=infrawatch-operators namespace=service-telemetry 2025-12-12T16:29:35.443500237+00:00 stderr F time="2025-12-12T16:29:35Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" source="{infrawatch-operators service-telemetry}" 2025-12-12T16:29:39.594903117+00:00 stderr F W1212 16:29:39.594849 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:48.109645468+00:00 stderr F W1212 16:29:48.109572 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:30:06.802202638+00:00 stderr F W1212 16:30:06.801197 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:30:30.438847173+00:00 stderr F W1212 16:30:30.438196 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:31:18.687388472+00:00 stderr F W1212 16:31:18.687284 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:32:15.355057619+00:00 stderr F W1212 16:32:15.353984 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:51.034069041+00:00 stderr F W1212 16:33:51.033235 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:25.693813995+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="updating PackageManifest based on CatalogSource changes: {infrawatch-operators service-telemetry}" action="sync catalogsource" address="infrawatch-operators.service-telemetry.svc:50051" name=infrawatch-operators namespace=service-telemetry 2025-12-12T16:34:25.693813995+00:00 stderr F time="2025-12-12T16:34:25Z" level=warning msg="error getting bundle stream" action="refresh cache" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" source="{infrawatch-operators service-telemetry}" 2025-12-12T16:35:29.928302637+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:35:29.930170334+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:35:31.920729768+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:35:49.147294479+00:00 stderr F W1212 16:35:49.146611 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:13.696832783+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="updating PackageManifest based on CatalogSource changes: {certified-operators openshift-marketplace}" action="sync catalogsource" address="certified-operators.openshift-marketplace.svc:50051" name=certified-operators namespace=openshift-marketplace 2025-12-12T16:37:22.544680559+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="updating PackageManifest based on CatalogSource changes: {redhat-operators openshift-marketplace}" action="sync catalogsource" address="redhat-operators.openshift-marketplace.svc:50051" name=redhat-operators namespace=openshift-marketplace 2025-12-12T16:37:53.401227759+00:00 stderr F W1212 16:37:53.401101 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:38:09.918342238+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="updating PackageManifest based on CatalogSource changes: {community-operators openshift-marketplace}" action="sync catalogsource" address="community-operators.openshift-marketplace.svc:50051" name=community-operators namespace=openshift-marketplace 2025-12-12T16:40:05.278043457+00:00 stderr F W1212 16:40:05.276172 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:42:24.604396618+00:00 stderr F W1212 16:42:24.603441 1 logging.go:55] [core] [Channel #17 SubChannel #18]grpc: addrConn.createTransport failed to connect to {Addr: "infrawatch-operators.service-telemetry.svc:50051", ServerName: "infrawatch-operators.service-telemetry.svc:50051", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" ././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043043032775 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000000202015117043043032771 0ustar zuulzuul2025-12-12T16:16:44.988268540+00:00 stderr F I1212 16:16:44.985393 1 start.go:40] Version: 89b561f0 (f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:16:45.306566751+00:00 stderr F I1212 16:16:45.304622 1 start.go:51] Launching server with tls min version: VersionTLS12 & cipher suites [TLS_AES_128_GCM_SHA256 TLS_AES_256_GCM_SHA384 TLS_CHACHA20_POLY1305_SHA256 TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256] 2025-12-12T16:16:45.306566751+00:00 stderr F I1212 16:16:45.304776 1 api.go:68] Launching server on :22624 2025-12-12T16:16:45.306566751+00:00 stderr F I1212 16:16:45.305162 1 api.go:68] Launching server on :22623 2025-12-12T16:16:45.306566751+00:00 stderr F I1212 16:16:45.306287 1 certwatcher.go:133] "Starting certificate poll+watcher" logger="controller-runtime.certwatcher" interval="10s" ././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043043033043 5ustar zuulzuul././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000000355615117043043033056 0ustar zuulzuul2025-12-12T16:16:56.017125211+00:00 stderr F I1212 16:16:56.016463 1 cmd.go:39] &{ true {false} prune true map[cert-dir:0xc000ace780 max-eligible-revision:0xc000ace500 protected-revisions:0xc000ace5a0 resource-dir:0xc000ace640 static-pod-name:0xc000ace6e0 v:0xc000acf860] [0xc000acf860 0xc000ace500 0xc000ace5a0 0xc000ace640 0xc000ace780 0xc000ace6e0] [] map[cert-dir:0xc000ace780 help:0xc000acfc20 log-flush-frequency:0xc000acf7c0 max-eligible-revision:0xc000ace500 protected-revisions:0xc000ace5a0 resource-dir:0xc000ace640 static-pod-name:0xc000ace6e0 v:0xc000acf860 vmodule:0xc000acf900] [0xc000ace500 0xc000ace5a0 0xc000ace640 0xc000ace6e0 0xc000ace780 0xc000acf7c0 0xc000acf860 0xc000acf900 0xc000acfc20] [0xc000ace780 0xc000acfc20 0xc000acf7c0 0xc000ace500 0xc000ace5a0 0xc000ace640 0xc000ace6e0 0xc000acf860 0xc000acf900] map[104:0xc000acfc20 118:0xc000acf860] [] -1 0 0xc000a94e40 true 0xae3c00 []} 2025-12-12T16:16:56.017125211+00:00 stderr F I1212 16:16:56.017109 1 cmd.go:40] (*prune.PruneOptions)(0xc000ab61e0)({ 2025-12-12T16:16:56.017125211+00:00 stderr F MaxEligibleRevision: (int) 11, 2025-12-12T16:16:56.017125211+00:00 stderr F ProtectedRevisions: ([]int) (len=5 cap=5) { 2025-12-12T16:16:56.017125211+00:00 stderr F (int) 7, 2025-12-12T16:16:56.017125211+00:00 stderr F (int) 8, 2025-12-12T16:16:56.017125211+00:00 stderr F (int) 9, 2025-12-12T16:16:56.017125211+00:00 stderr F (int) 10, 2025-12-12T16:16:56.017125211+00:00 stderr F (int) 11 2025-12-12T16:16:56.017125211+00:00 stderr F }, 2025-12-12T16:16:56.017125211+00:00 stderr F ResourceDir: (string) (len=36) "/etc/kubernetes/static-pod-resources", 2025-12-12T16:16:56.017125211+00:00 stderr F CertDir: (string) (len=20) "kube-apiserver-certs", 2025-12-12T16:16:56.017125211+00:00 stderr F StaticPodName: (string) (len=18) "kube-apiserver-pod" 2025-12-12T16:16:56.017125211+00:00 stderr F }) ././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015117043043032760 5ustar zuulzuul././@LongLink0000644000000000000000000000034100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015117043062032761 5ustar zuulzuul././@LongLink0000644000000000000000000000034600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000644000175000017500000004207615117043043032773 0ustar zuulzuul2025-12-12T16:16:46.256257516+00:00 stderr F I1212 16:16:46.251037 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:46.266309092+00:00 stderr F I1212 16:16:46.263233 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.328960371+00:00 stderr F I1212 16:16:46.328884 1 recorder_logging.go:49] &Event{ObjectMeta:{dummy.18808403ed313bf6.d3b4e2d2 dummy 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:dummy,Name:dummy,UID:,APIVersion:v1,ResourceVersion:,FieldPath:,},Reason:FeatureGatesInitialized,Message:FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}},Source:EventSource{Component:,Host:,},FirstTimestamp:2025-12-12 16:16:46.328429558 +0000 UTC m=+0.519596076,LastTimestamp:2025-12-12 16:16:46.328429558 +0000 UTC m=+0.519596076,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:,ReportingInstance:,} 2025-12-12T16:16:46.328990042+00:00 stderr F I1212 16:16:46.328947 1 main.go:176] FeatureGates initialized: [AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:46.330610422+00:00 stderr F I1212 16:16:46.329584 1 webhook.go:257] "Registering a validating webhook" logger="controller-runtime.builder" GVK="machine.openshift.io/v1, Kind=ControlPlaneMachineSet" path="/validate-machine-openshift-io-v1-controlplanemachineset" 2025-12-12T16:16:46.330610422+00:00 stderr F I1212 16:16:46.329689 1 server.go:183] "Registering webhook" logger="controller-runtime.webhook" path="/validate-machine-openshift-io-v1-controlplanemachineset" 2025-12-12T16:16:46.330610422+00:00 stderr F I1212 16:16:46.329722 1 main.go:232] "starting manager" logger="setup" 2025-12-12T16:16:46.338577906+00:00 stderr F I1212 16:16:46.338499 1 server.go:208] "Starting metrics server" logger="controller-runtime.metrics" 2025-12-12T16:16:46.338812092+00:00 stderr F I1212 16:16:46.338761 1 server.go:247] "Serving metrics server" logger="controller-runtime.metrics" bindAddress=":8080" secure=false 2025-12-12T16:16:46.338921695+00:00 stderr F I1212 16:16:46.338895 1 server.go:83] "starting server" name="health probe" addr="[::]:8081" 2025-12-12T16:16:46.338949445+00:00 stderr F I1212 16:16:46.338930 1 server.go:191] "Starting webhook server" logger="controller-runtime.webhook" 2025-12-12T16:16:46.342058111+00:00 stderr F I1212 16:16:46.341985 1 certwatcher.go:211] "Updated current TLS certificate" logger="controller-runtime.certwatcher" 2025-12-12T16:16:46.344262735+00:00 stderr F I1212 16:16:46.344138 1 server.go:242] "Serving webhook server" logger="controller-runtime.webhook" host="" port=9443 2025-12-12T16:16:46.346693854+00:00 stderr F I1212 16:16:46.346647 1 certwatcher.go:133] "Starting certificate poll+watcher" logger="controller-runtime.certwatcher" interval="10s" 2025-12-12T16:16:46.346759056+00:00 stderr F I1212 16:16:46.346730 1 leaderelection.go:257] attempting to acquire leader lease openshift-machine-api/control-plane-machine-set-leader... 2025-12-12T16:16:46.400004296+00:00 stderr F I1212 16:16:46.399953 1 leaderelection.go:271] successfully acquired lease openshift-machine-api/control-plane-machine-set-leader 2025-12-12T16:16:46.400361815+00:00 stderr F I1212 16:16:46.400333 1 controller.go:246] "Starting EventSource" controller="controlplanemachineset" source="kind source: *v1.Infrastructure" 2025-12-12T16:16:46.400611441+00:00 stderr F I1212 16:16:46.400577 1 controller.go:246] "Starting EventSource" controller="controlplanemachinesetgenerator" source="kind source: *v1beta1.Machine" 2025-12-12T16:16:46.400636051+00:00 stderr F I1212 16:16:46.400621 1 controller.go:246] "Starting EventSource" controller="controlplanemachineset" source="kind source: *v1.ControlPlaneMachineSet" 2025-12-12T16:16:46.400679902+00:00 stderr F I1212 16:16:46.400653 1 controller.go:246] "Starting EventSource" controller="controlplanemachineset" source="kind source: *v1beta1.Machine" 2025-12-12T16:16:46.400688383+00:00 stderr F I1212 16:16:46.400680 1 controller.go:246] "Starting EventSource" controller="controlplanemachineset" source="kind source: *v1.Node" 2025-12-12T16:16:46.400716373+00:00 stderr F I1212 16:16:46.400701 1 controller.go:246] "Starting EventSource" controller="controlplanemachineset" source="kind source: *v1.ClusterOperator" 2025-12-12T16:16:46.400752494+00:00 stderr F I1212 16:16:46.400738 1 controller.go:246] "Starting EventSource" controller="controlplanemachinesetgenerator" source="kind source: *v1.ControlPlaneMachineSet" 2025-12-12T16:16:46.400845326+00:00 stderr F I1212 16:16:46.400769 1 recorder.go:104] "control-plane-machine-set-operator-75ffdb6fcd-m8gw7_2d96a494-7912-4f85-8b53-c3db16b92b18 became leader" logger="events" type="Normal" object={"kind":"Lease","namespace":"openshift-machine-api","name":"control-plane-machine-set-leader","uid":"df92df93-9a6d-4b91-a715-7e7b15349653","apiVersion":"coordination.k8s.io/v1","resourceVersion":"37225"} reason="LeaderElection" 2025-12-12T16:16:46.435899982+00:00 stderr F I1212 16:16:46.435352 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.ControlPlaneMachineSet" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:46.435899982+00:00 stderr F I1212 16:16:46.435740 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1beta1.Machine" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:46.487458121+00:00 stderr F I1212 16:16:46.487333 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.Infrastructure" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:46.507777167+00:00 stderr F I1212 16:16:46.507577 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.Node" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:46.541544332+00:00 stderr F I1212 16:16:46.541469 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.ClusterOperator" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:16:46.635013484+00:00 stderr F I1212 16:16:46.634949 1 controller.go:186] "Starting Controller" controller="controlplanemachinesetgenerator" 2025-12-12T16:16:46.635013484+00:00 stderr F I1212 16:16:46.634975 1 controller.go:195] "Starting workers" controller="controlplanemachinesetgenerator" worker count=1 2025-12-12T16:16:46.635075395+00:00 stderr F I1212 16:16:46.635052 1 controller.go:186] "Starting Controller" controller="controlplanemachineset" 2025-12-12T16:16:46.635117026+00:00 stderr F I1212 16:16:46.635069 1 controller.go:195] "Starting workers" controller="controlplanemachineset" worker count=1 2025-12-12T16:16:46.635210388+00:00 stderr F I1212 16:16:46.635142 1 controller.go:175] "Reconciling control plane machine set" controller="controlplanemachineset" reconcileID="70b05e9f-c196-4ddf-8131-25256a0730bd" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:16:46.635339812+00:00 stderr F I1212 16:16:46.635300 1 controller.go:183] "No control plane machine set found, setting operator status available" controller="controlplanemachineset" reconcileID="70b05e9f-c196-4ddf-8131-25256a0730bd" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:16:46.635555027+00:00 stderr F I1212 16:16:46.635488 1 controller.go:189] "Finished reconciling control plane machine set" controller="controlplanemachineset" reconcileID="70b05e9f-c196-4ddf-8131-25256a0730bd" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:18:30.442113747+00:00 stderr F E1212 16:18:30.441566 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-api/leases/control-plane-machine-set-leader?timeout=53.5s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:30.443546243+00:00 stderr F E1212 16:18:30.443351 1 leaderelection.go:436] error retrieving resource lock openshift-machine-api/control-plane-machine-set-leader: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-api/leases/control-plane-machine-set-leader?timeout=53.5s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.436661584+00:00 stderr F E1212 16:18:56.436276 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-api/leases/control-plane-machine-set-leader?timeout=53.5s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:56.437581057+00:00 stderr F E1212 16:18:56.437541 1 leaderelection.go:436] error retrieving resource lock openshift-machine-api/control-plane-machine-set-leader: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-api/leases/control-plane-machine-set-leader?timeout=53.5s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:19:25.462277514+00:00 stderr F I1212 16:19:25.460028 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.ClusterOperator" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:19:25.462277514+00:00 stderr F I1212 16:19:25.461273 1 controller.go:175] "Reconciling control plane machine set" controller="controlplanemachineset" reconcileID="adc5c6ad-d2bd-4776-bb2a-97f414037542" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:19:25.462277514+00:00 stderr F I1212 16:19:25.461419 1 controller.go:183] "No control plane machine set found, setting operator status available" controller="controlplanemachineset" reconcileID="adc5c6ad-d2bd-4776-bb2a-97f414037542" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:19:25.462277514+00:00 stderr F I1212 16:19:25.461485 1 controller.go:189] "Finished reconciling control plane machine set" controller="controlplanemachineset" reconcileID="adc5c6ad-d2bd-4776-bb2a-97f414037542" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:19:42.237121820+00:00 stderr F I1212 16:19:42.236515 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.Infrastructure" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:19:42.237211802+00:00 stderr F I1212 16:19:42.236667 1 controller.go:175] "Reconciling control plane machine set" controller="controlplanemachineset" reconcileID="ee281024-18c9-4377-beae-8054402b275a" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:19:42.237299874+00:00 stderr F I1212 16:19:42.237271 1 controller.go:183] "No control plane machine set found, setting operator status available" controller="controlplanemachineset" reconcileID="ee281024-18c9-4377-beae-8054402b275a" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:19:42.237357015+00:00 stderr F I1212 16:19:42.237338 1 controller.go:189] "Finished reconciling control plane machine set" controller="controlplanemachineset" reconcileID="ee281024-18c9-4377-beae-8054402b275a" namespace="openshift-machine-api" name="cluster" 2025-12-12T16:19:57.918619549+00:00 stderr F I1212 16:19:57.918547 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.ControlPlaneMachineSet" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:20:00.787006188+00:00 stderr F I1212 16:20:00.786493 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1beta1.Machine" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" 2025-12-12T16:20:24.905592635+00:00 stderr F I1212 16:20:24.905519 1 reflector.go:430] "Caches populated" logger="controller-runtime.cache" type="*v1.Node" reflector="sigs.k8s.io/controller-runtime/pkg/cache/internal/informers.go:114" ././@LongLink0000644000000000000000000000024000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043043033065 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000001104015117043043033063 0ustar zuulzuul2025-12-12T16:25:29.036214265+00:00 stderr F ++ K8S_NODE= 2025-12-12T16:25:29.036214265+00:00 stderr F ++ [[ -n '' ]] 2025-12-12T16:25:29.036214265+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:29.036214265+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:29.036214265+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:29.036214265+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:29.036214265+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:29.036214265+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:29.036214265+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:29.036214265+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:29.036214265+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:29.036214265+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:29.037385964+00:00 stderr F + start-rbac-proxy-node ovn-metrics 9105 29105 /etc/pki/tls/metrics-cert/tls.key /etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:29.037385964+00:00 stderr F + local detail=ovn-metrics 2025-12-12T16:25:29.037400204+00:00 stderr F + local listen_port=9105 2025-12-12T16:25:29.037400204+00:00 stderr F + local upstream_port=29105 2025-12-12T16:25:29.037400204+00:00 stderr F + local privkey=/etc/pki/tls/metrics-cert/tls.key 2025-12-12T16:25:29.037410095+00:00 stderr F + local clientcert=/etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:29.037410095+00:00 stderr F + [[ 5 -ne 5 ]] 2025-12-12T16:25:29.037860936+00:00 stderr F ++ date -Iseconds 2025-12-12T16:25:29.041543969+00:00 stdout F 2025-12-12T16:25:29+00:00 INFO: waiting for ovn-metrics certs to be mounted 2025-12-12T16:25:29.041576689+00:00 stderr F + echo '2025-12-12T16:25:29+00:00 INFO: waiting for ovn-metrics certs to be mounted' 2025-12-12T16:25:29.041576689+00:00 stderr F + wait-for-certs ovn-metrics /etc/pki/tls/metrics-cert/tls.key /etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:29.041576689+00:00 stderr F + local detail=ovn-metrics 2025-12-12T16:25:29.041576689+00:00 stderr F + local privkey=/etc/pki/tls/metrics-cert/tls.key 2025-12-12T16:25:29.041576689+00:00 stderr F + local clientcert=/etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:29.041576689+00:00 stderr F + [[ 3 -ne 3 ]] 2025-12-12T16:25:29.041576689+00:00 stderr F + retries=0 2025-12-12T16:25:29.042066092+00:00 stderr F ++ date +%s 2025-12-12T16:25:29.045931259+00:00 stderr F + TS=1765556729 2025-12-12T16:25:29.045931259+00:00 stderr F + WARN_TS=1765557929 2025-12-12T16:25:29.045953110+00:00 stderr F + HAS_LOGGED_INFO=0 2025-12-12T16:25:29.045953110+00:00 stderr F + [[ ! -f /etc/pki/tls/metrics-cert/tls.key ]] 2025-12-12T16:25:29.046000491+00:00 stderr F + [[ ! -f /etc/pki/tls/metrics-cert/tls.crt ]] 2025-12-12T16:25:29.046714139+00:00 stderr F ++ date -Iseconds 2025-12-12T16:25:29.049192761+00:00 stdout F 2025-12-12T16:25:29+00:00 INFO: ovn-metrics certs mounted, starting kube-rbac-proxy 2025-12-12T16:25:29.049214692+00:00 stderr F + echo '2025-12-12T16:25:29+00:00 INFO: ovn-metrics certs mounted, starting kube-rbac-proxy' 2025-12-12T16:25:29.049214692+00:00 stderr F + exec /usr/bin/kube-rbac-proxy --logtostderr --secure-listen-address=:9105 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 --upstream=http://127.0.0.1:29105/ --tls-private-key-file=/etc/pki/tls/metrics-cert/tls.key --tls-cert-file=/etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:29.105806556+00:00 stderr F W1212 16:25:29.105611 23596 deprecated.go:66] 2025-12-12T16:25:29.105806556+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:25:29.105806556+00:00 stderr F 2025-12-12T16:25:29.105806556+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:25:29.105806556+00:00 stderr F 2025-12-12T16:25:29.105806556+00:00 stderr F =============================================== 2025-12-12T16:25:29.105806556+00:00 stderr F 2025-12-12T16:25:29.106363030+00:00 stderr F I1212 16:25:29.106337 23596 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:25:29.107420806+00:00 stderr F I1212 16:25:29.107388 23596 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:25:29.107812116+00:00 stderr F I1212 16:25:29.107747 23596 kube-rbac-proxy.go:397] Starting TCP socket on :9105 2025-12-12T16:25:29.108193176+00:00 stderr F I1212 16:25:29.108165 23596 kube-rbac-proxy.go:404] Listening securely on :9105 ././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000001111015117043043033061 0ustar zuulzuul2025-12-12T16:25:28.910331837+00:00 stderr F ++ K8S_NODE= 2025-12-12T16:25:28.910331837+00:00 stderr F ++ [[ -n '' ]] 2025-12-12T16:25:28.910331837+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:28.910473681+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:28.910473681+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.910473681+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:28.910473681+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:28.910473681+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:28.910473681+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:28.910473681+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:28.910473681+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:28.910473681+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:28.911786814+00:00 stderr F + start-rbac-proxy-node ovn-node-metrics 9103 29103 /etc/pki/tls/metrics-cert/tls.key /etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:28.911786814+00:00 stderr F + local detail=ovn-node-metrics 2025-12-12T16:25:28.911802324+00:00 stderr F + local listen_port=9103 2025-12-12T16:25:28.911802324+00:00 stderr F + local upstream_port=29103 2025-12-12T16:25:28.911802324+00:00 stderr F + local privkey=/etc/pki/tls/metrics-cert/tls.key 2025-12-12T16:25:28.911810694+00:00 stderr F + local clientcert=/etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:28.911810694+00:00 stderr F + [[ 5 -ne 5 ]] 2025-12-12T16:25:28.912489931+00:00 stderr F ++ date -Iseconds 2025-12-12T16:25:28.916045971+00:00 stderr F + echo '2025-12-12T16:25:28+00:00 INFO: waiting for ovn-node-metrics certs to be mounted' 2025-12-12T16:25:28.916123593+00:00 stdout F 2025-12-12T16:25:28+00:00 INFO: waiting for ovn-node-metrics certs to be mounted 2025-12-12T16:25:28.916131753+00:00 stderr F + wait-for-certs ovn-node-metrics /etc/pki/tls/metrics-cert/tls.key /etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:28.916204095+00:00 stderr F + local detail=ovn-node-metrics 2025-12-12T16:25:28.916204095+00:00 stderr F + local privkey=/etc/pki/tls/metrics-cert/tls.key 2025-12-12T16:25:28.916204095+00:00 stderr F + local clientcert=/etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:28.916215715+00:00 stderr F + [[ 3 -ne 3 ]] 2025-12-12T16:25:28.916215715+00:00 stderr F + retries=0 2025-12-12T16:25:28.916847761+00:00 stderr F ++ date +%s 2025-12-12T16:25:28.919254392+00:00 stderr F + TS=1765556728 2025-12-12T16:25:28.919274722+00:00 stderr F + WARN_TS=1765557928 2025-12-12T16:25:28.919274722+00:00 stderr F + HAS_LOGGED_INFO=0 2025-12-12T16:25:28.919284602+00:00 stderr F + [[ ! -f /etc/pki/tls/metrics-cert/tls.key ]] 2025-12-12T16:25:28.919344544+00:00 stderr F + [[ ! -f /etc/pki/tls/metrics-cert/tls.crt ]] 2025-12-12T16:25:28.920032941+00:00 stderr F ++ date -Iseconds 2025-12-12T16:25:28.922706598+00:00 stdout F 2025-12-12T16:25:28+00:00 INFO: ovn-node-metrics certs mounted, starting kube-rbac-proxy 2025-12-12T16:25:28.922721009+00:00 stderr F + echo '2025-12-12T16:25:28+00:00 INFO: ovn-node-metrics certs mounted, starting kube-rbac-proxy' 2025-12-12T16:25:28.922729319+00:00 stderr F + exec /usr/bin/kube-rbac-proxy --logtostderr --secure-listen-address=:9103 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 --upstream=http://127.0.0.1:29103/ --tls-private-key-file=/etc/pki/tls/metrics-cert/tls.key --tls-cert-file=/etc/pki/tls/metrics-cert/tls.crt 2025-12-12T16:25:28.961890314+00:00 stderr F W1212 16:25:28.961668 23574 deprecated.go:66] 2025-12-12T16:25:28.961890314+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:25:28.961890314+00:00 stderr F 2025-12-12T16:25:28.961890314+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:25:28.961890314+00:00 stderr F 2025-12-12T16:25:28.961890314+00:00 stderr F =============================================== 2025-12-12T16:25:28.961890314+00:00 stderr F 2025-12-12T16:25:28.962814398+00:00 stderr F I1212 16:25:28.962668 23574 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:25:28.965249389+00:00 stderr F I1212 16:25:28.965091 23574 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:25:28.965574847+00:00 stderr F I1212 16:25:28.965545 23574 kube-rbac-proxy.go:397] Starting TCP socket on :9103 2025-12-12T16:25:28.965972047+00:00 stderr F I1212 16:25:28.965942 23574 kube-rbac-proxy.go:404] Listening securely on :9103 ././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000007370615117043043033104 0ustar zuulzuul2025-12-12T16:25:28.756034855+00:00 stderr F ++ K8S_NODE= 2025-12-12T16:25:28.756034855+00:00 stderr F ++ [[ -n '' ]] 2025-12-12T16:25:28.756034855+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:28.756034855+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:28.756034855+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.756034855+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:28.756034855+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:28.756034855+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:28.756034855+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:28.756034855+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:28.756034855+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:28.756034855+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:28.757209724+00:00 stderr F + start-audit-log-rotation 2025-12-12T16:25:28.757270086+00:00 stderr F + MAXFILESIZE=50000000 2025-12-12T16:25:28.757270086+00:00 stderr F + MAXLOGFILES=5 2025-12-12T16:25:28.758146858+00:00 stderr F ++ dirname /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.763512463+00:00 stderr F + LOGDIR=/var/log/ovn 2025-12-12T16:25:28.763512463+00:00 stderr F + local retries=0 2025-12-12T16:25:28.763541273+00:00 stderr F + [[ 30 -gt 0 ]] 2025-12-12T16:25:28.763541273+00:00 stderr F + (( retries += 1 )) 2025-12-12T16:25:28.764273992+00:00 stderr F ++ cat /var/run/ovn/ovn-controller.pid 2025-12-12T16:25:28.766746274+00:00 stderr F + CONTROLLERPID=23527 2025-12-12T16:25:28.766769975+00:00 stderr F + [[ -n 23527 ]] 2025-12-12T16:25:28.766769975+00:00 stderr F + break 2025-12-12T16:25:28.766769975+00:00 stderr F + [[ -z 23527 ]] 2025-12-12T16:25:28.767089363+00:00 stderr F + true 2025-12-12T16:25:28.767135834+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:25:28.767256827+00:00 stderr F + tail -F /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.769422541+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.769422541+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:25:28.769422541+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:25:28.783559557+00:00 stderr F + file_size=0 2025-12-12T16:25:28.783559557+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:25:28.784502911+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.784622344+00:00 stderr F ++ wc -l 2025-12-12T16:25:28.787897836+00:00 stderr F + num_files=1 2025-12-12T16:25:28.787914897+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:25:28.787914897+00:00 stderr F + sleep 30 2025-12-12T16:25:58.790561807+00:00 stderr F + true 2025-12-12T16:25:58.790561807+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:25:58.791900511+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:58.792065055+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:25:58.792257160+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:25:58.795651886+00:00 stderr F + file_size=0 2025-12-12T16:25:58.795680977+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:25:58.796944978+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:58.797110213+00:00 stderr F ++ wc -l 2025-12-12T16:25:58.800687483+00:00 stderr F + num_files=1 2025-12-12T16:25:58.800687483+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:25:58.800687483+00:00 stderr F + sleep 30 2025-12-12T16:26:28.804447151+00:00 stderr F + true 2025-12-12T16:26:28.804447151+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:26:28.805742013+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:26:28.806001190+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:26:28.806529963+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:26:28.811128449+00:00 stderr F + file_size=0 2025-12-12T16:26:28.811128449+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:26:28.812039642+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:26:28.812161575+00:00 stderr F ++ wc -l 2025-12-12T16:26:28.814828213+00:00 stderr F + num_files=1 2025-12-12T16:26:28.814828213+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:26:28.814828213+00:00 stderr F + sleep 30 2025-12-12T16:26:58.817677485+00:00 stderr F + true 2025-12-12T16:26:58.817677485+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:26:58.820131597+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:26:58.820131597+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:26:58.820131597+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:26:58.828725895+00:00 stderr F + file_size=0 2025-12-12T16:26:58.828725895+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:26:58.828725895+00:00 stderr F ++ wc -l 2025-12-12T16:26:58.828725895+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:26:58.834253265+00:00 stderr F + num_files=1 2025-12-12T16:26:58.835296581+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:26:58.835296581+00:00 stderr F + sleep 30 2025-12-12T16:27:28.839567547+00:00 stderr F + true 2025-12-12T16:27:28.839567547+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:27:28.841255009+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:27:28.841255009+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:27:28.841255009+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:27:28.846172764+00:00 stderr F + file_size=0 2025-12-12T16:27:28.846172764+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:27:28.847660321+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:27:28.847660321+00:00 stderr F ++ wc -l 2025-12-12T16:27:28.850766380+00:00 stderr F + num_files=1 2025-12-12T16:27:28.850766380+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:27:28.850766380+00:00 stderr F + sleep 30 2025-12-12T16:27:58.852842364+00:00 stderr F + true 2025-12-12T16:27:58.852842364+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:27:58.854270790+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:27:58.858717203+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:27:58.858717203+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:27:58.861160894+00:00 stderr F + file_size=0 2025-12-12T16:27:58.861195955+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:27:58.862299223+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:27:58.862358685+00:00 stderr F ++ wc -l 2025-12-12T16:27:58.865005202+00:00 stderr F + num_files=1 2025-12-12T16:27:58.865021892+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:27:58.865029162+00:00 stderr F + sleep 30 2025-12-12T16:28:28.867476293+00:00 stderr F + true 2025-12-12T16:28:28.867476293+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:28:28.868619522+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:28:28.868872448+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:28:28.869087644+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:28:28.871630448+00:00 stderr F + file_size=0 2025-12-12T16:28:28.871630448+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:28:28.872501900+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:28:28.872562822+00:00 stderr F ++ wc -l 2025-12-12T16:28:28.875863295+00:00 stderr F + num_files=1 2025-12-12T16:28:28.875883096+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:28:28.875883096+00:00 stderr F + sleep 30 2025-12-12T16:28:58.878987375+00:00 stderr F + true 2025-12-12T16:28:58.878987375+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:28:58.881311164+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:28:58.881800996+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:28:58.881885858+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:28:58.887033918+00:00 stderr F + file_size=0 2025-12-12T16:28:58.887052829+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:28:58.888664790+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:28:58.888983058+00:00 stderr F ++ wc -l 2025-12-12T16:28:58.893823250+00:00 stderr F + num_files=1 2025-12-12T16:28:58.893886422+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:28:58.893886422+00:00 stderr F + sleep 30 2025-12-12T16:29:28.896759400+00:00 stderr F + true 2025-12-12T16:29:28.896759400+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:29:28.905591863+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:29:28.905591863+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:29:28.905591863+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:29:28.905591863+00:00 stderr F + file_size=0 2025-12-12T16:29:28.905591863+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:29:28.905591863+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:29:28.905591863+00:00 stderr F ++ wc -l 2025-12-12T16:29:28.908094336+00:00 stderr F + num_files=1 2025-12-12T16:29:28.908094336+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:29:28.908127367+00:00 stderr F + sleep 30 2025-12-12T16:29:58.912567464+00:00 stderr F + true 2025-12-12T16:29:58.912620975+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:29:58.913979529+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:29:58.914283807+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:29:58.914555114+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:29:58.918202206+00:00 stderr F + file_size=0 2025-12-12T16:29:58.918289048+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:29:58.919274823+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:29:58.919530669+00:00 stderr F ++ wc -l 2025-12-12T16:29:58.923110920+00:00 stderr F + num_files=1 2025-12-12T16:29:58.923127700+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:29:58.923127700+00:00 stderr F + sleep 30 2025-12-12T16:30:28.925892251+00:00 stderr F + true 2025-12-12T16:30:28.925892251+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:30:28.927643675+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:30:28.927789218+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:30:28.927952552+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:30:28.931671255+00:00 stderr F + file_size=0 2025-12-12T16:30:28.931702676+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:30:28.932971618+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:30:28.933244755+00:00 stderr F ++ wc -l 2025-12-12T16:30:28.936914326+00:00 stderr F + num_files=1 2025-12-12T16:30:28.936914326+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:30:28.936947947+00:00 stderr F + sleep 30 2025-12-12T16:30:58.942229126+00:00 stderr F + true 2025-12-12T16:30:58.942295957+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:30:58.944129263+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:30:58.944198785+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:30:58.944343948+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:30:58.948350089+00:00 stderr F + file_size=0 2025-12-12T16:30:58.948370509+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:30:58.949382364+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:30:58.949631811+00:00 stderr F ++ wc -l 2025-12-12T16:30:58.953548188+00:00 stderr F + num_files=1 2025-12-12T16:30:58.953548188+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:30:58.953548188+00:00 stderr F + sleep 30 2025-12-12T16:31:28.956719508+00:00 stderr F + true 2025-12-12T16:31:28.956719508+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:31:28.958497831+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:31:28.958577823+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:31:28.958879291+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:31:28.962275104+00:00 stderr F + file_size=0 2025-12-12T16:31:28.962305015+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:31:28.963580306+00:00 stderr F ++ wc -l 2025-12-12T16:31:28.963752371+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:31:28.967771150+00:00 stderr F + num_files=1 2025-12-12T16:31:28.967792510+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:31:28.967800310+00:00 stderr F + sleep 30 2025-12-12T16:31:58.971882230+00:00 stderr F + true 2025-12-12T16:31:58.971882230+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:31:58.973887119+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:31:58.974069243+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:31:58.974278439+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:31:58.979537538+00:00 stderr F + file_size=0 2025-12-12T16:31:58.979537538+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:31:58.980818979+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:31:58.981010984+00:00 stderr F ++ wc -l 2025-12-12T16:31:58.984776727+00:00 stderr F + num_files=1 2025-12-12T16:31:58.984810278+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:31:58.984810278+00:00 stderr F + sleep 30 2025-12-12T16:32:28.987893220+00:00 stderr F + true 2025-12-12T16:32:28.987893220+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:32:28.989596473+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:32:28.990012003+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:32:28.990233779+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:32:28.993475690+00:00 stderr F + file_size=0 2025-12-12T16:32:28.993501330+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:32:28.994662859+00:00 stderr F ++ wc -l 2025-12-12T16:32:28.994852834+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:32:28.999704975+00:00 stderr F + num_files=1 2025-12-12T16:32:28.999727226+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:32:28.999736896+00:00 stderr F + sleep 30 2025-12-12T16:32:59.002578919+00:00 stderr F + true 2025-12-12T16:32:59.002578919+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:32:59.004337613+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:32:59.004587059+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:32:59.004783094+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:32:59.009005689+00:00 stderr F + file_size=0 2025-12-12T16:32:59.009005689+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:32:59.010409384+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:32:59.010409384+00:00 stderr F ++ wc -l 2025-12-12T16:32:59.013952573+00:00 stderr F + num_files=1 2025-12-12T16:32:59.013977294+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:32:59.013977294+00:00 stderr F + sleep 30 2025-12-12T16:33:29.017785213+00:00 stderr F + true 2025-12-12T16:33:29.017785213+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:33:29.019215639+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:33:29.019596648+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:33:29.019613919+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:33:29.024241094+00:00 stderr F + file_size=0 2025-12-12T16:33:29.024241094+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:33:29.025825434+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:33:29.026020419+00:00 stderr F ++ wc -l 2025-12-12T16:33:29.029992148+00:00 stderr F + num_files=1 2025-12-12T16:33:29.029992148+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:33:29.030029349+00:00 stderr F + sleep 30 2025-12-12T16:33:59.033906777+00:00 stderr F + true 2025-12-12T16:33:59.033906777+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:33:59.034910602+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:33:59.035073766+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:33:59.035305282+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:33:59.037531987+00:00 stderr F + file_size=0 2025-12-12T16:33:59.037548238+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:33:59.038456620+00:00 stderr F ++ wc -l 2025-12-12T16:33:59.038544783+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:33:59.041802634+00:00 stderr F + num_files=1 2025-12-12T16:33:59.041819654+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:33:59.041819654+00:00 stderr F + sleep 30 2025-12-12T16:34:29.044316947+00:00 stderr F + true 2025-12-12T16:34:29.044316947+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:34:29.045688662+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:34:29.046399949+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:34:29.046625215+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:34:29.050744678+00:00 stderr F + file_size=0 2025-12-12T16:34:29.050744678+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:34:29.051902337+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:34:29.052222265+00:00 stderr F ++ wc -l 2025-12-12T16:34:29.053938268+00:00 stderr F + num_files=1 2025-12-12T16:34:29.053938268+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:34:29.053938268+00:00 stderr F + sleep 30 2025-12-12T16:34:59.056953858+00:00 stderr F + true 2025-12-12T16:34:59.056953858+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:34:59.058388604+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:34:59.058660571+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:34:59.058869726+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:34:59.062662751+00:00 stderr F + file_size=0 2025-12-12T16:34:59.062706392+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:34:59.064069176+00:00 stderr F ++ wc -l 2025-12-12T16:34:59.064069176+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:34:59.068052457+00:00 stderr F + num_files=1 2025-12-12T16:34:59.068052457+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:34:59.068071617+00:00 stderr F + sleep 30 2025-12-12T16:35:29.071965692+00:00 stderr F + true 2025-12-12T16:35:29.072067894+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:35:29.073365587+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:35:29.073551062+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:35:29.073685885+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:35:29.076785343+00:00 stderr F + file_size=0 2025-12-12T16:35:29.076861625+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:35:29.077976003+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:35:29.078163187+00:00 stderr F ++ wc -l 2025-12-12T16:35:29.080948067+00:00 stderr F + num_files=1 2025-12-12T16:35:29.081005239+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:35:29.081032850+00:00 stderr F + sleep 30 2025-12-12T16:35:59.085257951+00:00 stderr F + true 2025-12-12T16:35:59.085257951+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:35:59.086557734+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:35:59.086603795+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:35:59.086742799+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:35:59.092112584+00:00 stderr F + file_size=0 2025-12-12T16:35:59.092112584+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:35:59.093646622+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:35:59.093646622+00:00 stderr F ++ wc -l 2025-12-12T16:35:59.097393046+00:00 stderr F + num_files=1 2025-12-12T16:35:59.097426307+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:35:59.097434457+00:00 stderr F + sleep 30 2025-12-12T16:36:29.100439698+00:00 stderr F + true 2025-12-12T16:36:29.100503509+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:36:29.102380256+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:36:29.102772746+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:36:29.103082484+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:36:29.107069284+00:00 stderr F + file_size=0 2025-12-12T16:36:29.107069284+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:36:29.108280055+00:00 stderr F ++ wc -l 2025-12-12T16:36:29.108344176+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:36:29.112658735+00:00 stderr F + num_files=1 2025-12-12T16:36:29.112680795+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:36:29.112693995+00:00 stderr F + sleep 30 2025-12-12T16:36:59.116829070+00:00 stderr F + true 2025-12-12T16:36:59.116829070+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:36:59.118587784+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:36:59.118587784+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:36:59.118795010+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:36:59.121655832+00:00 stderr F + file_size=0 2025-12-12T16:36:59.121678942+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:36:59.122914733+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:36:59.122914733+00:00 stderr F ++ wc -l 2025-12-12T16:36:59.126794231+00:00 stderr F + num_files=1 2025-12-12T16:36:59.126811901+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:36:59.126811901+00:00 stderr F + sleep 30 2025-12-12T16:37:29.129447274+00:00 stderr F + true 2025-12-12T16:37:29.129447274+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:37:29.130819899+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:37:29.130943152+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:37:29.131245669+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:37:29.136452740+00:00 stderr F + file_size=0 2025-12-12T16:37:29.136452740+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:37:29.137104077+00:00 stderr F ++ wc -l 2025-12-12T16:37:29.137104077+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:37:29.139839485+00:00 stderr F + num_files=1 2025-12-12T16:37:29.139839485+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:37:29.139839485+00:00 stderr F + sleep 30 2025-12-12T16:37:59.142807258+00:00 stderr F + true 2025-12-12T16:37:59.142807258+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:37:59.144859519+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:37:59.144886350+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:37:59.145171217+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:37:59.149275130+00:00 stderr F + file_size=0 2025-12-12T16:37:59.149300431+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:37:59.150871140+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:37:59.150871140+00:00 stderr F ++ wc -l 2025-12-12T16:37:59.154695406+00:00 stderr F + num_files=1 2025-12-12T16:37:59.154695406+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:37:59.154720007+00:00 stderr F + sleep 30 2025-12-12T16:38:29.157408511+00:00 stderr F + true 2025-12-12T16:38:29.157408511+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:38:29.159171395+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:38:29.159818462+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:38:29.159818462+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:38:29.163512784+00:00 stderr F + file_size=0 2025-12-12T16:38:29.163512784+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:38:29.164768596+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:38:29.165144945+00:00 stderr F ++ wc -l 2025-12-12T16:38:29.170369057+00:00 stderr F + num_files=1 2025-12-12T16:38:29.170413168+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:38:29.170413168+00:00 stderr F + sleep 30 2025-12-12T16:38:59.174855629+00:00 stderr F + true 2025-12-12T16:38:59.175049664+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:38:59.177520736+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:38:59.177954097+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:38:59.178115281+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:38:59.183801874+00:00 stderr F + file_size=0 2025-12-12T16:38:59.183919267+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:38:59.185729972+00:00 stderr F ++ wc -l 2025-12-12T16:38:59.185729972+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:38:59.190707257+00:00 stderr F + num_files=1 2025-12-12T16:38:59.190873021+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:38:59.190941563+00:00 stderr F + sleep 30 2025-12-12T16:39:29.194076480+00:00 stderr F + true 2025-12-12T16:39:29.194256985+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:39:29.195928917+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:39:29.196068790+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:39:29.196068790+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:39:29.202763318+00:00 stderr F + file_size=0 2025-12-12T16:39:29.202867201+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:39:29.204250956+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:39:29.204565454+00:00 stderr F ++ wc -l 2025-12-12T16:39:29.207948319+00:00 stderr F + num_files=1 2025-12-12T16:39:29.208043241+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:39:29.208079302+00:00 stderr F + sleep 30 2025-12-12T16:39:59.211911674+00:00 stderr F + true 2025-12-12T16:39:59.212012616+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:39:59.213384491+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:39:59.213852552+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:39:59.213852552+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:39:59.219837353+00:00 stderr F + file_size=0 2025-12-12T16:39:59.219896334+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:39:59.221116075+00:00 stderr F ++ wc -l 2025-12-12T16:39:59.221237428+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:39:59.225710790+00:00 stderr F + num_files=1 2025-12-12T16:39:59.225767772+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:39:59.225794582+00:00 stderr F + sleep 30 2025-12-12T16:40:29.229998921+00:00 stderr F + true 2025-12-12T16:40:29.230150204+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:40:29.231811006+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:40:29.231811006+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:40:29.232146414+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:40:29.237345345+00:00 stderr F + file_size=0 2025-12-12T16:40:29.237466698+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:40:29.238879834+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:40:29.238879834+00:00 stderr F ++ wc -l 2025-12-12T16:40:29.243471739+00:00 stderr F + num_files=1 2025-12-12T16:40:29.243624193+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:40:29.243664904+00:00 stderr F + sleep 30 2025-12-12T16:40:59.247649746+00:00 stderr F + true 2025-12-12T16:40:59.247742548+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:40:59.249248766+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:40:59.249349359+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:40:59.249584655+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:40:59.252487688+00:00 stderr F + file_size=0 2025-12-12T16:40:59.252538939+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:40:59.253543044+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:40:59.253732429+00:00 stderr F ++ wc -l 2025-12-12T16:40:59.256018836+00:00 stderr F + num_files=1 2025-12-12T16:40:59.256067378+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:40:59.256093428+00:00 stderr F + sleep 30 2025-12-12T16:41:29.259013302+00:00 stderr F + true 2025-12-12T16:41:29.259195546+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:41:29.260973531+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:41:29.261234197+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:41:29.261386031+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:41:29.265105635+00:00 stderr F + file_size=0 2025-12-12T16:41:29.265221758+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:41:29.266910670+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:41:29.267132646+00:00 stderr F ++ wc -l 2025-12-12T16:41:29.270530171+00:00 stderr F + num_files=1 2025-12-12T16:41:29.270530171+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:41:29.270530171+00:00 stderr F + sleep 30 2025-12-12T16:41:59.273639922+00:00 stderr F + true 2025-12-12T16:41:59.273639922+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:41:59.274860612+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:41:59.274979285+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:41:59.275096968+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:41:59.278739130+00:00 stderr F + file_size=0 2025-12-12T16:41:59.278739130+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:41:59.279696524+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:41:59.279821207+00:00 stderr F ++ wc -l 2025-12-12T16:41:59.283879049+00:00 stderr F + num_files=1 2025-12-12T16:41:59.283951791+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:41:59.283951791+00:00 stderr F + sleep 30 2025-12-12T16:42:29.288634911+00:00 stderr F + true 2025-12-12T16:42:29.288634911+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:42:29.289950854+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:42:29.290094248+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:42:29.290419716+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:42:29.295396241+00:00 stderr F + file_size=0 2025-12-12T16:42:29.295421621+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:42:29.296778416+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:42:29.296892518+00:00 stderr F ++ wc -l 2025-12-12T16:42:29.300899099+00:00 stderr F + num_files=1 2025-12-12T16:42:29.300925490+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:42:29.300925490+00:00 stderr F + sleep 30 2025-12-12T16:42:59.304116575+00:00 stderr F + true 2025-12-12T16:42:59.304116575+00:00 stderr F + '[' -f /var/log/ovn/acl-audit-log.log ']' 2025-12-12T16:42:59.306101034+00:00 stderr F ++ du -b /var/log/ovn/acl-audit-log.log 2025-12-12T16:42:59.306312710+00:00 stderr F ++ tr -s '\t' ' ' 2025-12-12T16:42:59.306545296+00:00 stderr F ++ cut '-d ' -f1 2025-12-12T16:42:59.310159166+00:00 stderr F + file_size=0 2025-12-12T16:42:59.310292700+00:00 stderr F + '[' 0 -gt 50000000 ']' 2025-12-12T16:42:59.311322646+00:00 stderr F ++ wc -l 2025-12-12T16:42:59.311322646+00:00 stderr F ++ ls -1 /var/log/ovn/acl-audit-log.log 2025-12-12T16:42:59.313471280+00:00 stderr F + num_files=1 2025-12-12T16:42:59.313528981+00:00 stderr F + '[' 1 -gt 5 ']' 2025-12-12T16:42:59.313555152+00:00 stderr F + sleep 30 ././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043121033062 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller/0.log.gzhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000063457615117043043033114 0ustar zuulzuul‹#FA†‹À«Y§‚À>JðãN–¬(yÜ>ózçAuïߣ´º#¥É­ïRÖÙ˜p?ô³IÎúdS~G¢±k]ÐÜtáÙFc4ËUùú¢¤Ð &sÐ;ç~KLk¶ØX¡+äc{MÑxÖµˆõ´]«ËÉd„ÁN–UŒíMá^쳃¦Éž6ÍVAPA•K®¨hœþzJpV~¼x%`zON?×®ªR…538¼ø€Ò…Aþ³ÁðG@)Vm]NQóV[sØ›ÆM±‰-p=JB ”ô+¯ÚÑì.J¾°âmš£®eɲ(U脜žÖ•ßž7U%É:rª"ë5Å}SuÙ°ªªÄÿœ©ó»Aï80Øõ‚évzŸ¶ÝÀIÓ6¬|›©Ž´Kc Ñ«ÎHpϘ,çËlR¨‘Go÷èO¬ó¦r]è?wlÞnáî ‚íSà>eŸ’O«Oô8^˜cûw€â}ŠŠðiñ)ú”~ÊvASTU5±÷hßÉÇ4EP€Né|]™9PŒ5бÊKŒk<ùP¡<´ «HÉHA(ÝœH"œíØ­)@·¤¯Z ˜8¡pÍüù*ÉIÏÃ`Hž¦Êâ W¦X¥hб§3õ?»?PÛuÅR_€²0Û GïJ˜!kWVÆñ–~xa&Ä~æ³ýC=”cüƒi©ÊQ\®Í— ]1Ú¦ì u¦`ÄJÔ2Ì—Xê0~<™EÉ“xVî¦Ä„ôº¢VÈâ“wA4åtþiG\bJªYq C ¾ÁÓNáM¶åÇ·jkC]Gª§©Šl(ÖãT½éZ òhŠqMz }] ßÖFnuÄ)XÀ6ÊeÒ-â*aÕS©ˆšbêrµ.@V±#šªŸÝ~àÉ]ï¢Û÷Ætûgo.»Z`š,ªÏ4t€bZ’i5$é´4þO¦Q”¥YâÄÖøŽ•_Øó§ml ¬Þô‡Å…ÍY`E_vnÍïi­K5¬;}äUÂb‡Ž¬.šƒ9mºxêVü£LWSÐÛIg¦`ÔÏ]Dè¡‹²u<u³sI,ôï+š8ABŠ$A\IФ[-ð]-±%¶%c?Y5v’UªCܪÂfŒÀÇ<¢á´7lhbíº4‰¬[bµHfžnÙv[’äýâP4IlJRYz…86øO«NÀ»„©ŸMòk»‡ˆC• ]p®ÚRI#Iv(äxªBŽ?›0,IÖUã‰?z|")ŽeA<±¼¢.õÖ<²eE3>““¤ÔË› Ú+Ùg¯uäȺ"‰ê^r,ÑÔ«É1‘c“‡ :s@0öÃÕøÆ–T<„¨† "9/އÿJkë*z„÷Qš]Áún÷‰x&RÕŒÃû h°ÅÇ}Cê¤)]NƒûÇ=ÝÐuâñ}LíüPš5ô6éÎf=Ý´ào³“ÑÞÚ“ БْÈÙuL÷AtÇáyæfIð‹ãgo(dô< NíWøÚ(9 ›iÐõÇÑÅ›ñ×pì/)ÈÆ––ª˜’Kuî¸ ÅÅÇnZ/ÑdÚ£–S 2>C'Cçî"¸4}¯Ð¤ ö¦møòA¤̯Ÿ¿‘Kî¤ì‡· ×69ï÷ø‡K N×›²§òk@ˆëÅ·€È%n@¶Š pô•óuäÿÎÖ¬Ôôôb§¶Vj;›ã"ž_æD Á ¹Ðeñ[±ˆŒ8FŸf¸c'™£ Q@£Ç ½ÁÛÞ_Ö f€èGž·Æ£4Xƒý0r–q€ˆÑšØÊŸ1u-ôGÏ[„˜Ú@.HÎ~˜]ø‰ÍÜ4ÞfÁÌØ#ƒ`üÙ—Z¾¡É}X/ã[~„m?p%9à ©Ï÷£ÛŽl^#áÝ]æ,Á|òÆü,Ò]`]0»G™¬µ~kÚÛÜ3W@üŸhTÑš«Í#dèÞS'Èç ê~a& {¾‘O½B—s°3o•6bn¼{P®Á¢Ø- èè@HåòV{…üF€öÚ4…!нÕ×+íçOavó=èß‚¶ h’ú`óaÖl“sÑõ%æõ²mZ¸tÇât´Š1ðÙr].øV s³nk™ ìzÅK>W°VvŠQTöl”Î~xSD$ÖA¸€€ºÌ?äAXõ\çÖ¸3€/œÌA³:ôàp}‹6ö1BjAÈÔŒxåv"U!ãè mþ'÷@Ñ2v uÞ»rSß<¯¬ŠNV![]€ ꜑†²Ncˆkísd&¸æÇv1¦¾œvoKÚ3ˆ<Ü”ÐëD¸÷q搜nåIÓ…~ðß|“ä Q…/¬Ò Á)DX *Þù6Q>¿yÎöÎ>†šÜÂ~Avn ¶È"l„!kz.µ+"—‡å=¸±×væ=¹úbhž ›K·Š#Ød°Kû8pÔ«Œ²†’. ôa£.fm™T‰&9×®Aâß~ ÷v©‰k"ŸÄ•`­¸[Æ ÝB~ø6ìãë¹öÚ¦5‚BÁó¿£ å2 Ïd;þ픽‚þ£ö¶ï4Z…^ŽóF®, ݼ×? ]ᨘà`¯Èûì¢ûÌuÁb¾â•:¹«÷K©©Aøû(¶ÉÇ˳~ï°ãà –C©\ËGý³ñUž¥½û¥XÙêŸ#?äf„±BKÏÓ|ÔK]3Ï2!éÐU˜qµNIŠyyþ‚ÁX0õɘ"k‘5²éC×üðQÍãÝ¢m žÀ÷sXУtåT¦>ÀçÊùÜîxp‰«P CÉaTä°`ˆz±Z.ïó娅²ÓkrX(^à67æ÷ŽŸÄ~X Pã²Ù1šCÕ¾J0€øf‘°0!×¶ç±1(6ì °—¡‚æî‡O}»>K(v7 gûÑ Æhë¨"F«Hœ#¡Bhù ×à¿?ƒx*7–K|çj³™)ÆÚŒ†£Ü]PÇ ¼–{²ävu?òlYÿV„ôWó‡üþ_L¼æ¡aæü4Ù¨H.>þ ’a*$µÌÍà^Í„ÊmŸ8^ 0ÁûÜW°ç¥WNl/`\Í—‚賋4öúy3‰9¢ œÐ™ƒ {ó ݈èJ¶mšÛ®Fn0#ÿö­:u6%UW´§—ð§Î0N.¥ÎÍ|Ú$…\‚b-©ÓŸ\F;Â!í/ôžÄ Äd–DKâ°ÿ„ý8Ù¢s²£ê²©>D‚»JøÛŠéò¤ŽÝÐL]ÛÏŠ®ÿ×Y1,Ý|z¹ò)+†)¯YaU ^•¬Ï“w$ßÝ-ñX‡Ï5E:Ÿ)JOñÉúg¶›æ8'l–µ˜,ÑÅ0iRå"-¹UpÄ2,SsåµG?Íï·z'øu0¡s²‰O[~Är|âç”8ùÚÔ­»¡È{©Æûˆû¨6$­Žj/wfðìrÔ¦äã•O?/Sž¬YQDI²‰üÌê=7ãó“5²íŠÍØi©†¥h¢¢Ê¥)µÂ±LÃPŽ¡í]REᔕ7„­µ·.™DY9oר';‹o'¯I©ÞÆXU}Z×Ü^Bb¦Àƒ‰×,óciFê¯IŠ`nö?Ei øñÈ~|M–+;½ÝÖ„k_&ü!ŒÎ£øþáÛãî«*+¥KÇ¿Šßpˆç ûwá„dEAš½r½ÄuèÒ§<,Œ:Y´ôÝÖfŠ còçͨ[† p=Øš¤¢h¢q¢à ë¤í_ªòR]Æß佪¢K¢õ/ŸòÿWQ öeÃÜ“À8ÝT^p3Ôj6CM–uS3÷mÎ0NÒÍ5=x=#ÿj;Œ$0Ì~Ë\Z~¶‘çáXV%É* Ù !ÉýbúÑ"C:Ç’9öà$ÂÜbQ@¬!US!BWͽ¤jÚl‘Ê- "å™äâMò`ðNÚ "Ž}1Œ3åMLÁÞc)} _)Ü”4¹´ÌÙ‚²ÁÅUðRDFòI`«˜ì“Ú×ÚF]Ô-s?–ºÉÝfŽ Éç=S= ×4Ýá× Ý55I‚„q¯W44±”-&t†Õ’ˆÉÇo¢Ôuß ÉîcÈ~º•ZÝЋ#0èQà»ú‚úƒiò"KçDLO6³ "7Y(ž'¼ó¨í‡Xx§IÚ.q*éâÉsY’J éKª¨`ï³Câ€:2QÂÃN¦^üÎ…ýa Œ“ÌãWŒë»¯ÅÁŒ(úsÖ"gå 7UQÔÅý„›¢vü lƒ~÷µ8˜¥R’wÄZÏxË ½ ù’¥kšµŸ|H¿ž±_}÷…Š•ƒ8±äg-DÎË R®š–ní€ñ¾øñkŸ?~÷e8˜ãYE‰#Þ”M3̽:dŠR©¼YJ“È<*¿¨{ë+°ñKGoH¤ÿËÞµî6ndéW!úÇllÉu¿°îÙA€$“ÝLf$†Q$‹¶YÒH²;=™~¬}}²=EJ2e‹®’HÊv[ƒI·Z*‰_û©Ë9#¿oéMø2âËCÈf’ÅÏüáZð•ùhîÉ$4¤tœã`x“'MŠÁ°ßÝð Õ‹d6ÝøSWy¯J¡©4B…# í+4Šº¿DA3Å”¡aIbÒð]’DÍy²-4å\²%#·“³†ÇsÈ q8_…qDJ½^4 %• +œ,¬½ÅÁšC>. ÃWô`©í!Ÿ€EÙãëÇ Ã6fƒ%hðø"°Ç]Ž£J×/ ìX6b’ß[6Z_Vºß#$ˆ¢¶Ï3K–ÓDøC¤>)úþë÷_'åmx÷ÚûÄa²»ñGù0δ8 Šc¥ÔÝÓ}ø¦à‹”Bÿï­APÒ~Ý ~‰Pc6"ôDÇôÃË)_¯tæ©?qny’Û1Ì»L þm ò[-p&¾1Cuªg½˜¼“&TsX¸ö〠rüùŠódíg“µ’$åÖqÉ—û˜ÀWßõ‹˜[ã0äq+Û(êq([Nü€ÐÆ/†ˆJÝËZû­#€ÑâðLÉŽgW– ¾¶ çÒ<¼WH¡£ËÅ`åÙ¶ïGú,g³ß.W¯æíÖ÷ôný}ˆédÛýqò¦5=”n‘lEQäYÏßhÙV>9 ˜Óörð|øÏ„”;.Á?˜7ãêÐláÙÊ>DV„áðÜ9«m›4÷ç3gŸ¯Aƒs†©šGQãÌ÷Mñ؉ÁÖ,q-0 "ŒaŒ¼ÍÚûúºcÀ>^Ž–W7iÙžasOêlûÎÙÙå´V¢¥y~V]|ÍÎn‰¿-·ˆ,ü¦-aæ­¶¦k¤yŽ„0˜FpГø’Ôêàe·çN¦ 38˜Ïß]&œ¬¦z°ÊúÓL˜aÆbk«²ëkÎ=+îºÎE‡ ‹&WíÕ·N¦gL…1V*¸ g¤6º*¿‚A² Û6CjFþµ^Oûpå6”ÊD¥¤­rÿ×ôÇžõúÓE‡ÌŠ& ë@¥WÄyޤ ˜hNE0ËqÊðvŠü¬ÉÀ9§JÉ 8-m&{¨æÂCü©uæ RµÕܸû¶¼Z•‘ê_Ñ$2hð®ÝÈgGÅÀÙR$‰l±-ûrÈáO}+ªC6Þ‹Óè–»¤ìÂM° âªE~\fôU*ï“ü|] §¼áß›–—ûªußI2/+ÊwÈËhÒ™VtxÏŸTT@Э¤¢\|Æá3 ã^s$c’õ¶#·Œú>z;r{ê{Gn0„õ·µ½ÕëÖö>P(=ÂÖ6!‚ÎÃöFH˜G­J €¦x ºõŽ·Ë%ÀóõïÞ»Â×.ËÄ:ÝßôbµÇ1ó· àa.7»vóÓ¾°×ê·WÅ Ø!êMk‚iÜÁ‘&’=_ia Ö4H -?îFÑ>ظzš"x¶V42mhô"‰Ã´BWr}שƒš¿dቧOíR/ûÏ~ +œþáìŠGŠMèàøÎõ^ɤ¿á/ügõnuñÙ_”é˜Aþ}u3: Xav½hBCÖ<,ÙŠÖ{W›/Êú#ëöw’W’PÑuオòÈl>ýýcrU•¥^×›o«*uHÅÄš†:Ù³±1FQ\´÷­£IßÛpÁHð¶‡oô¡è1v¸àšC~$8åO±Ö"°ñ›P*P`mäñ“!5•”W6„T!×סáÚ•aǶªñàôîêý¬*0Mo4«{À©-½ì¾wjÛxÎV¶Þº²·.y“ͳ7åõ´FX—„‰€U[ui€EÙv-ÞIµtýàvõÕÈHÓí8"…ÁRíÃ8C˜¨ö·ßý혲FYn6ƒ@Ëûëß¿o€àË+S䌫 Ïj#xqÏñù«‹ïæÎ–tY©×ÛªJ¿–âþÕÊ‘6ÁÒÊW† ÃÒµ Ö¾ûôº­îâ6ó4ÞæÔâ¾}ùñ k€rN#‚_©˜Þ%Ë¥ê3–·yUtúŽ?{YA›×{ß}ÿö}mÌyRv®ùžäðÏE#F¥4§aŒ¼vczqe}¿î5ª |¯JoV…ì+½©AûêM þ7­×lüNÄê.±€OW6ó)€Ö:î±ßc¼«¼§”ŽHâæ˜‹ð) ék~=¦+Fœ'?¸¹§o)¾ö}µ:Õô`Hm°ÖáËwÇÌ”ƒÚI’‡eû·%£ËU€¸°h ²!6€ô¼Œ郡ŕ÷á;Ð_F*YÚ²¥áÁšýDüj…/ÛÈáŽùDÊb4ÒZt ¿ŸBIz¥!Ã*Tqy#J5À£jµe79ÞN)`ô~€uó¸‡Tk@¥÷÷M‚¨´Àz7s¯'ÀÊSœZüÓk·v<à:!*¹ØùHˆ„^4dHaœäxŸT[KðYßøœÃŽGÿ|ÀÝe?0 ˆZu0‚q‚›Zá€E%¯àäÿü»Ë¾ äËáª×û0ysqs3Ê“íÿaÈqAMfËxÊ3”æD óT¨Œk~™ü2Ùýí<ã4#Ž Z0ŒxAJ vHà¼0yÊ]š’æoå€ 'L HB 2…,ãÌ >ËrüËäM[IL҈ߣ|hÔø,M4fÒ0NáZµ_`ùø¢Úè«*Wc~šå¾&ÿtæª. ‹Ä÷Ø,;7%v1L~þ㯳áM5èoeG&ß%äâk;¶OÌ/þ2ŸBòñßÓeÏ—²œàªî÷_—ŸûË–2ï¦ã›ëIùò»›ª@ùu[ÁªûÙÿ\¹¹þüÁÿ‚é¿’Tìúê«ä}‚YM–£ñ0ñ]t<äê—}ï+˜úê_ßúzQÕËŸ~úæý°ü³êGsÞšêµçÈT¯J{÷BûÕì‘ö— Ó^×v™öUª^hcØžíkN±lâa³eµËá©Puá\•¾~–%yµzœ´êžVI ¹1 k•ŠwmѾóí–6ÔN!4^ ÿøËôG·þ —S`ÆI•;‰4Ë3Ä…d(¥$CØ)LdVð”ŸÎ?ùóÍ—}©_xïÂÎáì·ålÑ-äÉMšKŒe ”äÕ(âÍó,sŒ ŠY S˜*A Ûj±ðºFŒ(M'ÒÆm”!KŠ Ù”P1¡Sc{d˜ŒX¢òãj~ë•3Lk•sž;”3MSž"kTŽŒ¤ØÊÂúò®ý1L©PÃ491¬b$çMP^`ˆ[rîµ…FŒR¡ ¥¼à=2LJÂE˜ašêÃÖ “™tÜ‚çJÁ$áÀ8*†2ÅlÊ â„Íûc˜ouCdÃÔ‰a+†9!±t@JjáˆsdÁyþij5(Ÿ=2Œ›ðfŸW;ãòÊ–*À‚ŽrË âMW¦@™¦ÄåYΘJ{c˜Â˜®#&å‰aÃ-2*!ƒ?¸t ð,ÈA%èW.…í‘aéD¬êyNAÇŠaµVf„!L($ÎŒºÂp$©2Ö`kDªûc‘XEl˜(\;›ØÃÞMgw«מ{%ÃÖ=Ïí|6„ÿÿ 4]<{Ë7Üb:¾uÃÚëíAi‘á¿í7Ë3ÈÓáæÕö€Qv=ãæ?k¯’µAr× ¹ûÕeËaõ×öG‹Û ]OA’àûµ×Ûƒ–Y91½Í«­ Œå¢Îº5øjªcOMý(P®ˆ_ÃS¥Âa¤2“2c¬`´¿õf“°XÞj•ô´v³×2ãiåäUë(ëÉiÕ൒ë9{rʘ_'¹Ž’¯&§lñu’ë(¹ZrÊ”N™Rl¦ä[Nà5U¦û³<—£ÌŽ/ªSÍ@Zhs‘>%ëK²£|KÆÓ ´:Ä€Êú_óáG·€/LgÓjc¯3»X^ÌÝØ~V¡[†‹LàT ‰XšBöUƒlÒ¥•qL«´È˜|z†×Û1t€èÄÜã3×`CYxKIëî×Oß¾ûvÃeøÎ0ŸOg«¶ŸþŸÅ|zÆå¥¯&NÞ ®î¹&¾_XßÁt~y6ʇ+ŒÖµtîNK¿ƒ÷Gž¹««‹Ã÷ÕØ÷nòqׯM?À|ê?ÐüÛ;¾ç‹:ó‰µzS4ƒ™ÙŠf”:¾¢-§ÑjöͤS=Ûõs'E«ˆþ"´JHç²\d™àÐ¥…vH“qg–¸¢G­2B›ð©>Soö@«üm£rþ!­º®UZõè˽“¯!ÊþQ½=IXTïû\e²p y·ã÷ Љ²YÉzt/>}ÚÓn¦…³~ýŒb¿Â!!@I-¤¤‚º‚RÇêñ’!F`É"8\»ww@ôy FNÁÈgŒ$§Pà |f¡@rrÄOãˆÆŠE8bÓý¾VËŽÇÓOçßúÇ3—¾ºûQôö£w/ߺÈó–ûB/ ­R2N™ÒŒKo¡^ §ÏÒäi—Kí”C˜ù†?âÕá”b,s+!ŸïK%á ³`Á:?ŽÑ'PÉãF÷4ò¨ÁDP%Aœ^žN¾ dFZ“1$m.Á/9ÒØ9ij‚Œy.sÙŸú‚é\…°Þwñ%-?D™·MÔÇ‹}£ž¬€¨IsŽ”#â)ü¡Mš# ŠC1†ë´?S,5¦]/?œ"œS„óZ"œä_œâ‹×_$'ïþ4ÞÝPÉeDüV/ÝñY¦ª6Ìvñ”_§£IÇç!TØ 4ÍÕÚ Ôh'L±<'"+z$3“×`=;:9Aô HJ¤¤TÏíÀ8Bû=…wáMÕÝYQ› ± =”|È=¸!&›Ú  öb ùÈø“ŽTIn¤rŽ#É3 æ)gHg\ \ðŒ1Z`eXŒQFE1F¨®S†t±ŽÃG‘ž#ŽÄûú‚§< WÞý&FÊEîî˜ã+Õžä$»]Ê.%\ÈpÙð°ýÉ®œî¶<Û<÷I’»Ýê°íî| ú yVçËé¯ò£‹Õ)Ý‹ÍýÇ —Ý<ˆþíåÅÜýãæ^úP&P«ow{“E’,Ëy† ) pc”fΠœs*d‘«g=r\2I#8^o8Ô±›¹fõ ªq$Þ;åxÊÀ+(ÁŒ«õþn ŸÔ²[µLNJÑ©RiXŒ5 dz7h>vvâ•dÝb*ù pìƒý˜üùÝw?$¥ø-š0i&L¸:ŒÃõÖÝ3;ªžjúãª=‚ûýòÃêaeÿœ„ 5ÐZ6õEð? 9DD) ‰$'¸VÔÞwlðåËîHn¾  P8–šÐ0Voa>M¥_Vm˜6e£˜U×—¹ËÜ輋/ì_¶ÛÙXÌlV:¡ª bu)§¶^ŠVZƒ¬@­AV>s:_´F¥[£*o.>‚Q¾n‹ÆÖ šÙM:e­ÑÐÅÊ-³|ööÈ:”¥¬té+1Ï[C«uDš¬ŸV6ÖðÝhjÜ9OlÕÁi3ªý£Å#n`E0tŒmº÷ƒ‡8Qim®µg}ŒM¶¶DB´Õ˜«éb9³Ë+4›OoG¾­m{§K¨½ï2µV­mI%U¸ˆVÍÑõè²ßù«õÉèrk¤ú1k¸K¢z`°æQ2Ç»>ð‰NÙ=[Ò™ÔÕƒw#u¾ivÙ¹¸5 iÏâ8k:4-Ùxz“£ÙØ.}7=4šsÛ ¯—æ ÒmíΠc žM³®õŠ©îdéÚfW£‰[‡`YÀØaÂP;¹³˜ŽéGSDÅX/Ú¶|²-ǵöÎI[Ïâ%­k@÷Ô­Ö_/~t­Éìëð€™G*™Œf¨°×£ñÇá-ßãxÛFž† ÙýŸ«ªíÛõ!¸ ¸®÷6I*,v”#apê 'fþEŠrarA2Ëzk‚/qË(ѯëÿrOµºõhïV7VÂy±p•Øcûï݇/bèyÖR Ï,á HB´†”ƒ3%ý•ÿ/«®­[JÒ8cÎŽ™1­ŽpŸ´ú5jµÀBš­æ²!ž#ï³0Ví ¬°æ„x\+â\׺ÞG­¨–؆‰`f q'àt‡K¬öfyƒGY)fÁÐ)£‰3× ·ÜÂPEÂWQ¸àò‰#}ÓÖ@Ñ€‘køÖ‹1w˜IFxNÀÜi0wFadÇH¤™0 ²}’òÍ]¬ÖÊX5…139ëL& =‰ ‚`GŒNNŠøŠQ0ÎLÄJ›`8r-dßUE.œŠ¤9$ˆ‡Jd›È%(XUù€Òwã§ñ½}êyíÀÚNE®}þb45Ç…f‚:¤´ÎÏ´¿|-2›¢P’ ¬ž¦²î6,ÛcáA÷CÕ³dîÌjá»/¦3R1,„^»ç¨lbBùèHB""ªÖ@Ò0p² /É2H)„ ³’ñÇ|ø£gŒ+'MR bèõ¢5®÷wÒuѨ¼0(¢”dž“ñ×rÙL qy&w ±þÌx³# ¤-{¢øl‚”¦3åÔc{ð‹1R¦FINQnýÊ(±²9×HÁûFPž¥¹éÑHÅ š gü139k+gV¢(–ÆW~€X‚qƒñƒb“ˆøAaü²ÝøIwަ;ƒÃa&1áO¼Š«Äqd®éL@úš¾öbäPgcí;pJìçÆ!k˜E™•9e 3MÒg ‡ÔÄ-…®r­îÞ°bT6GñÀjÉS“s‰!ñYwâzf… ŒÁÿÖ’Â?ŒäV‘A¯ÚÏ]´Y»g¸«ð›f³8#Xïûxí1š6ÛwÑAù,5GÜ8ÙöÏÙ¶C>ª#N IAEœm¿£º¶òBbÍþŸ½kknäÆÕeê<í>pš A\¶j÷_ä…}Û:‰“Š'©=ÿþž‘-_Z„Ü-y¤è!‰·Ô@÷ iÙ—˜E„t¥êßúŽ—·Í¾ýýb'ÕI(º! Þá½`p9Äò"Ç’yýˆ«ÊmýÒn ¥„„¼ì gå–'Þ½]Fª s¸ÄYE™V²l×lËàà—¢Úëö^ÝÑÁ»Ù—þ÷<’Ë=¸ù³ ý™³¡ú$jó`‰àb·Tk| ì7Ë’Â;¾CcçDNùp6dªUÍnufô&ÙŒ¤Ïokw>"èƒI͇¤!ŒãñºË)Žž †ÔƒcÙaË}Ý„9…8—òxB¥h)“-044"4éV¼÷.³x”Ĥd€*ÐŒhŠqˆAEײý³–“XK=öŠM(qcB~€ é{óÀçJ*ð¸ŠÚeЏ&ez‚Ê÷vσn nËæ°ZžðÑNé&ÞbjzÓO¼uáÅ8Š0 úatƒj-á•Þõ<õ.ÂL“fäpF~½ksÈh¿yjWZc·v]Ö"J”ò}&ð ô¸Ç5^ñbœ!Oˆ³Ë©í†ËïÖ"µËX‰9yŸJà†U‡Ø:N?BM:YL¿ãûø\„Þà ßpnÇ/¿Þ})BíÅŠ1Yž¡\KIÂÍOžÜOA…Æ‘O0¶§‰œ¤; ‡c|õ²¹sfdÙ˜ó¿F8[‹ð;i¾®á+êX¢äQ]äZ¸Ü½K $†1Ä™§Ó™ªqý¾Ø“Ý-šU7°•¸!­ø¡HGH$ív Ûï6µ¥:{A£„™‰XÕ¿ÁJX µþhã%9nØ’0¥d»ý9ÏAnŽòê¥ú”<…&òÔ[; ßÂøYUAiÑaڥİ²Ý»A€˜>—[°ç¢RÄd5ÁÖãîöÀ\»ÖŸ´Y-«±ï«!†ÙªçZ%jyßzÕMoLðkyÄ7>r1®°/©Ó”æè´$u^ÁÉ0ŒNãÇ Ñ—ÿžÐÖM è ##,štÛ ¡Ëœ*”'ĺ+¤MÆ&…K6kÐUðj›nÆtFcÂD¨cBc\ašfú}êÔç¨L‡ ³xèWohI`DVÞBÆÕYâC{WP÷ó”ï—eäâ7,yýèþô”ÆÄó8˜…™£ ³ð©Ëa%@žk™DpX‹“¾ÇâfÒ<õqbº+"S:Á¾€¿=N5ßžÚõ®–ÙmÉ.C‚oÌ>`q4"üzÇä…-=†ò€Ñòøœu7ð—ð“÷ÐFŸxcѦyxô.Úà‚ÿÄf Y?úl$iÚë‹qØ ûxqõÅØõ}Ä¡@b€ŽÊ.σ:‡˜˜~»€UTOKÎkæ2VäõÆ!¡ÍôdyàÝj¨v¹‘‡ÚY-¢þÕÒ™tý‹ ¼}kÈîuåßjƒÏòÞ  &4„6ÓQ="/ïo¶ìù3F7y-3z-ÿn!¬\—ŒÅ/^ÃRAŸ+çJ\l¬;B2ZÅwû6ß|xO¤¸,^ðÞ“E<¡í=Ú®†íðÀÊ#„Ôã<ÚËû[=ZYµaŠä’>z?åLóñÆ»{kÒ´éålÊŽ@“8)Y·CˆäzÕ õƒ“äÉÇÓù7+Þ"¶y+,štëÞ~—!xŠÂݸu($½Ž´.*Œ^ƒAátÎQØ7cºc*7jÀïµ^^ßõa\]ÎÖ<Æê÷\J89Œ39E§)JÊ!ä1§J{qÀâ6²A“îèwÜe!H¢FÀ>zmœ2½ òôÒp%§Ì7sØÂDc›O²\£- Û_Ý–s0†„ ËIŽY,Škr°gKñ.cŠa ÑX6$ú­?}ÍÃÿ*-.÷TVµÈ&d#ú}~[s¶%‡ö©Ùª+Kš VËç%,A¿X}! LIxM9ãã¬ÄÚ½^œ6,YÜy¹‡c×/cùÜ—¯ÿ·(a,€IBÛ°š7onGRdñqðù>Ð~&¬?ž'éØŸÈÙH:ö/½˜¥²d&SO9“˽Ï.ÆÅ…¾ä©p²¥ÑŒ¼¸w¦³)Z4éÖ½ý.“‚¾ÉP®Lä_³h=éjÍÃYSù›}]•}•d=ˆ¶QFûÔŸ÷Ñ•èà_ÿ†¿Aøû?>uÜÿÞõ_îºòg÷çýðõçOÎ}ýö þ ©ür÷«ûÏ”Vç ó?û\žˆsµV0ýçpÿgùåÛºX”>ýüåþë§/wdóåɬ„ÈÕG=QmáS—3P-¨ÏsžœÒ ®çqp‰µJÜÖÀ~ÔXö{xÀ÷ãñY`¿ÌKi‹C»´ßò€»Í Úe$ŠŨJþM^Z/v}ו7?[`Ï 9Òr1T¹§by‚Ù¶lúã¾<Õ*æÏ¿æ±Éq„¯Š?âæ.Ïë.Ͷ`c\‚¹9µ-)¾²6k“bù5|4ƒì•÷ž©º÷a®]«˜÷á¢Ë!\åIó@³Ã)LßÑ2Œ³ ÊÝ@x>™Øá–Ú'Mº÷¾ñ.—ïföu1ø‰˜^S <åz­õûÔÙZÐS‹i»„œVóg,N ¥Ï%îÙBLŠçäM¸y¦¿Žg °a˜Zyd¾ Å$ò¢X ¹][® 6ª§;Úcñc{7> xcÃżØÝyŠBY>þ² ´Ž1ï»ýìŽU4â&R%‚2ïûüËo?O÷­¢H»+Š|qÿ K?ºÉôL<¼/&›¶&>¿úrJÄ“ÃèHCuù$NÆqtµèqs‡Óy{³ì.,Và4éVc ¤ò`—©Î ÷1ÕS\}Òz•ã9É­o6u=6E•o½ 0 þ£ÇAƳä¶ëp/]Yeè]¯¹Äç˜Áåá¡W…£÷=¥ð  Û[úMºw½ë:GŠcˆ°¸ç€óÓ—÷ „•Ù¢g:gŠ{³˜‹³˜èKª †ŒVÑØ'ÿbY<°ÛÄÏÙ%K«ò·—Ëøã‰•H1Ñ Ä#\›È}ëjIøP-å÷›˜S²ZTƒÜžæ‚ÜÕ“AbZ.W4KàWÖá>UP&‘ Dòñºù ŸÍl²«=»úr[GÂÀÍÓiV§Š¡üê§~@̧óâf¨…ö‰‰E“n5º,>¨¯´ÈÃ%÷hMÁ(ò§’Ò&‹ž¶?® «¥Š¶Bä‹çzâõP7ÞüÊ©ý Vö¡Ø†âG÷¢<¯œ¢ÜÀºqilR2Ë0 ³G)ˆëgv}Oâpžý8ö¡ïÄøv-štï~ç5‹/ Œ/%ÕžÆÀ%{Í–÷˜Kz'O†åœ¯7«º«JZ°ItX‰wýó6Hüc$ÏÞ$K‹MŠFÎÝçËÿãqm‰›´wF¦`¶<®ýòKeŽù}ú÷—ûò-«E3Ò>¿«9ôÄ€``H‘%š‡EâEª˜¦å½»DšV†ØãCª[ì·K|ܰ÷Ï/¿ósŒ\¶zÓ½[n"ñpÍ›%OÕ¹­²­Ç /f ,žiH“Ó0²CEr}HâhžkW./÷t©Œ`†n‹&Ýš7ßå Ø^ÐØ´u¨ñà‘39¢…=B(Z5òi,O„µü’1l¹¸ß=TKÿ^Ð:­MѸ¾ïßuó%¾¼<¾æ%Þ2c沆ÊPòØ“ºASt˜†ÑIÊä8¤<õY WýxGµO²HmФ{çûîr±0Ï(•HZcý™Úkº”œ"´‹W3³úf ÛH #C{k ήnÞâBÓÔ¯ÅO_ ‡ã4:!;„]ß§ÙaÄq 1ÏS?üPÔöDa‹&Ýæé2a¨…YJ"„þ¡÷¾éÅ‹¡ûdЛý•MÖ»™Û‡š[L“vÖ9¾{Ê–+ú©·ˆWíõ?¨§å–Eb*ñ)ZúZ½XÈ l8 )z¤#{ûŽoÃ*ª ¸Â‹³»Â¼/½éÁ^\sAM!#QîsñN@qP—}ý5N‡‘4°)Ä+5LÌ3hÒ½ó}wY“&N‰E=b¥¼h-þ Bq‹튳½ìÅÿf §23„ 38÷ˆ¡ìO3i"éòŸ bŠÉ i õ3t—ãÃÇ*~B¤Câ"X"¤¢Ö†«ÿ]áÝ—ÿ<•-’1{‹dh;ÞvSûŠŸÄ[”âÁ²9Lõ_Tò©å¶ »<ôŽ:ÿ—Îé‘qÄCÂ-„b¿aSÿ·òƒoôNe¬úl ¡oûÍ›ÛQU©ë„ âüZ#^Ž¿3Ö2¿üØå5sŒ’ˆœ§äR®Sa˯qP?ö’QÃéº%Š%#4€öZ›% štÛ¡¢vMPR’’×#ŠRmî>%J¢†®Ø¢°^[sÀͲÎhYÅ« Cû(Žà·Ú3à{ù±ËÉ‘’ŸBÎѥЕ¢Ó4€›EŠÃÕ˜!óǃ/øöÙ‚E“n;TTÁXL—¨Ckz¨×æ–_§€˜ ½u‘¸6¿~3­³šV Т×>Оû¼Ë £†"Ãb FA ý3E•Mú^»Tù«A ±Jœe‘S6,¯uœÍð¾%Ñ¢÷â-¢ëÄžßÕœEÆ`päI®:Šx1ÒªÅFõüê‹qlŒâ{_K[yV‡9•€Õƒ¸À'˜Ò†:6+ÔÂqŽ­=þyçØ|äÃ,bhŸXv·¦%š!‰PÜIÁ+%d‘foÿŠâüU(ðèÁËRâ½DѲRLÛyð‡Éûá?ÓøG¥DnÛe¼žŒÿæ.Ïà.­°BÏðªÍ¡×ÙaâcR1¸4çH>žSˆª, Ø%ÓUñèK?q¤×™¼ă”b[¼¢Æ†³A§¯Ã¸Z c7k½×8JP²nËÛJ×r¾9³áIßüÌåTfc¢ï¦É“ÃÔg'O.ÀÜcšar2Z`§ £ôئm²hÒm„‡.ÏÁ Ibá¤%pkΟb"µ˜xºš äfPç5( Q «.óu·:,M|j oéc¾~ègÅ)¹~g‡<ô.™Ýàû>$ iNþãÁ'¾MoѤÛ%e/9:¢XBe}@¡fsÚÃoãî]žHür–XbtÃúZôX„¿í±w¡xR¯°…p5½!7ÿôþIúv݋Ľ°¢Þ÷çÁáS/DyO^ Öñ€9c ĦìÐïdJêÈkï ýÌqü°&FZqûËÝâP=Ä´:dê²7~o†°•!ÔÓqÃæH«td½js2÷ãj]°<õLð[DŒÛ”q¾ ôvk7c z@ÎdÚg*ú¤Œ’jUpñÔö¦S8n¦ÔñUvš˜¢¡y\KŽsu‹ŸŽóR‰[–_™YzG ¸sDOÇv¨ñ0%64lë~ ûúÉwÿ®~»‰³p¼ÂyòIëTï œçˆž’K"Z·UÅɤÙÅYf¦A§˜O·"%% ÒÔ–h¼D¸ÝKQ$õIÞ@"ex¦Ý vjA\¢"Mô¬>”õðõîP1ŽEmQ,Úîµ…œ~wèfÍ—lÍè)†f- úÄÆcÛæhþŒxùœ¶È¤BÚ2‹XP¾œ]Óè‚É65ôeþ;¿¸(ûÊO×–Š}3p|~C«'._ œÐ BJüѧGñi¾]Lg¢76g«Åsþìâ *tŠ4«„siŸ]ïçäÆª½LÉ‹žÎ‹1kjÇ$åº …˜mMºµè2•4*w @ùÞˆoÌ‘|Õ%Ý…`h2ì–…­Á&¿ÁÊÀ ç±b\gþgíÓÊd 7ø˜ŒM@±±µ•ÇCˆÀš$t¸Jïk}¬L%œ@ŸdÚ(¸D2âc¾¢8Â*­hwάg9µ•®e*Tr ×:VÂ* ­pšŒ}^ÄÁÀU0P‡ZQGÝÈä’€óÀ­¿-ÑÝè3‰¦‰–›E¢Ç¿«4»le§éÇ`Ïœˆ-õnX¯Šù°=ÍnWÏAËÙ½xoŒgŒ¬×p¤¨ñ b$R>T¢5áí§…Q­º^Í,$u,:-Œ—‹Ãg¡Jy2–9sÅÔÀh­GØå­zÀMyÃh Ž7|ëÏD¬ÝÖêÁ†ÆôŠz]_Ìï…°fN†'còûýݤº¼žÈŸÕM›¦W'õ￉¸\ ?<øf= S²]g sÀêrbì®–¯xáŽ=jÂk²gìU“L›ðDÕêèØ›6Øv{‘s9÷9.õ}WwaŸ¹¿ì²Õî5ýåÀ Úgñd#L:‘èÎhð0 ¦ìŒü³œ?Ó»O–ï5EdçŒÍYÔ™?&Q&꟭©dßtâí »ç¨—N/•rš†°9ïe6Gêê²mÒÇtÕ¨ßâuü¥×“1š4 Ÿ‘ë°,ͰA±Wc$ê°8¼Wf®}Xn0_Tå“qý*¤ÙЩ¹ÅülzÙUñ¾¢Õ›ÞiýÏržò™]tÁû½?èB¤Ýf…Cõ»×îÖ·mª}k½ª5Em²*¶ìŜثò¶§õKÙÌwV}ûœ¾J&#ç5xÖCk-ê42rDCë=¹1ŠõX@&NÅÑ£älSr%ânËJy£°äá꾬æ>œ χ+¸å¸:g´±õÚK}I‡jF¼tÏÞˆƒFg¡ FU°"¨r*©Ò.‰Š „°½ðA"PWÂvŒ£*lÎiªGÿ2œé•¬‰rpà >%Ã<Ù³æj¬Ú“g´Þqöã=;díÈ`ÎÞ&×ôî/½¦Â•è±€0Nº£þÙ­þ16P ÏcY¼–Ù¹8Z#Fq9xøíÝM«ã—µÕž9â kîÏ4.ÇÇo} ØÑŽÎõ¤:=8wñÞHd`k}9LmÝ(²T¥­È¦C/âÑÅí!,æ9Ö£²^Vš3Ï iX%†½ Z€— ÷d,›N¢eoÇæ*¢5$de\Úï)M±1]k{˜¤SÄBŒ+WXûrñ½khtKÞ ìpãå¨D;ZѦ`áÍ¢+ô†š¬Î¶ ‚ ÆÚ^X¬·œIÙ)ëy¦™íȤr@hsÚë·x÷k3½½êWÖH„¸ ”5Äë¼²XgYÒ.xS¼ub‹Ù‘ëé+8 Ðzîڟà¦qm«Œ£ZƒQœêJéPAl¨U¨·§ÙJ¹`xK¡„’ÉÆxb9—, bBp¢ˆ„’åÒú†úIwÁ°- wY©÷(l+l.{óÛÔ6tê´àjnò9Àgù’ýÙÚª ´UòªJ ç#}AU`…½\S·Ö%QRöÍ™ÊA'¨wS¸€’Éëf;×Îrk#Qc® EÆ[]ƒ8me-ú(o"¾0·¸ùõlµ—PkÓ['½¡†QÝîú,÷líí!Zã À´°ù™)^Ç»£¡9Z«˜áã[‹#ºb¤ßºS›åçÂ>´ÛžºÆ6…u;wìÒó¾õ¬#*›òÙ&gU±ø™­¯)Æ)¤øöJh8¨+¡d²^!8´Ds¢3Q>ÛŠkγîèp…I¾/7¦ÒñâÔlBÇS§´ÞöCÍ£8jÌtãÎÊ<8£O^„³€ð"¬†ßeøpB~M¹Ÿ¼A=¼ææB'ûá—æšÓ h¹k÷Fþ ë:¢óJ×^BŸ£ ®IJÌE‹© U¬«íÉ1›Ù5Û|¯ìM8Û¹²@&ôŸ•+Gשuß磔Œód$wæV®´ƒàŒAB^Àì%²* ÓÃ&z™w­äl—΢8z«f¢âÁ”W8ªž­ªoBà‚‚ä޸†7ÌfJ'·6ÝËêå¸xÔéºy–™É!i^•ÝTοù†0ì¸_@éöÔò û“àbš*61©*U¢„Z1FPmí¢Cl¨Òf{KÚj¦aÞ£‚.%”L6Á 9áÞùòÞ;BXÞ†~²)·f. [ë]nÅëÀÄKÌ’-([áÙÐ[± ;*ó¾Ð0h¨ÐûÂåû³ûeÉTMªenùJ«ØR­bÓV\7äø3à;§×l SPêÀÛ¿(º8Þ¸.æÉxÍ›ØüX¥%¾u€nÙâˆõÍbKJˆöf#mq–Ë;8qCÊi 6¸„n—ElŽzépô£†¥€5j¾IS?—Í_·7ì%šŸ òI!7yçCkU¡I j¼ F×:}ìUºû7ÜÞðÉè9¾d hvÐè• òdKе#Êg—ˆÅ–‹å‚ Ýàærý¨ÃåKª÷¹j®önÕ‚çŒj×EۇɖaòþÖ`Êo‹Ê¯˜•¯“ U¼½É.ï 87¼ï4ÔË𩵔%×Lª¿Öë^`ÕY3ÊÀZ÷/ù²£KæÊ¿Á"Ä×—+Ó½¨tNÄFåÁ–Õž½°œ‡|°<ÜŽ^®{ûÃQng%*º-¹†‹St¯ÞŸeº‘*¥£õŠÈZ¡A•’H W-m¯,3 ¡€ëÂp=ýJ&£y ïJ[mœu!hòŒóa¹ ¥0i/ÍÎ9Ã4³ÞåÑé£TŒTÊ¥†ãÐÝ[:Àt§ùüµÎÙíOQäÆR£S£"åÑ[ÚäUðU[“w!&þ † ‡•P2ËY{;C`\ô¤ P °× ³¼~ÑêvŸ37Û{ßïgò:ødÖJ*ÑQ¬·.Ö>GwLåüÈÒDsÆ}±¥í/r¯ à±5o½·«Ÿóýuè´öößwäxõWrjvÔ{ãÞˆsªR-òc²"¨AU.YUçS&:¬Úíåæ•ó+‡×œˆì7œhO¤0œ\2Ä“M²l^+g4­Í½;ÁðrV(½#„hÐP»¹£¢OŠÐ Ñ°jþ0HìY€7Ø`6wµÊÊÁAØå®öQ#5rÈ×s oš²óË‹2Qºìjœ@\ ’ë°Ð6¬Ó°¥k'¸·ÿÈ iT÷ ¶~ÖˆŒíGjÁ9¯ >ðêî¢7‹ÖÖöFc4À^G6J¢ì¨ÈSP¶¤’GˆM¬m¢­å“ ci"ÀaƲ0¼P]BÉäµ3žÝ"ƒhò2³fr"ÚC§¦·Ë×Чé€J…d“B¢Á®c/וöüèë 53\&'Vö®¬È™ñ OFŠ@KH²kcJPyÚh‰áOL¹º…\†Œ+ÇÅE†Þ\ìiíÄ¢s–`ºÍ—4TµA.Ù5•k£ªàSRTµN¶Ò©F±CIG¤í©)$]°+Ì燋š—P2yÝlç^^Ákc=€¨:ÄAK®Å™74ìË÷ÃéåuMŠÇ ôß/¸‚¡³\ùÔ„Üt›ËÍ™ä“/¾ùñû.þþÕ·ÿ#¶êöêãÉ_þ¸¬Ïáz÷þõóëe#_eZ öô¤þýQIŸ[ÒgüýéÉ4“x§'×2â¿6ÓûsBÏù{õqÚÈ· OO.ë«æBŒÌ¹>=¹½»¼¹»œ~<·N/oO/åæ›»éùWß~ù·¯N¯ÿ÷âþ.k„:ƒ3˜hŸ­ï§çŸƒäç Šô§‡yƒû >x'Aù?l€Ü¹£—ÉrŸ^zîLë­“a úg2rWiï-”ò¤`çUÑèÇWÚg´˜uÐÖ9Lj®ë Õ ¿«/¨Ø1'ÏÜÚFèAÏ[5ܾ×Ú.±q?þúîævû³ «gy‘8)·â§à(iÉÎGGZ8„¬Öû¹ñZŸìŸ,KY˜þ¹Ù6sˆwv­Q ÔÂi¼ë®¯¶íûùóeâw öÃO7·çÏ*à›o¿ûz^¼>°¼O`þ€3À{8›Ú=<ï0¬Μ_/¬Ä»`Qçov¸xáZ‰k¶åy¿×]c4Ã%`~oš÷1½où½±§ÓÛ‡?8`½[/Å"Íc| Ê2íË£KôžOv[— r£ü›eo¢½‹¿œlî— 2<dz·uhò¼Û_§¹!V–«vÁ³V˞ū½àM>¡†'/r~1à´É+;W7±~hõ/ßüøõÅ7?^|ùÃÅ×ûî¿~úùÝ»Ó͈øVEt.¸V/;~;€kV9|¨pm¥4–àÜÚ‹@¥ÕÜ3Ì&åŠd÷4òŸ€Æpà4vüÍ?™îÉ|6B{Kä£ ³%‹{I'ÿIè :_­n÷˜T÷yŠÌjY®cÛ!é5÷í -_J‚Ü„®ÞÒèïmè Yž[•õs㳑ài'Q’FÎÏ( Ak:„Q?]ó>vj6AÝe^ ì!ãq¿oíd9× í˜Xœ:™èü ñÜPœí6l€½]gË!ë¢ ó¨Ö`êݨßð ©7ð…Î g¤{Ö•,ÙБsç¬ =Ë•0ŠR=7-uç@{2V¯R0izq?Óæü¯Íýô¯Ó»_ó/ŸVoòÎä«6¥zàùœçö OtwpÚ"¬Ð«àa^ñ²d¶BÝEt×\myÀ6ê †É¼¼xk<Ó²Ú mGír¨ûŽam¸“'ò8ŸuWŠ´[×—uöizq3üyýú·›úâq€Þ/{œ¯cç9¬ØAŠèºöA^0 v†Ô¶óÛçп}.¸M`fN…w,„|šfaŸ±ü³yŸ}¥oÿ~ñï?üçÅ¿}ÿÃO??zOŸ~ùþ»/úùt%æWÈ­—}ÔÑN‹©$f!g!•D¿[Ó‡²ë&­À†Ëy_¬¨¼ìr +-¤°ÊÍïO¾èW@D~—éž1™½ fï"y×ä÷û»Iuy=¹ù絺¯ÒôêD©écÞð¹¶òåúF]5±ÎEƯ¯>žüÒLO¾ÿ׋ÿ¸º©âÕÉÙÉÍícºqÎ}îƒËœî`[xF'gV÷ŸÐ±šœýgaŠáð¨:éß³ó96—é…$¿šÇé7./¢qg'åîo®š5жÍ]¾?‡Ò›Ú!‡¨œÅ¤¨­Œª¨6Šœ(+¬´F [Ë»G ‡5óÜ]P£€’Éx.˜Dír³õÀ޽EcC.õ¼T„Cø´—hGÆé¢Ýa•ˆ£`’`Yö\`¡Ñ¾yi[ÏUH¼é|†Îgê|Ö»áÓÙ6ÓÛ«üëjí\¹?9"¸ä#+$ž$nCB• ¸DŽ[íù3àN?\C±„’ɨ¹o¬ñÎ2äÆ ¢÷_(»ô F?>¶øPùœ]Q|Q3õ×ϲ¹Ið%ÐØìÐx•ÈŸO‰8­!”pbà‘…æYHåɇuy t(±ØmʼRˆ;,Ò/¿ÎúÐß ` TÝ“®ëZ—gæX‚PÚ4Çoì³ÞmŸ°¯ãÝÇÂ.aïñ ù„¢Dt ¨(V Øê¤ªuòšš6Ä­)‘Rž3ÃíK(™Œå€ÜŽ<‰0FZiÉ!ÝO²e²®€d½Ë"…G‘:‘²Ö™‚ú5Æj^¯/Úã,¬¨ŒD¹GE¿})Fiö‰ižÊøyïWTH*G‡|ÀÝ W·éÚ ¬,3¶‰ÕºQ䨈U« ٠שݢz2 XɵJ&kÏñCGëœ(½%6؆0ØôO³6¡„0ç¤ÝQ6 Á“ölLY¯¸~D`ýŠâ|åH¹¶Û'Yæ}í “£õPëèU¬=)ª0(n™•­*O-VÞú ˜ÜˆîƒÏ%> ¿Nïì¹÷Ù ’¼š'1hËÞz-f‰¢4E¹Ô•Ðç7X‡þ·˜þqyÝäÆë£q9(¥ðŽÚd Ú¤”¸¬Z|‡s‹+¡QΖAv­H²Ï~»3°¿*|9"¤ „Ÿ”­;ÓH¶¯€•[3ê’:èðO-`‡dÍ®Ûe`RÕé¤È·QQ#z r6)¦ +hêèÞš2 òÆA {Ù0hËK(™Œ˜w‰,Ñ‘6à¢wH šsʙٓ2S þ(-Û‘–@¹Øù0+q'÷ëw/ô‡*V¼pùþ,¶St2]Êí'Tîí¤þŸ½gÛmãÈòW~$À–T÷ ëM™&“ ÙÝ·€¨«Ä1E*lJ¶²ðgíì—í)’’(KT—Ín‘­ôƒMª»š}êÜêÔ©s±žsä±»„ Y<<³Zp~U0“Óý¹ Ÿ`iδ%­S,ëêFßÌÔQ2Ó—ˆšDèÕˆÀDJ" KfÜ-˜»÷´?Ýí=)‡ÉìuTu¿´ßî4¨Î¾Ý€1‚…l¶“ÝJx>•¢±Øgâ„Âd9k´=Ó*>—²5êÄ ã þ |Ô+€Ï¨‡·Hób¡àŸuIiÿìÞè¸%K$(Dw`˜Úˆ4ìéQb\ˆQ¢».E‚å®A¼Çhó®d&§mpBnÂ¥Ê çDs °n·õÚæÖÓ–`hð‚ioµÆë>Ôe¯×&^JJU¯-3ÅñÚý£Öë"?‘ÜòŒ-Q œ{ÆÉ~Î<ë• „÷³g ÔXs*J %–9» ‡\2zhëâ:Ùy¥MçœiªÖø±]÷Ÿ»Æ/hL Òöz¥-Šæd ©0-ôÙ—®B›]7Ï™Òäÿþ·ÞBºWvê.6Úl½•9á”>¦ÎeÇ ÀÜ ÓL ¶ó5„Q¹28`‰|âÚ9·³¹“ìx¿ÂŒÕlM(ŒåWZ<¹ ’_ø(˜¿*@lʰ5h•ÕÛ]½=“^¸² ò÷éÜÛiµªÀô^þfÎFj«71ÞU^K1¥©löØ(†[ÿÇül íïÏïÔl®‹;vvjg>.Æg‹ùÕå…ϕܥ>"A˜@œrÐI&93ÄÃ=ð§êvt 8Qã ŠçðFî‘ D ,EpBy"¬ù |¾ý3^w›ßVàñ£Ÿ^…8ž\Ö£­>†h3ŽËó\ðu„íHèü0£ŒM²¹?¹ä«û÷Åo67~¿Š‹ (Úåâ*n.Õ³ùür}¡¾r@Ãу¾*­çÔûk°f MSBÜ4iÊ’IÁX–pgzU1#DÁ¹£bb¯•|à²?3—iª‰áÍ:WS*ºÕoãŸç‹çlIà@Pô™?6U·X#÷U»\Ì?ÞlèoÍ(E ~¦ÿmùH$OQãшTû­åÚµ£År^#X%à=ùoØø«\ôs;ÚdiÏ`ÍÆ©—.goe»æp°1…bÐÀ Ú{d£"(2ls@†p¸»b 7Üh6žô³  1›ù71ÈÅzгª© »ÿ³¾<ʬ×7` 0™]¶¬þÛN¯â#MV†âOŸú$ÙL %qá(moý¤ø R\ 2Ôª qM•jŽcÒœ?Q\—m×…‡ó”W[Ý•ƒË_¸÷¸r0/©¬%V7˜Yܽkm–­^§(]¿ïAåà]×*×qYýûäñõ† O:ƒrÁáE¼˜/ãÛ7W³ÉÇÑéµ]œ.®V¥ˆó¿Úƒ;©‚;g£¼91B«­bJ-­SYçÈ&rÿäË'! ð/R‘À¯:øOÕŒÓhŒáº;ßQ1Ñô¯QÚ a˜ã‚˜vC©i{{ð|ý`oÆßÛºžÔw”óë¿Ç+'.œºÄÒàçÒÃÊA!é ¨."íÊaxg ¢Gn‹ýH»VçFGLR2W¢eÃË•EDKm“tTÐÐ59²à¼Ð0­ºÚìý²2­š‰gJo(ûH°Êö©º°O96®¾§Ê,¯vóO™¢·],¯F„cÜzB1$PAƒAñÈ$c“.Qƒµ–Z5† A ü¬†³Î¶tk6èR—¡øËÍQû“9§OÄ ë]p4¤Æ%Ïd‡„3 ]@8IöÙÒ šw¯ý× ÷«÷ªAë´¨u$Ø Ð&­#1V­Û€ï¾ûǽ3iÕ˜gd§ÓùØ-NW…É"®¯/çhš õL˜Áö±ÁÄÄåå|šÏ.G~û>)€û­ ܽ$v:?%;­#ˆ‰0ý;¾´~Šà©3Ðöðü È-g× »Ìî405RÛ‹Ëiæ&Û\Å®ÆëøÈÏ–OWh˾4±c1'•û€¸i2ÂGJLÅ):”E¨Íò@ıyÄJ÷²Eî“3Oâ\ʺíYŽiÐYƒÎ:ΪѦÆ¢›5†d¼cɯ™2~ý×g 6ÎçÛ*¢]–2 v'l8,EÜ1t‚oŽè£HŒ2Öò–\” ßÐ#óRÔkb­N ¥§ ÕýÚ7¨\WÇ4PaÕÆô«—”jàÓ.ø”0£o¶ç #ü°!PŸûx¶-ÑÝ–Ñ×@´œùÆ”ÔYäCtˆK+±– /œÕ60’¢9òn5Èí]SŠ{e}•NÉ— `dðHÃ^£(^ÒàÆaÖϘ–nˆf\Š6r‹(fåàÈY 1QyT‰Ѩ:þ˜–WH ¸*Us ]·•çT¯ô×*Ͷ»q‹I˜_ÇÅÔÞ¬´{æ`BÏæÙ•7©—ë£gÓà÷“´Ž­æ_÷pHü[un¯ã?W«T…üýo?UkWÛ«ª/O™M6Éš[+Õ)e›Iü´žÄfÈOïî1´ t’ãœxèÝ’|¶¢ìÊÒCö’¸â„&Du„4µ ¢Â>V3Ý옡²ƒªÈGÂb~¹u&’ó‹†S‘ûÁ}ÜGâùƒ‘ÿúþŸ¯çÓ´£ïãìæ×wCn?øÄo­ÎŒjû¨dž§Kf+DËü»ÕŠ+)–˜¦15X´|p‚Û98Á-œ°äÃ1ê â8QduHhf<£&Hn/+‹— ~8Œ°¼Ë¯þÛ¼^¾ûåçf¡yrô‘ Ï_þR}óMe—ù|œ,/m½u0ùí·Çy6Ù²ˆÑd¼ †¸q`ã|€·jÙmU"*ˆsk<£¿ú÷YöÔˆƒ}¡ŒÕV„K•BV3¸6Y#ÊU÷ƒXiï/c¦ý [³É÷r"öÐâÛ)Z‡4ùEêÏ`ó)c!¹Ž‘q'ìGÆO‚rRcwpaaXñcxmaú’U騗¤'‚Ðþ«Pò„q) ÂÙçÀ#ãȺDÓš'±&"^°È–çõðç_W²U½+ˆSduß.²îF)ÐûÒGF5¸þänΩP´± ŸäÛAè¤ÞWa™_>.ÅidB²„‚"¹³âȉˆ‘òÆ1c¬`”|z¦ÃåùM½zÙäÀ=!˜žhvB4®¶îÔo­tVÛU¢ƒçÀô‘ šF™m#Æ!™àxtn5‘§ê‡N?Ø›zŽë™Í¡f›š¡ð-—c»É,¢Û³8^žÃ s>Ÿ†Q®‘›Ç#¿DÌgqÔzëqž!ˆ[’“¨h˜µ"rNL‡Â¿N „…Ú^Æß ƒ`»`hXŒ œÑ\3u¼UÖ?«°ü/ôÃ/ã#+²¬$•–ƒB‰æÀ½#KE L‘Å Yì®{=PÏ©J¨¬L3 ËPüÅUN1è@s^1 ˆ ’i´F„“l˜"|wç’ÜÀ~ óÂr¥ÒQ<|Âá+0†0.Èë´ýRoÏJ¼/p(ùˆãMâíñ‰ä«ü&"Sà0±”`OjvÌ ªÒA%c>„V’O‚ùÝœ1-cý-ìZ†â¯»Ãه儤ÅR)ƒŒgÑ×ÈáŠâ’ŒÑÜÝð%W–]Ûþ £ÛM>‚ìM¬7Åv“Ì,Žòw=bⱜeFTËß/Öí2¯IÉeJ‘)Ä™5¹õtŽ‚Ç“òG€}NMÿÕ|ª{&>’U Èù‹©ù^‹J50j'Œª‰Æ²ÀšWZRÏod¶\Xa”r(Ú\ä[¬V6Ëa?êt‡Øg”“ìkò ô|ª{&>‚ˆ=¯y)=ßoQ©Fí‚Q5áÆ˜æó} 0…צå&uÁp›¤1H&[4®sð†M(ª fD*›:D¾PL” Ÿ‹þ«ù2T÷ªMž¦˜ë‚u:úŽ6}””jàÓNø” öH3Ÿ2soÍϯëÌ¢$7ÕùëÇè¿!âÛQuzU/NÝdv ·Ñuí—Ó ¡åo‰¨ê¸¬~ºŒ³ñõæDóäañüz†âÌÛËU½·YM]Ç]C&—o‰¡'D‚éBå ,A.âÅ|ÑåbÈœÌàîµ¾…ÍK®=óhtîîsw<„«½UÜÕ,Lávþïv‚Oüä¤^ÿ<5{»:4äb>›Û!;î1O~¹˜¢v²D.¦ù""ŸS“ŸzaœeÖ@Ó”+ÇxëÏãŽR U8CÆã›<æÍ°Å‹éä^»õ{è½{K@³ %>p>¯—9Èâí¿ðov0ÈH"if6#[wþóÝ6FÏ>Hlÿ<ýîN>¡Û ;¸»¾ŠW ~GŒ*©‘Ä`@U·ù«Á­çn 'ÛŠùáñáÖèu‘›§‚.²‹ÓXo2Ê·ËÑàuLDNo9^WyÅ…E¨¸F\x…\P —OÉkÛBwößöä"ƒ.pQ/\„à^í' ÖL`S@¶ýÂæAï› Wƒ˜µ&f Ã;ˆ‘Mbãž ØæXÿV}ÌÖ¨ÃzÐŽŒ•éÄßSïËlUŠ‘’„œ$9 Íbd‚ ìs‚׆{wHæúèr·€(“­φ‚O H–û´ãš^àA‡qòKK¿na¨Vl™ Âf5Bè3…Žz «9Dtì¥GFB('RZ ÚÔ±‘¬„(÷\H¤×ºº ËýÎ8iŽ}Ë4V}ÑÕ/" ÕÀŠ-³bî½.4kfE£{mW{ËC)­Lóä 2BÙ)î”!ÔGãŽäüt¯uu–{& B` hÇt_tõ‹C5°bÛ¬¨¦Í•uURë³®f{’¤EÃZ dŽH¹LA ¨Æ]"Ùa ¬ž«ÒÚ]]†åž H)í„ì‹®~a¨Vl™óOlñ“¤×þj"0KÂ"íb>j Y0…ñØ9‘¼äŒÉÓ~û«‹°Ü3ÉIa¦€v¤7ºúE„¡X±eVäÔpF³íaœ¼ÏQr¶ÎmÖuXîÛÍ·µéªM«¨ƒ‹W³P)\]ÎC]-çù©ü±ˆËÅÍÏpmo°uØŒüV½[C  de8«Ï'é¾›U˜Ø³Ù¼^N|}z×áê<ú÷¨ž_-r çt>…>†ë?ăi<5û]Sc” Bš§ÆðžS»°þ|2‹h‡Ö À|qºã:’Ê› ´w¹3Åy÷3$jÏWõéú-þ8?K_4ÃR•Mé~@øz>§›O$yàܳӃ¿4 ë4—N¢‹Åb¹‡˜0`Á’Ù°=é1¹°«(Ò³I `žf…†¼Eôã¥ÿýë¡Ï-nŠ —û ùâ}\^N­§[ßï "¸ .Îâ×ç ×ý”8nSªm¸˜Ô5,ë[ Ë‚3\jãÐÇ÷µùøõ“XÉ e¼5Çd|R'…faöqùõS”DxÖøvéõ½Yqù}p ýëýÙïáf!÷VÏ·¸°3P ‹{bBRK`M‚æœ}{ÌYKüÿì]ënÛÈ’~B?çÌš6ûÞ-`€Ídfvd3Æ$sÎYÃàÕ¢­[DÙNbø±ööɶºy%KbS$½{p‚™$I±¿ê®®ª®îª²¡™{}Ó|±‡@¨Dpé®&«oÉ+ÐźÑuwvvc¢%üC¼2bd–ÞäãT «Ýc.ã,ò%çîÝêæKúcÌŠ~Õþ ú4MâðkÖF5¾þÚŸ.jÖŸ`I¢Úô„ o‡§N~Dª¿L/jŸë$Ñ®•Òf·Éãé$)$ˆ I’ô³©X3]®õrÔ üVg]d·óH¼!9Ï¿_Ã"VSfE~±ÐßÝÍm]!aº„{»Z&«IÚ\‘L?Gz±Ú7#³{A/Ÿ¤L‚ÏÖŸLõ¨bV4‰Äã1}×ô€¶ÃX")üqçYts78ÍÔó‰K?¼ƒY¼zÐÚ.¢I("É…;M»†°çY‘ÖÑÄÔ`zÄVðÃ8»€¯füÌ‚†.ï¾àW Av#A÷}êõ×Åæ£«Ën0&H‹›lÆ×è\ÙÐH¯kÊpzŸiá½}yË’‡š@(7Kfêát1ÈŠ@ñ*Ë™Èg`K}~¼‘Ëá‰Â¨íkfLiýµþz±÷ªÝ<Üu`DB±#âî«™­ñ9xÇeBèü?ºÞUßf·ÃÓF:Šòhžm«q…3]$I¹øQMæ§Ó£¹n¦¡¹»K-÷¤i_úðp;šwpEçá<÷A¾muןÃϧ¯Ž(#ž´!‚¹:¼¨Ö°ÞK ”T¹|6Én‡'Œy¯"œ CgþøeÙÁ4``[Eúó.…‹Ùì~®ÅpI[æòÛ›ö dˆ¡÷›Öþê&^»Éäîvz:=œKlCï¨3KÓ&ógK½NݬÜpyÀ´Œ†ÒM¾Mï–¯@#í‡ÆBF\ì|¯éP*L"îF’-³Ó)„YIŠ®"КFsxÜÕ”Ò›ÓaëDÌvWÏÏ‘EÜb:«Ù4a”P)°iîÂG…_2ÑbÝ8€v.ÀôQa@£ 4n†U‡µ(6iC“ì_`#¶?G<E,ÊÊjX/{R¥¤žXÜåúæv2 ç¯@„êÙ»¸{ÁA&4aAäŠäöKæR Ví4uõËpz?wëÀàQZ0Ã[ºì1›¼U}xæüeZ˜hÛ¹—×añÆ”¢ŠòÈ}Lyú¸1SdA!ó²Q …«ÀuB§/_a9”.:­‰ì)¢ýn[G©v}ûS7œ§À‡÷7)(ÚÙç$:]Ü1D Oa×£`ö»îƒu6й•p ½¸¹ÂIÝOãúmßx%bN2auªÂ¥ l‚‡T¯¹õ †,u˹zu™C¼Šx6DÐ~w‡vv…8gRD\ûGn?¯oOw¤2Æ6ôÈ>9lŸt8ð„Ëh¶2Ü(ÌT4<­¬§=¢òoñ¬Ž”̘$BšDÊOo'§cœ(bE›ìï@Íöª¼£³nŠ®wYfòæñôS)LW2³!ã}Èñj&iÕœËlþˆ_ >ô†Ò·_݉’`·k“ööÛtÙFáy64 ¯'c£¶öPQ”*aÌÍnäêó+Aúqo&–V`ñêb÷ÂKÉ8s9^=žî9f’p+ìê¨,å_´xœO~”¹°ª(Ú‘Í(¿žÙ}9?îKŒÅTÃ:Š&Ö{Ê#VJ¨kÐÒöéºÂ=pÝ 9S¤._+ÜÁDR’4—×ÏmÔì"¸½6iš0áWλµ ?Í!èûq¸Îœ¢>t9Ñ㡃®œÐ9`üë!<q«ãû\Ò¾³S¿øY–f›Äµ:Éyö2¦ ?!qƒXºŒ+sUˆ(HdŠ}êqN¥ÿ|õÜs±®Ð÷hD®ò94F|"ÊURQñ1†‡ 9„)n1(õžiswPlãDë¹âÇŠ¦Y‘pþêùÙ9úóÝ4¶eRàê eRàç¨ÁëÅæt· jÞýÓÏ.ç;o¿6o3ƱàmVwI[Ìãë4¼žøºÀ„ÑÛHQ“U  ^Õ“%甊-Wº¦žƒyÃv5ô¸¥BÄÔ³ØÄeœzW|º\C”¼¹©œÎ©þ[ãÃŽ®6¡ ¬¯3ãl*HÜÌjk]Jkýñ«ç}Õg¡Ÿ­¯“ébå5ןûN&£PäÒI`@»’êÒëÀ|XQÅ~4 0|-0êÿÿòž®Ö–%yìºø*ËœÕlù3y¥£¾O¹ÿËÌ"ÿÌ Ï±PR4ççà‹¡ôN^’j{øQ¤· ã ƒç‡û9»ZqºëùÔ½‹¿Ž±Sl4šռх;t‘,ïMõŽZõµs]¼hë²®îQ4Þët XDcúÀM1si &oàcê*c—¼H WEDCã.+äS”"pÂÚuqë#4¡ÄG‘‹H„a â*0Kļ%DyÞ€‡9"GjéÄ:h¨ï“s¸Éé|Ÿ½N ]˜7,q!Ô~§&£²¶àÓ>Ëårn½XܤÎèdÄgÙAR2‹£]ºúc"¹¹È! ˜ßÆY â³?@Ç8ÅÊÔÁ‡šVˆ#‹U—jCqº(ÛqY#…¯œkezbsÏÑÇ4§Aí EÝëüãñE.¬q1¶ð«sU36ÄžwåüÝO >Awºð0•]µ^8Ù×yØ #çœÙ`¤5ŒG×éÚ™åø|£:@SÄ1+!júãÈl^àk}{ ˜£Ð¤iÎK(¤óʳ1*zô Zìl‘Y^ Z—î¢E‚]9Õl, þk 1œè’pÑ™éZMLÉ5ÃàTìÉtî¯÷Óiù@·›­{Ž”ë¤~°$aêe™XR/ ?֒ܺ¶E=ò²-ºÓüøh[ŠÙ8Id=ìçeù[nQþÖuS¸ÕÉ+ÍÂ?º:¬î]^õÐíìØÍt™¤_öS¦ˆ¢œ7‡o* è؈q‹kÑÖ¾"Ât§­£#fß–ªEßGóë4[L­bZõPMµüñós*Ì©žqг{A"0CÍÑb„pÌ@ ß~5I-õ‘¦i¼vÂ&KGŸBƒ¾ø ³tvžMaê^OœSy?0N 豢ᄋe¾£u ©6ŠkЛrnEBydÛ2Ⱥ"ƒ ÙŸN;ŒzÔ§àÇq Àù6Çw ¸8êO€„å„4™vðœ¨qÞ©ðZYЦbˆpÙŒMÕ*€4­À †Å‚e˜Ûi„Ò'—šIÔëEkï&P9E¤ÞE±ÐزëZj_Tý ] ¦,;,ðbÕEmT¸[aSÔ£*Mv5Ú^ä5:¥#mÁJÒ?źVC*bž° BuÓ-Ëÿ´¢ncl(Ô‘mzñd(VýÒB©Ô÷†qÄ•Û;ä쉠ÅÞËŒJµ¯Û‰•ÁÏ:‚m¶¾aÀó¾Ð¯Ãùí[ÊÓZ^öé²Ù-V0ä® †B}@ިݿ7Yð0ˆeŸé×èeÐö^Oª²åJ „¬Áü"UìÝŠ: ¶­$ãƒíȳÛEùNAJ³påEl+ióŽv¸9ò¬pSÞŸWiOöV˜%ÂTX`æ¢?Ì{ËžÀ(ˆB7oK(Ôuîmå£)„èæEúu±<v:ðž|+Ð þ( Ð]TUÚ ÃZô,Ðq:˜ßñE5³ØÚšE†\`™£²m`S&e³{I“×ÑŒ;–µlÄ´ÃöéüZÑ 0&64°•esN®AHà=îÆì©ÜÐ ³ƒ[`¬÷¤Ú¥vuدdÙq~¶ÎsÛŠ Å©´¡Bõä½iL™Õ=ñˆ°%;¯êõZa»Ä†µëeáû”ŠÖ9”X€Gýì>w‹u¦>°¢ÇG™¼ØdÑ ÁOáB´ÁÜu{ß¶´Æ)ýnMƒêË’»ËóMíÐR €-Ð> ]u’f'Tq+§øU¶˜ò*ÖP$ˆ”¬±Ö<¦¶a£òÎå*6³¢3®Ú!·—ÑuÂ"ºNG»ë…è97Þ§“#.Óëò©µ¢/£âDsT\›¶j©–_FʼnzT\Eý8Üm’òD9Ñÿ5ï‰Ðãu,Hü%]Ãküõ}æ mrOQÔ©§Ÿ«•Á›ÄSû×Ót~ÿÅ2bâƒR‹^°lž-ÑK§‘¡{t\©Ãè|kÅ_Æ"‡Pná0ÐÏÕ‚”vQ!}º<Ü+ͯ—Ø 4‡€Èòð¹$çû‰KÌ•jFÄk|òbörÓQñÒ_Õ¦îÁ•€ùeÑb-IAÙbuFv˜® XÔòÌãÇòÉΫ—ÂÊ$¹M•§¨Œ‰²°=¨‘ìó=Ø æêê~Z{},òã5`Ó¬×Mú¹šÿ«l<+­'o0×ÇN’N‹øðt>v~ýí÷¿¿ùýçô‰†d=uÃrómÞÔ¨hk¤#Ð 0£ÍÈñ§¹YS9[ggNv—.—Ƽ1ðð{PBs}åO#}3-~«žv×%]&É~º¾Ú‘ã à8G¶Óöùþ¾à îÁðiì ŽX-—ÄV_àMÎcÍ µnȱŸí Jn ù«@:r&ÑØîlé9î­óæíÛ_.? K-W‹õ"\€ öœø9Fo<ÙÏa†‡ÀŸvÄ/¥"²qcI?W+Ór6¼ûpùǧýí 0u&“¦ö˜„¢y.˜–Ÿ ¦•~çí-xc<š~®V³ü4>Êá㢙æ9r€ÈòS²ZÌt'aĬ ¦oÍpúd¨ÎŒÓH¨”žèÜlªÑU-“‚ÖÄi¾èõjKÇü²þ4],–[aÌZW”3Ò¼B„çjΚMëH§}ÿÙäçý{_'W)ÖÞïêÞj-Z¿ÞªxÙj™røS<[N}“/xu°}°DU³è‡çð¾ö¹N¬¦Q6öôùØáÊƒå¤øŸÿ>ˆDç&T¨‰àÎ2Âó¼g¥÷ÀD9‹ÄaÄÑQÀNàO}àÔU¶Y¤iZ@C´Žwo¾y¿3Zcä­Ñ‹b K‹!Uͱ÷D¢­´AUxáÐømÈ ¤_2N §:$æIãCzžôz„ÜWô¸&@Y°±bNvÊ”ö@5÷¯NÈ–Æzþiõ6MõNÒ>Ðcç_Š÷=ýfªOüg¼öÇO›7½lÆq|.½(d¾¹TÂÕIV]šxRDBE òX Ÿy§ ×ÃŽ'Æ ‘t / +þøôÖÉóÓåçyúôßeoŸWmŸ§‹‹l‡.ˆáɘ¢ˆÇ4!Õ?›z“õzyvYŸÞ^žé{cé}2‡{Í·'Ï‘š+ŸÏ´3Ó\òÎÞ,—Õï€ì³gøï#(¿Î¤*ÌÖZÜ_åÿ<ùËåØ):þ L–åb®Sü9ÕÁÏßæ…ï.Ç… ”#röIgv¬nýR¸íÞ]jâ I*ü&I´°þ:þï={/ü©âð¶­ïM@üï:K˜yAù>ÓÕ·O+Þ^.¦iøu|ö±?]OÞê­‡Z\ÞÀ“‹õïÚ€|S%Ã4™òw¡å©6M_½»üÕŸ¥Ó¯Åëø=4ÕN¼ðnÓ ™Nå\uÃUù£TÃ~wù@¯ÎÞLµ·Žëä•ð2ÓPýÎÛ)hþ¼ùù>"(Z>+.ÿ¬w+ÒàÞØÞù46tÅeæÛS½ñVOç÷ßå[fÐ×[Ã’_}Òl=“WÖÕU_Ì­çC¤?§ÄF‘Ó…‘…Îê* ö¼ò(ÇQ~݈# ¢Š‘Е1 ]’ƒLá¶tt\îK¡…Æì˜Ð‚Ũ…I C| ‚l H ¸ Ê$Ži{8N¸ï«Ä‹µKÁ“‘§b1’‘ ð)D3aí!•± åhûÓåÄßiEßÒQöñjíféÍ<ŽÜàëøeœ_ù yfõoHpìq¥ðæíŒÂ±—ǰü\ç»õÖ¿?\?b=Û#×)%»‚.uì/ùcìèœòûå9&ø»@Ïûá»D?E¢Ãs5“~\×ö=]‰¬ ÙÙ>!ÐÒ†×þбs0u.ôl?èÏjsVo>À½:DHb¯1¦&G5.ú(‡¶~ºO§k«5ÉûŸœòÈÕcª7`Meû•ÒŸWe£çÓ çè§—WÌöo9åG5»dô|¶q* ·Fg°l^Fd ä—r0ƵwWŸ ›Sÿào4—;MéþÖMý×oÿ˜ø¯pUï3oš¨Ë¨}­Áâm©ð^»ËŠIî¤óÃ÷@³UâÂ\z~î:þ ›Ç_µÐÍù©ã£c¿oðM—¤‰lEnæiᵤi]z¬Úòóq’j®oH—Í$ñ6$mÍQ ¯‰œ÷?=Õ?õ6*÷õ‘†®aR^Áä49ÔGðárgÖVzøçlšÕOzÜ¥óhT5¿ý_ö®u¹m#Y¿ *¿â]<÷ «ò#Éæ¤v7·M²Ù:'åbájÑ¡D…”lg]~¬óçÉNÀ HD“(R W%’ ˜ôôåëžo¶~ †'>Ù3&¿”¿¿+›ƒo¾ð?¿ÿ1õ¡q~( Œèæîþo¾øÊ³†­|p°ò¾žŒþû﵃Ÿ~›ÜýôÝç?¯þd…m®?RøñÂEƒìðð}Ur´5»þÒûÝŸ‹àb3L«÷l™@˜Ë•½[?úÓ?­?³ºöle®Šlëñù}kŸ’[¿õSÏ›¿+ÍôêÒ·á]þ‡0yKLZ2»~†òÇík_{^ܲu^ªÕ7_ä¿ùäÃsˆ]gV6¯3sÈ:[ÛBÜ«•@R¥)F„C79ôµå«“ s¤lb‡ö~v- îòÊ&îò¾p_Ü…M®ù5!QÊ*Q TØ–‘ ù®cö×vÛs˜O"ÿtùùgðgŸÂgóHçYÎðV|¶S·¨˜f˜I³û>îœÊÖKbµ¶ù%3„×®Fšpó¶¸Ïê>Í ‚غPÇ™%Ë‘BÄ$t6%.‰0â‰ÕTJsñâ#Ù$¯G|<*[óØåIß&éÕ<¦á"ÝF:ÖÝï|Í9-•ƒ@zÏgé4[š‚„\O^]“ðM8™†ÑÄ÷7bn§Øùb" ¼ˆ©xñ94rTwjfT‚> I )½_šñ­;¿a+kþKÁ¥,9UËBÉã4"I”9"£L—Å 1‘N´3Wôí]L©QþE…Z}¸ä*/IÔ¼÷šj“zUÌÀ^m{$å ¼¬Ù#IFÛÂ=ˆØê„|ä—íA PþZH»Ë”ß’‹)™ ÙaÄÕ†í}FãÆÐ\¸ÖÈN•{B³°rjB×€fáäìáòtîF`^籕Ò*}kû{Ô?"BrøO‘˜:J¤Ó‰B§‰IQ.‚/: àÍñÊÖ Ö×>Wût±‘œ›ÄdN'†‹ÄYC¼dD¨RÇ2jceý6Ce‰ $­RºˆEZYB¿©Äæ7ޝÓUhôѯæ!Ä#G‡½O'¼½¤¸s©Lû ¥Œª!Ô,æa53èZ¨¶% „ßꯌÑüðó(m4ó •;|¸¿¬Dä¡z_ˆôWìsΫà^¥Í\œMÄ*íª¢uQÖˆ`Ò´Ns;+ŒjŒ@ˆeš)=ë‹GÖDΰ`¸f£´UéˆÞuL‰Óý$£)mÞõâg¡yå:»V†EÌ`Úïatž€ bðÏ‹-hù&™hqZ¯)Ã41ŒÂd°LÈ^bÆÖ‘ø•t&‹ø–g½ï.À³.]â#¸ÖF_äÿ¢KGô<ˆrczrÕ®!à:ƒ€ËïYÃt˜cZ!ðM^é”í—Lž{çô²°wî£3Íú`›iº÷ÂØ}è À¶Jj‘PÊV6BÂ~ kG•Rˆ-“ˆðj ktsJë~}7¯PˆÄ&VK˜—8¡D¦qJ³!±ajS¦hĵ |屪ñcïþDÏ$àî}Ã*°î8¼[6elƒ¶9|ümññÏøkð´;ENµÇhßû8ÉÆ£þºDx–Åщ»DüÂXÍ^a½Z÷£•C…Q‰Ñ¢ÌWlÀòói ü§”i&@^:þææf Iز–¢1@¼ þy—xï[˜ŒœËñUzë¿O=xÞ÷ßߊ?ú9'ñbŒWr@,þvTôû•Hß=ƒ~´Ë é3¶Õì%k£Sµ—rò¼Q)ˆ_Ìiàù‘ã…*Cð8™ßMnÇ‹Ûð~<¹1í®¸’Ûª s5)ðæàa¢Ù| ïõ^"t ‹üÿ:qÌ©‰ò›÷ Ö8<ù§ô~ôë}|—¹-!³I:-f4ÿ5ü.éK-'|ÞŽJ™Yš¿£ ¯ÉkWðålúp“+ZðíÃ’½ÓÿðsÁÅ8*ý¿®Óy:úõ­ÿ6Ê$?»&yºê°Ã dä‘vÇt‹_oSBS†HL$¢æyâpá˜ðœq%9,qFÆDŠ4#‘Õ)Ñ‚³Tg1#zÚà¡e?„C67dsO6›SÊX0ÏT¶Ûç†tOˆ'*Ö!œø×ßò%^„¨¬t¦!¦!Í>ñUè WÔœ:f¢ s™uêíüqoµ®>ý}•9& •RÃ<'ìº.‡ú«ËiíϵjÖg-ØùWŸÝýÒ]Ã%£a‹ÅË}]¿‡¸^ÚDZÁ©ÃÊGwSÍBKÚSY ûü3Ȉ°C}B©D½eFç•ËXI5E`rLµÛ?PSïÇhXÃ)bß5³š¶B‚ÑF£+HØQ Ñ´b[n½F Ö6ì„Eœ0 ‡€÷DÍ9aóR6džÌns¨~qV/R©£€_ .Œµ7‹Ö’hsnÙÖCG.Iuª5Ñ©³DZ'‰SV¥ceNÓ$O›áŇh܆ÀíÒ7!5SÍà%„ ºCŠÿ¼3Íó‚'ð6ç/ÖßÀóÏ¥àÑÔÁ©”í<^Ñe½ëÃPbz/È ÌU²Þ Jå$âˆ(K]`Ë j©ï8WzMŠå;Ï»î À€µ½upˆÔ˜uu—¦Í‰hU&§-5ÖæNµO ÍM†ÚØjEdƉŒM ¥Š¦i"YlE ŒvUç=Pƒ;õ~û¡¸³2fµ&2šQé\$×"fF[F©g@b¡¢±uÔSÒYzö\øûL'Fp£†sîOrjp­ŽmXëEý± òJˆé)Ÿ†éiÛ>+®™nfzâRº G*R¡úÈ£!›{"ˆTíðÛôBoƒBú :”‘Ñ ÊoμvPàA÷)°Ò­2@fë•ôFEôò+s÷j{Ÿ¾bl/ÇV4fØ%R 6ìÌ«_»l:|"ë˜ÉíuÌi/ ³‘·Ç…옅Œ >j$¯ZÉm·áíëiåšKèÅ€YÐí©Ìk¤;~gz‡¡Ï:Üa˜Í Ʋ-#zQ¬ehEÐÌ´g|¯Q„^Ú¶¹Ê9”ŠŸ{(9¸¦Á5mi¶ÖÓP¢¥nÏÉWµf覫5@†r£½ZËö<;UÂØ«„桨xV¾àü³Æó¼døçE^NÇóø1ù(*FúdÊÎyÐgÕyÛ÷/Ÿ´ z.=âõ¦ªþüÉ×?Ž—¿Û©C/Z»·[X´¢=ITÕbì¬éÎH¦l¬Ü 8ÍaÕ«ºÎ;ºÓyÇë¼£¥Î;ÖAçqR8˜¡Ò©óŒ†Òõfé:ðÊvㇸR¶ï¾=꯭I¬÷ÅåÑm4޳WÅofóÑŸ? ~ §éˆ}8LpšØ(S†“„º "C‘(K‰ÂÌÄYF™íž[°Þ˜š½l®ÙÐ:t3?óFàÅÊÂm.­×Ð!DÖq…`$yÛ6íoMGÑæOk:‰¤ WÖ°ÿûßZÓë¸3 Qºs͇ðš–#%É$|u;[ÜCøbu-ö…]²Èƒ©C^”Ž3Œ$ܵ{Q•­yC‚Ö5}¹+ãö¾)¤|q Þcp'™„Ȉ¥,`¾%µ`Å¢„Xß* šÔ –¹ä8HT.®%úˆ þ}G̉CÆ:d¬—–±:ipHtÙ,íËõ A“8#QÌÀ5ªµr0Ï!À¼ê6˦Á¶>ˆtyŸæGK˜²†s»‹2’¸Ò£Ã,.b’ª@I®+Ïå{š.ý±‰5-Ú.÷žè9‰nH<=$«sA/åѦ.ËÇí=Ã#CS™ÓJƒü,©vç„Þ‚í®ne\}£Å< £;ÇRà³å]:ÖÇS6TI‹}ŸöçãÌ¢Xº‘`ç‡&"ÊÞ`1Á¨TT"4Þ´jÊ;"*Ä=h÷¶å Ɔ?–ýEô9 KwXºGh7,ñVmxMk÷d¤øyÈrºC6íEµ« îwÄ„:Õªo¡ýãã%t¬UCBÂ^úòWŠ#˜Ò…ÿ¹M÷R“€=Q‚5<ö X&Fø„€ToIOÁ8a/›q¿$U;†ã¦%Ùm˜ðÛ¡ FÝ®©Iž®º™„G•CdŽå3>&w9?:1!„“‚"&ÊSÕ1ŽƒCF¢E¢‰‹dBd*J’þ9niK}Ú:mKç20X qÆg<Õ8CZÅ,‡~Èsƒ¾×£X,å§^ÁzÙÞ¦c{Ù¥ƒñŽý!WJPåÈ•,õ0ž'.;(ö ØeÅvN"Xþ„2›žÏe”<^FÍ;¡6”Ƴyâ5=L’ ÷¾Áón–ÔÉýbrëÉ~=¹ 3žÇuCÖ‚[‰r©Fò˜,ÕD µÂia-"ÛÕ¢e®§•®i¹ÝpM»­¸ÒÎ&j9…¶–D¢«K•ºYôzì{˜þð( éíçkš§q:É[›`@zî~èBŒðõ´æQj:Z׳Åý]S ÖêÍÄ7Gøo²úÅÝôáÕä–(w}]?l©8Ç »DDþ˜« ñ°ìZAAÇCZ³z²Pó´x]ðân› ;ðpÕú8V§j¬FUUú9ý (Rìui–w:¸`–÷Ü¥ká&·D7ˆ}¼rB˜ôizõê |($<^iŸIO­ßá(x;™Na®nfoÒ`RÛ©i©ÄnËεjµ¥a½v/%q&3%Äd¯ß½©•BøC•›¥°Lv¡b\[i[²rc½|Åt‡×æ]œ¿Ì/‘üÒ§uº6®Öµg£üÃ9˜ÍnseËï€èátî[ÙÀÎ[ÎF®Qá´µ@¥Í¶]éÓ Üäa³ýgiQIÂoÉkp"·m‡mt÷Ë`R´¾m´i÷Ñ‘‹!(ŽâŒ¨û·ózm2Ž!hü9‚›ežÊ«‘Ô/ƒ¯n…ù·Ï0|3á4çmµßëTž×[^G¥b˜±+Ù³{ÛVs(ãÓ'uBÞf™­}þôAŽÈ–cýÈTq `Íõ}N/†ê~M{Ës7…ȼerÞ¯_¼ý=‰lí¨!0”˜Q—2ˆ¶aGGó­$˜‘KÓù|ƒ9ÈE™ÃÒÅ ø±,q›gbéXZòV½]\×Ê!‡êÄ+w:tk•Á ÓîcÖÎ ¤rÏfˆ@RÚ“Ú´1ÖýbÏB8`ø½yåE¯%YÞ–h žMÆ6²$úýþþ]ë¡—¨:Ú*PËɆ ¸€d¤imçWZ&0£¦E¨sÀ´"bü KÑþáƒFÎ,jÀÊ0”NÈ:-–öeð%$yA+½ îg³ßF³WVïÙÝ.©Ës‰¬v¿Ñ0Ô”Æâ!ö»)û´{8„iºt¾Ij4Õ1Ùº{Ø|r™§¯üƤ?^lÿH´VÖ$ž¼þýþuV+ÕTa$0„‰ ÚIÁ1ƒ.)È£ƒž•ûÛ[Kx&ÅÆ¾:A{ŠÔÉ6>á µÃ¸ô¸õy¼ ü©ÛµK®]s¥A²föµý%©}ë%ðÙ|¦ŽÁçŠ9ƪ­SálÞ‡°_À ­\4;ÇU Ä®ŒaŠïH1‰¨oEá"­pMcð=‹u«é¯ËQ¼8v…½„¥µ Ц³W¿hs¶î~ã¦ë§(w·A±-lïÚÅÍË\ÞÀðqx™ÝZåt”X©rûúU¤jG¯…q ñVU§|`MöcuqµPñtHÔI Òþp&‚`[2§Z8-ü81‹ƒ©`Í¿ä¥SXŽ5ø‘7‚{÷ÌÂAFÀ*T‡(§:’£åà%Óš´ó§„ÂÅŠË„$aäø„˜„4VDÇ–“QÊfi hG‹Ë¾£ó 0!žì3¼‚Ö%ï–Œ+Ût!yKÚ·Å'>ÿá¯AúnýËrw"ÙìS»Ë¡9ÕI~˜÷r’cþú¤ˆÉ¢8:-E ƒ…²3‹íèb&£Õ¾øþùjU•ÎÍ­bQWФ1ù4 œ1Çy@Z[Ÿj@7±Hllrzåö1¬¢Ç+¨íÙl Ù£§»Œ´!ƒÆƒÀLL´ˆžk)ûêc)¾ù¿¯_e­‡ix™C#“¥Þ‡ÃÂíƒÞy]œ]u“q­âô`KÁZ$I¯¤CýF±–`Ör¯í>¤çã w}¯µUÚð›Ö6R:Õ8PÄo%ô·úú!ØZ +ºß^–/”q±‹nìC‚õ^œp¨¬¶Ðp¤ìþŸ¸‘­6gb¾üPµf’.IBàÍX”…Y¬ ‘†Y"à ˜ÕŽ( ΄q!d^óô÷‡Ô§0$¾†Hd²Šûí.¾%BzX/¿áFÔíî•8uJ¥qH¥œHk2âÒ$ƒdRFÂÄ‚*ÞE>z5;ÙI»ÊÑMYhm.…Û‡Ù©ãr­Cš•£¢«£íÔêl^ƒÏ;'ÙVÆúMôaeÔ>úDç– ¨ÉÊlo±me>ãÂ/ýÅúˆ‹ÉíÂçéÅ1+;_ó•„[£<Cʼn„ɃÜ;É2"cK•&–‹Ìe‰ EF}åKXà-Q<¬oë ÄÛ½oó‚@›Hs ³d2­ˆäœ“(Œs–ÄiÊ"(Áp†ÇÁðvý”Ù˜OÃÅ3…¿ûƒÜO— µ™(a¶#¨méÅ*À60al‡Ù_jEȼFÔj”¼†”™é_+æaØŽsNºt$Ðpc4Ú%ࣔ=Ú‘î×Ú:Ϊ{V?>2™ò4Ë$Y¡±!ä]6Q јFJF;ú‘£T ÞÍG9›Þ\m98am‘ÁòÈÜ®G濾ÿòÄ׿|ûv÷öÿ[xý¯›¯¨{øÛ_~|§ÃȾ{õß_nÕ›ëµHWñlžÎü—°×¹ó«Þ‡ÓÙ«ÂnUA!ˆµKIèR0àÒD'1O"“¤4MmÉ€s™”HÛÚñWó»¸Âˆ+JÕ®ϯµ2ã~Âãò„ŒªfäyùE9/Qeõ„15˜÷bó~¤yç'~zÇ95Óå%Œ"Ë´Ž`:3…DfT’ÐIRÆÁÞX‡{ü„QÛö=qÍž¹¹ý¥tqwoÊÿP%¿mhóÀfÖ´ùµC·B~ÜlŸ“ì{3z_øŽ ɾá{YöknX1¥†ç€-s±Ar…€˚'¡±2·DƉ“„ê0eTf± “vàòîºE—T¡à†ZOW6ÈICªD¦•VG=š ¤šN[ÑÕ7­™“ÑÕçÞ~]}·BX¿üD-c½áEYU ¡1gÙ¢oøàb6å(¾pcuXý?{×¶ÜÈ‘\¥¥°ŠS÷ªD„|‰ðÛøÅë}p(uéÑ‘49šõ®¾Ë?à/Û¬@4A\èîô öA+5ÀFÖ-ódVæI9ß7MMÌ,ÀMÜiÞòCÊ—ˆJ¥ßÕ—Ô…O#ñ‘¥?änÿœŠvR¬ƒxüÞúÂ#‰Og ¾Ì‚òÎÜH‡Ò ˆs︃£i›?í ²3iÞiB =´ÊyûeËÔ,TÂ1ö0b¢ÒT@h©…R„Ú9ÛÙQu¢[¶™5#]MÑOUsãá­zú÷»ÝaQ)Ý5,ºœ‡kXt['{o(ÅqzÌÄ­äì—âŸup‰î+n9Zâ£0¼W|nˆp(ö8@Üó¦[2º9%¼h„pšÀÝex¯”sV1ñi|Ìš¥Ó†‚ ”bZûÂj¢/‹ ­®Õ†É|L#¤S†B¸“SëníÊC;ÍJÁS”áàõ%Ä\(ý^…8ŠÞ3š?ïÐ9ôk Y1Èû‡%i^ê…»ûòÐ!÷ÂÍ1½´':pÓü鹯y6êaùcÝöîº>Õöso:Œ~'ÕÞÉJ+í K Ãúƒn¯í °"nørý@…˜Utµª#ˆ8¾þRÉA Aw¬ÿ4 @qDž[ʈ´ÔtLj&iøi$8Càôצ×ðó¬kÄá¬õ¼Wá(T7‚–Ç™AíK·—g¢v¥[?smö$úfÚ„-ù—h†:|Ö}¨gLC_óLª÷íõÈ$ïFiüA](~YyÚm.é ä $/sm­GKè–díØ(XÊ`…JIɤ†å l‹4nÞ@lpï,“ ,ÓÑ 3+VxÉU06–éÜ*…fß Â6Q~PÞm«2RÇ]£¬4’"¿1ƒ:înË?V‡]Sûb8Ê?Žt;0Žº'LŒŸo6íðÏ*ÀèJÏ¥(©Hj Ø= žûuPdu¿·ø£+ÒÙÄ”:!Ž×àf¼´ÖÏnwYsÅB(’i! Å;–šWÛj•yù²­žOê×ÖÎWŸìê“ÍÖ'3’kK¸­ÑVÌl=Íž|±@lW¶x Û•™ž‰]þÁÞH¬RóŠÄ7È+šœñZíÞ4³ÿCZUK§§»‡CE]¬Ò0“ð—•å6®±–o½å!Gç=kÑggšÔ–ùP|ð¡ÅÀ,ÿ-‘ÆõÖµAp!3¨¼ÎZÙ‚¸¨–TN"q©}ÒÓAk£ç]h¤tép†.œèRѵp”+7èRâŒOsiaÀQš¡Û ƒr©‰-‡Éòëæë†ÓNõ¼ÜpË-ç'.Ü9÷E§žœ‰î“Nã|›%þf|Ÿ%ž]9%K,:•óƒPöÔß¹,çÇ:„;xØÅø¸0äÛu¾å‹ò]ãùÛ»ü äm„u–góæ ,öz÷ˆðº,è“»ç²~£ÛÉèHÜ==ÞÝß>߇O·w aáF]ó{š’-,ÕÏ݇_âCí¿üüøp³¼¸¯…O›“Õ<ãé^²|oE£kU±¶&ŸÒ#>~î˜Q¤Ûr×~Ì茟u^OÐ~4 †=·½ó×dž¯”`Þ„&‰º›•gâå4á|K,‚[ÑÖNÇ@᪖Ëd¥‰JL˜xi=—аh¶VRü…œFó3áŒ1ŽoÇ^Vöd>':¢„sìÅ(nè)[b"?”Nå”Ã:MÅ+<«ŠP*Àõˆ]’7Üöf#¤¨sL…™OyúÜæ%&VÀ€ÉÚ¶n( ú¶Hãj£ M]ðLëʲb˼—ŠY¥5dá‚00™6²Êà8^c•˜qVmŠgN´ÚU~êã §ïKöݦY´è! K,ÙJmœbÞdÃ|B‡©UYf¦[4£œçœ°hÞŽ­”ßÿÓů¨7=â/Ý.o¼ºžo>AÜЇs/Ï;]úTK±J:ë™åÐ|@þþÚ}Üÿ»UXG7,`/·¿Þe]-vYƒŽ?äc­IÞ€Ò»‡N¡óŽudQë¸J[´&©äZ–EÎL—V³€ˆ Ͼ“eÚO¨´-¯4íÇ·jö©”ö2®zòùÇ… ~ÚŸzúS…Þ1¡ÊÆÿÃÓ/P§¸ ,"Ü–Á´Z‹ X6Ýëö5Û¿IöÚ;°Ú>í8N¶vh—ïuÊôž%‡2ª¼éV{[’Xëœöš0ãÇlIòªUÈ¿ÝuLlóˆ¬SVjCƒ—ߥ‰qb&&¦Y·Fx&3ר«¸g±”È@‚€p¥¸é(o­«ùYÇo«­^~)C›ày™o¥2¯Àºs¹­Újp7›ï~#IÓˆ¤DáˆáUIéS ˆ„f€¾mᄆÆr o°øq¬Â,Y±È¤Uý¶-û_ºúÞ…paÛ)f7UØgѼ_î€õýL“ )Y˲4‰áyL,#™ÑÂç,O 7ml¥1Ûü‡Úc`ú$ÑeY]Ö Íhü®ÖÏÈ`J®»kw;¸òr-§áJËuž9q½‹Å³nÙ®ÊV Ü\‹0Ç™¶k¢ eÊ®~ä¼ýÈæêÅèÅ¡ì×ý ós*¶Ï <í)ÒÞâ®S¾ÃåMýÎ e/þ°;¿³4WŠ)ѧ±?—SóôKøã,šÛ¯¸=~± o¦ãVIE¹â¿äËXµiÊ@$8üc•XÂÓÌ´+ŠyŽ…d•õ¤ÉÓÖšMŠÐ𯩯½‚¦èÔ\»œÌ RϘ´·¹ÞþÏÿö¿¹Þ½hvµæ†pE]»Çœ×¾ç5˜~úÛ/ʪ;ÄCdÄþæ<(x‚lN÷8ß&8îú„ Ï¿§z½—4á­è{E5N8MÕŒÏú½nÆñÑ·0ë’g|îà´“{Ga½7”Qô‹ É# îœ% Ð:]^BB²Ó`€˜àt¯›Ñ¹ÞÜé#8îÍm¿âöø¬œäÍáÉÒ„L\Ã{-ºÎn2{¼“¡Ël÷’Ûƒí˜Nšë…Rƒ89´sWï¾uÙÏ{Eóöy×¾Œ°½‰Â;Ûã<ÉÀœ#÷Ò©ǽ’zoâ]ø^ÚÀ˜茱í HÁIë /ªî§ƒ+(Ì=Öà´R3k%zF'qs±¡w­·ÒÂ0Ÿ{[¤qA´-|F½uG‡NJÎDâ± \“ÅNW…åú6ж‰÷£™ é˦i ä\"º–†=ðOÿ3TlÇÕÈe¤€ð‰ é 6Œ)ÊÛÉjW)Ó ó­‚¡¸YUÁ8§Œ'Sâ•„½v4mç÷oØ“@“ç´$Ì€“ó¼˜fÛæ9f£™ãm¨u@šÅl=CŽ¿&4de§Û¶^H0вh—E/ÈûQxFÑ­1ÊŒ[DÅ(‚™%ó Bô"gT1Êk‰Æ…(2ñ`TЬµ22Ý*Í|”‚¹“KYé$Ü„›ŸKBõ±Ð£D)Îmªæ¼A ¯fÜ&ž¶æeÛ&-ù ÏʯùŠæ‡Õßÿˆ_ûÿ¶c›ù± ÷¹>XÿáhUBn⪆èyïÄ¡+ìÉA@ j±vÆ1à+Ød-À…4öMÔŸßo×+Ò>Và.k‘æû‡&áú£×Ø¡ßÝ—‡Nâb•ZGþ¶~‹R íÒ§ÐÑí£èèÜ4zî¸oªÐ—?ÓY—nÁ«SóгŽ“êçÓ¦aÇZ{?Ï\Ùi\ç¨5·€6\hëðX_â«ÑÙ-J½ošÌ†—ÞRª÷À7Mà_4Ûs“Ps|Z‚Ì]Çì'„¹wÏK¦…ø|ÊI*s'?Šdò‘ùÖ ¦m¨qNcXeÍkÞXýŠCåSzœšCå¤QãTY‘¡|R•£$õcòüÔÄÎ:|Åíô¥évFÝ*³¡ßѸíš8Ÿ}G©“ç¢Èw9…ÞÔøï´ÊÎ¥(Sùh”á,ÈLg—ÚdÍ¢S)´> ÓUÔ>yœQÒ8/2«-5‚×w½ß°)­®ZOGµvšd ¨"Cdlæ…"­’¸ ³—¥½‰ç®&kŸU¬0—Lm…Jº¦üAkkaŠQ úÜiíŠlsÔiBõm½%ô¨¯Ûm¶%Z$û8«ø:à©ð\–­V °#“ƒrƈã¡Dœ5>~îUÈ·k¾¤£ZqÛŠ¯Yšš7|q¶/‡J«ã§ÕÝ¥,;¢l~ wOw÷·U=vj¸vÅ0º®cS²…Å øºûðKÄ÷âr>>Üã*-î+úÓU4Ϻ—ôð '­õ§ôˆŸ;Î3é¶Üµós_¿¾{|%hß7융^zÕ=;Á;šN9h.ŒVåàù×+$éKZBÊ‹ˆ%”dÓ8·U‰ ¨¸™‘QI¡90åeK¢‘óÂ[0¦M¡æéHT®® ¹Ï…eÔµÊ%Å”³Îx¡ ðšñ2ù”©R¬¨ЉéÂ5 Ni´ï’;çm ñ»Íx¹Ft."áå[äZ ›«œfs¯pó 7/ ng¹&l}£fÍòêæÅò 5½Æ/líOUI×5 ®­ÙÆœ¥õ~”ô>òíÈHù}`Ñ¢Xðp"Ä(ù}䎕àWÓÔ´'„Ú,Ø ~ä±Í#ÃÐ,Pè¿ð€|ŸWj.EíNéed¥8δ‹ÕMj™É\Ûè‹eº–)àœ¢pÿT³­˜¤]ÍË!òÒ BÊ'* y©Ùƒƒ®øeà%·Ê2eÍÅ„s ž¡l¿NúàXi×üÁkÎÈ5gdN9#Þ  {JÇgãά÷ >¶¯Ü[3[*c^œœ- O–Uý:¶×-s4‚˜Yw|Û$g[ñ&Ý~À\Ì©èbbf®ü÷ÿ~n>¡Þ]4†ßhÿÿ{Ý0° ±ññzsêWXjGR‹ã7M›žrÌóï)Õ e3 ‡ûeË#Û;'5¡žùõˆ‰¯'hìÜ\;2Íéj{Æ­#škºëüÓ]›k(gŒI{ÿÏ·ÿöñ!†§ÏW¼MåÃË”ýÃ?®¦Kœ8pž}¥œ”,s@-©pô±dƒ€½¸TbˆÅOÖâ8÷ŠS.7ÀZ"r²Võ‘Sxi¡ø5ýž~m?¡“˜Úwµ›ï]¹C뿆LÏìñùþ/r¯à`ìñk'üžñ3Nˆ¥Æ&ާ(œ˜ùrj°£F9cƒBJôsÂsKÚþxn…øz °ªGrIptëcŒÁOQ‰Ô 2KæA„èEΈ%&À¾–hä ‘x0*hÖÚÊÜ*ÍüßÙ{¶Ý¶‘,…è‡ÁîŒË®ûÅÀÛ“é °Ót÷öK`u!#uûÖ’tÚÈgíÌ—Í)R’iY$‹"åX‰À¶(’uNÕ©s«sq¢p^ùÀ¸'j¯­„4VÂΧL …)ë¨SØ(†`‡rF°$;­Ô×j%Nœ_Fõ/ÎAj0‘LKÚ­r&Çé)>@=N†•~•ؾô£&ŽÑÂÀÎÉC |Õ¨ïyö˜}AX.vøj•Fâ+‹²Çz´ibyC.Ï²ï®æ•~ùGÔù/®cÅ­Hí[Pz6½Êª8¯™ níô¯«YÑööi±$ ÏÚÅyȲàXYelãkΛqsVÏWµ‚eÃfAÃ?³“Ó €2Av/Ëç'ð±L—]¼®`n哞kôA|˜O†®(c|tÿÄâ(yûp°³áÎx£¤â ™Îÿ>IÆŸ üZ°u^°-Ôc²^‹ÖÜ–=l‹G7ÇØð–ó¤Ú1XJõ²š}˜ÚI Hð:h‡q(\ÔT¹A¼p¹Bƒ€µA±‰ª.¦Ý®ƒ4®‚0hÄIÄ+¢×Ä"Çrt^€ÖMÁÀÄæó+ ïq׈´)Þ§®=T%Aö$›ïà+ÿº}å‡Ã½­™³ûY“x7Œ9ǼÐÒY¤8-›S¢1êÍsF O|AòϽhZKüùÚù^ÓhR:ªQj¥XYJ³ÀX çQa„Ë\ ­m²Ò¸ Ìr‘;exð[0g5Èi$ç&e‰0{}¶“†àálgçSæ´±kƒ¼#„[Å 1´Üä,Ð0VðÐF€í‹ÓV§<‘!@ÍE¤¸½±¹J:IÂ>·óICo¯wÒu%N^”¿ƒë¹§R:ëÑ@° Ø–qª²Ä*$%¶6Öá@†‰âuÆÝ[E ¦p9PÁt>Ì‘a’ /hQXš;À賓 !Rﯿ#mŠ¿H¡T~ńžˆË m¸¶QLd— A)`õa·;2b06¥ d¤ö6·-m‚÷Kº§.#l?=!»aÙÄ® Ø"—Ãzñ’fˆå˜zI…cľ€½¦Íçó„Ž›vpÜ”{öɲÇnØì ­í¿¶–t¥1Å®V<¡¶º&µ ºí úµÐT¿Ï›Lñn5‚>°~_;>#•ë@áT$t­T¬»­¬ÔVv½ø^7bËž²÷O¯”§ÒE_ŠÌbËßܬ•š^•¬Oã£eMÀï–=´Ok/_]¼³¼ýgþúÍ£‘u³Žcý,ëw,KàDLƨ—#Ú4ܧ£Ëÿif‹bêÿ'Ƽ- ü,§sÓw ô}c•——à]cÍWYñ§Ç„•÷Õ3[ž÷š²ê=œ³f–c'‘ÀCݺBh;÷Ü‹‚ †H†)5Ý%yy5[W`cÅ6âç4`$9ˆÓx®OFT[Ìrð¤ê¹K¶bq¨ø7^Å¿þäòÜþ‘ÂÕó#D, ú ŒÅ[+Ú —T!Ð¥¥,A”éóÒž×¥R§™³TdÊéjBPs¦i‚½£)Ô®¶ Á-ºÓ6.š6š˜ýCs9È&í©§MšÂG;†,ùhò¼ºá/UêÆ¹ŸùÏÄV;þ’­ƒY°àvÑl›§>¶ú ô©nßlƒÐ𯩥Ž4¥Êìzœ’‡"û¾‰{|“üæï?œ/¾[„‹A›Ø¨¡B`™ÀFͰV]lt4W˜‘Qžw#dè¶+ºPi²Kñš]ÊúÙ¥¸f—’áv)ÅRÝþB1ÙÏþ±­š~[ÆO²óe$ø€¥Piî§5•¾º2öIqŠo`g§”Hitwð%Œ¾øü¶qˆûquk‘`Ú„#ºgö2Ž`îäÖl™EÖޏ×6F% „­ô^2ÉmÐÏëthíÓq Íô~¸Çñ`),僥|°”ŸÁR‘I$ÖÝê##|í¥ÆÔ¬RnÏ‚¿~Çe|-“§‰½¼–=º+–QX» .=¾Gãõ[ôè™Ë)ÊÒîUf$æ)Û³¯2ëFѽ)WŠ5¬eŒ¾¥qE4ÒD9Œ²žxžÃÚ펌8–Zv’‘–)n>ÙÂ<ìn~RýB6\Nçs˜-ôðŠè±|t°Õ÷è®GÜ›Q#†qÉÍq\«z˜zZ§K÷/½—óõiØŠfù w‡t—ƒîò,º 瘛îP>Ê f&t¶´n ¡\1Æq ’r{5¦¨.{•£Ì…Q–pºÁÄ”F›;‰c¢B0’„£ÆŸ-ýóP}cÕ7²Cí‹}JÿÜãÜóìà6Ø·AvP-FT-$!\wŸ-ƒxVƒÂ‡T‹-"ˆ“2:G~ É @~ALJFíô°­æ´Y #w{Ôö€~&„t:B/(#Ù=,×¼›ÙŒ”nN%#Üt§_P‰õ°tón”Æ ³Mljlf›€MS¤-}Q T*B±H˜+ª^\èv‚±0^‡X´”¦žcPUrŠà¨ÀKeŸ; t°ð<ä´©ƒ6uЦöK›ÒŒÈ„Â0L©AÝúª&(• }ènU6y›9ëÑÍìú÷ÈϦ×(žžuwæ¢  &„f½·Ø£ÖhDå«eç¬MÍ4{}{}ýëiÆ™Ô\6J{Å1XüÝ€+,wÒk¯/^x(d‘€GrÏÀô>{ðÜüú"?YüF’Î Bþ˜¼»4ƒa–lŒ~z[ƒ©)¥ Gﲬ_ú=¬má4°7ãT^ù·' =ô[¿€¤òÚ(¡HqûAÑ?+8–Måþt_ž•yaÝV§²ÀF)–*žÔ*…\Î1"»‚sL Y,]–'<-9Cdtøw±5Úb‰Ï²è5}Kp¦ã1FŒtpBÏ@·½™ÎâAÌ{qqZºYOc;_ù·>}Ê൓ zövÅÑ =¡@…Q“ÿÇ·¯N³õvð~èš©Aœ´‹¢R8§&Ì„Hµ®QWÕ» ~–½m^ý“ÎÕ?‹Ô´ð¶–'g‘mfâ˜aÆ%¿+ëbê®ßσËnA;~Š%Óš63Ôd|Ìð]óßÝ‹qñäyã öé¹H5L'DÀm'û/ðBŠ29oY·TØk §Ç„ýQkÄDb’*«9]æ·>œÄm²:ÀZDRHK4Ð%ÖÔÏQ°ù%XK¿L&þªC‚$¼¡Ôc+˜å(—Ô!ž3Ž´£©Âyåãx›È“¬Ad°-%k¬qM€´HÊšHÙɆÆjÛnæ»êXKF~Êw鱆±kÞ¿1›’ ÛRïÆjÁlÔ±ÁR3XCÄn´în\AgD˜”yÖ¢÷æ­µŸåabok}Ä?üœîÚ»/¨¶®Ë½ÁZIØHÄè}ÆÐ€ I4ÅÌ éÚµ=É”•qóÖUd˽«HëæåªyM"I%S­h_ãæ­%]ožÃÍ{WƒÎ¤(6O÷®<ÆF*Ú"{ —J$Á?~“ô¥ïý¡_*œïóÙÉú$¸—@Õ@,’Î> G¦&~{ººVhÁ‡`I`Ù¢¬!tJˆš©5Ñ×úïB¥òØ¥. r>Ší<:•(Ìeüãû+Êøðšë°º²ï™¦/.‚ÎI‰.ÿ(~›4#¥K8 ­§õX”€b=oKS f5%x ZniŠm }Åœˆ9æšüóÿçƒ10»Ñ‘ÆÞò -H÷!4 ®GßòuF[ûûÁ¸\çñ÷“™kD‚ -{"Ï—ÇÎe¼=gÙßò‹<ÆÖÇ·Ø‹xÄ_…]^Cµ_Kôo>5b–IÒò‘ÞŠïô¸šå呂òÇóÇ‘”B« ¹×è—ßn):”à~/«bÃ…eTQd¬Öˆ³P ÃG¦¬‚1-]«B¬‹ָɖÛ³¢U!¦¬q­81‘HkŠœRh..DMî—?.n±Š™_)X5žËöÒHw‡@,ˆL@¤v¬Ð_%ŠnJXìRÚ !æEéëgÔ ×›UÂ4Á ¾ZŒrà›âŒg„ bTÂb05ŽxY›­.‰²~{“t‘œi¬š¹4L‰<¹îÆ“œÅà¯È”K Ó®§A®K#¶LŒ×:YûÜŠV#&‹$Ljò¦?+xD<)»šR¬™I«f¯õß!ZX‘üXñ#2(Jv#›“¶ eŒ%$jƒèe#’æÒù¿Þ<Æyª@ìs‚†–! ž'ØÛÝÁ÷bc z2GŠûrš—Òev} fKYgucþŸÃÜÿ¯‹±¦ÿÈoíé}ã‹›É2f9M`Ž"Î5GΉ,ÁÚù}wxº5¿ÄâY´¼±¼göŸDIŠ¥=}õvôÓöòv-ð¯èöhZMT"ùÅÛ@ ¬é¾ÒQ¿ù=É^UÀý¼X­åªib9õ¹CÁqW0d ÊÉ *¨À,+½*й1ª¢âO0RüÿéèÇ›Ü/ëÃĿ˂?1âvq-~¼¯ÿ]†¾/hähµþÓ«7Ge´°ÁÆUñÆåç{»kqû§£U…|ôíÍÍêiéèüû±,år=Ûñ¾ÜrYÓ*Áã‹9zýæt.-äÑO1›wõÕÑ*°¾,VC–Áï˰õÓïA9Š•{–…{àm>W!Ú?Ä”åòË÷•ó²ú´¨Pôæúbê?žýwn/n'¯&¹ÿµ6oîpÂÉ÷×·?ä6|üvU`¡ Œ_­*ßPÎÕë7U°üâõþfè"ÿñ¤ÈÃ$ÌOßÖ¦álùÐ4‚ýúÍ{~vôíETØnó:zKðæå@õo^]ØyuõõÕ&$ÿ¼ùhqùoÑg0uw%Ó©V ¼½›¯È­üt_ãôÑLWß¿®JÁ\?Z–êê}$˜™0]¦d¯>”_5šOœ3–â ãøÁT¹pç‹ÝÕ_Ì@ÿ]àœåËV¥PGgÏì,?-% 쮓Å:½;,{û¸ÞP¹—ÏãšP ¥­b\2²CëÆµ•Ja\sØŠñ«•(ŸBØC­bʧ`ìUÁCñ@ 2Š$µLœÉB +Ÿ²Ò¸õ4 ÓV#žq?`-$ÀŸ»à¸&Tó¥ö§“ VѪϦx–ÂB¶”,ÅwŸÒ€Õ_P{×tÂPbP9©>„±“¢RLPfR|§ÓAÕ-’1±K^2r¸æ‹l=bæÙ½Ä$¢÷Ó8î~>EË/*f2ñÏU{½¡:Lå ¿Kg2ñÐ%²tdmngÁ)מ·c…sÒp˜Ê·Í%áí¹$Ô6® S)=ª¢kKŸDº+j¨S¢ùÍÍàd™1AâbR™ª # "1)¼Š^ Ìð¯„À§ ²4{%À<:~löûëË 2“Ͱ>}jéYÛu¹!—X¬vêÓ§J ï3Ì…­u]4ŽQ9QÙÁÜ·“<{5[NóbÔ/ºª6¢Uø8û)z"ÊÏÓŠ“.%GveE¤ê«¥óãnsï{wggîËà t]4Îûóø,–KöÄkñ}×’±gAHë½”(PáAïƒÖ Š':ª°7|á®P.(›×?C½“ÛÛ›>‹˜á¼î³(oâ±hÛŽYサîǬi³5lÈ^-wdó(|,=up²Tópð²l'î5 ±S›t¦“&isÀ”9n%Ù”I•”D7Ÿô .KB€ ªÉ”¬"ï érêØ/¯§T*ä_P=°Þu9*ËûYú%õ¢ú1!8Kâ>œ ª>–¼yǪAÆ„&„éÌĶ5È9®7õü%ŸµçoúŒIÝ'¢¤—xZÄ™˜c.µÖær8°b/wÖŸjiãÚ«À֗ѫ¨ª ÅCi¦²ÈTV¿A¯ß/Œ^ï<¡¶Ýîö¶DJ8дo΄½™V‘7ç«¿ ¡p¦ÍßéÙo‰ oX¤ ;“×i¢·…BF ‰\î5–ñèFéöâ0¼P¶É¡G·uèÑv‡‘CWEâîtáÕÔ4NbS]¶(ܤásI•jN[e’RPoÈJ½¸•c*îÄ:É8læPÐq@Hóœ!Å¥—Ò O‰Þº•c)÷‡†Â+ð`~½V`*ƒç†íª«Óî;µ¤ÅR¼ÈN-Í Ÿ`$¡,0é$[ù5˜Úú$ñŽçËY³÷]‰²½_X©ƒšc®£xá/´G†€²#TàcîláÛÕAÅâ!oƒ:h¶UM»:¯ÝɲkSxÒ9§• ˆ¢T>Õ轪ô 7â#|Ä–• ú#¹* ¥¥ÂºÅw-á,v¹£\ÒÞ¨5bbf$%GÏ']ˈš°D¿ñ¿5Á«0S.^3JnÞv ©@TxHY’6ÈRòÀbQ[“%Á£¤q?p鮣¢%+,DÜÛ-QÖ-~0…W²¥á: a 7\ô¡(t—ÌúúJ‚Y.r§¢æÁóÏÏjÐP¬©dQ–ÓáÐŒØ& †·•`¸]‚ñÆõäŠ%ƒbêQAÉͬa&ORg¸©„R܈ ç™âBY‹gY !M v¬wá‘k{w;ÙèÓÂH4]ýòΉºLQE>7Bä°k Æ ûjU “‡ËS>ÖJ í(òœò ”[ l/ž)¯ß²úûP¢\±EE m«e¡$§:Ñ­ ®ÑÄI3M4ÅÀ´¾IÔ1¯W€áÏQ’#Mº+.DBê§’[æO§¯ÇŠ\àº-ª¶ÒTâ˜Õnº¤£ÔˆÁ$Y n}™}åGt‡#º>=F¿ÿëùß/®ؽۋºs_¼[9¹þò ééªÂA»B(Š6âŒ8äŠ ³…ò…³®Ðtw®ªÔ¯´ÜYÇ›8ÆB̨Žs%öü’ËwMPkü/ö®u·‘ä:¿ ±¿¼ð–¦.§nòÃ#ˆ³’ÀC¨K׌2I–4ÞÙ,üXyÎ"µ•3äAªó~A]§Èx<E¿|”Õ +,PÀ2ýYÕ’–¢¼ã„¹Ñ.ÁPú‰þ‡O' Ø±h8Ë4åTÛ¶áPËfý–~„’á·aþµi‹cçÏëfíkcxJ~:;G=ÎðÁ7Âà+nÿ…Ñ÷Mä[ô ûÄs‰ÛDyzðÆãƒÓÝqº;¼;}MëóxøÌ±Pª‹:©¹Wn\ŸD$y ¸8JN]¿®^Àþë¹KÁB{&‡\Ç  çJH×8.IFèg|¤«¶Áôs©Â‚è0i™#¬¥Wt£)$& :c#Ç›&ü1áMz3h‹9köåãõOfà*•é\G BZ|Ž(ÔN¸žÿR§_;u‹:U¿Lˆ¾›®œ7¯#;X6ޝS¼±YsÇ¢q5•V€ùØp¦‚-Q‡lŒáu6D×Ú«$#˜P¥ÎÀ „È‚KÕø£¥Rd5µ—B9 5QöM© ħ=±š &^Ë(R1,øª&‘sS0LBô:›XжÃÔdS¤‘Q˜Ð+Ë3L¥F SØ=*Œ¨U¿&ƒ ®y}5‘œ‚&£_›C±ÉÈHLíÀø.(¦–wý(¦ö/ ˜ž.öYá0TÊTCòA]©Æñ]»˜@â9e­ûÓwbN´ÖI@§@j<òš²­v¾íØ4Û;«vlÀãè„ ½„F×Ñ#àÂ(“0dºuìWš}!ËúÍ ¿,Íæz$¥5ycO¿t,æ¡¿3dôeÛÔË–Q¯ïV÷Çß·ùîïý&TZP‘U´µ]è%ô#{ |IÿÊ4 KYØAÐô•…íÊ€•.m$Yuý¢ÕªùGnÑðDíX“„Çp][æU Ìé²sYp‘§3Ž`µ$ÐÐthˆä–ø©ˆb‘ƒ†È!:ˆ|8"õ‘U¦I2šCû`«Âägk%É—áýõÍýÃez"fOÝ·æ•é]AÒ[ö%ÿítïbœ–œ²;J‹Ðèòk!9a:ÀóÅ›£Å¦q´œ– …\Ä9z!û‹9[¾ú‚öŒ*Ꟗ²ÖN@ŒKâvëTŠO9W,»Î×_z¥”·„œ;Þ)o £bX0… Ü5‚yïÐUʲEÆX'à@)¾.*Åh×ì`)ŽO"SœâñS<þmÄã$'ÜãªkÊßBQŒw*PBŠb`´PRFæC“ƒ÷v Š ÃeÈZXSšaE±M‘F®Z‘‚6ŠI+ƒ ê?‰(r ¼iѨ' &‰j‚n§Sj*·Åh¯Ië8xØt KžÂS§à–×ЙôÉ[cÁ2ÿSL_öŒžûµËAÔÌELY†+ªýuXTc®)6‚LÂòÝ£Ð^)hlß(´>vz7ó½ð½û‡‘,a>º˜™}œ[žâ;Ê“Ý>Î guÊ‚o™çg•€Õö@ƒáÞ+B­­‹#xPÛ×±K]õ‚ÁFÜÚšVÐ jÇÌœ»Àá;òV8¼ŸZ+qÂe¹D#¶^ö}/g7y¡†ß —¯ï—³ÃxÜ\ÕÌÖCÎ>ôŠ*%a‚¨½Í‘Ù C¥3RxBÙ¨!=¹=BQ’6dáHÜ >Ù©¬ÅкxZMrƒL³ÝJ:~¶s¯èVCyÒdƒ»÷Ô]“?„g/±ÿþøþ¯¹_î GKÛò±»õÕ—$QýH ú}RnôZ„2…˜1!-~šWÿ u㺘ûǨÙÀ`Œ¤ƒ)Pb¶\½­æ\Óé„唿\Ô4©laMlPcdä,do˜—¦>[î˰æÜ ‘ÆÍC乆¦až» ëéQ÷QÍY)Ù¡Åp÷6ç’Õ¤ãã÷îhÏšÝÞ_ÿ$÷ë¤ïXFæ% _b®12çÀê\KZ–d¥»!¢÷9÷DæöXÔu»u\ïÎTV"ÂΘNÄ4Æ`WŸá;l–«×|Té£> °7;±'¢[ªR_*¼µ ^¦"ä™”àùŽ@¾BKQZ*m`!^^µí–_O?¾^=-t ñò[Ô¸¶¥»¿­ «ÿTŸÀ»½~øY-fÿòH·xR—õ¢µS¢a\/ð‚¼¹Æ§ºøñ¦~jñ© hçšzõœõ®bíëÖâMü+àÒåäù[7¸³Õaéø­Eß”²Ðcq¸HŠû˜XÒÞp§wè£ÓÎPÄUb„ke £C¹s¶þÄÝ—:ìB¢>½N2fPMÒ…>á:0b/´rx2ÿéòúòþÍûŸ¯S+üª£m‹du)×Ôömmj;¯®×^zè×iêÂTta~ðï¤!®yÅ[FÊÆ¡% ê+M ;c¥'ôÔ9 5ñ—ÚÞP¯…öáak×÷²“ùQÌ«ËÒ¤ŸÓUó¨‚ÇîcŘ\u<®ö©òc§­c?}KóÚqx©—óì]ž†gÁ£•N‚áW—Ê ˜+àY#¸ˆEF+J˜ì i®„T¿³Çr²¯{C9Ú÷|=IÆÅB`˜­›¤/õzEƒÃ\l4k0p”2:e3, ÷úg;tTwp¶/IÎS¸ ñòêòáçç ÚmÀû§•`\Ëõ¯+±.¯ÓÕçܜݡŒNØó^ÆO,]Ý|ΫUä%–ÍÎÏÜ7Weývöáòýþ.¯V¢­Àp¨ÔçvòžMè¾ÿPªK€¾Bqؤ?_9üy‰i´Xa-œ SYŽÅ3ˆ-lI™Ùh²ñ¶H<’kâsŽQn\*ÌPúó•Òo!@wÛÐWoH~¾Ø¾û=Tâ OTâËçp¢?ÒJ†:²HÈÇk¼H€PlÓ\¸7‘¡3®õ­Xp+…!¬ØëáfoqO̹ñþ¾¸óÌì"ZÐÂHGàžÖBúÁNþ:¹òñ)aG÷Ù5 i§Ò{pѨñÇs £ÕÏqÏ¡ŒÄn(#-¥·Š°rîÄ|S´Ò¬RZ*”“„iË]¨1Çëì×D‘¹EÆŠ"ãv£Èˆy¡Èh‰a™àUéT‰ŽA‘¤*Ó ÊÐ×Þé¿~վʽÌßíØgŒÂ)kÕÃtíó$ˆ:ZZï ÇÕ˦}dÿ˜ëÞ÷mGúä‡ÔuÈÏW?ÌWÝ¡ø½%’ö#=^ª;3‚KÓÏm¤¥³’@TˆKSóÌ×NãýˆXb<°Øˆ†A® îŠ+¦.“‘:*1a¾VI¥Iv-½±~°4ü¡M zWâø H©ã/í‰ ¦Žè Œ¸-û·4òŠ>÷jÕM:ò*üÁ¿ó¦F^u¥fUûgÈ´ê„jóB3š¤ÕùUÑŒ4 ½w@ šìAÁ-‰ª¡¾ÎW­¿~±pÚ'ÛxÎ|1móI9f]‰¹bL•JËå:åϹÞQ®K7Ÿno®ëÃ}Z´Ó!µßîI”¿¬j{ÿ5°bÓ"to©×l–kø¢e›RªÙRŽ3q*ÆÔ§p*Ål\c"B¹BI}dÃÙÙz}lýÒÝ‚àŠœåEÈÂD†™¹ì4SÅ4ø‹÷6JÔÒéÔ¨ÂGOP£ùVix^¾4zÓ£eçik§¹²_¸Ä'¼7„MñzyGÙIò‘š=¼±Ys‡gªnT.À|l8;¶DJ°1Ì;nˆ4r*É&T©30(!²à’f¹ð:)²€šuÞ‘¶ÀSÞqòGöU0-'omþÞÚâä+½‚Ù5¸†Ñ ýø´B‹Ê×Sâ·ÅÖΙ¹zæ´yi›§!¡b:m%_ÛÉ;ñˆNÁ#º8¡\ÏÉÉ›14æâÔ˜8ÿÆÄÅÉìŽhvk››$$Äÿ•¦ÃÅLÎ^“Y(î™Ëmê‹Þ%ë}’<x}5ÒÜlô…/Ö@O{jÁv /µx^{þrX?¡<7WÍzP¿åÿZ¾Än¯>¿¿¼Æn™Ë^"÷’¿¢‹Ô»M·Ú¾Ý¥^->„Ë»[üHÕ¦Vk?“jx´(Ùøós½¸n.߈7wèÚÞÞ\WÀÉëÚH{÷ž»¸ÿxyÛ~IG_Wð%k}H·øò} ¶†"]”Ëæ*ßwC ˆÒô ôÖØs ž•öí ã·BªÀ]#Ðq€÷³²èm‹Œ^˜NÀµVNNXrÎj)7©ñ£·–o2 ,1ÍÂK×—+½ºgŸþŠZŸôžsåIÒìí=ÖÓa}ýÃê•°œÐSç¬ûUº=j.ý¬|T&&&MEÐÌ9Ï™,¥Ö;ÒE½¾ynÆÇášÇ˜Â)‚ÀÂÍ59Js„祷 ßEÀš„¸ì£ªˆ d¼ó…ÉÍ” w 7Î0•*)´n êg&´÷ÐdPÁ‘?K¡öXêÌRQŽAÌxŠ¢YjœŽ<ÆŒÿ0¡~7Í y@Üv®ãàÈhŽp&v`9Êg¶3ä=:› #uÛÁz3yÈó’ûŠŸ£ã|d_>Þû/½ËðN(‚+Ùî€ Uºñ&̧ZB†ù-}ƒÀ€và׳¤C|ÅÃÍÍÇó…g åÿýïý`‘ù1ЊÛ/‰I é+‘Ô)rËmWïï?§J­5Y¸_Aü½¤¬&8¹!}¸¼n‹ýïVßB‡ëæñ/÷MDZº”M%ºÿäÞÿd{—d¬Ñ”%u&¶‡øâÓ¬ãE8t¼ °BYM Ý¶^ñ- ›[ûšýäp/¿»MJÕᄌkC­ß.'û/К|Uê·—’}3LnÕ1\ñ«MBãöÜ'\ý’ùLnßU-ÙFçöÝþýbõ· Z·Õ=ZBò½+Â]@q³$·’ÀnE§™ìX#–° dùm=—C@A¬âBZAÙÞ×á®A¡A£…’2²:sÌÀËB Š ÃeÈZXSša}[›"܈A"œssþ$ì‰ßvsþ| '§Þ‘ù÷Ž,N1Ú¿¿øÃÕMDKð󊩼|d¿ý‡Õã‡ùìbÁ5²\,AáêcÉšÅPl*1Äâäd©f«´t”Rˆ‘j÷7™ c"ªoêï¿foª¨ß‘weyZlóéh¼áéGˆ?ó¦X¼­rèFï?¬x¨í .òa‰º„¼0nÆ`;$<÷YÁÐ5ÒÚA•t²FŽU8·à¥q„äk¥Wš'ŒÐ$êÈ]t.d¦á–A ‹Ü$&›\0z±r:u¤nš~HrlX­‚’(Ó\ h_¤îèDÙ°å’fO?pqÌó:(¦ñ@ZB~vÍ«~˰ìz&Qtp¥1¦ËY%»Ç3³F‡$AûéZm%Ò V[­í\;ûhxV}ôm3„Î>ܽÿ8ÞÙFÐø!Ü/ij÷‡ÑzU_Ûì´øOgí˜Çÿfõùïñm¿Á϶ÆäûE¸Îõ…õ¿Çc2…åp@oI „‚ƒîÐX|õAñNvßjÊ ¸–Q¤bXð (rÎb †Iˆ^gKÑy×7E¹›ŸÔÔ8gòIº6¿mòù6º.N®Òü]¥ÅÉQÑQ¡š]#Ý›B!Èo©(¦8aŒªFÅJY¼`°ŠÈðÒÒA ¬ýoŠ42f‹EËQŠdÉIàÔŒ†·5„R0ayÆTxŠš8>ßÄ(íÏ+1j­œµsMN³i*ºÚ³™n7­æ}1À´üöž´·äÊ¿Òȇ`7PÉuú´'Á0“™ÙùF'´¤5eÏ!è¿çUóP‹f³‹ì¦Ä¶6É&»ëÕ{õîÃ)8°åÀ‡4uAzšÄÝÖ£Ñ+ëqV…k0ÒnçZÃ&Ã0——Lfsš›=y|+쉲•c¯‘ãÁ¡”¼ ÞH°~½õ7ãˆ.‚—ÏíŒæo 4<«\ùuô2|áÁ=Âk.1´½U¢‹LÔ Èº ºBS!ŽMÜ›3m”èN`ªc]Hƒ¸N^˜µ*…˜óãÓª„{ÃwÒ‡Ò‡%±§UF>#Ò"Á$_)x)´{˜ß¹ósŽK‘d’(ÕGÖ¬Qz{ÜýÛNÏIãÙDéœJ âJ—âEæKQ6’ ‘ ´WÆQ°yZŒe# "<†¨=N$¸# £mÖlÜ[yh窃èR_I“LNk=¡9ÿ»þ`hN[bÔðˆ(, :ÚÂQmJaU¤^ J• ]¹T§O‘iñ¶ú˜³K@Õì·+_w'Z0 y{´l&†–NÂ뫚‡Ì€-4=‘ðÓl)lô+Ö“}$åŒé³ž€ݲ#×qOìùÄžO%À{38FÙ‘¼¨æÍcé;ൖÀ6 Øâœ<Ñò.G©‘ToDîÍ ÈH>ãXÙSöڲתSîØ¨&Ž7s«:ùÆïCªNæéb×hź»-ƒJ-ÆêU(Ók¾R´5»[½ ämޤæǵ?á·+_{vHàí­&õð•-yKm@I†‰êîÌe£j}¨6ÈõÀ㙿ŒáSî¸ãUç7à©.ä?3¡:º`7Ê`†hÇû CX @œöè–0JaR„É=’lè<:”6ä/Å£“¶b"GÜ9ެr ”uˆ4%‰ñýDñú’†=[Š*Çꥇ<%%YM KLþ8~Àõ¬Z­ ro%î®ë»†·†Vrd`ëÀÇëÕtR' nô"ÿqñÄ»¸lC|oíÅ]ë‹ÖUU^bm- ¯€+pæ-²8Hû˜»Èà3_¿'®æK¦Vœ\PSÁ~Âvþ߯«9:æÿÖôss±¾º·õoo¬›LÏW‹<Ÿ\¿ªó0ÿ±øêw˵~¿XêäÊO?…xþD‚ÅÇ?½ið±~7O‡Üúå‰û€üôúSXlC(øÍ,NÓòëèròþÙÏv2]À±ÈÁœãçÜNo.íúÏáÒÕ{ä5#8…€vTƒûþšß,¿ãäý¬ËýÖȈ]üvuÃú;ÿ‡(I±4†òÕÝPÒÐ7 ÜrîÌ~|çϤz=whÿ.R}^ü¯‰åÔG‡‚K¸0“@‡ Ò¨DfþÕc¬"¨9­ÃøWþ{öÃMôË4Þüºž™Ôâ³üö®ùºf—··7³å©9[¹ ~|ýæì¡zqnÐÖïïp•?ªîÏV3ñÙ777«_¢ÎîáÏu¢-ä/] ùhT똇Ÿ,v ã‡Ì[LÎ~̪ÎêÚÙÊ}QÏ´†çÔóç–nƒ‹¿__ųœq½L¸†Û=z?7ùÿ™õ¹úËûÕÛ±z·dýæz:ñ¿]œýo´ÓÛË×¹Ö¯÷›O8Öå߯oÿmøí›•È›;&Ö–6·më úöÍÜY±¸ýŸ~€m™Æn­ÿ÷Ã.Àêšûðvù«I^÷·o>ó·gßLsÌñ66á[®oV?©yåõ$âüùW› üÓâÑg‹ÿ2ÉsŸ²´[à–xûi¶"³úÝ]ó¶z~ýÛyž>lö#¼Ì?½ËÄ[39œ¿´zS_ºï+Ðôáb9‡·nËt†Q5†Ó›¢lkÅž¯ß2mä¤2S œ‚)vi#l©‚Í¥˜ fÞç±¶§rº¶¤ÁóÞ5. Ä¨Fœ0¬“Æ;kµ³2Ð$Gí'*ðä':ø–Ť¤Ó hnA妖 A&Riˆbpv^l¿efFâ'*Ë2­ŸèkL¢0šQoiøqõ[¦²!÷t(v‘`éET?Ä oál*áŒ'är_Q¼¶¤ýD^k „‡Œ° øEÊ‘1 S¸RÌ¢sæÁÈÄ`¢”) ¥õh³©‹²PFuº‹ÑfÔCnËÔ½[ø(r>+fòíÒ9QÅ«ps=¹ºgö ž>]YØÈÚöãë7¯ù .îjÿ¿Jmeíßè¹á#¶‹x¨ì@ƒ•´;=Õ`ÚŽ%·LN܇f¿ûóã1¼‹ÄÔΟS7·Lï¾ü¤®.Ȳhºvr®ÅÍZ–Îäêæ¡{Øb-]žÊ‹ÆíWÞ5ÚýÄçî±µ§åÓ”ö“l^^æ­äT¢‡G4ýv›žv¶R}þ¢ïrþû2)grÕ~ $îÊ‹VÔšTb`wSDBij9 ˜­©˜JÒ’¥” ¢Z‚ØÉ‚”ȵþVK`Spˆûi(ëKØYP48sÌ΂ñª­NÃT_ˆ³à”õ}$I%_¡9A11¤{ˆ!´{ªµ4ƒëfëEDÝugK5¬ÖuzCÍ8GÖ&œäqчܹSxÄ5®ÏµFQ`K¬äÚ1y@R¥Œ(\€4ý€´šûzë/cF™ 3‚êOþ«„:ßµ´ÀR*-]}øù÷éÍ_äŠ]Àz]øÇ»:éaÈT š…Ä@{ ™ ,å8“H肌PÍ—Âb®=ee²ª-’:§ÀZ“4Ã9AÅç°s¥5(š W(:þz3ù˜Uà_ìtzQ 8øwzíç躿¯à¶—Õ·o.š¦»yEAÿÎÅzßóú¢ZÏ|f½qª †ØâÁÙÏz½b±Y¸û0ÊIД°!ÏË\©È)'}š·­ÖÛL¸ Z›C·××ÿ®ä96JiöavVM'îúó,¸êxEÅθxk·=C9Ö%HÜ·Ts°ò_/‹m¿¬ït ä@I5\lEÁJ`á²°Î_aÕ¬óŸ}ò¹›A[­P[!3 3R™ »²Sxv=ïÂõ/W¹^w†WŽsÅe@³\¾ï`—Ý7˜³Cæ©ãÒrd\îr’¬CVƒ-Æ”NgìÐkÁÛØ¡Þ—êíìöÆ ×#ÕKâÞTBaŠ Žwe;Á’ì´RÔKJ‘¦»Ùù‚ð_µþfv-ιÔFë/Ù5XªÚÎá׆8ÞM;ë/ ×ÛŸîTdù547åçä¢îlú¨±.AÚkXR—|¸ãXJÏÍy;iÝgt¡]ˆyðCo9{”RMK4¥në[ÉaÕßGa‘¶Ž/ƒÖi[ÊqÔi[õ×4L“'¦#Ç=L§ü@¶«R]K%ôpèW„¶ H§àƒsÙø=ýÿe‡ÎÝûþ W:D-ó‘ .€ù;ªÆð8³:nWÉ ´M%WbO•\‰­*9w½1*ôàÞì§ÛK¸[•åð«ëü=\–Bª$R ýüñ&}lQ#¦[ʨa¥ÌÊÁ[&aÚÎ `¤¢S o#øW¥'¡MG˜¼AG'çBH£ÌE¡z÷~Ä}é­  ±áŒ¨‚.ĉîT¼ñªˆ8Úm¦ ¥b“3\0‰Õ–Øe18 oxySå~¶ÒPý—‹ä”ï§×ìŒÈ)ÅœË-ø‘”²‚\rÎôN­Ž‡¯|È]‘ÉZWd¼[WdÒ芌ûwE6\Ãîl!æE¶×U|$‰–Z ¦ÃH˜Ž=C´—¼»ÌøâÚµf]«º ¸þ`è¸v‰~8±¦4Hä‚cÁ î­§õõäw+n-wzW+ØI“+Ý;¥xqÎ|Sn+pÊã+åÆ ×ÙæiwBƒ(U%aCeX¿ãV93w—ÃÑ¢ Ù©þ+Õfà‡È½h¤uÝ”€\u×Ðô³|é¯Eo«¿Ëâ%ßÅNsèdî­Ø•²Œ­/á]¼¯{K/@¿™~z?¹úÃ};ÈšÌ\Í[3¸r¹!/cCF†r&)eœÔèãåÇßS+$ Tù"HD÷À_Àáß&W“Ù% 1û­‰n!Ö[w9C˜%ôìÆ>Ší¬¡icÜ&gÓ¨íAÂb‡;0v¸‚ %‹×ŽcÞ9ÿg‡aÌF fJ@jJ“càabß_]ÏnA |µ"ÍÜzÝÖnÆ-Ìœ‚ ºFIÓùUfÆ•ø´ –˜twÒÒìž¡2WPìÍdIžÀìU\]™Å†b®jï“Ì÷?è÷¿¨Î|•^w_d¯(“BJyM%â‰ä‚ñöÊÚ`} œn7ߤ-'>u{„”m/SmM3„3Z‚M÷1²;»è;`¤õÆ«ª* H)&¤¹VˆZOc(z­°ÑSu’Àþ¬6}Ëx0ÌÀP¼ŒŸfmŸOYÒàÚ„‹›èÑ¥]^X®14Z‰= >PÇ´Â.aZ0TÇdœ ÖZ–¢tÖ’º# –è”’eG2¤£1Sdã‘ ùbúƆ©ºÑF%UœÛ0}ƒŸaž¦oÌ÷á4}c×KÜa4ÓPæŸS·Ú½aÆ*´Ý{ 8îÓrwyCÃÝ/íæ"ºmv‹á¦Ÿý¶!mFÜ–1_›í9çÚ¨mÕ 'cVr5˜$±®4ÆW{hŒ›½Uôœ­8ÙT;ƨd[ZØäyè¦8ÁŽª<œ¦ÉÊ’¹ÂaËMHP—½‡œð%ª(ó’( íÕmu}Iú³-U@ V&ss/ ¶‚2û´¹6^³äÜÛ«•’‰np¹ÝMÊý|&¦õ™’¢®Z¦ÑÏ~Ǥæ^{᳥烠hó.íˆÒA³ Q†ö4ç^@äï‘CÅü{!«,!eg¯Ÿ|Ó;æ³ßQéŽôl(ñ/9‚åñ]ö©‘26º^ýeüzD½ú3B´P¢ ™•Öª7—¹» úáÇï¶¹)w"`*#£%..5H ã‘ß·+|Ñ𷨼ü\Ôh—2„SÑ™Ts2¾§¯D¢ôõÚm˜¼å±UÅ­rA*8M»Ü)Ž#GÔxƒÓÎvT#7&¬c pÑ"æÞºøë¶Á½ ¿A#qòò:ë¬ ðÝ~ƒÚIR)¡¨™æ…ï8ø÷©<€EŽžÒOx¸aÀ4Å@݀ÀŸÆYÏ.ÂSË``ÃN®Éù>œ\“Eõ‘—ä‰6‚ŠO=2†7注"'Ã*OÜD æaÄ£ H{åP$˜DÊ÷tb¬-ià˜©¨sªy–kYk®¶N~ZM„õȘ2O#c¾eÄ%—¶ÈÅ<»3p…r“,Ä"¦^Rá±/udÌ©’ù8FÆ|uåÉ©L¶XÙKH¡­®_é-·ôæÞ½ç»??—·ë¸è»/?© ñ‡n͵Üóf­Mèäêf™À³\Ê_—uF»¯>¼kt¤ü‰ÏSiÖ–+…ò³~’ÍËËN˜9AìáÍÄŸMO»?[1ºEnÉw¹B`ÙésrÕ~ Dà*ã¦þèþ¾ïÁ椻{;ÈÎa)d½õww÷îiâ–<˜£æFß}F¨ïó¨(¬ØÎ†‚ägí÷ðåD«?´n®î§ Ìû¥?,æq’ñ2u¸­‚Ž)[Ò‰'œ¤‹n*Ü¡hÅÞLµƒ}jUJ×Î%>Ÿm8ØJòÈ$F'•1‘#îCÃÉpŠ4%‰ñ=}~kKØçG•cõÒCn˜,41¤qLþ8àgãöùxòù|Ë<Ž"ú@ÇÂÃù `œs­QØ+ùØ»Òå6Ž$ý*øµaM°¨ºþózvc1;ë{æÏ†ƒQG—D‹¢¸eËËðcí ì“MVã`D‰>(4Ùa FweUVåý¥ Bµ>¿“Ay>¿—hÎ)í@«@¨ Bu€ùm«2¸ÄÕŒh«ÁF ))òùÝÍoz?0ðÀ/\"G ÏmÉð:Z"µ ÄÙHÌ,y8·xVa”p•ŒoAŽ]¡„Ån(a–[×Ü*î0kÞÈÚûÙZ_ÖmÎ[ûö‚vî¶öíu‚:aw¨ÅHzK•Í^ãLiœ=òWàš/c:¡<#èƒrð˜_L”…;iÙdÔ&(FM¹ Ê!'—špE;.ýlZíÊIÅ Ç9®ÜGû`^Kбøƒ÷<E‚ëEš”ˆM–“| !$aTÏÐâæ†Uáñ–3…ñ»Š¸’1›˜ÍLú¤YÌ㱉æ’ïÅ­+†ïí±³léå2Iæh’Õ†\çßÞ¼ëKŒff€âáÇ/ —¨ñllÁP¸|ŒÁ0¯…&Q!Ø G—3äsúõÕ  ósŠ>˾ªÊy›XªˆdÅ(SQσwRHé÷õ ±%ôÞ¢è+ÛQÑWv§¢/|ßTFv†ÈXû_÷ 5å¤ÜáëÖFáÎRÖÀ ú“ÞJ©’šj ¥vðC±D*ÈF[—x äýÕ›¥›ùbµÎ1Ēت߿ÿÒJŠ–B!HQ’uÄ;ÙŒæäéÛf3mÀL Ža'2›@¬×–TÒÊ ”WE3ƒµÝÄ ÑMž¾{j²Wäíïpoý BrÞ“xýáSZA™Lùd+8ÉÓùÚŽIòeqUŽñDÃNñäéâ·°ÐÉrN@'²Ë*0bXÒodhÉ‹½é,ÚqÐdQmA)Їé{mNëý­?ZÌ„¶.V¬äÄÊm¾ie™ä;´X§¸3…Ⱥ¯2—–›‰„>‹!S™IP¶ ö´$hˆI© Çó›¡,&%Û<‚ÎF™êÕèë0Ú£OD»`••:hòþãÇ»}Öù!Zä:3îB"Yp ¹[\Uƒ÷6„¦¬÷DÞ4Í®-òæºFÞÜNƒœ›ÞëÄlûÃ2²0hõ‚î@«/¶F3‡ aØíÊ ÞAÙ³¥ÿÖÚÑó¥ÿ :­ô_4+ðý0þΈ׻OŒí²YžË¥Ú&šaœNšvÙŒ"¤n-jz¨L+½cMåÕí]‘ sâaCnV¿Ü$ƒQ‘Ð3o:vÄ©gq¼]ê@*9Ä¢é®=€:»ï–ê‘>g µëê‘3Ü1Dº’S|Tõè` {Ô@éZa*]€oöý2œ*åéý/ù·V",gC„1CªKýÇ ÂŒ»«Ñ]YB!®åœëýYŒ6ì„®âê`ö‹©fãhÜœ"ž‚­Åˆ';Mñ4ŽI+f‹–ÐÄk|'GlNav®ÐRg­GOŒ·šý¶:cÂU**m@Ê(D&*õÁçTz¨‚Ìžµ¥½òD)c“`"°Íze¢nŒhØ€Š¨œ,õ5fE$sªè0‚T2S…­œ«FdÍ÷wà*÷ñ 'šã¦xR&"vá(|Àøt3ïSüθuÅnfºì†C­š» 'Ž#Ίijãœ"»Sˆª­¢†¸¬%QÜ3­¤W`¸hBƒõ‚@žjÀdtˆÂvlæŠ Ç²áX& cz¿Å31QÅtœ³$ð൞X—Ae¢:g˜åIÉaNóˆÀßLÇQ„­%´íCÇðåsÓÝsÓÙ¤‚é¥ä™* O8Ù K Éã g¡©”û—v¢g!©/‹I.@z"Tݵ×ëV-t„ª‡æ{f3CA PŽ‘ #Y^º¹*ëÀ¸¯bª §ÂU¥›«ÜZï wÔ;ÔécÍ—! &[³ð\ÝZ—ëIî®ç“.J¨ü@E Ë ÙR–à„{\—À€…—·÷©M¨×aÖdìmÅò\ÊS-B= §R„ Qâ@8„¼ä¦  F^Ža‚xõ€– Fùb`JšúsÀ”€’>i˜üþlTu´îOÓo®UvÜÞl¥IQg †&·ß:5=­Óµ‚ÞFš(ff¡Æðت“wu8c×3P>6í¯rmÓZ9%ù]ñW³Ù7Ëß¿‚Û¾ßÖ&n.¬~øª†å­pyç­“å¤RˆÀ™RºÛ ëKì'[Þxq_”¶†§¬VDwxÊDQÓ¨‹ëb`ï.\ɮ֬Dæ+æúÙÕ»‰ÇœÖJÚôWY9'éT*ç*J…ª,É¢ô%Y[ NtÈ,ÇJóÇk5‡f#Mm/UzÇIƒnã(Îíƒ{Iú2{&}™N\_Ö†ƒ¤BTt-\êRH°Èåüœ©ÿÿ¿V¥H[Æ0=´5½ä:V×ïO“ÛùàËö²ýƒ’å´uÌpŒÀ½Â[[õÇgì #åŽpVíR<$žµøAk<‹ËiųLé †`8FI¬®Š¥(Ç4Û0¦—‘8ªðP&¿ÑF DF˜ü»l£¶;’U„&K‹^ ·Cål…ŒÒXU+ÕT{¨á€5&º]68bL/_ÚáGÇ`ž5[R,Ñnc\GÏÚáÄMÄÏ,hø~¤#˜âýöœ]OÝ|–>ÀÀîæÿ¶É(ÀÕ|¡(†ù—ž7Î3 ŒMB¤ŠH•`ó;_=‡+)™™«wñvbàôKÍé ˜±{í¾rÇFßÙ,ÔÇÓe©S/ƒS/ƒ#sz`yÅî˜ïœgL N¹LdБ,áΙè Ôûç=ØàŸŽòSNÇ)§ãÅçtX­5æ¸6¯=Ü2NÕé†*˾X×Uf P1)wfÛ»ŽÀ4Ù6¬ä:’re#,¢ÞcÙ"üð÷—-‚Ýr®sÚòâGEAϳ½¢¼[¥¡â»E"¸ë%'¶1œ„ð† a:²+:güÕ †Á£3æÐÞ ïäOóËÅÿˆOï¯ê¢¼¦‹K»œÔÖòùÝÜ}ÞÓ¡ÃL´Ör͈S^‚Ç%X`œÁÞÁ倩Ý‚â9·urèÚZQìn­ÈÛšòq*¸vˆÔz×H­oC¹_ÌæëC&µ ì¦ÎŒ­hsÚµB^rZ°]öç`á´cæáT®1䣚‰ƒWx ±öGÑ^¸ &0®-] %+ËIÒÒ˜*J–T޲cIåží…7†4°k«6å ¤ '’%!;IlU9¬ŽŒ—ìÏ)°Çð¸Ôãd¾ÚHaTYÉ12U[ôðÀ„È}(½M‰àÌäp™³+ƒ&*mœ¤Ï4ì^¼pŠžªWNÕ+/«z{Skåóög~àNÜmDiŒ1¬É»o>çVŠ™ávê;Ü7~+ÌñˆäÆ¡ˆ¤n²¹Õ¨ÒËI¹ÖñËÆåXN{³$™— Y#tJ1náYoj{Ö§=t”ì:Û‡”±áçÚ¯²î\M$j 6…r¥ÕаÈRR8 tÄD&•MN鬯;¥8w–+i:áT(cR€Ïø…køýŠx宎ªŸg†/‹Tžâ¯‹“}á±ØTÙÑ*zxñ‡‡"H²¶aúϙٮsðï`ðÌßÂ$hõÚ²ôì· ­ÑÅ#=¿õüÿíTn\ÌĹVˆiw)¢i¥#¸}º\aÅí=ñ7¹c…P7{ ßÚçßÕ$ßv¾×ZþâlŸ½õWo¯n.ˉ^ íι’µäÈI»‹ ëuõæm‰Öí퇘û‹›¢}|0¢gs°ë‡4dÆÒ°:‡îâ-\ž×@Œ0¤Ë|U]§yS$Õ5Ü6ßbõ^4¬ÔÚ<ÚŠqnx˜iu³ŸùW©Ëh9e¦Ò -÷ÄÐ:ãURBJD¨l˜q:U2ŒÉFÎq 5L‘NIħcñt,>DZèêø~¢M=ÆQÌmJEF„õ ˜gˈÍÒ‘Š•Ø$†åc{‚—°bѬœª»g²NË]‡]6A'»l8á<±e³Ž!Þs!ùQ%\0öÝÀ0 )1nT† H˜%>ÂâÁÉ–-¬l•²éYK´1¤a%©Ö'P&ˆs¥‘ K–8n$Ñ&s¦x2J‰c`9A#Q)Ú®6X ûBôG·©?²QôGL¡ùx\/ˆl4}æ‹Užº‰ßMtvêå9¥’Ê ·6œbƒ“ ÎNvä2WsI12·œ>È“VyÒ*·q¸âÌ!ºÖ‚êI¿˜V)é¬&¸­÷ç<±ŸK™J)«³~¿‰u~ÒÓ‚ì«E‡HA‰ûøûëÇ·å4µ _3¡8ÌœZ:xql?ùêý‡ò?¿½±·}ÉÐT Qq0äȹP ³ŠZÞ½Èw†kéÓuCÑ¿Ü{Q2Kg”R‰¤8wiOÕ÷p/Zƒs„«$‘I2"³·$ð,@¨2ƒ‚älw1xåsÛŠÁe×bp¹»œ·®± “+¨%ü‡×t`JL!F3¼µùΣq/ ¿ae˜ho¾ƒº_²›ê˜ÐJ:)f/ž@Ëf|}øvÝ3 ÎÑÂnE0JìB@“Ù|M‡u¶ø4Y«UÌÑ%ÂÁf—<{âÁ!<›Š;«*¦ñ”2m¬Òˆ ­f —ƒÎ¬á¶ÐÃD¼alÇÖÀÒdÆ9̆#¹•B«¹EP¨ô~Äsµh·Ú\Vÿªªµ¾ÞÃÙU»@·BýËòÙ÷ÿŠó?«;q¿ñ¸Ö÷ÎfÔZë“Ò¤Ò¢C(;â|P$¤\1^UÌVy†<Õ ¯,f‹Êáwð¯Ð3˜$˜£¿ÿôÝl±3ÿ­Ýg‹W¯_~~õáõ⽋ÿ5¦êçE9É‚–óT<þ]ýÍÍÁ¦#ó* ‘Z÷[<‰4¿sïooç¯e³…D›Ww³ ʱN‡ŒÀ²!R{Nœ•™8Ðo½’2'gµ gx.У Êþ(MzKŸÞ³o«¸òk—¿×]ÐÊé´¼V>Þ7ÿ^ŸÅÛKîâíÙà§ï~8{(*_”¥×ŸïAY„K³?ÎÖ=ËèÙ···ë_ÂxÎ`O^Ú^6_ö½¢¼¤x»Áª~Zô³œ×Å´ž¯¶ÚãÕ[¯yt÷Åì À»–m—Aã}ð› ~öSëïÎÖ  u?9` P²‚&¸ø+è§g%² Àã}^À ü­ÈÀú«çÕS¸þ´l"÷Ã‡ë«øûÅÙTþúîíwo«ø®1a?| °CßþõÃÝß*Ÿ~ÿví{^€l máÙ®göû€ËÇÿéG˜ëêÇ;ß=ÌÂüâ¿›óðóêWWeÜßÿð«üùìÛ뢉ßUMúVã›×oj~óݵŸ/®~³Ê?-_}¶¼ü竲ÒáS‘¾+žYüi¾æþúÓ}ó¦zñý÷‹¥‡É~´.‹«÷…Ë`jÒÕJȯ?Ô_µVzi'¤Ä#«wøOwoá P‚Ê8QËub²ŠZ¹`%ÑwŽ·:¦ä#’ÑÌ0>„/M†Ôvº¥£¸´Åi•Ž¢ùOð^‚¾l‡qôÀP"¬¸ëp¹0‡kz¨‚–'â¬Z5!­ÉéáyXüZåÙüz¥\Ü×ÿÁÏ#Y­kÌê›–bn*"ûÉMí«i©æ˜9Ъ7þBß5ݰÐò¤K4ׄ¡`™ðDÎ~›çesû7–ͼç©(ùXJš’$ž,#&ùBH¨ž]'6‡404Ÿñ–‹3…ñ»Š¸oLÌf&}Ò,æiw@x e>eì_Á¢%4qO$—ŽØœ*²3Fh©³Ö_k([Ò©t@aGM·ëÄ „ƲÒjLŽd³±ìQmùF·%Å\RœDÛ Ê4+⥔ÄTÜ0_yÇ"ëY´µ1¤a÷–pF Ì‘˜$ÒRIBNTÉ+B`ÒæñØ,.ª‰†Öˆé˜¸)ž–‰]8LÏm½£[Àn»ì/ÿú¸+Ë¡ýêîŸ^©3 9›Ž»‚yz»üzusû'ºË¿­lÌ‹Æã×ïø£ÿ ççÆÛŠEX^öÝüz…{Z,æ‡W4ݹÛÞöÇÙZgXzÿR¬ù¬ëÕMûwpT­}¤õ%xÖ`VüØMXýƒ©MX«ÓÓq«1ûDZýñ)ízìŸÍ~ûÛo`š=r'´búx¿–{饗%wÚG'Ø4a;ÆqŠ2.È O «r Ù¥R T’ñUښÈâ”R‹Y´á½Ùϳhã(]ÏîЙ”çN0.(¢Y)§Š[7/t«äàS¬Ê„¤X&ض„UŒÊŒ ˜ ÏÛÇ«K;åS¯SíSíé÷МZ«÷ç LeGsÒOšâ¡ W‚†Ð•ü:‘ÿÝT%”’¼ª,¡•QKÈf(±®€|q3•#²‘ŽrŒ Žˆ(™®Žàu¸ çn –1‹ †¶ŸšÎñóup¬<9ÖH9tš¡•q,ALtÎR8“#Wk‰ ©S@ÀaÇê¼¥Ác—óÆ5NÊ·¡Æ<y­‰Œ4 XŠf ”ÕÆ"^ƒ¬2‘sm¥óÎv¬Ú^'iÏ2ÇÚK‰˜Á¦D¡a6¸€”ÜpìYNŽ»±Mƒ§ªíƒ‹ÌQÐX[¤Mˆ˜f2GFÅ:„À‡½š«?[òÓqÑÓ †1Sn j— v„1Ó­>{³;Šx&n-]ðNDÕâ&[ýü¬~~öÞò$!üw¸Õ Ã,a5˜§Þkþ4óO3ÿ!3\JeôxiŸeÊŽ> sJÁ (%<ë|Om‹€,cÚm‰ñ‡÷`†b•iδ’Ç{Iðq&Vb§¨=þx±ZkdÎXBì…ÀÏC.‘#^K^ E‚–ÄQ-­îˆ˜·FÒž{«[å . 2œ ßcdc[Mþu4̶}yąšÄà)örp‘åÁF°tiñÜÚØª8ć`-tŽ-É?×ØË©eÆÓˆ½<Ã#޹ ˆÿÇë¹zª9ßÍyvªo=&¥{ĉàÙÉÙ>zg;;¹º{Ô¹DlÚ3œ8!ø³Œsu$k+˜>¤%æB¦‘ãùÜÚzGU~8Œ2X‚$$ÁT>Ú$£4×ê‡U¡Îwâ¾÷h7©E•ˆJ1¹ B•ÀlÁ4 ÑÈæ±?DàA{™û"¨ŽxÔk$í9ЕסpÎw=\'iÏ9–˜HíM@Ræ°?€¢G:Çy.„òJEŽÚZJcðd-\dŽs, l>„K"  “(…[cìãùcŽ]øl¥‘Vš^:Zkéª]ø#n-Eô¸eÿÓ‘-æ°&¥µ„Åeö L¶hÅ»ØA„ü©zaì¹0ÚŽû±lÕßl@>läÑ0¡Tºµ ÓÀêÿô‡ýÉð:ù0Ì£­1ÇEšQ;‡Æ”×ÉØ®ÀÕܧ}#FM/ãçBHIÅuhŒ àA’öØ’ T·³D.ci{»Gf€©’®ûhO×ö柎ÀÖÅ/®ìtÁÉÆÆ'MÔS!an' ^BÞ”ûhnó+‰6ò²¼‚Ê+_ÔI̯úÃßLÁe÷×µë5ø?XÑE!uPhX ÿ˜~Ù‹ ½ap˶²¹+s—÷xÿ,.üžÉ©—†3¤ &g# Xí,Ç"¬ä|Ó¯¬ hfe/$üGޱ„ `¾Ì¢ÞùàLÇ“ê ¬@ª.Aû7Ñ”ýݽRQõ¢É«âãÇ n{•½|Õ«u ¦ê‚‚‘Œ~øúE/[76]×¥XÕF´jL¿Ì~kñE²ˆ/ãΔBiàOG£wÑLæ†)̯'gÙ ïF·“ÂeSØ©3vN1¬7Ò¼Þ’g,ŸåI•ÇrŠªi!¤sˆ9ÚÖP‰,‡…gepV{éq!§m“—IíH`äÞ¾¾Åò!Î!Á5ÈaÞû«[r¾Ú2½óÚ)×J/.©ˆ$ºû’ lg™é… Ã#s þõ“ÏG7E©ˆÞçQ'ee¥GÔ©ãý²Ú™1A“’Ճݩ‰©wï_Ú  doû“Ø­||óv DjîE.¨â˜&nf6—ïeFÝÏ}ÿ6¦¨ÚïϵOC«T›iåQ¦ß€É™Ç­Þ‚_J\6 å>³`®?Ì`O¼î»Sö˜ç°\Kóöˬ¨ÂK‹1ì @V×£[ŸõgUlðÂYÚ5Ø<„Ò£g•Ù×›™æ4i"W½–Ü–wÊ`xí6•âCVšxÕˆ—,ÙAg† ¿§òIl•Uz Q–pµ·¯yÅ )¢|zlE!I*ì¸ÜP&UÒ°1®êØ÷Ó+ †¦,ØÅ×hù¶8ˆûؽ½‡›ûÿw«¬}K\RÊeˆÉªqÄ„4H®M®Ypn»µo]ÁsÓ`íK¾£µ/ùVkŸãΣ%i«µ¿*Ћ6n¶íÅ¹ÒÆ(yß´§ÀelÀÕlÇ0܈fj;ƃl²r8³ÀÄy o%\éI„˃`ä«‘ C¨Na£¦÷enÍ¾Š¾}3M€—ÉÅüì"ù;4-ÙP¸z÷¶YM“Bë}Ø]û!šƒ³ªRˆ65¢o‡¥öæ°×~;œT–П±Šzû—ôv¡5Z$s³‚çŒ2™°äµœJg'~Ã\y=ß“Jxéßf“êbWê/ìÙ&0ÛË+²õ~¯·H#гœ¯ÃlR„íA.LÈÇ;?µY-bp~J9fÎ(†¤v,%LΑò`¿Yo ÉIÇóÓ5’ömHë˜{Ä秇i ü¬ÏO¸‹rv Np2;9®{t\ŽGq qR¶Â¥‹M.¶ºE²ïK[ñft ¾O —³±×Ê_f·¾û{+ûÁOmïîÞ 7?8Ëpf…@’„ñÒ{u^"0Õ¹ `ê™’˜Çã§RTašaÕ¢² €þñË‹¬põoœf£ÁõyÕ‚«¨Pxâ…™ˆÐdìsZýªgÔàêÃüýwW&~÷7òwþb Þ\M¿Õÿþý¯ºß½þmú¯Ë¬š¹¿Ý-X:?Àâ×·ÄÆW–d/ìÔžPÁ9g÷¤[Å9ØôˆYvxHç¬@Ž`Ouðsà-zæJ¹Bà¢þ"‚TF.?žý äÏâïwQÅÄu4»_ÞÕ/gÛ››q~¶ÿåÅ«³æY`˜FgRtyágåµìãÙ`5—×ðÙ×ãñâ» ä³ðçç¯ÖçýÓø(í¼.Þ=qœÕ‡(+ÑÅ?ž½¨ ™^¾ê- >û%nŒ‹·Îé¯"Ó@Æ$Fòæ ä½Áu8‹øTsx*¸ÛÊ늢Ÿâî_Þ`~¿RL‹W¿ÜX¸cþj4èçzgÿëí`zõ"ÚÓ5±¼zï`u\ý8šþ_/lÕ æ|´Ê.å÷òU}>»ýW?ƒÔþç©Íß-…0éýVÃåüKýHöËW·üòìëAô‘¦¾ÎÞœ¼Iù ú;/`ÉVnbò«Ù“Ïf—¿éÇátïãÖ<u pú~²˜}å«»ú3z+’®ÞYap¬W†¥ºz§H¦èϵÛâEùVc†PÂèö…>¸L2q¯«œÒãÄ \ÎÏ™ãQ8­rvOOÈ,¨•rÀ5vW]ï®ZPË™"åJnÜÿ%XV‚$°¦žX{w¾ô{x Àr1C—"§ŒŽV«EÚ…4—²FÇc¾n¥ö«íײbÆ9Js F‚ñ±ÄÃ#Ç„AÌ®'B¸ ’3ER&‰i7ä–nl[À÷]é{5ïkÕÚfnàªÍðîþ•ên ý_Ûc—ˆñZ»Œþp¼TcsZ¾¯å^íö‹‹w5Å—ýÊ+5²ö¸¸Fãã~•õ·çb8iùŒºúÛô¸g Çc¶±~[óVýaó{0áÚ¦¼ÔØø@HiK8û”µsÚÆé`vŸëmÏÚ{ôÍG¾”n#{†3™p0(uûlWxwöÖ;¸%Ïô­Ü)Ã`ËkçNфΊrGîVrZ«È”®8Ûžùšâl#ïõ½†ÝsÖØe¶ñªƒŒ"Гj!#45”$½5‘š¶n]Œ{êÙ*´Ä„'¤)ëZBÕc¦b?¸Ãvs†¶VTáÖÛ5âö í&ÒšR¶?¿9q[žcžm>´ÕÊH“©b{LÜöÓ¼¸—0ÿŒmmŒa a.Íu§~Â[Ú¾Ú PጛvnL-ÿü1WÚ¦ŽQÿÕ»”Ú€åuªÖ‹8‡å@è–$ÛIa&eâ=<¾ÚÔC˜<©ÂÂLMB€E·W¸4D¡7ï ]ÃÏóû4=(ËZ¥cHa±¼@&÷9¢9ɵ4l2Šá^Ü™“íqg;÷VVÅŽûC™ÂŸßÀÒ¶¾ ì¼?º˜øA˜©€]õß\!{kûëúƒÙ(Ã×ÛoPˉ‚ Ü >|>_É缌P¯?Þ¾A0¦hâs;*óÒVèGõ¥ß®ÿf̸µ$·Eô}öÁò37ÿM”¤XCùâî†qß7ÑûYèoåηdü–BvÛÙÿšXNsïPႉ0 ™’QN m˜Í"ôqÂHUMƒ®qú«ét<Ù¨ &WÃô%Ê—(}œ¬ÙÊho Á‹sÂù)_Éá„_Ñà\HÂR’‡Ý-Tèë™ ­]Boß½ùwÑZW×~‹*™6ÇLyÏ{ƒfˆkYæ…XÄsâ@€¯'ÔödZCŠ@’iծɴª%™ÖvøM(»gËÜ“bCu9gTj.î§ÐŠs¦ LŒF#MÆ <‰Öå|'Ù‹Y=Ø)·éÞ]¹/Ç‹™­;s©“†…±]kTí¸ÿºöûR‘+!œ3‚kŠë·á÷ÄšÔäûU ‰yÃ),Š=ßÁ´2""±ÇÔ€Iδ7Æo_HA“‚4,$±ã:[—ã%4•4a¼¸L®@!^$ ´)+ ‚•Ú”•®$¦ÄlYR©ìP²k¥`ûV1/„½mäȼ[acò,Òè±ÂЍ-¤K)x é’´Ž1™³FF‘Â?#í3ª‰vP$áÌHb-÷^P§;]_¿Æç2€äÛ7·¢3áÆìðõÊׯwPð£%—à™?¯ß6rU'pCj¶H‡J†Çd@îƒnS‡qXÕ ´RÙ¡cw'TcHÂÓ)<0ݹcwNÚ+3Öá< ¥‡Tiì$¯‡yÒ´L^ ËI$ìnµ@yS\ÑèE\q’£²¹L¿ÞdÁÇ< ð¦Ës“Or–M©ÒŽ9»‡hÄEȸB#‹‹"X|nå¼{šŸîy÷ì ú¼[χã'öy8|–¹ò4éSM”SÒÃ)éá“$åD%´u™]un’&iW¯õè^³Bz&eÌj¤ Å“J¼¥µ4W–RéĉiB,â^ïR$ ¥SÖžwl-¹FÒžSoöºK0âàB"?HZïð¶øp½3$ÓÊö“QÉÄÁzg¾ 3MÄO² ³qà8!\%Ä_ÿ<ר±@è+¬¹ ‚ AiˆP‡09Tˆbпœ©à9´Á&! ÀåÞ×ÿ§)'>ÌÚ—!h+rðƒ¤ó tCD[A¡(¢X«|¸AŒ0•à%sEŸ”nçf©HUJÛh옥”9•¹A¼`±ªÆ2d 9.<ÇEG|øu’ö»¶sVÃÌåÀ6á6€•âòœáŒÂ‡œ&†¦SÇŠõ•¶yUÉ´~RV·Ôû_ݶx=O¨hÕñënû<#»ç±÷ÒüuX´åÑíîAˆM;JCX*ôìÊöoÆýáë¨ÆKsAšs*xxÁ2 3Ä<¦¹¤Â1b?õ ýó>œ*Sɇ¬4Ö6ý8ª@÷gÉÜýa•ŸE ?»›ŠžýcR?…]ž¢V+e[æVG·pRϺf1íºQ\>–Ò@K=‰zÙûõÚéå³ÉÌJfŽøÄ0 鸬#¹Hȧº–ñ˜³´_t¡¦è͇‹Õ—Íó<-Ÿvubj‰;•w¯ÔTå½ö©¦†vŠÃÞÝ\ï pDÅm•©zƒ­î¸sðíÐ +)‰J \⃖ۯóÒ0LœEþï?#AˆÖ¦ÝW“=ŽD©ß&Q/¾lØ®2 ”R¬IÊ€È#=ò=ÈæµÓ:çIâžÃ]±ÌõE3NI_øƒmÞŠqPºí›·¢žéÇ‚as¥uAr&4LN8rVi”;ãɇü€ÓH–°®ˆ`;a´mT€Ähxd–™kê Œ¤‰ Ì¶ȹܢÂc-©³˜›"r38Ö-à­T€þ-xsÄÄZ_˜«Q<JÁÊx `EǨ!y4ˆx3¦Ç1¢nÀ|Úqœâ[ÐŽã®ô¬c]~¡ê¸zÖÑ@+£- ÓApÞ ê8a:íX mÇ ÖŒgI9¯'¾ô[$¡—ÙT¬`º@žÄÚÒ\0œcŸMô)ÌŌ΅³¿QïÇyL ×5îÝò Ë#’J‘Œ%ì1š³/—ÁçEºÐJlø²²ego‚ø}ïX.|o¡x'ìç„ñ>ü³’Q~í‡9`V™ƒF{¶„¸?ê\2‚I36©’à<áÞÝcä§ ÿbyÃE¡4!á¼pÜ Ÿ»m¢»å±O `µ…ÂgS³Z*þY)i©´U±º|ŒN}Γ*XUŠ)•ð­ÕPºÛ–䞀ºÁMBIÕÚÙì†ÜÆÏ¾ðU,‘¤) %ÀǪ¢fdz«4—$½[)¼{··vmØ9ÄØpßÍddc˜ç&¶]ÍEì†&,2(d<ó"€ËGež CÙ¦0#H 9̚Š˒Š*¦˜Ûñ,Ì·Œ*Ce?T´}ýêeö¼cµð`áé`rBúmŒ9¿§˜ã\ü×Ó›Mù@/¯6æ#°êßi B6=­-¤MœíÁO½Ê·<ŽßÜìó]B¬ó [ÎêÍÍ¥8ÅSK1œâ©k:YÇ¿í:YõÔ0ªvqæ¤áXÈ‚ +Eœ@œW¡9ÓÒا…©êèðªN¾ÿÉ÷®¾?x–àÖ'ìΚwkýºSqyâÕx eøÿ³w¥»‘ÉùU úaHÞÍfÞÙ€hwí…]Ùðùc1 *³ªÈÖ4³É‘FÂ<–_ÀOæÈên²HÖ‘uqº©^@+ªêŒ<"ãøâ ¦EDÃ26²wg·oØÂS«–¤ViïöÉjÑ—)­Ý<¸DÖï“馈Ÿ¦¨kŠäûœ¢/£•‰`#;¿F)“™R¢ñRÂȰQRΓ´:‚XÎØ¤a{Øn!gkÛùÓAÚ9ÊwÄ“ŠõÉ K“ú¥ªÌ‡ ý!ÈÑC8(ƒ9V¯ÙÒ¶[¯L•0³ÖH%ÓØî¡Ý2M–4‹ÊŠÁ)‡95!q&*qTƒâÝШvS:"ùa· -¬–çĦ<'R1 ¸,‘EÆ Þg6{k>ùÑ7å)Xw2žNÆÓÉx:Lã©Ç5#“U„òHŠ¥‘Z¤©'™ÃE–9·2ψ•…ȹJyªÕlE„À„e4‚HÀNŠƒ-s'¯a½g ¯÷ÀÄ“Úðn€18V¢Ü¸sTm2—ÜãOø"HÙÈ \+%bd¨paÞ¸ÏCEãç ˆ’èÿqßMöï?±ÅóÎæq‚•‚,QE£mã58›Qã}šóÅ[Cô0Ô &ÊýÍ]VÆ{|ý&¥Ã¸nˆÓ4zA5×1ú T¤}ôZ‹êèq{4ü‘ l½*rÿÙ¯óÇ€µó÷Ý¿b…wáVre…"æòãÝÕhi¬lØ;½›ï‡|—û|UÉ¢»òz'Í Ã»>BAE½2ÌþŸò{ümöMY±;Ɉ“›¢4§auàÝyµº$áåÝú-j÷^WW·%÷ñwI¶õª·À•Ýz³zuó)OVÍ›’‡+,b˜i¸bç wLó œwÌÞ;å°"ÅÍÃu9åãÜéú.”í%eÿ…í†)åL×R q ³ñJY³÷üS…î}9ï¶Ts´“ÉD…wN&ûá§·¼…Þð7´Š -Òkz,Ž•õ¸ŒY©¥Œ + aÃvæ-ôDϼ!JsÀÉÉ(qÚ+’e2MEª¹+\_æ­æ]^²p1 uZKZ ¾£L —àÑ,\ (úª"bf*dêoÞwðEÓùÎØZî„bNyŒ¥èKð]NTfÀ“æ…ô#û¾ÒÄÁµ¨FÍÇ\;Þ.ôÉ©y÷o¥ïà‰Gý ¢kïÑ QL€Ž¸vyȽ­î­1S7K¥(ËlDÃPb£b½X3Õ¶ÕþزkÇõŽxÀåMˆìq'@`è…ˆ´¿‚q¤‰õ§n¢Ê/”Â(!J;ªò«^Š©j½@s+bGèO¬õªà8h£§‡E|·t]éŽK‘ØôÜî!%‰r ,÷’ä2C%@ Àys2Î\–ù¬HÐѯèøŽfºÄ[”%¿¿Ì6ˆ_ßÒ'î÷Ñ ¾¾ÛÜÜ=—Kç¨J•fŠùÜ¢Æ<(%A Ê¥ÖæTâË>E÷ ·R(ÏñOîÑ%‘ž±ÃkŸò–mRÞ¬I e5£˜4ì’eRZ µ<{Lž—ìæáÄ´÷B̓à,"Á¥%=´"Þxs?Ë’ÞH"´@Rˆ‚@*‘ÖÂ+”yc~½A.Á©P÷ä ž<Áwá š)Ý­v ß0j\ýam©­ó7fù‚iX0®Œµ6‘#¸Ž’˜ëñM¤zˆ<¶¨5Ìf‹[f1A«¤~B¯èÁ88méS¹ëjjŸžíÂã*2–IC#²ºÆL SØ]çÿžß÷N <%³÷‰2Ï¿H TÝ}Ãþ1 üùÂþ±ëb$a™n­Ò\œÖiM½‡r´Òx *©’W´¥y15h Æ9€mìønný®¤yR]¡>˲Áx…ééØþvmô.¶}++OFAôà& šoCØCuH§)£Ñ•nXGh*Ñ{ïÅôroïY*™Ù{1MXÍt{¯gR9º%FüÊ- áü®ŒÑün !?÷wþkv̈à‘ñÍÿ{îéï~i˜9øMØOuÀoþüoç»÷^w?Úx¤³Q×I…löÀ¯ôãS« ¤ˆ`Q³Æ·ýì¡V'C¹Xn%‹Á˜6 #cÌÿB_à_X?ü ­à_Øøk É*a`dYõ¶£ž&œ²ë‚H®VÛâ’½Çv÷14…•3ÖÖäãÝÅÏEG9ö¼?¾-ãÖÒ,gtY ðK±='m ÎÏŒR¢½Œ»H3‘7”q33°Œ›™Ö2nÆFï…ÃK’Ï`LÑ\(•錣R"S—‘•h NJµËiNÁ…nuÓ\G—g`›ü:Q뺓~²Óß»«ýµí$ñˆ»VÏÞ¾õäz#¢À_“°…¦PÒ‚4ʎ϶1²W²½È?æi΀ÒRð9­Ð#'èYHC憩-¥Ÿ#Bê =ÇTd#ûšë²›:üèßÝ>«}•dŸ>Þ7 æd·AØ)8F¦·àbÆÍ*Ô"x—ðD×â¯7[¿ÿ—o Öu9ä‘à $[§ëÎzËÓK+ôõö sºÿ÷¿›Ñã®T½MyʧÛ;Zn"©8$‘Îiáöú%bœ- ª äîòî—.§sØC·Î¤’BŽªL Øèn 3éÀÄä…D‡+žåíÎd.„” Îd@U r&µmu&¥j\; 1”B–ÑiNÔ¯óZo&ØEà+ÓPǦ$޹™sˤ‘ ø0…0LÐV° .ñà‹–ñ[)eÔøç¹þ‡‰×$ Ã+ÃÄH3õŽ7TiÌÜáƒòÍþgi³”ºMÞ~ü™4LB6xœœÅpa=;¾Çƒ ëé7ƒO¦ ¼®óG”Ê3×xëOÎm ›-¥bCqaÌaLõµMÒ‡ûË`ùr^ž‚Õ ¯S(¯:B’è{Ë}‡•2öñ[{…9àÒ«œ8- ‘Â9â2íHšÒ1'm¡lGð[x]4Ø+0Ð\Vk…ÂèÕäªÓZi˜Ç³Øùm oàB·)[„˜Œ6Í÷>SšI!0Ãì–±"?0$Å;˜M ‰b³X0cm”KK!¢ä²“Û2ÛhK`C|ÀïžÝåŸVeÀåöîoM¢I‹‹Ì 3n­¦0gÞd¨üÐ[°œ¬—“õ2ÒzáTQÃ"ƒáƒcêRûÆËkuojß粞×Ê:1‰oyl?ß1–-"HÄ,“Gë3>ÎL…Ž/¾¿Rñ²RAÎR© ­¤JgŒ¤Êqô@pCºÜ LÿtÎÐØOƒ \w#Î,·êÌ‰ÓÆ>mìÝÆÖRòî±B÷ lì[þmAO‘‡†×‰6Þf |ˆ-\);c¿Cõ¥&Ϥ²ÄJV™å”¤F„y–gúà6…Žæ,Ì8ÚØb`dCˆöæ,lôzªîDLÃLžÅÎpSJÆ*U›‘;Þ-óÙôÀ íXS3Ö­’X 1’˜ycm” •Y„\ Þ´ÑKzŸâ®jTQØBK“ùÉúå±JD´´ÄÏѹ[½ A2€$‰ÙWÁ·t9¹MýGü#€ÜBŽ/“…7hCÖÅO;4ûàçîúmå²¢pè\ ¼P)à…ZXI Ï-s =ãi»J÷‚º´I¥j‹v 6oÌ­£I`#z¡…•î{¸]í¦òñúl{“€ÔÞ^* ¿à?uåF>~»ž9er[­}1±<-HzOåÆ8—w¬§Ô6åÄÐþi¢½Ë×SˆZO!'-Â8ÄÔÖ…w¤%úêróã˜êŠæ§nW¨1r“¡Ó¬ÄÓ˜9’rá“ÐÑýÎ{ÜÙ «ÇõÀÕãº}õÒÑ«WA)…âœ}mNö j“ÉCÏU{(ŠÚrCLŒhÂýÍ—FaµÖ,FX%Ç¡„ÏwQSï?c¤b5ù1ÍaAA·æ¢…Ò…/p!ÿiu½Ú„îR›Ï×¾4¾vnxÃtÓåz߉'©]©Úz²eÈý1¦¥j6,¥e"‡%+†åtÐææU|lth¬¦’M!€ê^ö!T^C¸\\žrb_W2¶^7ë«§·[è®G êíAØÍ Ö,Ž1 2Fí€o')Ы¸4­ï¬¢0ì,tr»ë…µ -I AcðŠÉ¹Sî>æ÷·kÔ_gwyv™> –ËDR1YoÕM¾Þ®w_©Yá>ŠÑ påÝYÙ“ˆJËYŽ÷`È_ôÀ Lîªg×›3ü‡ì[ÕÝ­¥¼h¨µš±jrö÷Õþc9ûk5ž$Q…à8%Î9‹Î-þe[’yÐE>—%šjöг_²%·-œýéíí²:²-C¿OoS·Z¯î??'¨/]„Ù}ô/ûqþu7ÌÕµ_?dùâõQºÉŸõò3~­ü¯m¿õÃ+wEüúæ!ÛMAñM¾.ö'—«‹K’~JWë»ÔAlc€ wÿ³u«%òÿºm¶ øuGÀûìÉŸØž‡÷¿¶6C²³`©ä>w$sî@Wb Ÿãt¦­)¸¢")=7c¨ÒèÝmݰ)ºìOKMWxÝ– ¼4ª+A8IuÕë:¨…¦§Få4œú<¿ºðÞâ4âêRv¼\¯k5vl¢ZšÐßÑŽ±ñÑ‘·"B =ʩبáqm fxœF·´.kþÖ¯]u×T@!vnξgu ŒëhÞ¹aFK ðºÒ »qÎDÈÇÌ™=gµ Û=ø•ó×ûa&)Å®ü 5\ùû/-¯ØJTµ/ߢ‹@mŒÔÂŽâËï+öX¾ühöøpK¶ÇC_öøí^‘Ç?­ÇQ1ÇÇï5®µÀ€ý1¼µ@‘³Õ&`k"$Ötsõ‰ga®¶(M<ò߯íÑê?懛-Äv—ÅZ]7刳 ö5y}Ä@`Énª×Á“zßþV‰å*sI[pSQˆªL6̓j¬{ñamµÛúy™YÙã  ûÏ»§þóW‡÷[©•ÿ!æ¼´™§:K3¢%h"³L+þE}nµL7Z·ÿó£âˈG•±YmB"0`úBîýð‡²uä&†ïk :×’d<ÄÕu$kg™`Ƥóñ9Z°¸!b¶I%Æxۄѧ5a:b›¨Ôʬ€ ç•"Yʈc…%œ€U™Ïô¸mòrHÓnÇ(ç¶P„R¸Í À¸!¸ËÍSË@èù¶‰•šADÁ²ÉÛ®<ß&½YÃÒmâÚ±ÇMq_¾Vå3)±YQé­'@Q?èÉd6qá U6"_eb<º…‹S¡Ç´pŒRÆXDÿk+u9;­‰šª\7Íø‡ä[+™3œT*tšS£BLÕk\‘I£mêálïmî©už ¡åûìV#ÐùIž¥L¡Û‰~£¥ÜHVÒŸØïž ¤'bäªóa›åؽ‰¦å}ãt ›­:­wü§h‘{µØ:fBJ[‡:Nu3"oªi„…nT%?ÑíûÃ÷ÿÑY‚7ßÓ[·øK Ô  ô逪Ô<<¾^^šw!μÜh :ìÖäÇðSú¹|»ú½\Ölìwˆyd½}õñ’vYwí—ëq}ªÜXÝ”77-Ë—¡¬cÚÛ9õE!„f9#RrG\Î,²ÈDæ ×Ȩ̂+“¢»‚(l·ÃÀu”û4¥ùQNber$¼jçª6d÷hÈô‹e‚KÝ¢Ðu鯶ùí„Ȥ*|o¢Jîo6xÞ:¤-ˆŠ‹ª»ë'$ÞTÍ84e(a6‹iKÅÚÛbB­g/Åi«¾ö’&‹óh/á¿ÐÐ`9“Yâ@žª\JfgÔZJ³lJw󮼂Ÿõ–×Û œ/ÐÕnêˆWdÈ6µìJÎdóh0ÅÔ»2Hp^2Ÿ ï$Ï<“†(—["sôÿA¤Á†W¹7À ܦóíI®¥Õ¦{Ѹ”ÙrFnÔ= (îL¥L¨œ-Šà‡cbô¡*oÜh¦ Bp5Ævì7ÊJ©ˆÀŸ:¡ÉIÛÜ +Oi¡ „òÅšKc!|mÏn€ô'©±y·,±YDº Hê #&§ø/ÃeªL¢Œ¨‡î‰è3°J-£Ú¢ùòŸÛ v»yÿåM´N¹k@‰.ÚâV3\˜uxO¨Þ¬àº °ešŸÃ°Ç‹¤›°WÔ†Ó{õøf$ e–?û…p vƒÜŸ²¤eýë‚h;œ‚Ûy8A_\‰VHqÅ£Ú¯öÍÿh¿Ó^§ç0ª¤2_©ŸNÏ¿›~:%Ri×äfé§S)õý1½têÚèÔ´’úf·íþò‡òo¾|i<Ãè31ã~€ñlê>ØlVj„àŠÛfÿ2ZÎ ­Þv½}uUÏv½ÍëÈPÛnùpFõÍì+ßD}3qàÂ! ¥G š…VByF¤‘0…´ŸÉ\ÁÑy+ÒùTÂP04ÂiÖ#™lG }¤M*ùÀ¤u"åiÎ ×}’L¤òT´à…§Y.iã>/‡4m.Ñ‹ÌZar"K‰”¸/€åŠä’gŒZå¬;H¾"\#NÀƒ„k$_IùÌ3e¨a\Áh²?øèLbDä”{Í•,fÊNЃã†$§\å„×®T`ÉUÎÓVmp½Q f„èHÎÐt`ur wùEˆÅ|~ŒÌ?¹’Á½¤”ÑÎ’Mqeó#ŠYÍdŒã%fkõˆßßܬó}û2¸»}‰Ü®.V×D…1¦ä-»^ÿx©¥‘`MDv\q>eÏÇ Pá¤G¸•¾ª¹»@È9ª¥Þjî ö¸Wv%"2¡ìªÈ¾ª,ß9Ÿ*Ãea6fci6ASÎþû©»’–Q-pš"rÛš±ÑU¡Ã%é.}ñìó³Ô£¾”Q#´…W_kèé5žÎ]§ÝõÃÎÿ¼¾q¸ú½M.wî‹‹Ç ùÝ?ì&ƒõÝgWà¹'µE wuQ—Æ.uðùŽ‘ÚȈ…`zLqóläSQs³`Pov ñÿì]éŽÛH’~b~,¦•r‘—~m½‹m`ÆkL#y¹ÔV(ÉÆº ~¬}}²$%¥"ŤH–¥²ÝÕUEfäñÅ­+\ó\åP*ž1G™ìIRLw”W¦ù£KK¾ˆ†š¢Âý"çèkïãbX¯ÔíÇe””m¤À**‚(µ½R¶;P:Fª6£¾–»â!lÆŠx/‚ÈÝd›OÙò ÀwÔhÀFÔT³e^·Zú‰.àÝò6ZÅ1ExBרlXúÆÏK²$È=òÙ<[nK+$ÑWï¯/ŠêÇækÁ›(²Þc 9E…?qÎø$Už1Ÿ+¿;x)Ï¥q·©£WÍ–IÚ!Æ¢:%´.+Ĩy0[ÃnðýÊ žt©÷Ķ[¥9„P£èóÔåÝ6‚­¿Î ðP0ÁÚ½Ù¬š­x Yõ¢Ré@Ú_›rÖe2W^ëA%H¹Ä«Ž$ŽÅ’;¬Ÿ¯mwH7Hâ‰Õ)²ž˜£¹ô-zP«É—ø¤Ó4–0š.ÃÀ-=0f¹c¼ußÐåš‚£a#ˆ¢LÆ ,õy @DÅšs‚|\£¹r¨ÐI¥ë#˜YKñѰQ”Ì/³À蹦hPˆ²Žõ1†(·— Ûáu1Ä0a”cˆËy8Ç&¢l¥½ï`²ëqX_!UÿÔÀÙ.ã¹Ì™""1Ž@ŠXÁ:.Mt2ᆦi„Ü꩘’rŠsÖ,¦nçדª˜(…‘çw›bó“äö>»õÿ»F†W0ï Û[Qºæ~[DFÔÆ"…˜“<M ó…`Di'¬¤<Éý€×u•užVÿÈú²BéyÞSN(Ÿ²B9+Ü¢½>k‚™sÖÄjÎï0Žg´"‘°Ý Õ;Ÿ°ýMi… U\$‚° ¡8hä±UˆÙ€æš9T’bIMkA9“!i…­ƒ èðÏòk?¾û)ú¦Yˆ‡àòö ËùâŒØ»å##v»+§ün/n&ã¯uÔƒv?¨3f÷9ÏlK€qjêÖ3oE>Ý’a†»“*ù‡À€·ûįXmè_ƒ¡¦lû“ríLï+×Θ Ú†¸—øVA¿ N¥»]Û?!Rã?FB1cÿ¼nkVzøƒË~¥BÒ̤¸U™c·ŠM_¿‘(—eV"£TTìïWškrµ¡Ûì¡ýJÅþ~¥×Àÿ1Áx¯0–Ž;öÙ¢Y ‹Ñ€Ñ,-÷™>¥Šû]¶‡©E°#o›£y3ÄôAn34ÑÆÐX¡D¯ •î‡aœX&4×*„`IO3ãsŒ!¸È`)Q&Ô©Gœ×n²<óR eùx%ɘ°š€1¤,|ò @›ZŒawZˆýG(X@¬°í-¦w$ü«pÙ__ PN¸ÔL‹ºb€šÑ0Áµk@P1V«›zÎúŒq¤Ìì ’”]E¯Ü/zʼn‰ÞàÂìÕ}øqXÕÄýo€©}»œk–»œ$ÝΫž]‚ØÜu‹Û–~NâĶxèöàt¬~NLp$ÄB Dò8 ¡?ô½)£µ¨j\ÄQŸ€T4 ÙÌPÝ«(Wh„÷HŸ_E¸Gú‚Šoùü’UU¬þÅ·¾VS[B™ÏÒs GT²‹¢Rÿô+ezÂ*aÅ@s¡BèR¢W «Pº†*]Å$UJÚ]MÑë”Ütæ>ÜÜ.–¾îà&M×ûÌȲØÓ$¿úøÇ¼ÅüÜía+“³cÎNð˜eXš‘XHKD–ƒÑÀ¤Œm›É™[Ú`r†-ΰ×àL¡÷J™öžºW‘ç:‹‚׈o Ô œ£],è¯íB‹ œ]Ÿ§±è¯«ïÿ€·ý¿[` ë/þPd«Dët•Eï ³¬¾ên”]…ÓÛޢ渽j>nõV5¡Öׯ«kų šõ_ɹÕAlÉ~—Í¡˜:‘ M1‡„Qf‰¥,'`òËI”Œ©I¤*ƳøJLµ'Ìãvƒ^ GG0’ÞÑmG ~tð‹ÑBJ×é`ZÈžÀLŸv `æ+…`(ãœýdÉÌ%µe, €I*ÙKêȱ†Ò£Šâo!äT3ÐÃEM ¦N)¦A-Ÿ9Õjaý¤Š†/›=T· ðÌ«Xì¸T,ßËDíIÖnî±f3o‹(½Å-K,[7ÞË2[”ÁâÅsi“”á÷¼É øVê¨Íä„9MmžsãØBRËäî$ ¹+ìó 0T+èðw ‰8.¢¸ß`ûœmügÿ ÛøC™¾:\¦ L>Ž]Mi JÐÛÞ ø¹…Ü¥óœÚ„‘4áÂÃ=Mbê¥(#Æ&`ŸYÖ k58‹¼³yél^úNÍKÞ!!„µëoֆѨbö¨K%…47)áŠjÔN#±WQ¸ïec¬L“Tõ«Kµ;¤]A͆N¸Ì8Ý”^t˜n@=Œ§ïaŒÎæÖÍ­šKP¡ÔÇ‹ŽÕœµ>òõù»³ÖŽã¸Û³Öù™|vÍ ÖÊQá³ÐßsdðYrK² µc5.­?ÇC¹+ƒ „Ñ:—Ö8˜ÃR#2¢PŠ=_ƒ¤(¡ïË€Da0sZ›Ypy”䉉cš"&üÁ¨@ô`aYbX*¬´L>¯aph™u6 žÎà|ÏÇH0 kL);\ÂZ»«ë9òÖZGql®íÖ¿ ?ˆwyÔÌ»¿çÈpQA¿ýŇËGk?ÀCé'Áäi5\ZZ;yƒi'Vp6$¾ŒVÊ¡[žËª|ƒšødº'}%tܶ¶ŸeótQu`5G*­:νǼâÑ…k{ðÖ;A5ã™ß­²"è\W:–÷hˆ=øQÖ·YÈQ'x”Ã5›Ïð‘©/áð1—Åá¶®øðËéÐç7$|Äók¤ ÉV°Ô×½ïø¡Ç6 ë8§Œkr„+©ÊßSz/ˆq¾§Üi–øN‚È$q¾z½`9S,iž—ÞË©ÐHÕ$5DI„Ãj9Ÿë"xÞ¬RýÂÞÎÀ÷ |ørFÃÚú†Ôìîìì¼_?~|Zy~œ1nLûÄqZIÛýžäµ´'"¯EªcêRA4O4BÛ%Ž’ØYay–åVºÏŸÂÈ€m`ý–颇ùFbËS£Ó”$‰aÈñplFss‘!ÇqYRú˾QÖhoÿÉÙ]~v´mß§£-˜µ3ÁNµðE˜ì<©HlΔP¼}Ù(:„)xvíõ«ûìƒo"öecRÚ¾üˆ’J@J­bKùµýÜH…5 TÀ棧ŠW§0ÔÊHqy²ÜK3ë­‘‚˜ØIµe:3INEr4‘xuT‘Å3âo§£‰Ã;d«œPž¢³gÃì´³tÃÙ»Ô'èðëˆÚlÄì…û2\€Ì@j:­„ w¿‡ÚpãóFVÖê;Üêû+¤27Ãþï= À“o{x0{¢ÍG¦§Ä¶sNÕ8]I†­à\P:„ÕÂ7+ˆ â¨bx (ˆ œu™Ì±:ÆÃª\‚:[îHâX ¹cÀúDØÒÀN“ ^|'\á„GçþŒßIA„³ç8 "¼D30<FÈ#È8k3gm¦ëæ× ·Hp8U×AX˜Üiñ,[’‡ä¸~ä<ër}¹nÛì†qÕ°Ì…xa&È:üA2»Íýð2„¢…‘}Z1¾Gâòï4ÍxÀiÐötáêyçŸwþÓ/( ˆœç¼Òß%v‹¬ÆØû~Ý{¯%þ}e~5˜ùú2JÖþ•Ý¢l­Ùô‚÷íãÞâ^˜£o¢Ei#Üc)cA¶WQÇ>ù¬½Ö¹@$Ù)2ðeä! ±Û\¤¼0xá€XÜñ¸ˆIJÁr(e»6ݽv÷³%nÜV ô“xØl˜Q|ø,[zí=£l±›ˆÔZ¡3:v8.'†¡v›Oµ2¶šîo±›fºèQ×bW‹{ìj±·É.ؾ+#™hí¡Z™ÆWÓØÔ:U(YuS¹5†šÆÒáˆKf‘µ©”!¬V†í®Ä‘„ ø(Ô jú¾Ú’ùL gùu–_ûå—¶T„o XþfQ¼â’÷óß%`Ó„ªÔ¥DQÒTD¬øM2«ÀʼnÎûE ìiØ-A•2*S¨ ñå± 1qÉãJei*˜ÖΞt”@ç(ѧ,Fp—0ÐDÆ^ÏLJŒp~Êd–hÃspßm”À¹rÃqD ¼D›Be*ÀãÆ«èÕÆfó%~yépKWB eŽœQN |¶É-ÚdŸG—JfHó—½JfLhP2™:PÉdj¯’ÉhÓ:jj„¬£µ­Jæ»sëìÖëžbBQÒ­Aþ°'®W£D!”U¢ÁŒ|=À"M+ ­BÀ¢ä²<×!Îf–£`Í2Š,!F™ÊP¶¦¶XÜÒÀVtkc„ðœ™úÎG,!Ž E¤4x’Z•¨ñ™Ámb¬èÚt¶mª\â¤ÜÎ7vwÔ2Ö—PýôavCð0h­!¹™ÿq¥ZØØá.™XHÀæ~K®ŠI˜˜´21i÷21÷^ÃJÁ£vÆW9ÂŽÉQªý_€ôÙ9¯ÂE½ìá΃z*{îV¡ÔÙJ—a´_­àîÄ®¬¡H¢ u³5Ôfm zÔºÁÝIìO‘|ºÛ˜Ï-_úbEÅ’,oQ¤Î3·È‰-ÖîÇi´[Æ %Bú éuŸ}ži0w÷Ÿ¥E¥^<µ’¯A{«Áë”<~y êK‘µÝ@”Menÿ#7‹ôÃÇF*%ЀöÖ80D €c$LV¾zá®ïP±¤®é¢b©qqbHþçüã]oª OêÜð«’Mg”1B›dC&a‘´²4„JßCÃD†_ªöÈ‘¶G¾ï¾=:“X®8ÞŒåCƒÏç1ÜŽL *±¹#B9 ¸!q’KÆÀr¡TŒÌf4 ,¸Ç<`›TòúK¨jcå÷ rä½)>(%%K* žW óo_O]vg¿ 9 (oרgâtÝa{ÿ¤Ü5Ð0´Y[Ów{kÓ,+ÌJ÷·×Ñ|V„¯Öö9ü·ÕÓþ;ö¨ÿÌ–núP}Öök¢(ÓœÂÉ£*6”ÏreTÍ\’ƒ­Jt¤%“tÊ a„òˆê©”ST³pp~ýåuTNcùÓo'|üd}\&›·Nf·åx?Þ0] 버MåØ'n~wå¶¿R|ä¸×Èböá9}ü¥â-]}—¬o,î¹ÿw¦§ õ]X¯Á$Æ9Ù÷ð,Áù&…3¶2qkŽE–óEð£ç%ØúiûÌ¢7o^[Œ#š#’YNp¡P.Ɖ"Nå ÉÁ²Œ›L ™G…#Wó<˵.ׯïþ߯?ßeÉ:¢Úÿþà¹?ô«kþχêïoWsr±©Öùë›we»eqQŠ,þz`¸[ðƯo‘c—èÅww›ïá@.p5Ï&Ëäîñù¿¼Þû|s×w¬Ö±æ–Ixò–Õí-oñïñÁÛÈŸâÿæƒPJ ŒýYX¯,>íui_ùéÝ´Ò0å☷ùèbS#õ_4Á»žÖEG§ooo² É¿äǧmý] ý—>,°~^1M›¿~¹wøÄäÝí|–|™^üWææË«×¾¥geFÞ}Š‘+]½½]þ+sé—7 °,kº3´2»˜ºŸÞ•¥NWÿÛÏ8aóìç%‚ëÇIXL«}c.×_šùaÿôî3\^ü8÷~…eV%o=¼Eñ¢ê'¯çË×ßÔù·Õ›/V—ßø¼ÄYüÉËÁõ¶Bá÷i±9=Å_ÕwL·fºüü§›~Zp®·–¥¼úàwÎL:[£ÌÍÅG_ûÊ#®ôiš¦Ç>oW !‰L¬ @ñ‡É|Ìr– 'p¶Ùx B(­¥ X4 ‚ˆ`Þ]ì}xý€¢È‚·-aÖGtk/Æ”À2Ì ×šFŠRñv›2½v‚»nL%ÆHÜ‹‘w,¿l³âݯE³›dþ)Í&«€í»¯ß•9>{ožÅ×$™ß~JW:ðE6Ï×·“«Ù‡+â>»Ù|5ôŒ´hà„æs6÷Ó°JH …Y$´w‘· ¶J î ÔZÉ€ßʺ¦Ñª¾id˜îãºÓ¡2Ä9j!¾`¡ŽUª¬Î¹ô{¶@]:FN2êjF-yÃj¡‘¿uÅEWËåÝ‚ìy5l÷=¾ûVdTl­¨noÕa 91挊i8c ÃÄiµZÒ<~¿:ú>R… u¹>óQv“ÞÝÎn–¥}ª“C,r÷Å–¾ûOΫ•Ž2}BŠJ¬ÏGô:þžÕ‘›>xý£æÔ±üsšžÑ 1Œ”,b}0Äèbë3êžÚ>œ(¢6”!L{°a&±Ê£ŒrJ šÙHZPu`ƒïÍ‚¸;o´®[ 8^&4x. ÌÝýíÿ|y›Ë˜0€&ùØ0ÀRº+žý¥î0 muò_[³+ÿñR/ù¿>>Qíù©ÇòŒÊi8c€my D@UWÁ(=PÞ5Æ]ômOj|J±X$LeŒä™‰ d¥XÊ-IœŽ‘ÿ¤‰¡ÐçO-õl*ì)V ªzåy¹r‹«iêãähŠ ?ÈYNY#¬5JHK³ÄdqN!âÛµNóÌåŽjp™Í™rnÅúzp¿BE©aM¾ÉÐ6gRx©3ëóF¨äÓ½¯D†g÷Ïš)jèî›ðRç7%žm ʬyÊ&«w^U\ëÅo7»im«j¬83ØrÎ ö0 ÌöQ²úÄlk_[&éCa®¨”ì¢5:Xa:yrÏeï)©Ôl’9Ê\F/ZñµGbþãï[íÚ×íØëEN¥sÛ<.DÑK­‘(%”!ª"wÈ:÷Œ¬ØYì‚_à zPú;³>‚ÊLWÆê”mŽën»ì?-²}­m§µÆ©!e*˾u®[臧WŠRR]ñ*ž5ßmpS›ê/¿¾ñg7w+ÿëzÇýÇz=§•go.>¬Üµþ ¿A)1v^åMþU¿©êÇë~ƒžÎÇWTeXÝÛ¾^lŠ©¬xè?|‚Ѻâì¦ù35ÁR\úöž´¹m#Ë¿‚ʇ©$«Vúx}ñÓ:Éìnªr¸âd¾L¹T°8‘%E”ñ¨ôß÷uƒHdƒdRÆd☠Àî÷ðî~þÖáØªÆVŽØ:[áܽ ¾ªN _ÍzUs!c‹¤”H´bíª]¢…Žeï Œp´Ñ†0ª CÓg¤}øòG4­ð85|ug$¥;2RËeX[M[Ë{Ù»-Œ’ûùhÞOídeµ†‡›_»A5VЈ‚ 0¼Ó»kê@/5R-;¾Ô–Œù†µÕî2~|ܾ•ñøHf¯92'¯æv¢,œD´RsáDY³¦³VÚ†²]EGK†F~Hn$¨u’t¿RÐ*^Ë­ém„n0?~ûðØÚ †hipõ¿‡àíÛrÇ­_oż2'¿À¿¼Ü`úeðùûÙ¤÷šÒ¸­zÙðËm]Äüø­ÿüðëªÍ7nôííý‡¿ý»o²ˆG'‹H´ÏLýå`'¯þ˜Þ¾úùÅo‹[~›³Ìò‘*¦]…«%¸ø¯ï®Bä¼¶¾¿ô°ù¹ ´¯¶éêᇗ“5ÿQ¼ò•–‹ùõò©Åµ¯°:†XÛ@øåÆu‚ëÐvǺ†8 àÌ<@±ëˆ×jij¸ôSznD’…XEM;ù—Ö®_ ¹Öu%6g ¿ ß|ñج2¤Ð&Jþˆ®ò§ÊèU‚Ô: Âý…YZ÷ a—ÖeWÀpÇQ¯öŸ2è>DK-ßX4dŠª(ÈërÔЦaÂú©ÂÆIíä¡:„m<_GN“‡pþÜxÓÆA3Þî‘[FHeŒdˆ“û=*Ý­~`Ñ”MøsŸ%ô#oË_Û4Ïõ¾ý_Â’/çÏ…·}‰ÏKü«Ð9/,ü*´gHýf1Æe'é°iC¶1&ÖÆÁ’òK\Ü•ñ_U;«‹ü.ÿD†ÕÆŸý2æf€Ðz±ZEg=ÕÁ©ÎT†(z.ÖÓžô·™P_üï¯óï6L©p‡ðßïß·ã¿êduä¿çÅŸ€\›í*mÑdêÉbÔ}û¨ÂV8>EÙjûÏ( !—^{Î×C»m—:,±‚©Š7? LÑK™|2ÚDÉ' <ÚMc¼?Ç5Ù騹ÑsôT9:Ý1àÐF}ô÷ò,:Ü1à} óS(è}dºá#‹v>2­ùȼYQ€ˆaãˆ@vpT¡kÒç6´ñÃÑÆú@›DJ1ôd:oïOr’-vËú*Œ6§²*<ð̲wb,”“´BYxàsFYUÓgó'žÓ鬼QLí•7Ö,Åô,q7ï}57r›ñÐMgUŸþlvþÏ ÈdîÇ.úih±–2¨«´H³2[óéîóÛòéæÄñ œº½î‚¿£_á!ø&YPzÇBVc` Œa€¡ÃJs%c˜Nil» ça2Ö4¥\D4åSœ›Š>àt„ÑZ©’ˆB”4b™BË [¸Œ3X“¨ïÜíQ€|&jö°ã­u ÀÒܸ”dZ̤Ä8D2ã²0%èôi­°‘†°³ÆÌƒñäsÌ<3Fþ3 _É©Ý?(Qh~3÷ª¾èl§o 2ïîFÞNßT9Ž+O|ß„6tÀLĬAÄèOê.M^–¸qÈ‹©[›­³‰¬Ek™ä#\Kê7ù€ÅæMþZ߃ŠbB©ÃuDf Ú™sû1ÃÝ'ðFŒÆ Ô°ÔÊóîïÒëYšAà[»VA† ¤_þñóÈø#ã?OÆ7ÀiL×ú®îÎþûGU7ýÔE¼ˆi3š:œw*ìj ¢öYÃe å&¢°Y³Þ§¢c±W†"W¯¾ºÅ•Ð-àçFœ3CWß T«Ëåõ0ŒðηAœ®•!ŠZk“7¸‡¿Òáëússú rl5žpyyóöžl“3?²åÊ7\I³éM˜ˆH“{ßëÑ‹º~e ¤¹v…rcÁ &‰QEIU@³ŽS1 L‰##®kƒ¢o²]øF®< I0ˆ‡§ýž¯Û;Ʋæ)ú+7+%G´,K[*ÐŽÀ{›ÿ+¹¿¹ùÃLÍ#> û˜½Æ¬‘…£KS¯¹Èoî\˜{ø.÷#“ B½´¹í Èf˜ @ L5Óh7LJ‰:L©sMðøÀÄRâ×?mmfté;xó¿ìåuÓî-UšGìÞÒ&º¢¨˜^,·xWäÅ4ô F™ù1•õ°a.‘Š"6Ìd«:~Ÿ1Ösìž ÐNÅ m€:…¦µŽ/‚Þ´Óg”v –Gô!ñINèUìR‡e¬@ªrŸ¦Ø¥ºÂ4Ïá{ â×:®À«4FÒsÓÙŠ—÷ûÝ¥úã»uG+·È¢i3ÝÆô»8¤B£·r«„Vå øYù­ú#ô\·ÐC>Ò=pÍcÌ29Ô,튑[ÁD¿+nfœ?Õz&>ʲ\‹ÿ!²‚í,ÉŒÔ<•n6=P®©å1!_µ¢»|üZ³K|U~ž{ðo放hU‚Ã(g·éÇä¿÷ h+3L8g¤Ro›©ÓX‘ †¯‘ž1 q"1 !K.ý„tÔ´)ñ¼A µšäZQ—:“"GÇ- 8Yªù±¥vñ~ÀÑÌ!‰“Eã%±© Dk)sQBæ€=mjÙ¡Ò˜c6ºÊ£«<ºÊ£«üi]eà µöþãyÔ¸ôTÝ…8Så´Ü…Ø×FÁv˪¨^QE‰žÒ—ï)]Î[ œ¦i&ø§³¿³"©Õ¡UºÅt–¾ÞÔyWc9¢¸%R"ÄàlNŒ5q¢ÈÑðɤ£&¹+þ|W »ìHŽÞ÷l:› Ý{“×ß¡— ·{#·Ô¯Ml2P¹-S"T è—rC²¼” ÃJ¡TÆxw’Hù¨zI­ùÈ#nÇIq¶å$™;Æ '¹²$û×N¥"†‹Ò–-Ü’€6OÌAXžÊò’ -й(þa Wà¯:‘J”¬”i§ŒQºO v™býg6wQÅiMïÑUÌSn¸Òœäÿ*3b¢¤4%£¤wTÅ[ê9Š/‰Ð9%`©#6Ïr¢ "/hnLU<†L¬>*2afõNxŒÅV0£%hCx™¢NÓ% šÑÈf"-sË”*LÑL6·Ô/™¸Ü¥èËÂTjPšCÓr’q–ÂI—¥b@2‘ZFÌwªÌþ˜?{íÃ=>…)Dû?\ç!~ùñ!Ý*H³ÑÐ{[¼³ië‚Rˆ˜b\‰¡ÜÆáÍÝ8ê8Js·‘æ„DR¡_ÜÑÖæ4E!›³ñˆ°>Ä{aª˜ÕfR}ÕÔ¿~€Ø¼)ôô¯ßãczÝ„×úØ1' Ÿœy„ÔæøšMuEdR®Ï)¤Ck0G]Ÿ!”D”™i ¡KýÔ¥úWtrçP‡ÃéÜVjœe5βgYÙ,+ðI{vF  >òúÊÑx·7PÒXáù˜ýmÈäÖÔÑfÏÚ×¼L‹PÜrwó6¹š†][3aÿ6ÿ݇_2OM?÷éäaó÷¶-š$Ž2CKpÄ™ NŒ4Џa,”Îu©¥$4©Äå Õ P âQóûoß%j«?=á!í¢Î¼,ÞÍ&[V&Û{ž™^£sÅùx:+ΗžOo¾¹ü€º8|ªØeçÍÓì-ɯnÞ¹yÉ•‹xÙª\ÜN.‘_Iú>^¥ÙôÊG›"~…áUòx‰ÃWwå­‡ðÜkó©kkO¼g‹ákÿ¨ò&“yþdbX £¸Å{dï‘…Oõ5&k¨®¾ÿáúÇ "{í½TW<™ jÜt¡-–ÂW&ÿ¿ŒHF×ó$ûÇÄy'»:H´wÅž‡ÝcL0Ó'5ó=\HÖîÀw¿~‡'†¾s="B"ÃÙ2R0K#òŸ%ÕGßTe䆑:rƒUQÇ­²vŽvÀäÜVg£sMòÏÕÑ&ß9ã”ïŽqT¨8í4c©Æ®C–¢G«“hÏØ Ózì…Ù2öb…ü“šyJ1A ØJú‡Ãö<Å·êiÀ"¹û+$\ùr­õ¢Ö³’Ü]¼ùjâ;I ÍÊUÿöñžg/&T)£ ÄqŸ&hFRƒ²™+U8'˜Ö©]dkUÁ2ŸºþPæs}rê=ÊÜ͉ñù ¾7ôa½+þ};½ó ¥WW“a†¢í\ ‘ÇÇö2AW¡Æm ¾áø¬¯°üéÅw“d#ç€g˜7øO̱·Ð§{ì—;sZÇÞZ) QR-ލçÖršFµ2<Bź®N»JHw×MOv®4ÛÜñ' ikR®*ÖG[ õic+ÞJD½öRmÎV!OÊ÷"J也ýÆíÛÙYr5ÍnÞÏ\–Ü#¡ã–)Ú2rGu«¶†±eÃkµ´Žy%ܸæ‚õ°;Ë{L7ú¨Íá¼þ¸E²‘¡šª¨ËAz?mÅzãfASˆ33->]e†«å^Šˆt£»!©ä1!KĆì×ݘÝ\]ü éÃÁ6"Ê£ï4Ãzg°õð!Ö»†ñÅÂ&©UÆ î„m9ƒ’{iÂ#@“òÀŸFâêšß³øk%‰±¶è…ù‚J4X¹-‰•hdÚœ+QpGË)µü(½ØamNï ÝÍ«,ž<½ç̬'»„G¾«IžmªÏ‚>ÎÓ«ÛËtsYüêú A=tOfÓ7׸zö¡Ÿ?K7†{îþ›¡ÏA‘9,=C"ØõãEŽFÊë_D?Èæ†ÎrÊúÊrº¼¿¿Ý–Ûä;ç¬g6ŸøØ%±)¼d“Q¶å7ÉsiÇô¦€†1»é05Åj3>™QѬRS!Ò¸à”„4îíMåóÇæµnTf¸¢ ÎÛ/tTUèšdÃ&ÔìH<ÏØ’¦®sƺ:ÍÛkö=—Ï4ø›üœJªoö7ð€, 6æzP0T&Á Yå"°Ý™?’ TD€UËü‰Löµ«W»h²jA¾ï¢¬“¥ü—\ÏÜ›?ödŠô¿`•fÓdiwš‰0, i& L3a°3Í„ïÍeóÎAì=úÞ‡ÒoZã|û¹9?F ±íØœ)ÆîàÜhpe§ä¶FÖ}²Ü6oïHm Í>[$¶™ð@c^;­¼¶x:ú°ô‰þyaž{ÁÏZmX4^ë»qHLqRä†s iÙ)¤¸ ´a"ŠBIËUÄ¡…Ã#Š[³c† (®ÅõG3M™¢$³:% sFŒa@(0ߤßCR‘RØmñDT¨ÍñÄôövÒ¸ð3¯ŒÖ¢z("[£|Ÿ6yŒ1ÄÊ“‹!zÞhæÊí±D4|Æ`b…‡1š¸¡¢ §FE(_+:E•ï@ÁĦõŽ –Ø´µgÔN½”!GÔ”Eó•ø9ÇFªìO`ñ\Ú1æ›CròKî 9ÅBÅÕ0ùÖý»i2-#úù"Fx§ñªÒ³åxÕÇ®!”­”ø„4ž.„"Ôi…P€+%cè€A§IÝפÇ讓&Š(^Bªˆ¶¿ˆÎñ’&Ȇ —Äçluøâ‡s²áœ´ÝpNVÎI»ç” ¥T1¦«µ¾õÕg¬o[î8lõm;{F¦ºW‡Oa«£&A=’ògMÊdLJ ç}Î[žV¾:üŸ¶íâ»3ò”õŒ¼Ô¹¦l¼Åþóô:½ûðÍúGrÿgþgÙ´mK‹˜-µ1ÖÏ4"w€šÍµ’„–2GjQè _­à,IYɤe‰þa(e¶Ü’û:òl.Ñ‘gžN>6ÙéÔdgþòÉýÕìtëcPh¨¢§;s„l©±Â®—È0¤õùíÝ[ílr@Ó(±±×Îcy̦²èŸí×@–Ò†’bŠzçÅR;Þy1 ZåÿÇÆ]t%3LEí´Ó›6ÖÈz-ÁüÆÉƒgùúè»»šÀ‚5*"ˆo;Ž®i„ìI'×ì*ï²²Mrzõ@óÜ{ZÉéÑ”  SJ÷.J(§;4 rºw6LR·U=´ö]5 õ¸—H€éï׳Êñÿ÷h½&º k-8‘ “êÍÝ囵BÛˆ@Vv«†l| %X7­wÇõM[{VÕ–éR•D¬¯ZûuŽëÀ>šÿŒê”cÝÈ=%Y+J­ŒiÒhm·:ªF@úʲnÉÊ®ÌÐ{ß7ºXô˜ éÖÿœoå›VRü5ŠïyžyC®ªWXÓ\lU#!JuTÉ;”B,ð¾CóÌ[ÕÇ—h®¨äÜ EÔÚ=qšc+Í©5)7”¨T@)’ÑZÊ"wi*Ÿ6ÏñPí9&:ަÓh:=#Ó©… fŸS®ãïߟ@®ã;×s®cÂh­TID!Jš±L¡Îå…-\Æ v°šG¤0ÛÍ+hîxÞ—[àÛñFt[RŒ™dÒ&}ÔÌ­1Êõyä$£" šeRÓ8áBï¬a£Ȫ P0D8ðN~o3«öæø¢4ˆläiyä‰K#_~ö|É©5"‚˜©2GNÌcÕËXõ²m‚í脌NÈNÉ~ Êcºnm7Û4'æV²ˆ,S §9Cס–fÊÂTj¤Àˆ¡¤¼¼Nº,LIWHÁ`Q…‘‰Är+ˆ”‰,0n@Ë@àïA׆[ì¾ÝÀ0‘å©¡Ò!|•O®-ÉJªHšR©Un ^ÿÏÞµ5·mdé¿‚š‡­¤Ê-÷½Oó-3;»;53Ijv'›ÊTJ4–Ö²$‹¶Ç•ÿ¾§AR)\@“2^â$œ¾œóKŸï¸ÁûƒIÅæèýÌ)7sÊ=3N9-¹°2"‚/ÌÁÁ2•~¹ó²deÚØÑô±àxc÷x¨¸M¶Ð!N¬Á´×E s»Î .wÄ?Œ¢CjˆëÐÆOR® ¢3ȹJ õ,Gôúö½Ç‘ˉC¨³¼^Ê­€[ƒ¥_z‡~è»±¯4.U…QM8Í)‘͉-˜#yYfò\+Φ3ØŠkʺ£áZV öH(kw™L‰´â†¸7Òr9: ˆÍ ô ®>Dš…-r›Šbºƒ¾Z®"/jÍäØûÕ§KŒ{kûÑ=> Ý:~â‚Ý~î*.áãõr[>„ó Á¢©µ6y…ïðKú±ü¸ú»uíNéÒ>ž¤—÷¿^‚ÈEbY†åy ÿ·Žíõ]©Ðó ‡:‚×;ö)DàZ£ЏÑÉ® ­<ÑFæ—’Q†O·ŒŒQ”Fv#ÌTûeøz¸Ý7À}w¿£Œ±ÌáîÇÐ{cž€ÉÑ ¼]ª¼”ÌN7mÀAA„uWTžaê¯Ë6É¢ ùsI2Ð*­døêÊØØ$ÆËp‘ªþî^hÛ剧æ=/h\¨í¨ŠÛPáE9hAð¿hšT0T„ÿÅSŽöÕpâÿ#©Êˆ(~…4´BH7ÌÿÚ¥q——ÞIÍ "ŒCçÀÒœX—9b<úÏÎSgs=N†é3y?qž¤÷“|¦Ðü4Cfu¤\0"3çH* z¥YF¥Ï8¢Q;A.sv×ÎÎ]Kfgi<³k¨4è-E„Jœg&qšø/²4÷P‡ZŠH(rÞ ã̺œ¥ùtXÉHšÖtNšÀ¾Èø—g¢0•v^¥Â“ÌRÔ(ÚR’š”Ï"('…J'ÜûÒJ­ºÃ¤ßë\ã[q|^*[)PaI£*§$í)1H*»1ÂSò ŒB[Dx‘*"M!UÒ”0‘}CíÁó”ö_iäΦQ+gì)MT‘óœ=¥3.bJfÃþ†?™Íîˆf×hôñºÜSéIZ.i—º+ ¯Õ?—WHyå«Ê!7é+Oü«Ð¡íã&€½wyË™ Ú…ö¨:³dY¼±¾^„® ˆµÊþ ÿ¶êÈ7ÚÝ“0þ imFæD¨¼»IzUF„ä.·ÚéÍÞ[£`›“²ûVÙå0ÍŠLd4tÆt¡^Á'¸oš0„$þ×û뇀(~Ion¥¾X„À‡[MÔï¿'xÛ«ä/ß/*-͘xÉñ·¡Œöïßüi‘ìGGóÁ³©Xe6WM5¸ ý ›øeßþ9ÌÞzg–(ëÝÝÝëD](|Ckß,_$7×Ù݇ež%ïp·$âBI…Ÿ Nn+¨Cáñ¦î¸öTýœü;êÅâÃ]Ò›pÒkU"},"¡F Gß%B@ÍšJ@e·éoRÓpØ”–S¸sViÖ2SÑ‚@d»`CMµ]ðò½ [“³Q,ÔЇ P|ÑÝ¿×á\ß^/¯p!†îÍ¥XëÄâ¦} ´Þò>m£­-ï_$üš°§ ¨°¸§:åÊÙˆý¨ß¤¯ý»û”é¥Cûõ>Ô›> Ó§-50jDÄÁn>³Fh`v(!P8 ·wr©E0"cŒÛK•·iÚOø•†=¤.¨FÍÍ{ˆiIµŽ˜ îÞCuÂÎÒjH\P ¨› d,å'±âš›Ø4IÈq6"ˆ¸q$ø¤Sö¤Ÿ{£´yÊ"2 è8—ìLügÍ÷–åDZDº„FHÀ3-Â<4ŸÌŽ˜ŽuÎsy;¿åüË`…žñ<øü*ݹÔðD¿¿ˆ@÷O6¨°Ç)hÐ]ò‚…‚¾ Ê µ2F"=&õï\þ2ü§Ïàã³%ðˆW5ì@6‘ík ¥)o²wç$¡J8Mµ$±Œ„DIÑØ'd.³)Ï\¢´6OøB„]0Û‚;Ãý"ô}Æ«‡{Gpyüú±ŽKÄØ{H¸t˜ -”%öÉx…K½óè<~ÉνKjôdÐÂ¥d/Jm‘”'Hk¸PÔÓ3Êzf.”=ûÚ;ÄØO¥´Ÿ@k¨Q­¹eýÃñí-äJfh_M.‰äyF7q®àhwÔE–O-­R fK{wóæb‡6+\xTþæ5ý§ÿ۷ߺŸ~ûá§ïøƒKRuðƒø¿¢øŽþ÷Rþ~û£þA­Lu zŒ¿]¸»þyóò+ÍKþ”"¿{µÚ‰IÍ8hi¼ Ddˆ,\FЮ{BÃE í[óB™,—*¯þá‡ÒC­^£ÿ¥Ší)ÀòÚ ÆÛUÇcQ3 /ªÓÔ¢.¥˜µe9 ³²ÜU–’Z-#”¥9„Ž©ÕÚå_*-ÿ§Ùý‹YŽ.׌•[7Ù|‡l‰šlçwÜ 7ETÒøÅ-f+o ÍßÜEDáÛ ÷mŒHfMDüÞ0²¨í¸e‡j#Ï)¸QâèAŒG;âNqÍtU⫦«Ò%éÃtµúÁ¦«•s’dW/’ñÆ ½…~c~ðEYé1öbT+ð¥™í;fö‹³ž{sõƒ3³f@$9µ1¦LˆA w;¦lJ; ”¢h:ˆ%n+Ê´p¿~{Á‡ ÓþI“]¸PãâÓÊÝÛVÖÙÿ¬6¢,AFt3lÃÒ24¸^-zgåA÷Q<ë_4r†y^jGQ°:fA>ˆ;b»W{rEÄt1|¼yKëÏÔÄðñÕžMÓ•-_÷\ùc÷,Ü5ã}õ{˜&TG*òQƒ…Z!ó£ý¨1„²ÇÊK¨£-¿èG éú‡ }ëÚþá?ÿq¹þl¯-èú¡ƒ–µƒÂ>Ý(fºkõ‡% ±jd¯¬|÷Ï>•$±¯¯Ø0¶îîù™ÆÁQèº!Ÿ<Û#Øq¥zgu ”¦c¦ÍŒY÷ú}æÉö×d•5{ˆ"³m’DS\‚’ЩîTsÝ)†¡¹æº{6%›-‚$ô?€ V ÄJ“L˜‚ä';à©õ7KSl¾N®®_]‘ôCz}“fh£ÐGÜ ´z¥·Mrœ¿› ]Ϫ÷I‹Ö+%õ]Bjkî'ªŠÿ´NMíÜùÛd¨~ÀyGû™|Xÿ ,•¸&3’g¸^eV ·V¸’ˆ3×Ö\Q±Æ›¼(d^¬ãPÔYžl¨2”î€NÀKƒ0gØ2I×ÕLyÁ,Ÿæjfˆy˜‰¦ÆÊG¶ˆJ‹öÄ1¢-“©FK׉wÄÌ1·ÇÍ‹óÊG¯mF Â7¬ˆ‰¢ð`¥Š`’‘0£…±$œ&Ž/ ¢²†D@wuW ŸJPñ ;Š^,Ó\Ĉiå Ý-çÑ”Xi¨[´XW}t”?hTaìÌŠ_,WÆD, Ëä ÖgaL¤Ë¢EåÃRr}DF©Y2Et¬QÊ ª>ìH>P’SêÔþÔ`mmÁgT%ŦŠbì¦bn5²ÿƒNй5zKÚî¶$­[2ZùL´9cŸÛ4öUŸ×r€BëT˜©ö+{ܯûœÛ®†[Q]`•TÜaA{ Ž$N´öM0à*J0é/æ„qMâŒ-ªô<6EÂLæ=™w2SiŸE³3%s‹Çóoñ˜Ìe±#–ÅZ«uLY¬…î¾æ¾Eua_4ƒ ÛñÓ^$ôIp,\Û"½Àñ:˜–|µþý×øµ¯ð·eòë²Ô/l~øuÙå3Ù´ùlªa±Ôðî2ÑNWÏã8-“ü_ìÝj#× Fˆ…^/Ë"ž›ly¤¨‘çLÊÍV^®Io,Iu*pPÀè4¥±ã²¾s÷§YZ{˜ŸÁSítíÂ7Æôë^$Y‰ä·Xæãbü¬1 tð¥f1Ùh«G‹ú×åjö¯yì‰mÊš7|F»’ÛcíJñ¥ìJEù Õ¶»ÀXÁ¨ØW·œóÑ‚ u e¤`aôT؈îÞfŒ"ˆãÏTLvi@iÚñ’0‚C[Dx‚Yô@)zà@ *]:Á’ ª#FLÃØT*¡ÖörSlÛqÛ‡"›ßä YÄ:?ÿ^r<,†x¾ ‹˜Ú §òùøø±ªÙ삟ƒ*±Ye‹ ˆ*ÑÓØæ'ÆK›Îü(M9XÿŠëÃú¦Äë»Çþ( (kQc  åï\9j:f¤"^¤& 4ã"âø»*V­EU´Moÿø²7×…wÝßlê—w7o¶+—RæV[òÚýby£,JWÖÜžSpWAÄ`9¥F·[SôÕÀ¸ÛU¶@L YÀ Í›I’÷±´•Ù]]ßúÀü²òÿ­ôçM¦DDƒ[\Ȧ/‚Þ°²;Ü /wÿ$ïÞº·EtŽ¸Ã 3Ç” ·bæ47™ ˜Y²1³d­˜™ÛÆY)ûG(i¥OÜcŸAÔ ¢ÚA:‡–FD§ŒPþᮆxY§0êCqaŒáZÕ9†¡á5ÍFÀXà1á5¢À©ÖðO\‚;hyYDDÄDWF{÷e)ú®ß<"¤ïüuÙâmÎÓW/ä–E úú¨šB‰†êÏ·Ë•Fü-låÐBê¦|óqÞ:¹¾MVÎøƒk”@#€‰‘ Ò¼iLü±˜š^Ý2 ÅÌ©¥{ñ¶°‚òT%Þ™SI2éÞBf@±ã¦þ¢¹]Î.·weçች™Æg¦ñ™i|fŸ™ÆçK ñá”rmiDÇLÅåM¡Ûm†v‚Þ»]ãs“Ä>èô„R¦‰,p,,/ a™áž‚O¥ç çÆìô t58ÍýŸstîÐïÞ°çòÀfÈ g› ~­§á OY8è †ÈûkhãÚÔ“l0 säÕ8Ì=÷4ºÓMü…ß«ô„= ¡ho¢²Ý.¢›ÝUvA¦•ȰPmDcÅ z0íRGa0{AÝMë_ IRkiÎmµ$” ia2ÂgÞ;ê$Ïe…©c-@Ù›­z[$´a¯{•éréýš`W=—æâ﫟}óý_ÿkQÁzO‘­€Ww¡ÏιTLdÝP¿¡œ”œ £Ù´ä°ÏNÀp'”_bŽ7›#©›òz^Ng›¼‡Ù&ï-Æ(°›lÅ î×íFùˆ}û—µ7î§}ûöÓ–ž×BSÏë°,ÕË1>¨çuçr˜¤Ñ5¾77Të~ò}ÎÚø#"‡“1s5¨µwçŒNÑÏ»‡x‚+RÚåm¨Lê$ùj*WRÊpÛ”ûí3}voLÿò¤ò…—‰)'œ¢k5H•½´{}{ÿøâ¬ ’eQFšT83vD ÄU-6Fæ‘87“ª³ ä ÞCí x¢ÎC­=#–­ï÷œ²µˆj·Ý€èt+Ðö}Î)¬ì±5HLÕ­Ÿ'¹œj.!BĘf>t˜ˆÓ >®…ì&lÆï±¡,.NÖЭý¹'ÑÑ­ýŸÑ¡õoúž&?H¹ª³>´Þc[¦†èØ–ã´EËÀLwõ$~o0‡B‡@#u‹—ˆi;hŠÚÑèXó#(P#’Snñ¾âX9¸§wî~­$q&7±©’ZÁ<ÓW\>ŒZ¬Ïçnûs·ýgÛm?^A ÉFILÆÛ¤#f*%kIT–þDŸDåê‰JnÎ+S)Œ56¢Í¨m6>Cw»æŒóåæ´ òþµž½—Ãkÿœ¸Í¡ûuoU’˜;_¶Ë­ƒÂ¹b.„xž…’3ÔIzN°ò€ô¸¤ÎC¨šQ ™ àŽ VXüØû°ú¬çzйžâ×SH-ÑgPÑšR«ÖËeŸ¨xM‚5<âŠÔf”âµ^2OSͦ¸` cDVgN»UŒŒ#€èˆ³>nŸ0@ˆ‰ "²Suì±Þ>ÃõÞêi5¯öhòy¬uôÓJÇiQq¨Ê c/õ˜ˆÌ„K]dÕæªÂ1wú4>³ŸÕ8®Ym¸ŽYÛݘ"üx¤6qU~ô‹œ[ýÎÏÈÏ”Ûxe¢n*7“›ÞÏ9-?S…ªÐ˜\iµwšVj_3øê¿¬5Œr%Þ@uþAe"FB£œO‰—p´‚Se‰9¯©@ L´+¢<ÁÌ»fí˜ÕÁÇ÷³Ëڌ˕õº,I6 ?},?ßèª}^B•lm+‚æ:¥{] ?A®sòÕ'ãÒ;^XB=Dš<%ˆ} ¢uÊ„•æÿÙ»ºÝÆ‘åü*Ä^I–û·ºÛ@.6ç2È"8’«Å‚d“¶lýx$y<3ûXyxö|ÊéÉŸéÈyå‹Û¤JÅ:pÒ(ÿÿkõ×÷p=iö2’"²h[ÛÅ }öòm¶ç•ûïf~ßÎ’ïb¡ ¯9"Ji™ºO»ðV 3Ò“óÆFàx,ÒôÁÒÑ…B¦Ž„Lõí¹»LÎÆö’¥¡=î‘Çxrþ©ºã2/Ŭ>brRS3¦w«šFKð˜^ÜÜ´Qìüìí³y[}2Þ¶J!&Fz^€‹òv(è SBK;lë“ôÃÃ1¢—”jÙÕ§’YhtGò(¼¯ýmq+ìd‹¹¸äJ4¸$3ÚbZèÅ–-ëx&Å‚“”sì»§õyyóJ=yE¹y]Ôuüâôé½+3NÔÕfçK. ¸k‡žŸiT {«Ü:iÓSÎ?w×Ó‰¾s*#oI¼q%C n1ç–l÷O4v"ÙGê2‚§žë^]Fv©?V&%çÔb^¾è%­›ŽÒÉ¼IÞ@ÝOT ËAäºúìr]1.0ûP°~Ê{Å1=½º˸c”Ù¥‹@Ê^b¼I’M¨ƒŽÁ¢ —PoÂr<¯5DŠ'ªixç¼§ÏÑŠI#-ü$!?ø’Úã øZ”¶1Õ[ô^H;˜vÓMç%0ŠX‘;"³Œ‘TñŒ¸BZ( ”ÀÔ°­^:™w×6.W+þjÅJm+VÂhŒäH5‡ßû&Mí^ô›“Dsk ëU!!¹äþ§,Íü¯R* *ƒB'À@½Ï¢â·\´$$ï‚Kr¶§Õà¤N3î¼æêLƲÔ9(Æã¬.ÏeA¹V†#‚åBdya>–:õUºñ€/¿û÷=%×lªÎÙTp”Þ=®9œRÅ\SªÖïášRµ+¿Á D¹8«·ÏúHJR窃™<°›É²9“ N‚í€z7û¸K¤ ½ CcÊQó2 ˜ƒä·âó4 9OäçÊÓ‚”èçBÛ.ýŽm³#Qm#GŒ©ÿûߨ­­Ã0DEèc–¬z+…<>g~ ¯ßìþº‰í¯#Ï:EÑ`LÏÚÎF‚#~ÎÝ[0;šc½j5⟧gÈŽïˆ5­’qEŒÝî4Läü$1®¨Óu;ÐŒŒä9úŒÅß~ý¯Vþõ ´½õä¿iÄ(1b†nïLÓ¼¾¤o׫ö‹ rßzs j­Mî< /é÷êvýßM^[8–Ü6µ»¼ÿxåÀ¼=ÄìËÐñltÏ-cŽçU}š¬‚]¸ö¸ld)ä…”¦PD¦yNLj4)\.yÎ<q~~6R¯ÆÀÎâ®±beef¹HIšº’ÈB0’qDS[‚(óœ9;¬ë¸G<ùêH¾¦\S ~º”-©DL’õzì¥7‹¼ê¢Gº¨1µË+[]Ô=¯ÕÍŠk¥Ý°íÛu¿Hù|–, e¿!Û%jKC“WiIбñ F tVëäW"[”g=;’¢‰ó´àBa¤ƒ,´]–ÈpÊ04;]Í|Èž_õ¡^öa=±ýk“G͆­–­O¬„U I°xdíFAfm8Óx>÷ÅÁè‡ë¿»s\^Ax(Idóž˜9æÌžÖÎõ¨á=ÊïcàMÜ \hávÝ•ö ‰{Ãê[:DhÛW?r(J1’Z6,N¨64¢Q?ú™‚“<ÈÅR¦|è@A¿ Å.®/CwG„N¹NÉH–Kdª81RÜ¥iáXžºràF ½œׄ۫×êêµúI½VP}°8—¡C4fGAjj"@ÊmÏÆ*s$÷¦W Zø½ºBª+ÿX³K^‰ “qYäßóIñ:ÙmñÇ|2ÝÞV¹+¥t,yÌ_,ÿ§Û$uο¯ð©É?ü­ŠöÿØ$ÈPIB›¤–5“…¤·¤*ã­æexiièO‘‡ÚúÄ„iì‰çùÐöÂ>Å·§ñ"¤ê½¤“Ém•ˆs>2_GÒþü3ñ{Ÿø½RÊÀn¸ø½Òåÿã׿Ü&ï’éâëg Ǭ_­Æûiî•¿r\…rÌ›½i|³¿‡õzÍu©ò+͈$ç&tþ—d2Îæ_—.KV^&RÓcT0¥%C ªõ÷žg„n(ß4%½.ô—E‘V Î?e£ÿœ»¤?Ô Z*•7 ø´7Q3@^5³?^5µ=õNSý{ò×JË­4õç<Ø×IºúÓQFñh­#ðÔôÔ2 ­;ªõð/Ë‹‰_«m½Y5å÷žçƒ6-bßì_´RCU‚:Ì Rø]éñŽç.pèäû_ÿËê¨ôÚLõ{’{JfÅ$¹óZ³šwþUî¶j°ó=¼ð2O<õóªyÎ2™—U²ÚMoêíö½¥n:ž½qÁSÕWcü§]U±÷óÓî»\Ÿ.¿†OI~[J²îΣ¸Au@y'*•h£òßÒe1ÏŠnäZ.%BèÕ ì1ä†`v3¹¿mÌÊeâ©=DsÍBŒ‘o4WáS5Âv"ß@ùžQûn-h@«Vwbfÿm­ï}îú¼rá9†!ÜØd§W.C9†—jð ÒÚ ¬‰èMÌÓ»ÙÜ›âù¶mš‘U¥u“òþñaÒ™ç&ö›4Ze dhsóç()óFÝnnÈÊì:zIÂs²Ü2ü.î&÷{íâhæYìL#sCÊ“ǧþhõàh_çDßìý¾©s›ÙÜoâŒzZöÆXSôOÑÍ–7þyUÇÉb"å]f0 ºžÍWÄËžr¸’[É•ŠèûÇÅ4†R3Á(­º”±pð5\z•õõмšØ»²ÓÞ ¶ÝëÞ}³Š:&ëªë]ë¢ô[Mu»Ñ!4×Ó’.ÈÒ˘:Üœ€Óñ]%wzÑC/Ç"¸ÏkCà6â´7EA²^Žª!o’ß “ù³ VÞÜ’ñ¬\¤ýQØQµisðö¦ÄÀÔG¦öBÙ`JÑxÝwk…ì_ðÖ‡Í3é²¼$jõ²øÚ™ÌÀËôæ#ñGñ|Rô0 ùT‰¨m®Ïx‚AÔzòóEÞÊ0kq?_®žR¯†¬Ëq8öâ§„´EÐn‡³ð¦éâ±Xya›7‹Âݧ[kbI^¾¸ÌôEc(\’2Ò*ËиØÐ»}m…¼í™8+µ0prªVé¢òM×Ü8j¨Åæ¡(”b,ŠÏo- ðz¦3{»ˆ‘ëDyµÛJ/y)K_K H¬jÀƒñðÄù ë"Kópâûîu•ñ¼Iaù¶å}êÏâ?6⫊K)ú{òK]¸íÊ´_j(þõ—¸Hþ%F§åÞÒkÏÒ¶vj¤S{:ÿ²¦Íß­ˆ+ÜpäqÚíÈ0!“ä)WŽÊì1.–Ó¤Ô–mC°÷ h # 8¤Ö¹>ÕÖ‘ԭ瓤’ÏÆ^L>ßys€L¿”.¦}J ’!d½ÜôäâW?»ÙJD¯1@æŒTŽÌî2ÕÕ€±ù˜/ì&vƒ˜Òå.ËÈôGùå¾7X5Ü ~(qôÀ5¢2›[ Zjb_²ü[oŒ@‡vŒ¬×oñÛãõ.™›ÌìËj‡d¨”(Hê\΄=}$.;€Ú ºj³Zñ@4lUhŠM7‰‘ge.•!?J“bµTp Ö¡UãB2r½‘'Oïžù‡Æ@j@°Kˆ¥×JÝó¤¨óáî¦uÁÂË(¤l)ã‰áäÔÛ,í.VÉô,ìÞ+ŽUYîߌ4(…VUŠEÓÐñ°Ø©¥Æ!ŸñM¾“͵L”½¿rc°ÄyÊ’3{ÒF2´£Øð©“a}Ã!1Q{†h^”h°LSÄBÈ¡ýàÓùlì—ÃBŒvA¥á A{GÇK¯¸u¨å'“"]½É¶p–Øuͯ|G­q)˜T h¸Ô¹æªpe´ƒ¹ùðeõPöE&¨8“2³ß8%Š„[j1H?’MÈ«ÍVÀ#sYp‹b}0ú#òyÖîÈ Ÿ×Õ˜ÇÎå™4åC¦[ä ¢d)­VJ9âò¥uQÔÂúM‚A=´ø ØFzG©—† õ V{e·MT­¾¾D6:eYj­eé,™Mî!ŠRT”rèTÃê§0Ù»13ÌkšŠ£˜l@Wêö¨ßì7w–E-›[ûÅr”¹#Ss÷¢cP¥ð›®½wœô¦ÍYòJ¼í»—ãÂÕRKž–³ÞÐV-]TAœ*¦cë! èÆIž^C:ëºliôH65OÂSÂ/"Ûßï¦Ç ¼zÖ6Î&­Kk sÁ#ÿðcòÔí€A•Cáë\™ÙR)²¼3‹/Q8Š@À0´#­Xå®/ÕþÁÁkZ»z c(|ñßWÊZPCkl‡ŠÃPd«³$à£Ö¥ÔZ ÎØ(/nþ2 ~Ó%ÑRg2Làrd9•ucH‡fhnZG¸£t[`áQ–fèèÓÖ«£]QEC»g£½ÕÆ‚Pô\¡–ý’Ù(i¨Ñ$üB$Óñ {–üâwöoˆÒ0@wÏ¿Ž-'žx=hUrå$^þÀZÇʹûhž‰2’QDàGÁpN–Htÿ&rèRå lf$•åù1±n-§Ð¿jk9e›~4M&rdšZãIRlpö¯§ëFé·Ü¢è×pAýRÖˆÊ2ãYÇYM¾¹¯?To¦[;Åj5ñ…Ÿã½™;¦Ïž¹N@çÖ)“gÉî´”1¬@C\Öó&v35?îïÊþäwÚ(x‡›dµ1P#4ŠB;¸Õ³ñÓ‚tR–à\IüËžÚ¾X€Ç,Á­±Ý;¿ð‡—? 2],VQIL&¨=W(´Ò=¼uÁ¿=å_zãè–nµ¨í†íÁ÷¡íÀÑ(Ïâ7®ý\2i—9%¥#òëý"ëΜ-í}:^VAΚuÖeV‚±ùö¸´Ñ,\ŒK <..í\¬¡-mî˜R“™›}[ÅÁÜ^âæœ…³u~}x¼ûâzãrÈs(f¡ß ž"`B/¯Ì9åYwö -‚´PèíÑ߀ª3[jm30dq¿øQöFØmsr#k²?`üÔ7ˆ6׺9K¹Uõƒ)Ô-”ºóØqõä7¸Çˆ•yò8Éãâî[œ‰¬‘m&Ù[ÖfU\&Ï#úíа¦Do9*5eQ(縡=`ÏË0„È?ˆ„I,’Öîo»‹òo+Ò‚óöÕE#£4šKLz FÍS­ðäžìÕÖ™AêÐÚ‘“¢ùy«úÒF^D‡Çt•Næw» 5¶oAxÛÁæQœZQ)ë¤äyò ×rk¹î½<9ŒŸ GbŸÅhª1ÈÚº}L 7ÃÂIeÁFþänˆ1ŒÀÏUEµŸ¥ MIº†˜¨ëŸ£gIÈ«ý\—*ˬ’Æ7}(_¢à¸LÒŽágìµ2~Z…všK’úCpY=Ì\4«Ð„äd$8SØl¿¤€]VI“äañTFŇ‘”q 4!†öךfï]acµ2®jW§£Ù“Fq)öoáÜå±{ÆS£wÁ:UéÿÙÒÝ=öG¯/(có5±Q;YæÚÐdR¾4€*¤âçn~T9ËåÓã·8¿‚W@P`àDjJ¬“E Å+´ÂœÞC«»ÁŒý–ƒÆ;[:töÍìy•ÎÆßšÓVÐä3}†Þ"õ–"^X…$åϬ»å¢f¯žW‰8vÈ¢™P†Ñ›`ÁÎÓl»-oË*¥€äg²[€PÀhq 3)*V‘~+9’Ð` aÑYu¶¢Ùÿgïj{Û6²õ_ѧŽ›Ϋ°]Ü"-ÚmÐmv?AÀWY¶$Êe9Éæ¿ßJ–)Y¤Ì3ã·šH”¨ÌÃg^Μ9ç9Qœ%•‰©ü"ÑdUÌu§â‰–4*%¡h³kÜx!¿™tÓ°o'«¥Ž8¤Þ“¶שh=k»²ÙÛB4ªÁÕR¥`Y[/õ·vò½Sm½ÝLTg–ÖL2<½RçzvÕ#ðŸ ÛìF¦™õ!ÔŸ.O^%Ùäî¦Ë ãAì} ÑóØ8ÀµS^ð¨Ç»Ç¡½&ŒžÎO—æ?n  ÓŸç×(ÃH´¼ÍÓà>£×!¶ß©u¢daó}éF_-†£R®N†O”èxù”Î[8 Èc³^܆0v[g4œ99>El‚ŠõÎT1AF½ WË÷¤¾Mo‹áp"GC¦=Ò‡ [>D}ñ1—ûc·i’*¦‰˜_­®B4M¦Þ7;û¿ÎÆ3)m`@.f„ƒ^› ø…î›x #ï5º¬ÑÖÝjE¢ë†þ­ív'‹Íý’†Ç]Íf`†LlBš1Àäñ¸û@ĤœÏ× »?x1ד;Þ HFR‚….f‚.†`“‚¼X߬  ê€òu|»Ýí–èRÓníhà)5 0P,ʉíy Ö¯ØE_Ì  ¨(`zné?“ŸSЬ¨ÂD¿ûI @oApò>ZBÍ%~@2Œåñ$a¢.<¤¶À-CH½GÅ»ELõõȈŒ ú”»hDéöÖAgÓYH#HÓ#WÆÛQ’£†ê€ŽÙy(œ¬AÇ@`]K]è¾Säù¬mhƒiè*~¦CµÂCÇ¢Ô‰ñܺ£– ¡ƒQêÎ}{PÇÁOuã=5B#ŽÁO¤Üù }p¨#x׃&_UjpûÝyiöùl>º“wÍÃ*8ƒAxÇìŽ ŒEú­»[-¯ò*ï Í„7ß{y£§%©º0D¡€aÐÁëi o¸“üÕÖ„5¥Tß…¾ãânÓþa1.á±üYü—a=–X°0€D 9¦Õ|è–/ ûüŽ`(,tQÑY8fMœCÂdí3p£Ð j²ó>“Œ‘‡Ž·TÇ‚ØCˆQ%\î­ž ‚2¦ec2]?ó\3v\5ç匉@šõˆC@9eì "(c²g­ƒcAäNdؙ٤#ù޳ÎʘÆà#tÅG»ZŽRB'¤ €2C3΂͎jäø ‡:!生@bðãìÆÆ•ú %rBJÓzèþ- Lx¯ÐrTBü‡ÀŒ<®üs³‹™}LgÓ|Qo˰šV¿[fqÝ<ìª9{X&ù¾6œÄ«ñèý—_—ãõöK¿[5‡ñ÷o~ýVnƶzpœÚ{ÆñlVnH•ÏšÛ²i•o¯×%™-˪å÷ÆNXijÓl5þò÷ò—xÙÜÿøÕŸ¶~„Q»Äð4?‹oÙó„›§jÈøaû…]®âiµœ.†ˆ“¿hûßÓ¯ŽžÔ5nÿ;ÝM8q_ýi™¶ìÃ×ѬœŒ‹xfvó¸N¯ÆÓ%»XUéè»ï Wú°t\p[øÖüsæéü3¯ÇæÑΈ¹qb«3m*áî?2ïËeCVûYš«ËjZVÓúÓ8 ‚p´-åð1_Õí;wWùæàjnz½³u­žšæX…ör³2FoÊÙz¾h^þ²®·½Å¾ù}:ÏËu=þëb:ûÛèßÖ#7~¿±™®no}\¯§™…û%M3‘s“" aa`:}’0BcAu!sÉtüÕôÊE=G?¬«¦ËmùM9Ÿ›®»{÷s™Þì^¾{÷ÓãæO;øÆ_? 5J:šˆ½-ÚI†íyÿ´Ïr§A>˜* `L<ˆœEļ8ÿ™]„_ÝÛ‚k·‹Ã¹žm*°æ!\ŸFõMtÉÀéiÄ1"hpXŸhõà„ßÝŠß½¢7©¨Ê9ØŸô#¶;éÇÿ_Þ¤x¹œ}"qaGÔ,×Õ:ÿC8˜XT¨HJEì©'a™¢DGZ®bè„©ˆñ×w0 ¥^ɈÂr, -_)pqð-©Ÿ<þfÂ^®s}x“õ+TŒÇë,*ðèî:?bì饈œ&#>©ÿëaÁ—ÒŸò `ö(5„aX×íŠOÌ›Ç߬BÏ™3óí—¶æÌ[c|ü{U®—{«æ¹õíËöòØ,Ñ«íõ²Of÷QþÏÖ+µù»Y‰@‡(£‡oÃVį_Ï[guRäqÎbBƒH&BM’8Œ§yAiÎrY°×_g¥rµÓm•²ö1•“ºm`ñ–sçÊ·§ðœî}/{Uð®F†ahþ<~èu>ädáôo>®ê†È¶:\s£/ªÉÕù\W±,<…N¾NDÐÑGc!.c»=­Z("-ÍÇxqŒgO]„üz8ŠÈÉð87³>ºàƒg‰µ‡yÝ>ˆq“c{„8b0¨aÊmp÷ae3 ñÐa¨÷ðÔ†b<§”fðP¼ƒ&[Ä5­R,<+5þÐVuŠ€‡†BÏíñ|¥tpؼ“2¸Øqs}êx”ŽG-ä™Ùìel Çt4 ‡>в2)îλˆS§à ,Ê‘_ñrþf|DühÇÒ^ ÒÂ¥ã÷l†0|’"\Wüˆ—S÷äÂeQ ¬Cbðâ, ó° –jÜÔ2;Âe‡å˜ÓšŒÄ«(x6PÌ ‹\öµ³çèPat7.œÍÑûzE>&á&+»,è²vz×þ=™9§ýoOµ_ŠÎŸˆeO¤ýxÊÙ¼U¸5Í Æ2³ &7éFÓ!Ýô$¦E > ~”@Ch&˜Þ 9DõºÏ–—LjHtš°,I ÂëMuçÕÏ%û|ày‰c°‰ê¯=œs V³ÏoÖnÓÀ±`<‹ºçÀ@=¯ O@g3Œ§víÁ*Ï®âVÆÙÜf‰B“ °¨àëõZaVÌ sŽÜJÉì•0æy]MÓÉâ|^.ÈõÕU:(ÚЉ£‰(@«áôŽ.!"© Ø´‰–ýœ|È$K Vð$#²¸¾²¦ÑЖ—@[¦å |Ð$Ñ{JÆ*Q!ò-.ÍÛv.¢­¾È‰¶1e"eŠløfuåP ­t/BÜÖò ÖÏid‹màÒÈ™¡òØ“;®.¹ÖL3‘‘MQ¨A6ÒPZÕ|¹±d^€¨„ôÇP&89 QmÓ¥±ðU§öhýþ“1é¦å@#ÕÊH‡ €Ïò9Ô ûÎ£íÆ—A€òßÚþEâ,›Ú «xFÒÅÔLRë‰1ÑÉü¶È†X¯TèHB  †lApA­"Þ;åJc @ JÌqxTOóòqÞQ\‹$SŒgdq=IøöÕ€*µÞâ Ç…3ö” Qa޽ª\×§ô.»> ªÈÒ,I„ óÏÅí ‹H FAˆ5©ÃáB¹î=Œ‚Hpr͹>¡…vyâá‰N-#™$z“¤÷(6F §Ï×-6Ígðb”à9¸/jXâ{[·½¸Úlj evk,U‰"Ém]{!2¤È.ÖgAAy‹(o¡vçW=²Û‡­Q¤¸‚ ¢Žö#§à€½?½Ú0`h¦¿àÄF«ïC¢˜H“"e\‘Ï…J‡ì-#Æ5 °Ââr0ZèBÙWöZD:ÁPÑgÙ½RÕe§„•'î| ßaöμR‡ 2L ­±µŒÍÌ–œÚL_›6±š’‡¶[hÂõÕÕ0â ˜žŸJë“hˆ” (›Â\)s%.ß!¸’™°ÂÙ×·õu1„:Í`cN¡¹Îò©` ÀÔî|é—U¾í³¦÷®GÿpTÊ‘Ú ÌYßÈcÜØ°Ï«Â‰3«Fž“Úc.Ð@„ö6­†@é~¿( œf’ø ÃQ=æmû¡#Ôl8+p€Z-ŠIpW™#/-Åûü!`X.s›Úû@äé ®£x—dÖp¤Î\ ¦¾œ xÛÔ¼ƒtS"0ÀƒÀ‹Œ´BxÀÜ«Gƒs)úböáBWìœ(tåƒ"7ÉU§À`ÈlžK»+uÝ“ïƒ.7s&ðê£1Xs&Žz·ZZY›¡Zp$nRa€g>†ÁMä\¸!mÇ‘PêS,àyb°Ä§–ú †S‡I˜àQ#Ü•‹æf1õ7›¹ÑÅy„žÉЉ-¤Kûuæ3:³µ_<©a-=Îjê,-êƒ-7‘NÛpT9SÔ9N(ñÂ’vÉÒÙB„áááš9Þy±´r¹‚[ Zb à’Ò*=ç  ’ÑnºÏ™K„®™ƒRz>è N—CPjÆœ%(÷vžã—)íç`ç%Qx‹"/†¶ª"éÞ¾¯:†Tà>>­éxùì7gÓ’sž‘,]éÌ›œ» ¤„ªòrÒ'„tß¾S#|Hÿ6wì "ÔŠBJÉŠL“ÅìúJx`[èyî/À .°ˆ1¶eÈäËš‰êò0üá“UÞ:E{–ˆÂØ s5ÙHÓ;}ö¥`¡ÁÎ\`˜­mà»4—=ôCý0!Ô‡ÑoyZV„Xœe£ü._Ô£r1ÚÝ1Ú>¬Ž‡±-x±×MÜVÒÞ~åHÄ&Í«zZLó¬¥û²\-6ÔGwŒ„ ›nHP1 …2¹´Ð•ÉõÇ*¯«O In$êv«ÊÓ|zgY¢þç.¼Øµï­ÿ^Æ"?âsƼ»±Q½;éƒù-e¡•H2› zýy¶ôÑyàA†ŠÚ?#”9‘K¹[­4neE¢ ÎÉj¢ª[tâ¥~@a™Á±!d|<-ùö8©Øœ ƒöø‚1ÀS¡œZm|¸-¥äØ•í· 5DÏ>k;%*Û’]?Æ©6Ÿ?¾á=ag¤gåfa .¯ˆd2aLZ›Õœ‰aÕm>õ)OâU~Â:Û³ÀÂPê½ÊëË­²æa<ܾ3 ‡AË¥ƒ‘‡¦%rÕ¨æÎ0RÎ쬳ìêÊC_SA„Y7êð.V"ð¢BÌ…á°ˆñ£§¦ã:‘O׉bDÔš¦> Ñö ƒÑ‚•iP˜¦Ì7ª~º$L¯òô†¬Êu•æ„'‰2¸3-É}v÷™û 8¢ØÎ¨óaB=ƒÚÖÙ&` ñaß²ÝÊ·=[¾Ø‚‹¶ˆ¢éu™–³qpŸUiÊŠ`d?k›õã"kÞì>.h_Í#­Ê¹­A9•AºŽëDÈTg\¥IB’‰dÌG¿mé×BhäìÃèü¾uK™þ^—£e¾}׿ÊëÕèý‡á”k‘=0càÒá c ¢îˆÅéªÏW“ÂG‘u½-èr'¸;Þ9ýp!XÆX!Ì–žhsíƒ)°7hÀB@(#GaÎó1—ïˆ0¶7³l"ȼªj/¶ˆBcí,d`Ù-ÓR;Ô§h¶HiLèý2½õÁž A/(C½gXPP:`ŽÎ€Z¯Ûçî2K2ÎXFØÝU•x M‡NêÀà5)0æKM‡ò¾óéªÝhE8 %š™‘û›•¾÷Á&®t~!¡–£à|¢êà­æúB§Y&T!É"[Ü×>ؤçMB\°¯…k渠L{¾º¾™Üf>èÄÜóA£ŽkÍî>¢+nôô-b6if¦J²Œ›¹j±>øâÎ?ØÒÅ1¤„pÚ.O`—‰.¤Ô‰P¤ºª>ûðhIÝQž–1¨Z*ô8]ÓY­õ—W¹1ŸN¶ø°Xij_#\ðÄ<%¹©&÷^ØWÈ"˜Îž…ß^¢?!Ni\dzrr© a¶fáÓk}@KÁMÏ#Ÿ‘aLï2¨;iÛcAC1L¢ñ PÊ‘¦íiD`ï! cΰ[¯Ûý“'‰± •"ÙüºØø /Ô.¯aðÀÖ —T¸,8]Öq2ËW$6-·]üz‘I âH? ÜÃ1x‹”³ ’Ëm¥ŸÇ­¶DÑdëjYTÈü½=…q71#σëFÖ J°P=Q]>Œf}RR§àÄSá!þ ÛcÇ>¾`Ïw•–\eMÉf9$ÉûÌí|8ë(ª îk4ü½û|ñB1ó?Y¬²É¶…r\Âa~po ½A2¯Ù»y™±"•™’ÌŠÒ•Ç$Ž.˜pMs”¡®„ûB½Ía'[ÞÜ{™§5u\•·<QYñhzÌMfÓÙÔ¹øaôn™Åu“ï^5&Åj4ÉöµA¯Æ£÷_~]Ž×Û/ýn-Åñ÷o~ýVnÆóxù>Ní=ã¬*—£lZåÛ·uIf˲ªGù½±(ñìã4[¿ü½ü%^6·=~õ§]ÜàZ]˜guQV“Ëi6~(ÖJæ}x<æÁRö™úã“»“‡ß}ø¡òŧƒÃjœ¾õàkåÆnK[ÿzwÃNÜWZæÇí5[Íúðu4+'ã"ž­òÑ<®Ó«q¹®›Ç÷Ýw£ÿy`Ìã ¤Z3)™ Õ£¹é[•} ÿÌë±!aFÌOLLg4?Ö {øèoûŸù~¹lhoÓc®.«iYMëO6ÚÓt°x¾œåóU½ÿeóÝÕE¾9¸š›)ÊÞÙºVOMséÛmVæÂèM9[ÏÍË_Öõ¶ßÙ7¿Oç¹?þëb:ûÛèßVHsü~cÿ2ÄÞ2ú¸^O3û`¾ÈÔ Á „¹$,á‚$¦Gˆ,ã)“Bɯ¦cy6ý°®šÎ»ýå7å|nÁîÝÏez³{ùîÝO?Œ›?-a㯆Ï,ÊIÇvÑCÉ8ã\rcôOVsQ{˜0C<—°XDp„i3Dõ Åñ<ä…U—y,¶&R‹]$š¬Š¹¾óÁ'- ¼<¶L(BEu¡[0*髨NxVù8Ê18“‘ÿ€—,Îçöíf¢–>ÔÌsˆË@pE…ƒPDü¶›—'¯’lrwãc½¤xëåùðÀG`* ‘ã]úlç'„KóŸíЄéÏók¤R̸–—âac‹zÀ’-VˆÛoˆÔ:Q²°Š=t£¯>ÈÄ;c9øx eš$r¢Ä6‹ÀMCY„™ ñØtp =ÆÆ‚2_Þl˜±-Wyð–Ô·émáƒ<©> p~;†ÿŒâïºÂ”.÷ñJJˆ"MRÅ4ó«•—µ yÇðàŸÄàõ5ö;÷ýbs¿¤^fLåÁ÷Û$ ›Î߸Z~ß´îòü>´é%nß¿üeô_æþÑþ3ZdÿíÙ ~“^àP¨œ ©‰Î%æ• :ŠÍ«@F¼ˆãÀÌ/¯ï¦R{ÞÍL5à  BÙ¢†¹ˆ+ÂÖ »±}Wד;îc2Õ܉^d'$p–Ê™” PŠä+œU9/?ºY)~üŸN«E1ΣÀ0ȹÔQ þ½+ëäFÒ%†=kªyÆ3³ÞÁoc øÅ0ÌLf·ìÒ±:Ü6zôß7XU’RêJ%³*³‰ðÑ­RU%ƒŒóc‚ôõæöáÛg̃¿¸˜ýÁ|ÁýYy|}yžéÙâÐòϤìn„¦T¦aZ8``9gΠª­FîcþÇ„öeå¶}.¿€¸OoøÀ<½^ÑÝÓ,ÉàûÙ’×u-ÖTÚ0PJ1”•b¢âR6†7΄édÓP ¦_6u«j$ÙŒ]³Þýãòüæ¢%¢³«ûý¦?çd$¥HwïNs¥ow×í‡G™7:{àò‘ÙÛJ N feS3Þ2ä®a2'™Ô,ÒîÙ»=@fËìd›ïÙ;M[ÜîÚ”½æÆÒ” gttœs+ç¬4œ4Sƒµöf:æFãP%øUÆ=\ ]_ú³+ò„"cƒŠ»í›ËßýïßçLÉœ)ùš3%‹Œg¬8cÅir‘°„elk»‘¥Œ,½xd©È¸NÆu¦ÂuŠŒªdTeTeÎZ§q‰w¬õýßÞýcv^úÙ=gõ‘õiñòñYù®jÞ/~s~yü_ß?úÙM8·Ãçµ+m%«96Ć¢deSkVúÆVMéËÆÉ©à$²°½pBkÊíÄ×tì2ÌæÛ(Ø^d_žœÕá÷c^œ\°rFÛ³|‰aÂú0W²CTWŽ›¸Ðùñuuñô¡Ý¢¼úÛ&ð̸ָ»l‘Â.ЪWùi]1úeü#ŽCùË7ôÃQ}uý—oèG_~ä ‹´qFZ¡…‘ÎÙ¼·ž=?àh_Ic+€’ùJ.´¥eŽ +¹ 5W¼U»W šïìv0mƒºM÷„e}rSÞåA°ZJòñ¹¨)Ž2sÊ—äÌkâ»=°hZãÈ$Ùzeë•­×v¬W‘mG¶9 Éh àÒèÞÓJ¨V»Ëþ9}¤+ãLÄpõ¡øî‡·«›±Þ}’]ÌüY¸ÿÍU¸~ܹ.M¬==uï?ÚGˆ—S»&ßœÍç@Tô½acê-ŒM}Z+Ú‘éÕRi”ýôÂs RˆKyèóð Ú -*isÅs/h'gW,ôBØžêøÚbS7d•“†A#•5VŒ¶ÃûÚWµùª€ô–Ú3¥@3¨"W,@ÃE¥\@Œ@àíyÁB+äýé×ô>ùˆ×=â]¦9*a¢Ð G 00ÐÉ»Mß#ËËVä%Þr´½òí¦óçå/ï.ÃõåÜ kNúð¯u]P uv]PÌN~#5­óŸ’ªszÅé~öö¼þ!ÌB|e“ô” `%­1Ò“Q²†ÿÖE—ÐFB¿BȺ€\®¿_’Rh[ d^Éx{÷awýi:a\»)Ks2¤<ÛèlÀÁCS†»ÕÆÉÝ}Ï9˜íÏ¿‰£X]ºàÁþ¬ŽQ¨î|2j¥y“͠QâRÉåΆ§.öÉâôÃùÍe®HëEð…Ü£¯‹%>qvŸ3;?#rg]4JŽÊ¥Ðˆ˜Ä'´Ã²!$-øÁ)éN;YB’Ö ‹–-©]Â+Î&Ra… ªóËz^^}SÅJë%£œŸmpN4haúAczŸt‰4ãÚ4ø{eûüú[åÕdËOš“P·Ê«oÎb7mVÞDY¹Ú˜åÖ0ÏòÔ(Ë7R'-_‰Æ…° ¢âiŒtâò%;tQ¥€„¨? Å©/Õ×÷É ‹;„“«âÓÛ9X÷Ó§·w 8ÿ½â\ºª2†ñ·Ç@ßžÕóæ.UôêÎ#´·1--ÿ#… ?2U4âÔëóâ"£Ÿæ{AFüªø©Ó5ÔheŠ £_QKòû}^hsÿÑ}+´Ò4V“–Ñ\QtÌ_û6ç³ :àJÞxS¬™ð¾b Kd®¤øÐP¤HÛåµoªéb33}4Š1ê5õ…ú\Órº·+~©µ>+eo¤¤îõDq?/P+n´óV±*4ÈÈ<#óÎ(¦K)J4*èÆO'ŠNh ûEÑZûšï<0Ø·l ÕX€$ÇGS@¯,¢äŽ ~/±Úˆ„Îø SJ w#nÉ™4µ¾.Ìtùxäkká\¿@:'^U'Ï$òÀlcŸ,Žn_d¹“QÐH®ÉBú†ÜVT¬ä¢Fa‚—&t[Ñ9eâFÔ;K•Mòëïï}Ò\ÛÝ;Âû!ã“§C¹¦*99ÊÌ”eÍ@x`èeÉJЮ®éŠOç jnÉòôç‰èh·wÄÞI¦ù¡ä+I[ÜîÚ’½æ®U]V2Œ»†3¨xÃ\4«ÁÒ ñZLÇÜÄÊéæ–ú…6ÒÉø^Æ÷ÆÅ÷ŠŒ®et-£k‡ÖJ'c[ÛšÛ*2²”‘¥Œ,YÆ–qŒëLÆ\UɨÊZ¨Š±B;ô¢*Fã.ûɬãZl£$ÿeû3Ýî|“´òü!ß¼§µúË*ýo¾™g,Ò Ü+¸@£P@*TŽàؤÃËr|Hˆ•ÕÂ0Á-Ù&æš¼fÞ£¯•© å„ D9Ç¡_X!vÖ}&iƒ¦í °¶<ï¯f˜Ü2J.‚t®a•´ŽA £èKm™ÑµJ•Ât¹Æqä åšÆ‚ÜR_šl³ÌVp—V°È6(Û íÙ ô'tXhî ªÞ4OFEú*áÉVLP½9¨‚h”RÎt‚ݼ”s}‹ºNu$‚uVs‡"…‚V1êdu#Z-…Hè‰Bj¶Vä¹b†n"1Š ÄH˜°ÈsýåÉ“–|Ú"Ï^ yZ# 9^ Ti·"Ïrí"ÏtZÜØEžýG²N‘§u’âþºâؤ$C“Ï9{½ç³vPÖò”)°Ú:圑ˆ¤%(K:†—”i«KŠk eœRa`eU׬l¬AlÀ»fºBë€+‘ @œØÙ\Ñ´ ÚZX8Lž÷W3L ‡‘hfE0 ýÍ{S2å­«š{g¦ëŒeQ¢Ãg̵†¼zh2[Ál_°,² Ê6h{6ÈQÜe±ÿzÌ)PëA“vã'œšìç¯Q Irc­Ð&`'G…&ûé[@“܇gú)¦S€r«ÐäºpX2Al š¼ õÿè¥TZ¤µ&…!¦C&§_½”““½ À%ÉKâ°¢ß .i×o>—N‹—ì?‘u`Ig”äºUpäVeXòG¯÷xÖÇúÏýÃ1 9p[%”#DcigðÂEd:ÔÕ*85rV†Æ’])Ù¸x»?]g¹„KZg´Þݰ¾” ÚV<8Lš÷V-L *"HôL6¾f CÅÊÒ&”æd³¸Be'dk²‰ YÀÎ8Ȉd6€Ù¾tXdó“Í϶ÌOªtž$¶á˜A`¤ÛøÉjÚ<É~ö‹D0Z@ ½£lÖÂ"ûÉ[v9â’¬„éÌ’L'@»mB‘ëâ_(¸U ôÜ29SM§ÄÚÉqÈÉÖÞÊž…!7µ4«&„Û€a×ÏL¦E = 9jb$Zí$ïïbŠÆbF û}»±óAúÎüƒ/Ë`t⤈ì‡JÛñ‚¯žýYÁWëRYmYe)ÄXV6¥`uœ¸2L—‰Ézöß] 5rWècÚm9üÛ4e—*aú<d¬‘c΋Š+)ôÕ0_‘ª ÿ¼ n:–vxŠ?oQeä1¾lø^²á+²ÙÉfg f¸2*a˜;½¯UÎ9qÄŸ¬å6ÇIs‰ ‹€)Ô¶͇7ö%>Zs„Jv&>X¾å;@‡¡\´J"’¨™têîýHìÙIª?ªYþ}æß‡Ë7Uüæê:zÍÉs”ĉì&§ÇÅGX;p)“ÖŽzt¬qÀòÓ±F"É P®Ÿ$úg+X£»Ëwt]Xã§Ö÷cz½¸oyÿêßù2Ìî~üô¯è{Î_ºš{ W×1Þøyñǧ¹OÚè¯o¿žÕ·¿_DJ–¦óѳþþï†"Œhâ>Ý~}[üóíßb8×y[ýë»ã•ütûôÑOÜÛ5¿ulW·EòØ~mïDž×‹_ÒVŸœ]…ËëâG?»y@!Û;r{{Hì,h§SÎEˆ¡IÅù RÏÀX¦?2×i[v‡ˆ-·‘öª¥9ÒÂζ¶`ºª€ ²_«(;©B§DUÆ¥¡JSӱě4?B|nV¢“ØO—åz=TRð-®‹óšìÎb—Þ=1Dô¹;<².®>øË¨CtùœèóÅÕ]~ÖÎtQFJHöß0e9¢[€ä®úær¡®”­\Å|U•õ¬tÁ±Fqx‰|BMžÊÆNØÞÈ.…’7[æ¯7Þ"Ej±]MteŒÚ /ºŽÂ;•²+­üí×ÝeAÞGA–ÆiÙ?²¤nõkXå?é “é¥o‡éÑŽ 2ƒ¥MË6íy¤whg ´ 3×m+)Ün"½§ƒYÝ4؈~Ãê—M)S­Ðdâh¯—–eÄgåiÂîˆ/6çÖŒøÄ¦†VQÅn#¾U6gcêÔvZƒ;»nù ÑW§hqz;´¸%-õÙ‹i˜É)V}>²• ù_oJrÄÃu¸::9s®=¹óþèé==-gà ùÍMKµyÕÒ̎窖6¿íE¬»Ìy“]?ô§¬’[(žz2Rþú<9q|̹¾Ýû|÷˜Ô óÝß´çÉkõ“ÐÎÆ.;Fj§V8Å×íoÛ¿}«S¹bÞ¼,iküÑ8·š+ÍJ[7¬ò†£ÅÒ&sJ“¥Õ¹­•:„¸Þ ›ßcY½[âZ‚úÅÅ—ôÅþSœÕ_íVn÷³Þ¢Ž•R ¦@Ɖå íœe¢òÜÔÒ†« å)Zï¿ "ù†í[Ùu ´&–Ûo_Ž•}Ô´ „@kâ=>"‚–Fºñ…u³r©=5»Ö6ÞIÎÙX_˜3(HÎKí½äÆO'¾%…™ýâ+•ÞÙÝ_ù=l³Û+¹S[Ý—X÷¨B<ÄJB4ȼâd‘±Ñ nƒ·Ó ²r5ïd¥`WÿIFq÷î4·ævï<ò½Ô“×hê¦Ö56Äò–2 q¬*µDhªZ5ÍtÌZÙ„ª"€ÝÍÿJ2ó÷ÌŸ¦Kn÷Ì*¾NÖ·P#Ö^±ªvžÎª®™·˜0¼•Î[¯¦c}CÖ›ËÖß°h!#šÑ̈fR'ŠŒ'f<1ã‰#HMFó2š—Ѽ‘LNÆÒ2––±´qzˆe$+#Y»b½Œ#ei|Ép  û!TVåVãO:>ÃÐO3*‡t\ðµûÔq5öX3ôŸVUø|]üiU‹‡ÛU}ZÝŠ>­Nm­VX£)ÂW¨Æw©:NíeùX!h¬æA“ýMÉŒ³umT‰(!L—7dÙ/Óßl‡ü©CÖ7b_ôx¹ú&µÃsÖ£5äõ¼äÆf´V9M:Ä7 \-¸á¢,唚ƒ,FÿXfzìl´A’j½wÒÓ¶sÊŽÔëJþž*é;RÓ¹Ö®âÌpŽ 4mi(¬GUЦ"pÁ§iµM(X‹‚òª!d·<»åÙ-ßð!;ÅÙ)Îr»/Nq‘]Òì’îŸKŠÎq®ú]Rtʦ4§x¸}}Ò¢¢£Ï£æR ÑßWFsÑjˆûYPhá~0ÊóAá²!ÌÖ-§ÜË)_…Ÿ,„UR)«¤$€$K)ÚñáÒ á†ÚäF8¥–‘.Å¢¹¢UöÏ@"ù×<Ëõr}S¶\Óú×—kúp–ëñäÚ>#× ”2ýr­ÄèýùVxÌÉ­áȳ~¾+ÜÝVnÅç/Ƥ‹ÛÃqðtœ‹é¬M8¯V34Ì1ëÜìKe;¢/Ud©ÊžLödFödŠìGŒàG¯:Ãûý>©¸Ù ÷lìVøYWÈǽ»*^€õ/ZŽiJ Ù„%ÝÍšGö™Q³ôhÚA•²Dé6ÛT×´CŽý`›Ô 68D2"~6¼ßîÄ$’ZD¥™ýðëåéÆ$é¾Cî“:­¢A¥_Ñ_FYqqÞÌkPžÙ˜“³‚Tèé kZ{ÔP_†£÷G㘿*ê…Û}?v÷¸øx2›ÑΟžÿŠ“ë÷ÅØI F{yÇe0C76)¥ð¨-D1ß¡ØBøêèèhcÒ,®ÙPºm¸òhgGŸì=dOǘì=€Z4›NöÆ0QÛ y$¹âžvê[#@Û„õ›Ö¬¥‰F{oõÕ‡“³ÀüÅÉ›E0r²¨:b±3½ºñú;{¼¯9Ð{ý[©¹NY1Ž>Æ»wÑéûÃÚTsóYwñå*{Ú‹k¹ìŽ«Z¥ÿŲÝÍÒ£íæ]·ûâ¹^Ûþ´­R|A ùv˜F¿F»‹éÓ['r¼òDî{b¤dö~Û¾gá2;èQ $/GR¬ B[2oÎZX3°ÛÜ;£޲ 1É@6š•ׯ”Ð×èjºàÚ¡’÷+gv–Ÿ¶AS& Ô½’òékŸÔbŽ"PJ” ýM›Š\ä@FYOǾòªþÂb_‡[ÊkÏv+Û­l·Æ²[E¶ÙjŒm5Húý‚–Ó“€PJÈpõ¡øî‡·«ã¸w­¿·ç»é²D αúô—æã£{ÝvEâ7gus+úÞ°1½ntz—Ÿd3îCêãévuišªf§îýG»+êµà{”74—¿´ ?Q{¦0 Ô ‘+ á¢R. ¶[ Ylꦑ¬rÒ0ú ²²ÆŠÑæy_ûª ‡_†@ëVÊç#BóqîÕq‚²bšËÂ6ò¶†Û˜®Ö¨Å”‹âé(YÞxÈ#Ñ}¹œNÚé+ÉÀnJ p5Å¥9ËᯚŸm|"Ð2yÃî)õÆ6—2éž²0åvr¸éíd?IË;É#Ç•sæ´›'Ðq#­ÔÔW’ë^‹¥Ó;ºˆ<¥>©6?½µ‹ÈÑV¬·x¹\ô€‹ÈtB lé"2,/"'sß?äX7wï'ósÿþå¯)-¬ü˜áw÷ŪUýמé›g'½öüö®¶ÇIÿ}Z$¸¡M²ªÈ¢±ÆaïÎ8pÜçEÐì—x{<™±ä’ü÷­4[R³¥¦Ôòppæ¥G]Åb½>ÅâRìûÀž^²yf;Ìq=“+Y÷™¨|¼”ï<ËÇß*ªº &AUGðèó¡ªK––ꋜß~Ð3èµ!GGGUwc|#xðyQÕ›ŸÛ÷¢³uû¼O~Ù]¶ÍJcoÕ7×µ(ñdâs‚fj|õˆ´û Hk2ù#0×t–ÆÇûc®îÁPÒšÌØÓŸéü8˜M—IN@Ôiç úyj%›²Y3NþܹŸ±ù¤aŸ—ƒ¤RÖhÔ^R¡}ÍxïzðXœqY\UÛÎkej¬6-+®<©èªPƒ‹Ü7sü~ƒ¡0»èó¢Ÿi ”·0½ŸêÎÔd/V][hÁª*TAaKA…ଠ%o4ÑЦ-½m“¢•Pç‹qòÓ(ÀæÏÞ Ö=mü´xÅâ¿2¯¸(>©ø¤Ÿ4‰—a ^Ãð(oFeË; Ä•TØ$ÕëÛ«_í&Øp¸@ ²!‡ £ôX:O±kLèbJ‚¯4ªÎ¨šl×U¶Ö›ñÒi·¨ßDÃã¯Ø®b-Bûƒ»:j0Z¹ùèJ>µ²QE$nùt•QhlSæç²%,B{ZM¬;pJs§ÖºS\·¤j£ÛÖsÕéÆäXØ'míÄÅlÁ÷36…‹bˆžö’Ö  ×&!&dàȹjÞ^^­‚ÂOAÛgøÞWãzÛÐÑ®ý£ÿ»®rŸò®…èk#e?^ÝÔ¼‘—´·ÏåÛþ;u%NCáõÏ¿Ùý åÿ­bûævQ]_¿\â¢Çˆß]ɺ¼¼§ñâá%ê¦ýxy+[@½®n_¿t9Ï1ÖõÅÃûž]¾{þîöå›Ë«¿]¬ˆêúå'÷”6QröæeUÀ…|¡Þ·’´ÊæT÷ÞKÞö.útãååUwS]ôÄÿÏ÷½C“Dü™qüÌX÷̘^ôKõ²?È/ï>¯ÿÁ.1ﱸ+l&£³Ã§èä)·[˜Bßý£‰Ø·¿_Õwñz/ÏÂJý6ŽH†lGkã×vqä´ä*—W—·¯%¤èùYnÏ1ì ðòâÛ_X|¦qÇTƒÀ†cÝÀ6ÛŠÝà]‚v0õ8á$ƒzï+à™dí×BÖq6~뫹׿„Wš •Úi“ùNÒs3‚[>¼ç&¹å& ÖúÞÞL¾?ÂLƒC;@Ò¹a}´>‰`>\]¾ÿ='”±gOÚƒgŸB{0yûpv“ŸÞ‡#É¿Fk0…%¤>> 'Ÿ`ìÄ}82Ù£'•'+gJ΀c·löGd~žˆ#²Ö& 9Æk gÜc’ Îr¬Zn©U¾ª¢Â(Ú\™Šúƒ©«[­³ÍÅa)¼9U#NÚ ô¡»35ÙAÏZ”?€È©iü]K$ƾVÖ¢w¡ f°¥Ù—Öšâ芣ûšÝ¢¸™âffåflØVçr´ ›6Ô$–Õ¸e¹ñbñîÍêJOÉ…Û_>ToúİWç](A:©ë×ÝTü£*Їu.Þ]õhǘúúÁt›-W6:ãw-ñ²{LRqm §(P_$@÷ÏÅ««ÛOÑÆÿ¿»jïoÝxé¿[Ô7õá¤rj…SûΩ¶C?4’† wk7OÕͶÁŽºŸ~üH»ÙËlÃÝlN{@k}·ç:£†&ð"t+…XuŠMKªEÛ(¯Çƒë§u žÑ`š'*B›G7›Ól¼µ)B ¶mm,"´þ\ƒ›3¸ŸãŒMᢢ§½d'5úñêƒ=ÿòœå#´Ôl‹¦]––š­QírV‰ÛÞR#Tyt õkÁθ–>øÕÎäm©õ§h©í›´¥f€¹‡6ÍÖ6BFL ÿÈ-5û4s•" Háæx-5k_¯x9œ|Ÿ¯f Á¨…æ‚fmžÙD󈎃FÖ=õ‹ òv̬Ý2ºaÆ †(3ŽÞÆN›ÄÍò.×ßÄ;|÷‰_^bBIO›mOK°ônç:DÛ)ëdqÚÆ¢ûuŸn"ãd“Áp~lh×i¯ÒM´I {#«¥;s8õÎÎ|¦z¦ Æ­sN[ãûƒž‡£©»ûŒQT‡Þ£®¼¢šQ!×A1Ô’}Aí)Ômã3¦[V›þäç°)ðæ”…ø®dcðj6¶àÕÓ1ô^ú‚à¯þ§—W§1¯vZƒêúúÍïªêäÕêM|ÑŸÍ: aL¯£U£L¬¬ÂF¾Š@¢¶O¾§Ûç3@óp¬`¦b ö6š34Bôv!¿¼ÿÏ}ëË0µehCÕÔÌZŪ«ÆèT ×*¦Ø€©<Īže'»ð,)¶ZMôO2´«§ÓÿXíeƒ&aNæ${#·®ŠZ±5 °FTÑsP(lÛ[hÜéCþWú•K*]Ré³J¥%‘-‰l WçšÈ.JYÒÈ¢—³K#%‰+IÜù$q:¡“ ÀL>iw“(¡obCˆÂ¯oâ¦s [:'0‘ÆvéL§¶61j¶UA³ØÉÑU¨®k؉Xö»®ñ´{-sÂù@­‹Ðfr8ä .Ah–‹ÐfÒ• ÈÁ£OžÃ=·çk Å=í%;­Äå›`0ÿᄤ0q+'ÈàS8Ÿã BZˆû©Ý>3vû¡äu œÞóÐBØòjDÒ ÃzåÕ™ç€nŒû'9©Î¢Ÿô¤ÂFŽ–Û¡·‹–¶OH§™í1'Œè‘OgamÀdæ3 7móºz?êhÅ68ßÙ„Œ„KbÅYÏ(ì¢}ÄY…t~Ì‘¦{úƒ¦{¦ózÚ&üÙ§?™CåŠÝµÓ]‚Ù»“`§´ÏôÁþø‚üƒgÒVÓhIÚêŸqkÄÁ]´AKÞaŒêS3HY–ÜÍ"¸Æå›B’l#Ö¯gz÷ñöΦõ÷%¼ú­­¿1üí‹Åó·7ÏãåÕsùµúx[¿³Pêý§…xièÓ¾e7‹x£Úߦhú‹ÁSq£4‘ ÛI6%s4Gù±ž`; Ú(ßt Ð#«ª2­ÒÀ!`jjqêtiØ+ۯÃÝ.Šk)®%¯k!Ýßç5 ‘F˜ºà ‘þúKySßÀ@•!¡€Œ!°œÀ'Ÿiÿ@lë Ù;e8…Ñ‘ Á6ªs†­†Š\ìö@€Nº9 !šáÒyƒ[ÚLú(8g|ŠÐ¼-B› pèú®C=|ÌžAÿÀ›ÂE1DO{ÉNkúÁa¸aÏÑ2™­`s|¸•tØ ·­‰©ã [‚Ùå%&lïp\Âm}=‹n¿«·¾Á‡”WƒËÚ!°3Ÿ¤S@6­ö˜Â*Ò”;9[v 8Ôòæ·Ûwy~ítÔ†{ ×αaíXq6gçÀŠæçËúÀ§R§ü{0åÞNÛ,°­A¢ðZÙNÞ°ƒÜ-ÎÐ »s!ÇøÂAˆ_¤¬Ñ$Ðçǵ`¤Í#˜NÜÈ~Šý»¾OË &X>ÛâÖotÎ×rÐܼ»> úUN•åþnéÔ^lXäUýüþcþ«½ú}Wuõ‡ʤë´.î~¸FÔlâ1öàX[d _j@£‘ìžØÃ‹“-èYB ÜAÙVªõ• Àu*ŠÞ¨hÄŽjè´Í–xΧĢ!„cuûÌQ÷ª›ëÜÑ6Å[R´—Öýío‹oäþ¹¸j¾=ªÎßó­WŨXRp…Z÷ú' ¹¯IC*[Ç|“«¼uˆÎ*¡8o}|ÿ·Ï ‚i•ðÕÙú¿õ¹l ÕA¢R­÷~¯vèÝðé=SUd„6TŠ«¶Uh|[Pº:VÆW嫈yc‰Ã1ŽNàg£‹g凴pbox˜RÎÓ?’kCgšVÉ…T׊ûÃsÌhìÖ/ºÒÖ€÷îª8¾ãƒNpè»'xû©ïÇÉ*õíüA" U|À ­98&Ù±˜g±:qûŠ¢x|ÔV«ª Fuâ9Ð;hÈ盓Ü»3ÜïÈèÊ%§cÔüÕWªå®—8Ž’7K0Šâ…Î+Ð6È*´¢Ÿ­øuˆÔèMFµ×n†gÚ3<Ö—`WQkÚræ;‘ªÅ3QþìÑk ý,Cò «(é¬i­ŠHU!šZ„dÀcÆ ÞOñ¥„ ð”ç•HµDªóŠT%N,qb‰'.J”V¢´ãGiA31 ·†õ`ãÆ˜ƒ_Maòñ»N­N2q ;LJNØÅÌrÈ0ûí3Ò©õ˜{ÆÀøïéÔsæ ê×—W­ª®/Ÿ¯hVŸÎoo#ÝhC>‰t?õ%ûË>e¶.ØNïˆÉAÞ:…cŽ~r?‹Ö†âxŠ›v¬è>G÷ùÅp);Ø]÷4>ÑRöš4^|.Q9Ê®šu’úHÇ8«Á{ðVv‰Ý{qÂZ~e×$w¶²®aÕ1“BG *«ÄsTmÓ’16ßà€–‡‹Ùb àTÅì´Ê;9Q1ç¢ÐÙSBçP{ED’ŒÚFdšZ1yìUm×ÄŒ›6 'œî Ǭ_ÌAÄ!ºˆÐjeµM­UäJ«ètCuª\Fj±‚4ùe²Güy0_”EBûDÒŸä…SËážh¹=øÕ^gÖ¿)š.ÃÞúwp³ÔYôÄÚ¿=|'°Î?”,’ëu™úò鸈ùý4võ¶}_½9˜ú¹Aó‰éå#àæHN‡ÎÓÙp:dÊÿÇ÷Ë­þ­–ìŽëšP/úß¾%{uÕÜ}yI·áiºRw÷;“öñþáù¤Ïçåó J–ð|xxžSž¯V‹Ã˜ò|¼> úÏžºaÄÞÌѹðxOïѼàµ÷! Ÿ¶•çÀ—Ó¶CìGòyåòÎÏšûM˰¾ûrÑþùJÔ»ÿ‡/zE¼èµë¯/Î@H’)O²h¡¯XL…0m‘ŃÀ(i1ªÎÕ’›5ªâ:¨Ð8oÑøÈóeÑ F›„ª“öTÍi t\Pk»ÏÈd‡¶LD‡±6* »¾$BAò*x#2‹¶iØdܺÁÒðÝ:òœÕOùL^ñgÅŸ}uþlQ¼Iñ&Szã4Á0ÂêÍzÍnŽŸD†FðæÖ+±·Ö¡MàÖS°ô}±Á+³·~øì†<‡X°ôÙ‹Ójçl‚8å¯Ý¼¡ÞtNHŸê n;zhµlª$Nœ;'¨7¯µÃ£³€zê«Ã¥µ‘ÃzaÛ«§S^ì°ÞÇÃ$pïÃôpïc†–ªKZ³ÝŽøZc†võ¬ÑñßRµR80öX ï2ÁjÚ«ß·’ ÖpÙ!Ú›D¨¤Æ”B(¸¬0ï:­#ð]kHB@J 'Æð¼xg-¡¾ò^žÓ|ìšüÑ #V1±"²õoήÒÅ iÄø$ˆ¯*+`U‘fSGvuÌ×ln-Cœ°ÿÖN¾ïQX/{íéìµ/Û¤¿ûþûÍ»(1ËØÔù*þPw?®rç{yŸ7›±ýá ÇN¶±jt肉*v ÷¯»XÅŽm>%ë‹. +™[›•rêšÛg¾8¡Ø–̦×f¦eÕ}˜vA'0 gzZ'©È´UÀ]’:­dÉæ†/¶³Ÿ…ñÉ‹,ÔÃâàœªYÉ|YëçUÍ2ÏhÇÙ…t¾pæa,ÕߘÐslO_Ö;e’ʉÓ{ÖñðWSÖã£Xa’â"ˆ§±˜Âš7Sq²¿Ad¶VÅ{YÃ5ãYUL(w¥“n9o9ñæçö½˜Ëº}~Ó6¯«G?R?Þ\×b>o>¶73“'9˜t2byHG“¡ä˜Jýˆ"$€3$ ôÇ9dâáþ\iMfìÁ–`Ì%ðC“ŒtLÉ>eVnh85tåŒÄÕ³•^ìÍÈöÒÔ{§é'õú¼‘ÔÔä%¿ ÈöcY÷m$+€3î(º«ºþ¦ÂÚVÐßdZ©à$=lÅ55‰çK Pc‚™ ãOuB"mò–Ó÷ÒÜyêö’;5Hm劮 ]èïDµZÁtˆ­v:ã†fâ„#?²ñíÓ>7Q|\ñq_•[SE®þ)µ=róÖø8Q8ƒž¹ì¤Ù‰ëRóá¡%âË–Ž›G-F^|/ýúévçåŸ-ÿ6yøØÅëîjik%äA‹¢¢O÷âq˜U¿œÄ³=ðÄBVž ¸ÒÒ±t`P F³s9B¢eu¿­ñV2 ž]ƒ€wó£PÖµ Š Ù4]Ó9 l-jÔ ŠÀþ2íi ´«<ÈY= Ÿ<ÏQš2&²r•€šU<ˆ¦ Ʋøki&ôjD}Â%÷ʇ.æ[*ßRÇuKÍò‘ïˆáw„‹C3l÷á¤Ù6þæ?ì¤ñ·g‡i”x›“^™ÂœucÆÛžñr_c÷õÄXl¦SëÔ.ãkI±žÄû)ãiKxÚùYªUçQxY¼Wo«âºà³|K+'(eB$Ÿ93 íÞ` í(G¸õ ¿GÎIò@I,íÃÇã.Ç–ØÕY5ï qŠ×3i“Èí$¨ˆjë ¢S µéŽF;Õktð^ô=/õå\D»{ šÇ¬åþëJúí¿>ûÇô|õû¿e8øñëߣ¡¿xi¾°Öæ×ÑuûùnJ¶éøJ›Ÿœ}|˜ñµVœ,,­ybrÞܾYü©Ó/Ÿ"ï÷†Í“gý3ü~ÃÎZ4@¾Þ¾¹ýõ§?GÏ,Òy;¶iÚͶÁFÈÙHg°_²õí>Zn›H·3l̹îH·Ó¨3âì©KÓ¾7ïžïM¯rü÷@£ Oªï•÷,©Ê6I•U¤¬Õ[†ú¯ø—ß×EÓ¶jDÍ× ûˆM)J[)Q:Ó‰±†mBbf.Áj3`CÃékÒ £{Ò§`:ÇtVçk¶ÑF@Ó  m¤(‹V«ÀöK5ê°ü¬D¾³ XuèCQúu©Ž%Ä/ëŽIt‡’«€F˜b3Õ¢AŠ ¨°KWM©;œAÐÝ€ vYܾòíIvÙ²3]’*^¾;mñ§ÌÓl«)TáLžÉ‘ª J)Œ'€÷— ô®ÐÖ¼õÓ5‚p^±dP‚¸h÷šqóÙSÏžú±{ê³ì'g?9ÛºÇç'ϲ—š½Ô,¹Gè¥Î²˜}Äc÷½g#"ˆå½ÜŽõ°¬¯ÛR¹‡‘ïü¢S‡rÑ©o÷¢‹WÛâ@ÿW¡øÓq^Ÿ&­4Ÿu»r~×Þ kk 4ß(Œóö"nlÔSÛª,ØcöM}Ü‚Íôo/Øüá,Ø# 6µ 6*­Oìƒêß á´n2«ÚmöòEêßIf¿²ÙÐÌ£ìÛ½6+ÝlMe¥;¦55Ëb•m™lËŒmË̲%1‚%NŹ;–ÚÕÉZŸç c+âôK¨~Pøã»øáÈòì»ï?k¥ÚòYðìYüa~ÿZ|žW×¼2—r»“zV^‰ðåÿ.Ó²Zö³¤Å­Ò² $|9»æ?S\ßÌgªõ™FKƒ ÏÔúå3åÃ3‰Ÿùöf~õ¶<»|»ä&ÄõÝ¡û“²wܟͯçwßµD¼L¨}"©ô€†„òQôÔF ŽÑèîšpj¾é`I]ãO¤³í¨V2Úóºi4Ò¤`›—d=G8··\ŒHèãLõÆÛ/›_ž_6‹¹–Ô8ðµ€ÏTý¶®ùìdBËq>• ƒ«³r©¤ÖÔX!Yi¨K#ØM÷Bòm!CA ;²Û†¬sº§®56oÚJs䦶55$Џ_ Œ„EUZMÐTµišé6ÍʼnwÝÈwrò¦=nš‡š¨.Œ¨j,˜º… ø*‡Âk,|a&Ü4¶ptÂÍÙ¿N3«ÂcèŸ}ÄŠh–ÕÀHj•gŸ½{š,ªÕÉ£¶ï'a-¶2¥ÉëfÊ+«ù6î= :ñíÍqÐH-ÑbSvÛI8øÑ½ˆ´I}Ú ŒÑ3©·ä‡öLêÃÜÝ!±'±Ë¾7ƒÏ ®´XŸ¨‰Òà~>=¸Ñûꪴ:7]çç‚­¤¦9«sdüκ+Mƃ5»ì²ÔÆFz·¥¬9?i/•V^¶è©’ÎYÚ5ˆq_u¡­ ¼]*éÏ](¸JÚZ£@PkKQÇF}( Y¶Ýd­®“O­–Ê ÁæšOèÄ'Ô£š8´[Tò7w3_„¯ÙÙö5_Râ¼ùãý‡uqõ ·ew|=u\‰úMÃúÇó‹Ç_Ûªn€]jGâCõé}r®ôn6ý~ÒÏÃËÂû†Ð•5‹ÁÅoÿ>ÿ´Ï5ÐösJ»âÞ¹ãŒxS0X7M) 1Z€D-ʆ@`¤Jt•ÒÅcyCIJA¡xYXJèQÖA¶hl0òñÝhêÆ`#ª ÿmÃ_x}œ°*”u ,5ïž&èv·Y“ÏVÇç¨2h=é| vs ö–7Ò*ë;‹ø}½=Ÿ|™çe2Ë’›sY‘öU¤$AvMDƒvŸßŒîOu+È›³ó0|<µ%c…ÿõÃÕÅ`–°…%ˆu¹ ×ülf„ÅߌBñìc3‹!  ÞÕu¸º8»,Îå°aöC8yò˜°ûqVß]YËÄÌ»Ùgçç¼ò?‡ÙÙõàu¡^.)èŸg?]}Œ)¯HÖˆÙ•Äg޹ž©Ùb¥Ø=­ç'''­,òe"U7‹$ýÎOóæÔ`Žì UßD{˜½)MŸÌ†£)¨`¦| SFPAJyÙ¾O`)iŸ íK”ºÂz­¬Y£H'°;”©Îå}}ƒÝ´gN•˜Õ;ß³Aèá ùìÔæ8úÝþ8³iЧ±³’;ïW¢DƒÝ+3¸D©õê§T‰x%»G«Æ÷É K•6Ø7ñÈè ZK–@JÐ*‰}–,õ,’acÔusRÉ=•.­ üp.hgåJItb÷Œ1~ŸÙi‰Ò*é=Ê’@:ÒÞ&°nGCàªACà˜ÒØZ=…#gw3«Þv‚ÒšéLàÅÐ\óÚ†À¥o›’ÓÖú=Q ÛÔ÷ñÈ¿éæÄ¬p’g¦uUL­îËX]ØŸìõt`·ä$ÛÖ+@ðÀ™ñ›P¬_è#î¿V«¢ö(  Mp¢¶ÑÞQ¡t=YЛŽV'è5ð*OJÛRœ¾m°±óûîµÃéFåp¬Í£ËÊÕM!¥T‚—Q ¾¬K¡B-Ù8ó,ºvB}Àûn}aÁîkÄQ’B}Ìó'-çžÊº[eþÕÆäåÛ%(W• í” Å&âÊÉÆÛª„bÂì0k+B¤„ƒïeV”Mäl"›‰<Ëj6P³z$ê,›‡Ù<<óвò@Ûû²VË<Î`Ø5¥¾ÍkjËŽÚ{»µÔ‘÷ý%Ù.ÍŽH™î¨¨u2',̓šyïYšÇnä¥y[iV#H3ùD’kúˆÇ•é ÄØ¯-åY` e¿œÉI²Ý”í¦]ÛM³,KÙjɲ4ŠÕ2Ë6Ã6ƒ‹¢ªÑÇoSû?Î#QB€Èy­'?ááDZÝŽGK'äéF&ÀÙCëõ¬X;¡3CÅ ܒ:ÎNIò#Ó3h"´½{ÓPùÃï¶ñ ng夻‡1ñû¤: à_:Cø§NЫö{&Ît÷82[bÿŒüh°»Äþ=¹=FÁû¥sjaB¼ßÆîN‡9ñÚ“nÇø¥“îÔ1~ Ȳ8½Üû$NhJ\_u~3¾Ðçp!1Oà‰­¤#»)¤{Ì·±$=¦Ûoê@‚o½=`{Ìê$H Gm$zXQ gu7¬ˆM|Ü ¾úøiX©d¯¯÷ÒÛy׺ÊËZ¼û?ö—¸ØHÏ>>A h•îE;žUÒÖ„ˆžÔ2*-£%;­Ö+é¶ ú´,SRÁ¢<Ì(] ,ECÖµôlXI’j^l(üd&0J¯l÷I~ŸÖ»Âô®,WŸþ{Aá â][Há÷ßÏ~àÏþóŸÙeýã„ò0ë}…….ËR8þûá9z¡uQb]«z: jé­êÆ µûûq›œÌByzä÷ã“™ìF!Xiµ5l´±XŒuA›»~˜W&Am‚÷J /¼€` @]+ÑÚªé¤Óè¶^Q“ßÕy`ây„Wf§`Nrg“ÓüE%UPúÒ‰%ñ-jA±/2Ê@†RM'§ ¥‡îð;û‚Ÿ&ÙþË”kšQr{ÆòáüähuAe»‹æÉ((Ò¶â³YU¶žÐƒ0 <8P{;æI÷õò˜§iÛC¸Þ^Ñ!§Ú’´xC «*2µ(jDU¡-CÁĺÜGä¹q##ªst0G¿õèà,Çærl.Çæz‹DŽŒåÈØ+ŒŒÍr\*Ç¥r\j²®9*”£B“²“É1™A1Ò†½•îÀ#)Z©úû<_TšÅQa§_Bõƒ¢ßÅG–gß}7ôYz¥”~ù,xö,þðÆgYç|ʳVáAÏ’÷ÏÒ’Ÿõöf~õ¶<»|Ë¿ŸçÕõùLˆë» þ“²³ó³yÜÅüòJ„/ƒ)Òn×!ÞIÛ=ð'•‡ébÞþ¹£k\¤ƒµUi|œ HÑ4°‚d]‹Ò‘×ÒWÁÈ)%Þ9eSÎ! ŠÂæ3—ÏÜò,9¯lBCr §Áès‡tæH²£ ¨:Éï[i¾; oÈÊ6þ²òý .|]Ö ™þõª\×0dÍUÕÝ4¤«ÆÍêU¨-ž¼$~ûðþ÷zÜY;6wlÕ_Ü\ž]ÿk¹sá~{ÿÙŽÉe(=§'Ú¼GÞþø½.q/¼ùÑ·.\]Ÿ5g¡^aïÓüò={lÆÅ±‰ì)©à8›õÔeɦj‚$j†Œ *Œhšš=@(<æŽ*iX§Wüžˆ.»@m •*c—sTv%Yñ­@ü~ÑD¿hª`E]+æ’ƒ•–û¡b¼š•˜€ÒY¦D×¢qŠ·ÀÖ•Íã»›ZQS†Rï‘+AðÇ”¨¬nšB‡Rû-š™íõ>K?l½c²ù`½êƒåµTÎw,çõ4×U¿·õÆm£%˜n-siÝôÔ¯¿P“N£5qJZêû@ >1¦µaS¯œ†}´Ú°iê…a†%%M±ô­VóCÖ»1V~G¾ÕÎj¥^;ëR–^á$§¾Õ.|X{E›ÖÞ€I:8jʃóÜb¿'NüF‰e?J&;ý¹Iò‘[9±|¥ìØ)v!Í¿èlf7î F•À‰Ã-³j¦íÑüŸöºûÑ$ÝèYµ¡î1²jL¶E§L{08«¶‘›‡ËT³'Lî¢õ$Üå‹èurê¬Zÿl“…L$ïý”Yµ ½ùøÔ|ào»%ŸÃÕÃË­üw”ÂÒ¸©¶±8`«>Ötr ¥ô£çßz3‘ž”c‚Ù~ð.1½’r’Ué?žrtv_Í|6Ÿ}ýi,ÿ߯?=tÌ—_*ÉϨ*vºgñ·ï"F{vzY/~X¸a·1Rõñ"×[ÙñŽ]Úvl¯<©Œzø;!‰}Ï[ÇÍÿ|ÜÄ‹2Ö$Äjµ\)e˜”sÏK}9qàÇSvfë>YËý×á¿>{ÑiýÝ÷+Êpþðã׿GXÇâ¥ù" 2¿Ž˜ŸïþùúᦠW̘Ÿœ}|{® ¾éŠ“EšèiÏö7·oêô˧Èû}NæÉ³þ~¿9» 1$ðõöÍíì¯?ý9NňtÞß6¥Ýð4wݰEî›4µØÖдKø†çu¯ß¡±&w·ìÿÏ Æû¹ÁO‡z³ƒ¢ùbˆÃß•åù2}üIËn1æ·dÆX¢4u#ÀÅïP¢ eJÍíP‡ÉB¡Ú±/fºýJm½Û£Î8º¹â-Çôô댵³ÆNe+î“ ScSU°M“:( ã¨Yé d°ÒN§E¼ŒÙ­EV*»îG–¢f—Y¯´åÜÓ¼óù?he2y (W#ñ^—wO’ Ü{¯›Ò–¬j܄€dLw……ÆŒÜÐ)›ÜÙäÎ&÷˜Ü³lðfƒ7¼Gmðβ¹™ÍÍÃ67šNˆdƒ„ ‡<,3Ø›}¯W3îzó…6pðõÁ\h£À>Êûí0†`ëv×ZAB ë“e|<ÿÆÖ¿7j³Œ#ãfƒŒ[®».Žu< hM4~Ó«ŠnöòEÓD°_»‹Ÿd¬NØ/«z†ù²þÍ6V¶±&²±fY²…“-œ -œY¶/F°/¬!™ÐɆßgÍìJ¬/|QÍù´³@k½¶ÝyNkéII éþãä‰òÐ6‰qVéSh\l›¡_“Y±°ÖGièoKi*YÙ²üPXTVDAꢱdŒõ݉={Eí ôd‰-‹’tðòÝi‹¿_ÌÎ-qˆšfz„NÝ+‰è਺¥”Åÿ³w­¿­Ü¸þ_1öÃb¸ŠGo)¸½_î‹û(v·{?t‹Â3ãq&ñëØÎ£§èÿ~%Û3c;‘Lg¨¤§HÑöœØûGФHФˆ)¬Y)²‚òdŠ¢3J¹ÌŠÂ³†ðý#@ÿMèƒðø#<þ¿ ðxðœ~§Áé—œ>BÃÐðK U–i®™8wXãž;¼û£±ûuö•þF÷¶W69½ãNG¿ô&'Öhé´ê¼F›Œ~htoÆè~7Æn þÐè×k´ k´Í— ÑšáƒÄK“ò­1ïéSÑ̽Néùõ²L føðŸ>ü§÷ðŸúôá½|èš÷2øð|Æx¦8;ë;0–©wºà¾˜+îÔè<@~Ðê´xXo”sl¾~`Ù¯ý/û5üÎ}+ÿÏ|éÕ—ä+2~ú±Xd³ð?ÔóÍæ¿ë æ°S·#NÀ¸_v`ÂߥUïÒ/|WÖ|uß5¼_¯†y=º·ÉúØL„lv"ú•Î)ßl{Mž1ƒoæNý݃Kâì?{›¸ŽÉ”s0dp†ÿnk45ŒFöšƒ¡B¤Ó^qÞ)gÒ°·™ƒ±ïTÕ¥¨ ]¥É´zœÜEÆ_´}‰g§^8:”Všñóô*I¿Ì©vÌMYU9gÄmÖŒä•ČǖæF”¾4ë¯9Ïìùl<Ó”ýú§^üÈp•I}>¹ª…ÊÞkêEÀ®ô¦È¼á°‹mlî‹·š†÷8IV¾rÈ…îûÕ2ÁßàÆO„Ù—JeúÙ{ÊöòA¯¨ æ˜õÉ~C-"³.!…ƒ®³?^ƒc¹}1¸{˜Å]£ö7ßvÎ_ÇÅ&³0w. ©K÷!.Bî W]68îb€Àš+ƒfFçà²wæ\Íz>ö‰ç\´2Û0pîMˆ{ó+cPXl²D,.¦÷ëÍvtÍŽŽÑr¹Z è½1Ç!Œí,!Å1xle"×3?#h5žÔk÷‘}qÚŒ¡qöY;Ì …Ÿ–¦âçÃzé½tRϫը7LfÐØylÏM}%0˜Ùkž] ³ƒeX-Çãå!®Ö7(¼©x9[Ìk÷£±?F<óÙ¡jw%&1øØkoñÆ9Ÿý¡ˆ¡‡ÒJž¶S©ÑûÑŸÖx›öÑžiK14ÿöQU;êÍ(…ÉÔÌVRQTÛ²ÄþæÚ=é}4.[ƒîožblYŒ!Ì&ËRñØGîuáb’QŒx¡Cµç£aÙ¼?i*“°yèðÎG½½v‡SâyíGÈ?Sbp“¥Î” é»É¸FO’œè¹ûK1]$Ûµ¦‹û’,§£û•J<äÐ"ú/àë²éƒ·’&Þ¨Æ+?`²?Püýi­ Û™Åਢɷ%<+ RìO/Čթ듻ýÑáíOÏžu–cìJ†'Þ•Êz4™/Ö›ºèÖâgïàµ2iubκˆe½˜öOš t®î¡µJŽb>i–ê0äÆ @ƒ½7JŠ'¤‡¸º ãdÉÁLÅÌÅÜtó¦{ãdxÒyŒ¬c(ÆDyª-¨Ù1ûC :˯`eª5šÃhR{°áh9ÚÜøó‡Ú{t½ß ½ð’ªõÆà£‰¤q¼)ÊÞèT†&ˆ2ïT*—ht¿¹ñeŶ¯?N¼°çn2Žjùf™å¾XY&&”[[)1Ü!–ÙT;Ž2ZÖ(uJ†Q¼HòZsê†ÁN–%Ëv"äŽSˆIÎÃd¿”{ß¾ïõÂúnaÝ/?»1ðŒ!;'´/è q<~ŸŠ+0D‘h“ï L¢ùì­ÉÌÇdª Ç‡í¡Ö>yHfõdWz.'ÆX¾CÜY ‰Á•*'2»ŸnîûãÓxüÝ!jÈY´X0Õ!Ò½³PÄ禋Q ¨¶#6x%u!Œ¿ŠbVMÂJŸÃ3–bz«GØZžf(Šo“ÞvƒEc-Ï ®¥}±ÝÚŒBà0§üM8Ü'Ë1¶5µà p&ÒZþ-¶òw›½Áà Ov´t¿Íë§x`‡‰ÇÇ#`Ýi<ÆÎÏE*×)ñÄÌL…즺2(RªEÒºÑ}î§ìÓPäúÑìŽ×{ÚÚøm‹Õtº”üþŠÅ|°\”ÁãΡû±“]±¼{b½qó€íTTÇXÜô`¿%Ô£.ómŸ¬P? ¾ž¯wƒ>/æ¾Cºpkë!^ÏïØ–æ«¢7Ô ‰¾áâüI²d™b²ö}ým_€’Rì’àÆÐgW"Z€‡(ñ7Êå}>­ƒ(¹Û¤ ÐÃØ$w`ºB?ãš~“ÃÙ³¾°tF»à‹¦ž]0u%³¨,BAržè(ù…†×MÚïºAX’f–BL7ƒ—Ètt¸ÒN.{W¦ïÉíšúb«¬¸ÒTÉ*лv÷º?Hƒ>ñÀÁja‹Ê¤f’C@òw9øê [$q7¯ã±4n‡pVÓ·9’kŒ³¨jaë¤3…À>Õп pKEÆÀ Gî¡ B…U/ÛÔ}ŸÛË™Îð—¿€Û,©Ñ(nÆå}D$à89~ê /aî‹sa© v”¥M:ž0P…lÝšEoUÏ1ÔZ ÁISÖ»Ø …)‰?Ÿ9ÄÌ •€™1ÔbÝçsœX,„Q\S‚©Ó±vE8P‹^…ÿ|Õ» 9±ã{%”¶ÌØT ëä«*)\†Jqi!´ ãôŠEèÚŒ£6J†¤d\ìov=14ãÜh@Ãû2EãD[z\ÀXÜ‘Íh5oHusw;íOHõDòsÃseKYáþ#óuN–0íÂ6•:C”5ØÊtç,*i0 ¨dx;á9/¾….¢Jâ‹oAÌ×iD¬mèžÌEˆ$vÀ  þ¾w|×ðøG¢”4ºTnÈí§Ím0ºg–S ¢@`ùí¨ºÜ`.(•C³áþTi˜ˆgÈ3ÉdÀne2§ä´êtzÃù¾R e¼ÜUŸ§wAW˹»š¨âMòvD^'.®*”t¢ jcYÐpF©‚¬”Â7a§ŽWð"µûÇW‡Y"ìçÙm˜-!ÔpŠá@^fÍ­×1½âg =:¿\¦º²”X¼Î¹D“}¯8ëÀxW¶(Ke*MæåüiÓ›(þîñ®éžÈ<7nÉJ«ÉSùðY‰.8ÓÓe&–'½ÛƒÜrxs-oç¥î]¿]ħ֭¤±ø‘ £$þ)þz¼¶; šg+Ñ“o—–ѵ.”‡%$b|uFÚbj®cкĀ~~¬CSEŸEw å e ¾½ÄòícATÓYÝ´¹To•hÆ$‘¸kÉ5„ eR³µïØâv0c Õ å|å(ÒÞ¼]yuæõ¥aî¾Ûi.7Òíi™äˆ[Æ¥Éê.[BëÂ) 8Uˆÿ7y©È-Ç´óqE¬"TOw™‡ò2GßÍOœÛ D=³ Ö–c«|Tø½§ŸêET윧YÁ×NðEJQ5ÂÈcÂ$%8Q1¡êÓøhª À3”ìnO‡ ú‚)4f´MÿÌK)„¯d¼YåA"¤DKbÜèû70õ:UäÒº™J”BTªtÞåç›ÉÌ1+ ÛŽPJ.´¦q3ª9`$žcdt7¬>V©!”ûW@\›TÛ‹ñjSWõ¸²&Ëõü‘õž"µÕ]×=<¸¹[{µ“Z–9™¬g*¬Ú a¹²È’dr7É‹Çd œ2´ÚjŒ0©q@bÕ ÂhJ!´¥¯ Ù¦×<±§eñ)„WfZš­°ÿTüa7I'–•Ôi+/ÆQLAÛŽZ-cÈS€\Rý^gÝ{·oþø´ ÚKÉ3i d0ùv§ª‡À»žÑÕàÚi6€ Ž•¸yé2âa±®IóÆ®’ŠH{sSô†ÍLjóÒäÈOJ[º˜A^KTnɺšÙ`ÖùFÜ*ÈJ¼^£/â=$ΔÎn èÍfç»’}hN›´®¬Qy©™Ý~ž.ÃTiØRƒ¿\*Om¥¤ŠÝû§I¾MÇø¹µi¶A“>%2H]øn­ „W–âu¡CtKR“qó¶-ÈöµÎ(Uya„%jv³ÖJé Òø¨iZa;ÛºÙŠ]åîèq)HˆŸÃ)çë¡û¯-Ã_M…˜_c©³ÐoiÓÙve%DivÜ–õ¥EpžÐà %‘›ú¢XCeZfž8Jñ“~ôép;ÿ4’m´éJ4›¼Ëþg·TíkãT•³§¢*-™OooT_jJîøˆÁ"E-£ xñxáröv9±ä…ÌïjX’jŽÓ£j‹tna6W†¬nVŸ«Þ”ð$ƽ35‡?mmntU9ùböfÞ½Áñ^Áù®É-*Q:“ÐÌ£dºÄÀhYþ~è7È<÷¿!åì¶z áhþe1Ó`ØíôØõêV3k,€ÍÒY‹Ùì~î›r»üªº<K´‹33¾¬ ;‚Ú¥‚c ך1îäu¥'ŽÏ g RKk…suKòXU&˜Ï0¬ˆC[Š^¥|9Î#Ø0€ó(CSÏrX}¾™÷3#¤â˜\¡7En5F&æ åŒà|ÀÈ4®q'!G?åv]i„Ê™­V› ¥qö‘CÎnŒ¡H¾&q×?³8–R0ëÜ7•z®O8½Ð(elÀŠåBCŽ,SI÷ªÕ¸¼m¬ÿã§2VqÁ€Lì´ýtþeœÊF9mD¦ñ‹Û…SOi žYc]še™æt‚µý´5 ¸óŽÃ>õ?ƒ„üsiRPÛ3JWñÔ²¼¹ Bõ™LT”ôy\#ÑáCj@ó Å Œõì<‚Ý¡7çǸ§Ôì‡&…CoS•E™ç>¹ÿ¹ú”ʸ¤¢(ÅHè÷&£9£°±uBžï|sÏq•¬v¬\<Χ‹Q¹&Zè\íÝÞõL¨IµÒn%¨%bOE'äÚ š¹—)¤YJ dxž%æ;ïKÅ€rÅyö²„³W[StúÑyYT¢’yItuûô¤Âßz/T 81—ㆤ:ãðB¨Ð"iÚionˆsÓæãöõxs|xUæÊï33yÔ!’8…TG¸ç2…œ„zí œá4²L\dê|¥{.ÁøöÓÁ?³z½`veËÜ elNžîÖö)H†,ƒRÓú àÝYuLi¸sœ@G-R¤=àž,¶'QZ„oKÓç—D$YtPM×Úµ^#2·…ÕÊm‹Ä>æEP¼癸Þ¥"âµè»MRÄÖÅùMBK‚iÐMNgÛ|ÅÔx5<}HQl§².ÖÍÒÛ››b‚-E%ºí.¯gnK! øâáÂÀ䳇½Ï¶”¦Ès’O´AšLÆЄR Ô—Š&W‘8O9ÏCA(²â-ò‡M8·w“Oe7SÂBEušLä3¤ ·EŒÛ@9 ¤ÞlüÓ±ëký„ UCåã:˜ðPÚÇî+šnþPy7Ú#¶#(Ke¡ÃðtS1¦ز¬r[IIÖ³ú‚­©‹*’Î$f·ièùcHÆÅB3ý•7zØð W_kò²”.Þ˜?ª •Ò¡¥^Ÿ®&7]I#™DZ+Èhž¤“ÛÐlè«]dF!iI{Ч8[”ãéÅ´v_á¿ß½øÃà»eé{mw|p±åz0ÏýߨÑúzðýÏ_^ßïú—ŸÓvý—Ť.FÓÿ±/ýã·‹i]ü4øÇâñz6Z~ïÈsŸr=šNƒñ“{bîž­ËõõÏ^üu´Ü>T/I5šÕÓŸ®ë¥Ü™õ•³eW‹ÕdX—×M>î n¿j×_ïÿšfìúëo¾%[Œwé^Ç=ò¯…ÿÃhó)G>ÍÆá_;ztñèû–¾:Œê…ßó]Î-ØÁþWZHËU½XÕ›Ÿ<?ü2˜6ÅÇ|µ^ƒ¯¾ÐìŠQ}•]eCÆ¿ÿýÀ¿W®7Ïß;ú$¿ëëïÒ,¦÷³ùö¯½ßìVÕÿð¯z6v4_ÿ÷¼žþÏàÿnÆ«ñõ÷þA±ý•Á÷÷ué¿æçÒ”¦’#NÎ<¯ÈÈpC ­¸ÔRŒGjü‹“žù¦ž^þt¿ÚŠÆî“ÿw1›ùîìÝOYwû¿~÷Ý7ºÞþß_ÿwýËéöÙ"y~¥‡Ó!ŸI·µ{úÏI÷l÷ÐKÒÝŠõ9îþ¼{ùz鵡¯wï-V×õÜí›Á¿GÓû±WŽ7îé?/¶¼€qù—_.[;YÈ,w1!áy®‰¨¨%£Âù FÛ17:¯ ®®áÙùû„©‹ÚµÛ¬Fóµ³ÛIÜóØ;ô»nÿ¿ÿûoÿÏÞµ77n#ù¯Âr]]e« ¶êf3ÙËÜn’¹8{÷Gnj $@›3²¤!eùµóݯAJ2%KlRN&;5©˜¤ðèþ¡Ñh4€ÆW-ôU N %_u@ß:@2˜Nìwý‘öÁ‰ ëä¿&Åø9Š!äûB4H^ÿFä5u"3™ÌsL"n½AÚc…àMfHî9 |+I÷ou‡t­…ê/Ñ>‰CùËÒM ŒF†#ÚNÊ?®}òU}Iz(ùªzÖ”jöÇ_t­cW¿O ¥'ÅPúéè¢Ëó"ƒb£5ÃZÆßV7L?Îa9"È?ä¼$£Ì™<Ï‘`†!Žs‹tª,J5ñ)Ñ™å*=œì† Aô~×*eØ|ÑÖGÊ_–Þ û‡¨Šh»Ö¡øßÞúøªcþ :&ùÚÃûîኅË>÷÷pÕŠb` 2ø¾òrÆÐËôWßH+ˆ!–#ï±Fr¢®ýˆÇ;ëV\š˜º%©{eónÔOvÔ/@¿³ˆ“}á*øúÿZ”þ æ ›TÐ]Tðpîyÿ¼W¬ÝC6¯j Âqß]ûìJþ4 ™CHŽ~UŽz’fÑ4O'N¡Ôçðª±È=ÎqÎówÿ7>ÚI“ÆRÄÐÔ¾}Iß  2MÔ¥b6Š@]÷#õý¨iV·Cè­©‘Ïß[ï}ûS»Mª…Û!c(jß`½ä/¹§ÀýÉeUž¤Åø~Fó*›„f®ú3`JÌ’·ÐÓ0Ci‰üõ{è¾h6 /ûf ʤÎï$Wh)Ô~»T@¹չ-½{_ŒóIyÑ È~ô¿¶¨õJX[ÈlvîÉ÷¤ºgG-lÿ|ôÜ£ÎôµnºÚJŸú¾mh‚_k¢¼{>Y&Ü/AV+>؆ÜŠÀb(gû%¯^#"I˜‹?"y’¬mÿ¶W![qiÅÿiÒÈŸ®chMÑåtCàëVmÔÔONwa$‡î„al;”‹.«ú‚9¶€£sÕ­5Žî¢¼¤=´YuÔ™6Áº‹q$Išj¦cHÒ€+¬Šv'­¿ 3ZSd°Ö1ir°z KöÔ.j$ƆájZ1©×û¾€9óÏ~j‹r­ó゙P¥Ø^e=M_©›rU÷h2™Ö ´J½‹"ÝÒ±$pLÞ%-ÆEuÆàc$ ÂÌÌÎݱâÉiÅù_ìYhv0¼¿ŒýÂD’ÅÞ‡4Q-‰ÚoI*wF&ú.y[N²à–r‚€®Hµkî‘Ý R‡È3ûEE*Eæ½»él« æê5LZïÍÙxR60¹fjõôðZ‹6M‹·+y~YHŽM5Jl•³¤ÞËYb“ãÉÕxÁZ²àm'hF‹ˆè8R+Õ+hörvÒVdµ# S&¤=yQu;=;P ë”Gã|? £§…ÔüÚ]t,ÜE¦" 3­MÑ}@¶=q¶ äcRe¶} ï£XE–>”îüHššýwÙC:!ÒÞ“ñXíVyëô¬‹~RX½ÿbaH×òô ÙÖ«a®|z>™|Œë±BЇOy:í“6< &Í{†iwhü=Ð<S?_}È;Á.‘p˜~¥f#¦ñ£lÆ?ÎõlÌ»0ÂHEl†t´ÿQkKp¦ýãÓ¶ˆNiU:Ý ª ‰Fc쉒×0ŒRQ%!žOcÀ8BEF§;ÚQìžd0Ç‚R72£‰õl)¯: ÌÞ³hUÀã6ò®\èã¬<ûÔ °c€àýv°Ý!BùT¶fBç·óŒ¿ ýZ´ð y »2!17·WÝ`P’ÆÀ ñÁTè–+ô¢5ç¶ë÷RúI]wE«×Ìñ¡mÒÖ§§Z¥íAÅ©ì¬ì‰Q˜EA"ûuQ—ɺ¹õ¸êazt–}2Ù‹0ß³QþˆOmj³ð°0=Õ<×A·µ0DÍÇó]`£”ªˆÐçŠâ~ͳÕÍâq³Òi5~FI¿.¤â"øÈJVT³òædýõñ屜(›e³N“z¶¯ÅÂpÿºÇBºF; # Aêâ*`4æ*/€óe]DÝ\CúâL^½ ,æ·˜·v?ô0n Ú§Yéh'à$1QÀ þÂãZŒ-˾ÔqEéñRѹvG.ŘŠRö;GÚv¯J TäCé(½=[Ã.‡‹™)¶®’ûd>|r]gœ³ˆ“ÉŠõì^Š·v×íÜÊßÊa¸Ûªÿ%ՓƆ_ýü„Õͬˆž•i'ÓŽ ¡h $\Dø£Dé,á¼½Ó=/]-ÜÍ5œõ6Ñú¬cԢм¨üdn;9YÚÏ"@‘/¼ž·üØËºÞFah¤ò|Þ 6-bn¼T¬çmO¼ú§Sî~í˜^]U@3ÎÄ€¦õá¼r¾œyáÝs¼ró"ꪼ“Vâ8lˆ¥ç½-Ë‹½('ïq"[rTßvrVqBߨ4ÀÙ÷ˆµyòÃOûƱý% ‹ëY9êæ2 Ò/@­Sîõp½x‡D«eŽéèò¬x|‰5ºt!Çú¢PBàˆ,{Ì€z÷,Ávί¶fîß?¯á”E¬ŒsÎÒd®°ª3óþ^¿fux ‡]bZl_!H_M§u‹pÏaÌæA^l)Øåv49ëm/é®òÐ,Ï»­’ š3b% @î¼'_Fÿ(tO¿Ú~:¾½îä® (c–;ÄžSèAîø9Y=Eì.}˜ ©yua:Á …T10ô¬£ºº»c<ÝJ^ñG¨¾·µ= m2úÓèEØïyîx¿9íiÛÔwfCi~}ÕɨJʈË u}ÈxXñZ{Ú|±5'º½þPvrÚ Í±Ž¤—•õehÓûÛ@·ð¾%ÑK0¨qïËÀO“û­9Э«ºM8„a˜F ¸½õ‹ èù…8¿}PÌK,Ÿìøþœ•â]e!^–i'½Ä«ÐŒ8 ©õœó<;ó"}–ÊN»€C ˜ÍëqÌÞ7|õ,_l+?âÓé¼›Ä0Âe 8´ç­l0~\V'ÍdÝEQÕ~®ØÅó½Ù‘ž^˜N}ŽDG¬k ÜûÁée;Ï++…a¼'Ífú:f¾—$-ÛŒòvÛIY¥w”¥ô]ëTlçSŸŒ !€ _Gþ.'ɨžÑaòï!èÎÝOéŸÍ~ð3;¼ 9ê²)‘yÎfÔ#nlŽl®<"Ž2' V©0 ÇŒˆ' 7aš`5äf(HLÿøåÛ¤ rÔü?}J¡žÁ½Ù3(&'¶Ì·öÂIžlùuR GÅøò:Ù™iý‡óI5«cf^vUÇ܇Öñu­ZîLvaüvýÞÄêh~¬º\CÁû Ës?áNF“³›A xjg烬*Nêƒ@ù»,ø2˜‡(T›5„dPÐÝQøCB”®¬†¿݇à>¡üèøè>wëÝ´Þ×"ro~ a¹›ok¾Ã§wk4Œ:³3eo‹;ÀÌÂ*>‚ÇQhoz”––…;ó¨pð±vߊe€›Öç~m±Çá—Ü¡ÿ0?´Ù0×ÐgCæ)Z'?’ÛN…UGô×T_ƒñdC ¾fˆ­¶÷ÓK‡ìle#çßaPÛï’_ÿ}>>,†Á¤ wo'îÛ7¯¿žü8™ý£òß-‹¿y=<~FhÀ#<ÿc¼˜»ÕÑ"kQ8þÅ“}øë»úá®ùÿßüͰ‰üf ÔfÞqÿòø»<‡†¾-}îË'§‹ÙàqoYl¢M†ÄœÖò2„¯Ç RC}ðsˆdzY5ŒÔwßÚ©ÍB\í¥”ý È»lz9LîîÀ8ÅŸ“»ºøÏ ¼½öhôÑé›ÏÇ~zîÁP¶£e¸‰A‹ú†GÂÖs&)‚»/d;¿<óÓp\‘ÿ,BÜNˆÛ5Ü'¥?ìIzZ»¼ iÂ(ÆÐvõ½=’еòÂk«ÄÏǯF£IX0 -·»p‚X  ðåb/H0­4R†X¶k$¶?oEáP€ÁìKi:ölWAW,CØI[}¸[û%Ĥÿ¡&äm÷ËÒ/ô¯uOù;¨“ï½-g©·³ íÃûÙU=kb0kºŸÔéiÆ(K†G·2üÜ躿5šæ{[^†«îB¨ç†®ã€®Ðl e”‹IµJ”40&vn‹º‹ [x| Úó÷Âá“5j¶ñ7ž$µ¾Ÿ.meêí›×¿¿V¢ö5$Ù×^P»Y2õ Œ቙<5l2RTIÒ›xf /«Zï¿Z]ÆÔô¹Åû]û¹æíÍbkç›·Ë<ÃõéÂ’µLß/æÝ«,`öÖÕ¾®c|·ôCÕËÆ·»OËÃõßïB¤Óàݸ.1”ðfœOš1ê쉋ðz÷C3ðà FEÆóÔ‚¦÷<³$ÍŒ£ÚêÔ j™ÇM®:ö²Ôð‹I©9 Ǫ±\#†µwLÐ\:|ü—Édi½È©â^ ˜~IÄ!Jq–#bs‘bjR-ùñß‚1ZZbb@`J„„‚´™÷rp­å{HúÓé›p0 ¤É%ßÛYò]@fD` þ½öT|;)ýO§‰ÈAÀ€ œ|óv4¹úSТ3° |ùóå8Ć]V™•š ONÈ€±@lPžÛIÅ¡„ÁY1sØ[£E äxû2ã¼ÉQ ãúfùÃñOMûñY\ãG9~Uà30< ƒ7ΘãÓ+;­ŠÏÇ5kAØVtÖ_î6^CØkHõéÒÞÔ.’û@Œ`nÍO&Ù͎п‡ÿ¨Î-r¨2m±Ö˜)‰3Sʉö†å©dŠóŒiO2žå¹aÞ²Ôˆ i©ÁM•eïŽO‹[ÿ—°ù†ÔhVÒ¤hŒ¤Þˆ$ŠK°yεâÖ+gÀ‚·ÌÜœ Ï8ËtJC¼h-ˆÅð)³Jj‘JÛDɉÀL ¾“Èå)ÃAsL¹1ØÃÓf4T€M~½¢RRê‰ÈòŒe42åhÕ*çÖR€ ¦)Ó ;c²Üìrk<özY®’gÖ« <é•È9@D,—Yš:¬X†¡X™ÙL‹Ì@2ÇÎæ2DZo©SÒb•ºü©õç¡'¬c ŽÄÚо1¥Z¦)†!eTÁSlá‹ÊÁ–ʰš³DÃI›¥Têrå3â Íàu|Jah(ŽeÏâ©ENXpìjã”…çÚ:%¹I9Ó©q¹Ç:g9ð¶py ¦ [)Ešc±F$ —5q%ŸåÃPh¤Á9æÀÊXÐ6†8—åMÒ”§Ž¥Ô¥BåqΈ>(šÒºQšt¨ U>ש29‡îœ ¨›ä^0BÏ­t,¸Ò¹}FÕ[ä” ÊWâàj'±­™eJd–ɆM­Éˆ¯pÄ“*áR ]“¹\šLrP¾º¿Æš¹uBNÌ}¡¶¶¯j Ïs]†¤Pmô@&èIæ Q äÔ¼40Äy'¤d™¦Ò¦:ÿö®l¹‘äºþ žüà˜s_:B#Ù–ay&4–ôà˜èÈ•;ÈÉfOwÌ¿ûÜH¢Ø, 5@U¿x"†M,NfÞ{î¹¹]®ªT5ìŠ=_8ÏÊÀ#²#ówpoV½–FFÌ”`m¹ÈTsõ6z°h 6ƒW“«0ù¾ú +àÎYÍàŠtnPIçJ7¾«ƒçâ?‚•A'4ÀCκDn¬O„`” !»4*Œ‡ÛhûÛiuûŠaÔà&Yà "v:—Z¸ˆ!DñÕ u”!#ÃJ}‚ñ‚lðfÂೋ¾ÚJ_˜ݾ€Y9±ˆÀì,¤•†Ç¦Àc Zð*Ä@&+³d¿á«ß².˜BKü›+‚¡†æ§ÀJ•R*iÏL¿Â.’4ˆlª2Ÿ¥E†’£G\€è†© €‚«!l¤PG[^a¬Aõ^*Ф +m´0Ú¬¤Ç·Õ¨Œ´dérŒ6JfÀJÊh·a‹ia™uR;¶f…&z-Tç‹/!¨HUðrYGS!\e2ˆ0°SÆ¢P^Yª¸=èIï¼ô‚±cËÓ(ìâ?ÔC2£¿0ä$cL¶1yÁéÀÐsÕ"4$¦Äšç));À¿4‚êØA¥F&„–tr+fˆS€=(¸Â!³²7Õ¹œh't$KTÊì®ƒà€¡†£-2ÃÞº¢®$PC6jò…¼ä^:f ¢Ì6q2ѬEVˆ‰mŒÎ剨ïÆv xFõm^o`8²õB×€ØUH0U¡„†7 øV )yK´«+RZ/sB2V d Ç@ˆ‹AB ÁlŽÜëÒ$XœŽ/*C®kªÈü+w”ç"Û3ÉG.CJ()›Hr°Š¤ràañ AÍÛzyÔÈ;…¯Vò‚ìÊ©z4zʨª`0.ƒ¯g:y- m9,ˆg=ÐVy"´˜8v@J>zpd½DJ·*Ðý¶BÄH ð4Ð>éj‹@âTÏÊ6F¨Gמ›©‚ÓŽŒ¼®d°ø?H&`Ê5¡@$"n)sùí Ô²“:pˆF¥óœ)utŒE(ŒŸ­IYàÇH§…*5ÂóÉ—èW¤X9`*¤3ð!Ðp±T)_¿ÿÈ[{l¦ÊHµM‘½~ ¥I+- ÄOÞdƒŒéò—KP:ÐÐ6€sK„ÔF%H þØý¨!D‘h¹j²P ´0ÄÅ’XÈ<Á¦‚žk€«$÷á]Ф&;Ö`öˆX· Û‹1­hÙo *ÕTz:J:²e÷ ±q‰Ù‡ˆFi– 6 @#rÄ5¢’²:Ä¢TáR$)9E\€2Dƒ‘×Èp,øýØ#¯­ÂàGŒ2¤Q5“!V¨(P¼µ`îZ\ª j*6ÂRE~“5Læ«ô°W”)ª#ôêŽ32OÐ1ÈÑ#äàGÍ…n†a>xŠ?Ž . dt§ð H(ØlF&º’T¦lÐ%Eb팼¦ð޶YC©%QU)WÎY(1TPºBr ªÑ /<Õ",¤)zêfˆÄ`àŽ±Hç`¦0N#¢°3e4¢ÙÑ`ƒá±JCS¡¢Y®r,c)ß6|ˆQ8¤ŒÛLƒ€l’IÔ¾¬‘¸£Y§¸Fއè‰Ê8BƒÉZˆTX¬Uà™> 1Z‹8¡Ž=ÖE“û8áHÑÔ­·Òè¢!šb@–ä¡òœ!{È9!aAS’‚?¦‰´<Ĉ +Ôу:m²¨¢Ì­œ†bÑ2qˆy³Ð<ÂÐ,øiH@"ä=º²˜,ájCŒ°LpvdŒÆÐìˆÎJ׊o@‚¡o5’gdG–Rjá=.šGôTTk£‚™†¡ŠÁ ÇÎ5áºÞ"QCzT…C 2PŠi Ë R¤l•žÒû Rl6‘DMH®ä@#¯v\[}l¿¶Fƒx’,Z²2åK„æÉfÕ¢Ò•o`#€\Á$í¸ ™fP•ÎxʾÂH©=ÖZ¥DL}’à›Z1²A:"ÚháY#øø$ 6OAëá]"™:ÄèèRû£‹u…T²°H¹¸Ö¥x¦ UšôÁ YtÁÑ\ àtB¦£µf@@Fˆ“¯ÆY>çèqÆÚ 9ô¤=–k q›ÑoÌ##©™OPn‚’#ø® 9UIK3Œ…œsð–âGçGt…zÔ4OïYŒðlQTþ²XpŽDl4H„ôD³³`*—W‘¤å!÷pð¼`Þ[G04C2ÎÖÄI!I‹ jéáÇ >ä‚@~ƒd ªÍý©X2”r8Úàž£Mʆ3Sj;x„:.iÑ¡Ò䩟”‘y)Uµ†‘ì¤0ú‰Œ½Â(9eNÇÆ Ç08ÇGZ8ÏVªàb(Q°·†2Bœ¾BN"NX)l;ð¹æ@á®YVÔäóÄE/ d|‚B¥™a ñR ’lËdB‹TÁ'(E¤û ¿"TN¤•¥l â °ý?3ÑúÜ}êä}—îλ²Ùÿ·½=ÿ™sŠÒôF(™E^–"›bÒ`°}¨ÎDü#½†ïWÆeA¨+pò~ákq8„Ä"G¶§Ì ˆŒdnVʵˆ^a)@qKZÞð*d¨…¾†]¢gƒ?»«€Ž¾vz¿†$>× +DÂ#àåP㜠›=¨­Ã—¹„¬Z2Q(R)Jâ-™1ݱ…hφ™tœ=‰ Q+C”LÞ<þ¾ßf]2žzúuýÊ—§ÍËý£Í®§ÿÜ¥Üìj>ûå­§Ö‡2–ßýG ´uéîÝ¿|ý¶§×¾üµ¤‡ÕÝùÇB;î~X^ýò—›ÚÇö¯´ÿû; _õ½{K•¶7O>mR{ë‹¿É—¢±iÒ/þõÕ§?ÞO··WåºÀ¯þ´ºy¸½ûñæê<ý²ý·ÿÜÇ×g4Òc"éZ¬çòªt6sXXú©nèâÝ‚ÿN;çäõÝ(ˆ»–"?Èj¦:Íú~ïRo•l9ôï<ò±Ö½ØhWo¨Qî“uàà8ô¨H8 k€*ý±»±¡“ƶ ´ÓuæÎãPFøÒý'¬i úØ}¸¤ž Àv^§Õߺ'Ü×5«÷UDv䡾Á¶¯Ù¤ó°WïÓmOï{ÅÉŸëÃ@‹õD9¦Xœ–eé#.ÂÝ»Åÿ~ùáöÝÃúMÿÓŸQÙìÍ~ÿS¹_üõæñ2~>ûîËŸnðâè¯?Ó«Q¤ÀÛÓXüåa}\­@ûÕoî×ÇŠÿ8++DðGúC²xÿðpž¿ÿýâ‹’Õ:m}ià:ÅUèÐ ²«×ÅkÅŠ‰øÊ¿! ]½[üÛêǺþä?Þ\SðØ<ú¯›t¹ùµß»Ýÿì#ܯ?v7âˆhðµu5TB4¤¾«•¤^XŸ%£Qþáïÿýÿ]»î2Ç”k¸ûÂÙ­[t×çÿÞ¿xoÙ…Ûº½òû|}¾\¬eÂâP§]ö€ŽÞ{e3 :•õìÚ›Œô$aƒÉ3†Ìrÿm¨Î﮿Ú_&@š†ó]ÕПØg_ItßoÏÜ÷(r\ÙK*­¯0½[hö;'øx){dÚ7T­¥rƒÏð6‡Àßoîþ|5 Úhò¹kÈÑõÉÔ««ÅýYYüƒ—ÒU2÷t··ôtÀEyoG• ç”¬î×¥žo´Þ‡ÍoÛÃò<_Ñ/[ÕŒbóÛ€eK#÷Ÿßš2¿Nûû@6ܶøé[Fýïé<ÔÙ:Ià 8kvž{õ~ýñæöáj}`øÏKh‘pÕcØ‚Pèçh/À’[Ôê-–:¿Ü¯ûþvýUOm^ß0t·;¤{ɤa¼áû·¯˜ûšwÞßÞäž{`©ßÿø‡Å‡‡ò@ n‰wÎFú[A~î!á­ï¸AÝØä³v)Æ.žZ¥F[Æ%ki™ÜºÃçÈ-ÜT´ú|vZ+¼Û vù”âöÌföC~þƒw£•3ž»~¼¼A–¢‘fð¯®Oqo½}§ïd órÜCÒÊ ’Ïôú,×_íu8Þ鬡ߙÓߘÒOŽCõÐÎ P•˜Åpoòö%îÃ`£ •=ÙÒ;£÷½ÜZGË%G@¯ÅŒè7÷ìg>c|úY˜o}Éÿ(XÚíØÖèÀöWymŒý`Ä–Í€¸ÿí>¤Ëî|YWa´£- Õ  éÞÚÑÎ!:ŽÓ©nšÙø9^1©Ê®ü ½–£cõs¨—â…‡ãÕ³âÝ:”öÞ7Ìb(9kà 4Ëpw<ôš};‘…ë,cMpýìó4cvr¾$emËwë¹Ëq÷{™&ès¸ß9íHêžv Œyµ|¸ËóO»™YCDµAžØ )»:¤ŸÇŒ·g¥GAÓŽçК©}o?jg˜mA=Gb¸Uá`À\Í’Én—Å, ÓM˜ÝŒ˜÷šF+êY¦Í·î,­‘M€ç˜¢£9郑J6ÁŽu&MÑ8C§“ÛpÛé#ÝKäƒáª´Ä(H-yÈù³èÃ1Ï!&Þ(; ÜZg\pã癩Æz0~;q|¼»¥Ýi{Ô±Ó–5A¶s¦¨ûCu+nÇç£`iA¾ ì îry¾Ç&šáº¹8¯Ý0 cÆ5ìzÒ^ÎH~›Ò-4Ÿq?*š çL4@‡HúÞçN¶l33œÍHÚw‡ÃUÓîà¸}ˆxÓ(L¡½m‚éfZhÝo­gÉž~ʉ‘ÊʦèÙyo³Î7l±4så*4q´)Ìr8f;›¦Þì‰Ùì«ÛÔÚa*ÆÚ–­fŽÜ¥«q8z=-Þõ×ÖÓ϶ƒíö*ÜÓ)¨Ý ª¹žiÿå~îkFlgœmœˆnoøŒÂiÏŽµvÐÓ ‘·jSŽâuÌqÞ‚×}kÖÞo0­m±søç -u6„úFÐrÏÂÅoßó¿±è“§¥£²RÕä\»Ïg§×~ ¹e¾é†ñl"äà ÷ƒGñÑ!ó6Ñt׫ս>¼~¢V ×h7åGC'>ݦ‡¢¶{é9ó¼Žq²}·ùóhesÌZ©Ü©g«8ÚÚï#Zšâf9ôòfynãsôÊ8»O—wþÓÁáòÛžMÚj[õ)gãªí–yùitÎÜ ÛtúÄr7ƒÉ½q«þÅåé‡|0z!§£Ù±Ôõ¤é]qt×sÌY×f´­Ry×ÔV7[[OÞh˜¾Zë£qÝêlõyô¤™UB4µgϾÓßÞž6]rÒö¶N-•/3ÝåêôÓÚ=Õ8Ž/–œP%Ϋ›­CŽV×ê«ëƒï}ºm•f–·´JM;šOëm ’µH†\~X×w;Nr·£·<”åÉÖïÛ¡côtñc—¯/êãhS 7-¬žj ^OFŸœßöå]„ñÁ´¹[_,³=¼SÅp7ãÉZò¿¼l´±n‘cw±º­«ñ†xÑÔçÍ^Fâõt†Ù¡'²}ÿhÅám1Sº÷®h»ï ¤ötu ÿwË»|:º¿Ïz!EK[';‰¾ƒ ‘^RMÈõ›Íª&›±ÝU}ÜÕ$ïYÃáë¦J,†»ÜOððåh¬º½ü4jytýRt?U¸|9Ks²u¬ÆR iMåܺӻks(~ÇÄ<™èÓ ›áÓ/6çL¢†™è»»zí?Ž6Œ++šf§¤„穘¯">ç»B~3|οmN—ûB¾Ý‡ÇSw;Ú!x“‘q3qÐ,žŸ¼ùl—O?^Žû âŒiiŠ`ÓEÍÁxŒ¾Òiki‘2¾SþóõÅámÒµi{YádûA‡Ð­5åN<ú³ÑÃN"QÕ-mðÎ ®§w¹u+L9ÕŒÀpÓõÉðawÿ!}M !N,oo¿EJ|òœô"É­)&§|g®Ïî.nÐdYa#ÉnôÔòñÓí¸Ñ’Ù¦ÆØ¦Ê¾.F{×™‹Ó£SâtK¿l?yb¸µ»ô9B oO—Ý}X–û®ž]^\ÞŽ©†aìŒòÉØ ‰š&ùTr]ý|u9Ç-Ó-7Ê9#&nÛÆ÷O^=ÞŠ›2R¦ËNßÞ·È:kZZä¦ }}ÄÛœÿìVWJ תo09GŸîi¸ž“} OÕ¥îìÙåjtk†sT©¥M~þ6Ý\]oé©”«RÙCO]¦G?ÎÄÈÐuK‹œž8È¿Ìq½z.ïST9¦~fe5žFyodK[&[o}•FÓ™~`w)­fàf¾•£—Xøø!G7†Ý3ã®EÇû&¾’ï)ü]—ûÂáS²wqv–F•¸çŠ«&ðv®Þ×Ot–Jø©ªcîl½ø4jý^pÕrüÅs1ïÄÔæ³iT¢§hGUr»Gýxwvxkæ™y59úõóÈ`µ§ƒâ&wµºãä\˹V/æ^ï[¶Š!QXüô ëüfm!\¢û[ZâgÚ˜Ïi™!\uiy{{8w×j§/íXË¡¯¦¢¯WÛ N^L‹ÊEÇŒÄ/wˋӨnƒž*îíÄ={¡s5§£1Ýõçúáìð¦©É§¬_åç:M¼¬±Êvþ1¦Ñ 0Þ©šZä¦\ë±Y=‹yÓ—ÚM.º.~¸¿o€•ʶ4ÀÌ4YýŠwˆ×·@·S9ûñd׋S†*È+ýì=kÛ¶–E˜O»{Ë1Å7 ôC›v‹.Ú4hÒ»X´!‰¤­Ä#;’<ùï÷P²=òŒeiÆÉÜ¢ÈØ-Ãó><äQè³SÉåþ˜xQ±u-q™üÉØ}Qø4­çy‰ÃVÛj›¾×ÝêFm×àïü‰j>אּ Á•4Â'>|*?´d †ÿdæ&€/}¢…µqáCÀ—'„mJ«eâÌ1-9çÅ$…6í8úŽÚ]8úqÇÎ¥®Ê¼—ßá×몕Ì8ç»w;£Q6ý0­8…œÑ^8‰Ô@mÆ_«;…-7KÓL,üÊÕ…_ɽ;âJo#ooó2u©5\żȮH+ø„@„Ö|þdå(«õ“u ¨tZAà=埧óýqÓÇöm!Œ6ÆÅÚqŽŠ±Ê?µO1Ä’=€'ìÈÉú»gœî^Å–¥¸àHüªhGIrŒû tìm+fv•ùÃ% ÁI̘ô)Šâ‚‰ñÞ°måׯ4T•©Ì¦~ÚKc&“V€ïÑ!Ï,NRèxk([®#éx"¸ŽC¢Ô$Ù³£å'º—‹Ù"‡h˜Ç±¬Œ–èÚ\~æm(…3F»Q ‹¿Ë\bÕˆê÷A³G o”ÍòrÏ™ LTFqTØ ™DÙ¸nÛ`^7©²&ˆoüã:»0ÀE”îƒþ`\#SׯWÛ…ï°¶êÕöúûÑOÓYM×Úºº²ý]_fñ(qãúÎ,þãÛàŸÑta‡á—‡õmÃFÅŽK‚ Ö1Æ(v†£8r2qq;EŽÕ·bÁ¤ìnE1oœ!<»¬[ªúvM?^Ûä?ùÏ¡ÿ±G98û''‘Ž‘³ˆÓl·Ñe‘”Ó¡²f„oÁ§Û2ø9åàÀ} ¤!6Œ‚HìbÄB#Ql|U˜;‹vÌïnæ-[y}{£0Ä‚LCÂõVAfÍ&mWÇ—hF¸ éœj‡w±m²Z½—¿••ñͳ»O^qìì}V¢=X‰é;äáîw5· Þg%ö`Vê#n;X‰Q©E¥Çø×UUÍXè¡:­c¸Ñqcå¶ÌoÖ­—ÝSëno¹Mlz Œíáô·¯Á]†çofæ›`65þ 6)ƒ´ì§˜‰Ì^ MÛz#õ5lÄÚ»[ YõœMf¹©ûÕ5°˜e~Ž2Å”PÒîF!ëÆ‹PîšâåÌTrª Š 禒@ßÿðǬ¨ßÏ2˜Î ­ñàiÖÑs³?¨¤‘¥ØÍ Ë&7‹Ä+胳“‚ûÞ(Ñ~œÄrÇêæ¼n5úó›ýį7ÜD<Ž·€»ÿ|s€ÛKVÜñV“5­æÜ·9m5šx¥Üe4Y£Ùÿ]„ì2š¼§ÑL×Fèrspt¶ƒ&(UXvG0Žè3Ï›3¶Îú^×8›åþ¬ó³.¨72ÝÇzx˜øŽY=f¡Ô!{eq‡q×Á%HFi6Z´ ® 9F¬2dS7ï“A4Éð×™‹àæÚa” Rþ×Y+iúƒÀC¾ƒ4¢ià‚èîæ&ºƒ4rEšÂÓÜäŠ0W 'ËA:¿d>/í  ½®¢Êh|¶@áFÈU7T4Ä»ˆ ›DPÎ=(ç”ó ”ó[P‚oƒvay<¡ÜAÙ‡"ýßEäЍGËo`>F—o¯RÐ×Áù­Ïšbè Jœ§fì“Øóy{Ó©$×¼;ð‚qMÓwxjS‚æ“›H8¬·Kzú¿ží¢•êC+%¥æ\õxßÎë´*NŸo7æl; J îO)éBÆ5â„ûÔÒMjíý.²Ëæè¤yÀ»èKOñŨΠ ´e‰T3Áxþ ¾)ØÍWÝã(vÔ eá¥L$>ѧŒpŠËPD;dª?T‚Ðv­ ÚI¸¼‹Š„ ï„âåÌ‚Wï%|út– ƒÿ³Å_Ù»E–Ùi0_ˆ¦æÃàõì¯ì—šÕý|‘%><ØøÕ«©&7.½­öÕ€1.|Öm„†Í2ˆæ¢äc\\ «¬ï$SåË«¿F×0]£IÀDÓñ,OËÉÅÐÍW~] ˜,ƒ©ÍÆåä8ë§-ç½ÎÏÜ,oÿx=÷Ùš2ø!ŸÍ7ùÛ~‘~†™ù>šF¸pï^½ . ‚©gâöÉÑtŠ>Û|üü&xûú»w+ßüò6ø%Ša#³š´y½ÉhóU¹þòÝë`b# h1 H}±z„±ór2 è_„»iž¬0{$„é¿ýêÃøÛo~õúöÛÔÃqç—Á-¬p ¦t” nÛµ*èûùÍ¥XÞôƒ×??•[T¨øI9Û›oÕ«¸âÛ]ìßéô_ý8²K‚ÉþªwVNl>ªKŸ‡“+4sÎ/޶À-pµ‘¬nw-NÐʼnvŠô›Ô;(BúPDRMUï’Tì¢í£S‰ nËx«þ÷ í†„WÍຠS;ò”v»#þ]~ƒ—ê~g»¬Zl=Ub¹"-`ȰO~àþ,DŸs”Î 7ÔäÌã980ç!@|ô¸Œò(¿Ä6Ø,L9=Eä¹ÿ\Ìlq¢Zì[€×‡¸óÄ|²[õê'dTEÂõÊ,˜Â÷Á›º`¦.ˆ€¿ÀAQÇEƒ Þ ¶\Á/êSÛ—³Z-&× ¾ÁZ€Z¡õÇ8‘nh ­Ð†’¿‡Ðuùö%ˆÿXXWP¿7YR­3×ÛñŠÀÉ1¦Juæý8Œ÷/Ûð¸{˜\>»EùôÓÂå°âþ`i}ú™s MF™MÇ“Q­Šåƒ¦ñø(±£"‹J?kõmøT-ö‚úöŒ8ŠÆ u& ¨Ì!õ<ãQR¢*ksØUˆû°2Û‡a'iøƒXZ¤¤Ñ(”HÄ-ƒðùHe¶^X¤»c&îÓ(¬f~ŒÁx¾‚A(ø};§ü¸Æjç­H½ñI‘•Ĭc‡5oÖÄÚÎFËÍüU$”—³è§ß} ƒý¥QaÚòfý,¸QFã5Ÿ5/ßÌí’/Ke)ˆˆüŸ$T ¸Wa!&K­þ="•¥oۃʜ·SYë%þ]TÞØà±Iåïòð,Ö›<Ò ÜÝr¹Ñã®Öê7Å_¾'ƒPlœU"ò¢\ C†îf8êw¹ÛᘛAEÊH‡8‘4qZ9®äW@¾Fé Пür×oŠ!wOç2ð¸Ò=×X×Ü;pz‘±GY–‡+"—»U“exJËÒö¯uVA>‚@öÆÀð^Nî-ÅÜ—“»ë ÛEåТAB¢ØñPç8¢Hcä$œH‹ŸžÆDèçk~úMñ¿§pu4óó"ˆÏËFýÕ(ùy’,Ý"û˜Í®²í¬¼Ú7[­^/÷b<61P­ùßÖ#ÄjæBä"i‹­@ZaƒBªX(eböX)-éÓRùžrÛNo{LÆ¢iŒ.Óy|‚£|>ôϪß>{¹¾}'d¿ÂüQ¬9ެ°È˜Ä –h‰¢„xU ŒŠß©F<9“HLÂç›Êí%‡ë¨~y¨ÆW:ÚÀc°•`"¤udPbbÎí˜IèW@æÆ¹8GOü¾h÷ž®Ï‹nýšukð¢ÙžP³1ɹì^ÒRŒíÚйq Æ²šÐšÖÝE\q‚»ûtúqxÇV[ÚãP ÿŒj‡swñ-G ùrÀí'Ìpˆ—•ëòÉUÕô·ÁzÚ ¡ûÀý¸m›×u ¼×.´zŸSµ >¥YÙ”ôçÍwW±)ÉÄ¡}Þ·^Ž“Ñ¯ß½}_×~¬BùPèsÂ}>™Ó¥z¸«9ƒÛÔt¤‡ÎV©iø“RóȸªFñüZ>¬®¤ÜÄF‚ˆvÊ/ó ¤¨LàkŒCCH³ã•(úŽÄ=§É>Ê ‘O$q·)0WG®-éX}[SÐñOf@ÆÕÒMxØÉ· ·–ú…p­ b˜ÅHã#Ÿ0ãéb)ÙÓO¾ ü+K:5±*¯§_ÓoªŸSÖ £¥èì@àÇ1}Àä󿯤/|z > ™Dw+_ý¤ZþÖX–yµs‘‘š#g©DŒFAˆã+ƒ-MÌœL¾‚Ù§?5ßoªÿ=ŇÒðdjþY‹Jð¨Ç`TP ãÎKøýrO©çëÚ‡e^®B®¥Œ‘,Ì>ŽŠ”Œ<1°pS«§Ÿ}½ËÊ>=ßoªŸ•ø0LàÚƒ€šJÏ?oQ ^õŒ zFê‰n|ü”YrXÞ5šEo$ƒ)_Ê9d¥qÓPÈÈqò¡Ý9Á8"Ÿ¿šï7Õ•žp ’"cé×—4Š’Ø‹¶TÉØ%T‘€’j…{‡'ÏÚ|*?¸àÃÉaÛ´CËÄ™cZ‚•0È$…6ÇÇUïI»u“wp¦gS»júW—Њ“™§6›~˜ˆà¶§‚‡Xn’f•],ÈD™]ß)lyK<Éý¡Â%]¨ñ•<‚z_ó¶”;H âI]jÍ¡Í‹ìŠ Ñ3¨h÷ÎW˜¦Î&7ÉÔ®-Õzm²´Å+C%¥Ó üvïÒ~ø<ŸGq g£{hc\¬ç¨«üÓñ‘÷tú’éŸõÛ,oÀ§»@&B).8$¿*N€Úžºb¥ÿÌì*óG‡¢Š˜1É„AÅãà°§»g²¢>rÝK=õd(™?ÞdO#ä£s›•iR%un nËu$O×±bH”š$'ÀÆÌš4gàä¥I1X›^ßG Õ‰CH+ÀÎh‰®Íåg~Ôôa¬lÝ}á–x-ב‰6\%qŒâ±dìøÒ=#Ž bQ ê?(ÿ<»-£ÐV^œ`†1'Àä @àB¶ç´Û2¹ufßáçÇàäyy1aü AR݉#BäzžœÀò³zšÏÍÈGšØpÆ b—“<>>JœRª#s‘>oæE„6±fBé],ôõ ’'QÆ NŒÊI”™ìº<>Šûf›¬˜[3‰þÅÞ•õ¶udé¿ÂÇîÆT\ûâ÷Ac^f‚ 0óÐ0„Ze­–EƲÌŸS”,Q ¯x.«.¥;¸mÉ¡¾³T¥ÎòìKäüâôs:íþævömGžá…¿E´Õ š!%Êy}§@³íM󻉌qA[rûñö#˜ZÃzä‹@&õ†·{}\ÞËéQ¬¸¿F h @¾…Øöâöôë1è—“Åïà¹ûËO§Ï“,®h°`'\-º=¥vŸIë]ñë.¸â…ÀjàõBÑ? ÊQ[Wùµ¯eÏþHVŸãçƒc©T»H–Æ’ï£EˆK ÑJGôÕÇåù³G¹Êܢ뻯7|z¢$ëøÿéêê÷ëzi?=ÀëóÓ/êdLþ:µò·§yEÊÇ‹óË#Ðãú¼S/}]c¾q}}ƒè j6­)•?./n¦§Q‰>4>Üï¶þ¼a¥¢ .–¤I²êfyÊLû3vµ³ß‹co/¥<vk^ú…ïÓåÕ†S‘29ð€.â;Â-§E—¢åtÑÖàø¸d q”¸ýršl÷W„§kûîs vzZóÎ/Xßoê+ 7÷÷hèüãÇx}"dç\äöˆ )YTHÄ”ó¯GP®Öüê@Šäy ¢«3Dä–Ü©»åÇ#P¥{–üo¥ïvÔË+£œ“®PÝ•b 7G§ñQ×ÞkÁö×o<}j‰ FP$ú>r§³š(÷—$^Ÿ=ö.ÉÕç’¦¿î,Û˜sÎOÖ?¬¢çBXüÇj݃{¡~?ÇÕrñ0’=§ÅßjM[­Züã û0þ½ÏFYÐ3ëQöÕVóÒR\‚lQ•:oIï~h ê%•t ÑTç¿ý¶‘`ºàß?Fö­ak3r5’u!ÜC‚ï:ß=ÀCÀ7³@_®×«Ô¤þ°ø÷ëåýܶ?À~,ÖÛ.×ù’˜g×s©ÚïšU•ìiÊ6úк7¹ÓÒ6ƒo4£f¦õðuåì¯;1n_CŸsÇÿ´z2a„DqÛÊø¦³‡Æ)EÃ%·?¥Š¸Ï°P©k¿ÏÚ“ÑÍ 4‡uð=Ëßc„¨¨ü~ÏóÎä—:o’~jð«U¾ºYýmù÷füšµ*öÂF§ããîÖ¯†š*•øévM؃dÞ¢>] ×x«çl>ÀVvuHP@ÍwÍù«1œS×Ó…ßœóƒtx¿£¡âÛ%Æ“0þ‰³'ôý:Nê@Üõs²Sû¯‡ëó]| ²_U= •^ï¶Y¼'8ñV>¯ïêëÅò~O|A»Awxï)yÛ.ùѪ»_âs‚; ÓHÚ™fî´Ë{?»†Æ V§&áŸ`íÑ3²h¢ª ­FóXHu‹©ÄÖ t¼c$Uí±÷dÜÈ©ô ޤ5 àS†Ñ{«‘šñË)Ðéá+5ϰ¿ ¬çI–“m¯ûïµÑ9ùµ¶ïû2|Jõhäåãâ êß+û~M ¾~Áè/œ™_è/Ê~ø¿]kGÎJuÎR}t{_¬øCÀïñDªâ xŒ”Xoµs2X¸¬ž*uÁ¹ôËåÙò=°®®+©{S–9þ¾ÞY‚„ÔwŸ +>ç”)I¬ÖV±º¤CEE<ÞI!¥ŸnÏÞ5±bJ·i¯Ê79LûÔ$®Òæ:“ÎËžîÖè-4•eÈ]o8µ¿ë-Ia-|V*…Èè"±Tib¹$çE¡¯¯ì›ë': ®^‚'ÿ‡àf~B£à±Ø(9Ñ™"³Ì$P Ï©$F†ù„&Þ”!“úÉjŠ0dÎÓÌ…)$‡ gƒJ|rš8®3S‚Çd›!Û†Ôy fTÉœ‰£–)(hRJ²Zs›{ jâÞ–š0ñ$æjBi4Eà«L`¾sÄáˆbÜ U –,ªMM¶!õU+RàœÁERªšÀ/ øsHAZÆ­|}5Ùu:;Û;‰MÛm”›§ížFh,”P…˜(3ðRm–T‘)š« ˜Bsj¾§ w¡ýEOÛK‚{ kJ7L—FXS~q*6®«oË<#Õd£L[§RLºÑšnAêkM£œ»¢¥40_ g™Ñì³.ÏÞ©&» ÃìÆ4ôöó=ˆFh+þ¤´G¾ p™Ùm°“±³ÛØ´òCìHL Îdw2DYhçÌI#D—^ÀïŒ AIk˜òo@”˜§ 7'`‹VBEFà£áìˈ-Ò¸OÀ»ãÁ°ò„fM{IØŸ{õ›a9Õ\85*kæ©áÓX·Yh¸¢›Ï•ã‹ÁvÌ¡À<ã¡ÁÙöù]Ç?¾coÜò=ß'8®Ô0W2õañ¿þlý.Uyê//«ycËšñëI,«O ùµÑÎ,Hþì~¤Ðó¹3­H™ ‡ÂôÔZcº–ª ‚Wž6Lû¨wB!µ£ûç&+¾¡^oâåÂn<pDÍë+2`¢JDrÈgÞ­©÷©ø@k|¹Ø‚Ô×-.‰¹r€øŸ*"á2%NhF¢â¥xžP4ýÁª‰àô‡Œ® It%˜NÙò@¢ÕŽA ÄFfHb.áŸ8a.´£F’Í7׊;©óÊ®(Ê%¢T 3Y’ü·u¡âhÁÁ ‰ w2ÆŠ-RÆX¨5ðDz–‰5É‘`•á^e)™{bë·£j%B²ö†/üP¶F´à•ËNAÿ!ÅhžJÞ^¾‹žØŽ¶¹íh`‡ààdÔK“çšY«[ #A""7£û»lפ‚x¡¦=¥wb,§ñ|mµjìÞgåFÝ5˜'ykU³ÕîGÙ~ã>ðI'pq”àÓˆFlµ¹.øG*fz&ÅÁËÈhÖBD¶ø]Ñ$j¨pcQ1™ ©°NìO~jJÍ\kLq žU©¦Òr[ÿÖ€¶PycÐÇDÚ‰¬5øöá}ÎW/$\ë†-HOwPIQÅA/˜5)†8«4 éS]G3;¡šÔ’Ôé~[Fœo¤/$FMhr¶8Ù.K"CÄ›P&e^Ц†ÅÆ@q R_51ܱ†ž$#²xK/‚Xš ƒÿɧk4ÒŒ †Èíõ|;Dp,žU‡^p–ÏWp¸+tf‚SuëÌ~Á1Áæ9`šÓiV9&N$U±z ¼šTÔ3¯¥ 5Òœ[Ë(Bhj¦Sb§9iÎ;&)qÅÖÖ<ã8qÄ|ŒTGˆd¥}}¡qæ:o¨ÜZ¾£n¥¾d9éê^ó»Rì—A$äê4ªï*ÈŽ$¬S”í¶¬p^œ_6#¶júmïÀ–R‹I—§']—¾‰“ƒ †R£çÊLb›ƒuÞQë @²>ÕbURÄF%]‰'§;ÏVQ‡)1Nþ˜åbn.åb =BÓ@œ“¾j¨ªÕL…Ð\x䊉Óë«‘íÑœõ,Wý”âø:1EE­\°’è•ã±™†cØò*>Í ~÷ìODTe¥š\ÝÞ®Ô ^­´Ó¼ºoM|G–ãHPZÊn]ËK_GjoŒRúÑ~0°/ZRþ¸¼¸¤ÂÀý„V½ºwûá—ñÐ`7»Æ~…ñŠ„ñC°”8JÛÇ)~¹^?݂딗ïàëg õ(2ysñ•·‚4mcƒÚbI·9|ÃmÖ¾»ý`f#1ZE[1ö¸ØÔ1ßÈj9Š{¹‰’~‘‡:d3ÇôÎïG<¶I_±IšäWéjÖQÀmû%7¼†ýÆÇ øÍÛ·I²D“¬6ä²Ü^ ‚7õxm'ùæómð±ŽóøúDðÐÉKšŽÅlÚï¹îü¶Z(v5ÕüÐÑìvŠ3 d«'˜Ð9„u%¢‘·T4¬#LÝÕÄ1$8Öa‡ÐXŒ½äG3Í$”NgÂPÆ1ǸÒÛ\Gu°~í/ þè“=ª;¦rÊP΢ó¸*5ëùÉ4Ñ%ec$œBô8LÜ&G¢a+VX» Ñþõ&~Ä'Œå|Üi0õxK+P,nü>ˆÃXxRN?©z¹(£bøFJ”éñ>ãqW”Úu»5üÍÙ»ßod?” Á©ÚÉ®ÎË]3èAÿn„½~‘Ããm³L ö‰è#Ý £w¢F™BŒç1Mã_r¯0 n*3£lú{e˜å˜#Ë8Ü~R:ìØQ¿ô4'üîs v¥pŽ Ê¦rrBuXÃØ¨˜;Ä ^ýFyœLZƒâžävnî–.æÐ€sÌó Û˜©×cáænÄã¶mP=JVB1‰h´y¾±Qij£ÝÜ`:à³P2[N’®3kA Ñ’ìêªäXBR¥±~ Rßwm—…M¥"‹¨ý@I(N›³cÁêÈøt•Þx5±r¾5§8Ϫæ+¸:çæQp«[½ôq-4)*ƒ«y¸¿’ÿëþ³9þFCÓ˜)ïd*6®©!’yF+ŽpVo:•bjz²©s j|£J,^s$Á$Ûîr’¯Õ[> ËlÑJ¨Èˆ°XV,#¶HGà3Áêñ`Xñ}XösÚ¼' -~Îëivµrá/ 9Óài|¥£\WƒBÓLR¦0ïJ¼[Mÿ:Iu–jÍ×êÛ»_%éôËŪ´n߉ytÌ›ûÖÞD3¿yò C8ªÑò”|®ön"™ òt:0“ÓLÄ(xÍü[:[žµFî„&2hEœã‰Í,§Â+Ê„çQA1ªÍd‡´VcÒJh蜶¿DŒ§`ôóž5Ij¿ßÕƒ#D;ê:O§_ä;v\ÍSb]F<*W¨•’ˆíLÀã9OÓC]y³J~Ã"!oäL§éM#4p9CaÔ“!zëmˆT‘)pÓÁüš}[#Œ©}òH,ÂGJJ)r‰’àhHMI`¦š¼7*XÇ”ló‘¶õu‘ ØøUeÀ´)j)HÒx_d`Aº2áh ´’XNç{%ãXü×¼’­±ó¼’§š &:WûƒU„OUŽÖYœ‚¤`w¬Þúõ…æ¸ý!›„…™IFÕP+uQŒ(ÎK‘‰ ÚN)UµÃ¡„7pö›íN ƒg•ÓÅŠÍR©^í)õgYÄe‹ŸE szJñÛÄâ§áŸ¿á_ü4»¯`vU?¤Ó­åLÎ^Ö`t,÷ÄÐjD%¸!%"T1Ì8² ¯¯FœÙ¹:Ý8ÿ5O?—æÕœîŸ3Þ§˜ñ¾ø9a}NN÷Œç›ÿ?{×¶dz_ej¯v/XæÉÿú€¼ŠGÉÉŽ$¯½yúEËöª3ÖHœéÔÙJÖ’¢ HœÀfuüý;þÍêvOàv¦7t»NTO‚Ö,HYI@õ(\ÈUpøá¼wAùR+E<¼‘}›³¶åÅh'“†o«ú¤"KŽ6I†0˜.­„CþoûPÒ0UT^²»òäyž0]„½&zmZÝ™ÇieÛhxÀ±½Ú2?´e³KzJó:†ulÖP³ûPs³úÜãû\ÄåP0<ûäÁ¹>¢à·¿!(ú!ÇTÞ»mXAºý/~æ^€÷映_ NS¬þ*ÅÚjw^3MTílïHo+‡F ~¨<‘‚29eÙ:¥”üÄ9[$Í ÞIº#\`û¥CàÄòa’ÄꥥÓg¬Ï4iºâΰӌ?y= ýÎ#eò úÍ¡EužZ¥ÎQ8âöl@E¨L&§S˜ªß[$Í«ß$¸P­Ø©ÄQ Í¥„j:ýVút£oÖM´ l¢Ý¬‹K»j÷»On³6–Åk£•°Y³¯SDgÏM”<÷°º-²y¥a5Lß]ø«P“ÂM¸ýÏ»¿)îÿLÖÉdâôÔG ’¦ì®j °aZI;±£%3Së€ÏÜ^øûñ[Ož{‘Jç\}>Ç]Æ`)J–RÈ‚- C&fKPù¶Ð×`ýñÔÌüтãcƒi4Ÿ©%ÁóAml%ö:J²'á5…FZ¯&öŒ·Iš7bË1J„R‘М zöfì¸D­™vÅû2OÄö´X¸á©‹ù}ºØ‰…¶ëXÓÁůì(Ã\:Mm/é Q”‹ã|)qúÀÆ1GNç;5¥ÕÀ¥žÓÁ¶ãõ”î²Æ¿GVNÙóZ0à½$´ÌzËZ+½Ʊ9fV4VAÑeÁÊ¢&>êÙ&i^ÝB×L5$´ÌR@!6U%‘£‹ÑæÌù”:1‘ð&¡ÂVõòØ#f(¨œÐYrJ¤qx‹ïµ7Ê'„jÏAŒ°ãžn›¦öT|jgœ—Ôg¾ ÓtC†*Rà€\ÍÔ +”‹šµ-«í0M©nßT7™³×’y<]c|µ½Ä¨íÍ:X·«0ýŽµÝ¬avÿaöfu»Çw»ˆ4yÃöö.Ñ?ø0Aš€¼ÿ×õ‡‰Äsx>e§çát¿ÜæÜã #\ø¡mÎÃOòr÷s{ë·´GStûòosò–ö8 2Ë„/Äq„C¶€ ilÕ%GH'·{^è5Kl»àþÝÕO± O7îvEÔ/¨ß¬Àºž²ÄŽ¡=g’%î4ÆhüÍs­aBŸ9å«tÒOJè++Ýäí¬OWïoŠ_Þ¿ýy´\ 1FàœÈ×ê·‰4{úM¦ÐK ŠtÒū̹%»VH2J …+ŠŒrZU™O.úÞX5®þñk,⎅<\–!޼¶ê]¿¿ü‘ÛÿúƒpD5ÅäÀ º¾ºû0™t'§jíPÞ[.×$Õ¯ÒMxãÕ„êݲÖTºk<Ç©“KwãåºÝ³|±û~ö+Ú5ßÔy­ Ô£gdžRù ªˆœÉ)8•·^ðOµÀl56” i"Þn‹¤™±¬(‹Ë(… J²á“q¤À<¥x,!Ó°öþôá„ËŒVæÌÍŠ\멺Ó1nl³æ_ýç_›5?ÛêŸÜÙt†OncÙÑ(Àý3¨ëpû±ÜùÄ1ͻے¯Âß¾%>|¼ü3Ï™.¡Æ>ÑÔˈÕê¦a&gjPаY0ƒÄe9ÍÈÞºÓÛ?9Çm´—Úѯ\ì×E²WÄóÊ^Uu}[²W ÒDo Íà†P¶h«B ^%5±½EÒÌ©ŠñÖ&ÇÎ3+àR‚ˆ5GQ11*Ž)Ï@Uœêø™OÓ¿J7êHwºö~™y›©†jœEQÍPØH!°»“UxPÒ¡K2¨tzm#Õ)Ó–Ñ4]‹„«P=0Ó,gîiØ,½,ÁiÀx>÷ÓNàIaä¶lÓ2E#m ¬®¡@³pÉÆ‡½ÌEC…œ§LÙ"i^Oš¤±Å iÒ\G¶8°À¨ˆÙ…¡=®˜<…òs‹{Ò¢w’¶yAkÐÆÎüî“—ø;›½y£Pîn˜û«„*Xk—‡M,œFDKYÆ”#œa§‘À2º”wS™FNR·˜Švh\UüðÔ…}å òvôzüËígJßmC $æ2£ }ûínªÄ˜>Eíd´L¢v(Ù-Èâö#ø×?´$~ÈÎÝWÛîîê×§˜‘›P,75¼ÿ4XûûrýåþŸwÿš|.û(V?aµOào‡ƒZi‡ƒ¦Ï·¢öó°›òoVÞ ûådÍ"‚dívvÄãÁ™þ›pÉdïúp5§#‘¸þ«þy5«ÌÞ* Œž| —QÓ»>é¢ùÎö‚T·Þhó6¡½ä5Ò錣08D÷šDüÕ\¡"3ž>Ò‹½4Û.øUfVDot†'‡¸}h¿ª()rŽÀÉ~~K+EÅ ¡r:¡œö{:Š`}ƒ³ÀœÍú¦«qý>eجñcÿñãfÞNàv}¯Aw[\Ó?Ûþx‚mãÚËÑQzÔ²oygE2{WmÞö,1‚6èË\t­$­šŠ"Ø"iæ—ÚFó@z% '¢®†}X©Šÿ¡çáJ»DÑÕêwøïÿÿ_Ûu¬±ÕÈT-seI,)kÓ€e²çDAT àÜ„Þ,æ`ÍÎÏ"¶zNz‡eüÍIÛç–W¾æÊX/KáU4ºzTJ&5̲+"”*…L $Ue ©ÅÈzo»ÝÚöæô•jÿ®el{` òÍÝ;þ[üì6ŠÛO—Ó ³Ý¦|M ûJåi„:–²8Áy‰£™ØžzÔQ¥J")¹-Vò˜ÀÉ'‰|ÕÁ¿rñûÙöBj´Ÿ²ãá mrÞ ¶Y{Ü骫2*®É–WC2¨\ߦ' ò?b$Š/ÉÕ<_ÝÄîèI3oHnBîvÜíO¾YÑÜo¥‚·VΣ‚÷ s.o )×’sp´É(iжÁíJo+Z#P'¾RIC©5(f”SÖ…œRšº¡v‹¤yõ¨mjÇnw™ ¯¯Úív<w³Î3ìžáf-ÝíZDé¦ï)à/¿Þ½ûù›âºÜß¾Ow"‡rýùF|¸ºJ7 k >äâ¹Ù¯PÓxxi'7~ÞÀÿ!_¿¿{Ø8z&D>Gä|ß?Þùï“É„>tYðŒTƒeIj ÚLy4yÁ-…Kd•o ì\º²Ïšug¯{ÙG—”DI nÈûóÚW9Š¢9‘hhPä$)‡,A9#G*áLÅ„˜lˆŸÛ"ifø0‘£B ²f/FFâøxzƲ6,çtšÅ„æ“.>Û RÊ›èQ ЊD0¥›yÊÖňXŠ^#%¡ãÅ1mšÚU‹EIo êÆ!•›fÔãhYa[\ŒQg¾e?´Ç@‰ ³ΫÊ™90ÍKlQ4¯vkκÑ…4g&Å€6Š [Ù½¥l ){Bâ Ïi4Ëhv“+0ÏLsY8ÍÆ’¬Ó•3Ì嘦4Zk^fš­v?4™zv»øÁîÏ·ïÚþ5„Ñú8, øx{ù½N?¡ë×é´i~_NG4¦…qÚLM˜O$™À5ÐΣ{O VÙñ}ñLΨÐH dƒœPX–% Õ¥H9ßrNT“Ùó«:Æœ¸ûü©ü*¹ñ-ýú–øòéëåûµZkšç7Ÿ>\ÑtêirícY^½\&iûà‹Cee¯ŠŠ²¡åÞIMµZó Ö^yj!¾Ó CËøI‹Vj™‡À°„±¥„WÃ)¯ò2:»`ÜÝÊ´áëÉŠZþüíæÓçï„,Pw×@—“©ÔròäÀÉP®…ÈÑ¢¶F¾H_‹{ÒN!ØZGîéPÿ2Å/韟qñÌ©÷2´Æxlè§(íõy-«VÕ‚IÉ™@¬|Ù8Q9Ræ,±Bu…RÉu"¶~›¤™÷gD²"/¼gÒAe'¼¶ ÈV­Pg‹¸Ü&VeÀ{ýrˆÅ©cXwÛ÷•þ2ÒÈÆ9Ùg,² ÓØ‡eæRVf…,"±µÞ·r6Ö÷+Ð^bC…äy=xR£áÊ5åêÉf”Ž/wxð”+‹&p(‰¡ãD£¼EÒÌ[=’Ž,ÕyÀb‡‡Z&Š\¥Ô&ƒ®°¤˜Bß"&#£<1d}LC·¿Á —œ·èrxÿÍêTÛ!˜h šæ aI´”o²u ¦“ÖmÖÁª408` {„¢0ª*RÕäZ–ÔX‡›dߌ3£¡ä²=™ÛÉÊòý Ë>ËΧpwÿs.÷ps›ª éO éªü/#`›~µa*7ÿhyõɧ¿>4¿ÛCy[ò=d °¡Z æÊ÷ö8Asþ÷XŽ{ùVöÊ‘Üv?Ýî|k3]áY•%mdC~†út¯7W´Ùh³Í ÒééIÇè‡ÍšöšnV·{·K꼪ç2ª"MËê‘£Û¢AŸx€ô%‰P¾ai‹#M_tn“4÷öié]f‚Yø1ü;2å,‹2V©+Õ3°WÏ›®¸¯êy3ãÈÎÖþ•Ðd€J™ó¸¿®.¯ýd½›«%¼‰Và á]Ôµ”š)á­ÓKô¶7ƒwžx¯Ôß:ôZ¶œým®ÙQ½¼G˱P‘Š-±¦œWމ"[â’Bv¡DmÓ‚5NëÁ©1rÝm»à¾BYGFJÝð"ZÃÃC,8HýüiØyŸóÕÕ4ʤ¬šÁ<¾×Y«\ÕŽüT|ï~·¸Áã¿Cšä “Z´8}…ë!—¼ïòÖ=N4ÃRÚ]7¿?Õžýh Õ£¥çs®œÝÓ”xI¨ºÒŸ× •Šöû%êÔºŒÂŸôÍJaÏ«W2Â(Õ2þ€P­£ð¡$DV„ŒP$uȨ,Õ2±W²EÒÌ V«R@2B[­Ø/ÄaºÉ°,,©¨r ²„zr1Ñ;žnÒvůҡk¥ÌyÖë(‘iÑoŸFYÙŒ&Ï&9 -/ ªÉ†BæÖM}I´EÒÌ8´ô"PrlŸŒä,1F‘ªÊ,y¨+ÆÅI¢o­úôÝËè¶´Ú«ˆN”¤8¡Ïh…75‡5dç²’*/Ç4­¥'ÕÀ4„~r›^ôe”µ—®¡>§5Ú>µm!¦yH¶d7àá­é«ðÖe¶ø„¾êè”ZÐDDR/?ÐFöúfo¨H>ÇŒ ¬Ò›À3 BAü_Sàó Khw3] ÓÀ¼ÍUœ®“î9•ŠÅ3ÑÖý ­p&–…«x£]XÐÓ²ðbƒõÛ=o»à®ºçÌ3Ψm£Ñ®G±6‘h"mÖ¾@Oï/:®¬nVÇß¿ã߬n÷èn×’‡)ÀârŸòcSío_ ò|RI\ßÞÞcK{<'/ÏœÒã÷Ó‡¶÷¡ýåÆßß~ÿ⅛ث¥G–²r1Š{ç͇ük<ŠE0s )™;þBXÏì°µ2‡ô7u³“X`³ÞPÙ£I³ÚÚélÑŸá ’n(j̲I¹‘ò¦õÊÊóü-ì§;ÎYÕrˆç…j³£èß7$$’´2jv\¥ u#v;Z*aBÄœÒÐošl‘4ó~NªJû˜Eeo+@Ùw¦‚‹²® V¨,€m˜8¬íüS¥úxß¡: T¥NYé…Ëž4ºDô.‰PØŽ$-#Á‚1—u]C…Ê¢é·Ô¦©}õ‚šG¾×Òb›fô•ã<þ,°Ïg¬øoqs—/?î<•–Æ6L2 =êYäP^š–Céé³îY þýžC£Þ=þQXΈ-æ(.ï®é~'¥ÃÅJÍä!| ðm„ÚùŸQM‘–<Æ¡6Ø$.®úYÆ‹–j):–!Á ÐläCÉJø¢É+k8/p šãV¦MÈ6¥i2Ó0àœÏeæÙ`1«5n­x¾¿êî•|·ßïôMÂmRñò=þòÅsÒµß=X°ÔpvôfêØÐʽ”{³©{ê€vŒOݬ…¥þ K›µ:0c8ê9Á†W^δýî.]•ü•£•w»~ð_öžm7Š%É_iÍÃjFÚÄy‰ŒÈäm4Oû°Òy_P^Á`ŒÇ6˜Õ|ÖþÀ~ÙFÚpÜôquguUa¶@È”íꌌû}_nÌ›¾ê?´¬oì6Âi§E)©ç´`&ïèë:hÁß}h 3Æè<ûØá ô@‚ æ„'RY)åáç&u9Ÿ}fûóR\…¬2¶Ž|C`´­’õŸg.ºËi²Ok+‰v[CžeϰË‚ÎѶ­$Nµ-QÙ)A9äc6d'»Ü=Ò¼jÜSðŒB)(UÉç÷Ex Ud媂Q¥å8MËkXïÈéÍï“ ¿£Ù}àUoú™kŒ#=âþžV6_–-OzœëĆh/¬å[€Ü¼,ï¢È¦$™ÿ²të#ÍËf.&Ï>Ži€ …8cª¶ßVÕ F¥ý‚lf5uLE3R®xðIŸ$[U–ý–¦q¸â^Ê>ÞXâ”rRö nþÕ(?'2´ ·É¢øîXʬokãÙÿ®¹U=‘aEYDtôRšÕ¶›/ÃiL†Œ±Â&oZ¬®´…%›`ø¶Õ‚Hc i;¶ƒ]±nëÛ¸* üI¤'Œ³Òá‹ íQ‡;t#ÍÌPµÒZ= þ;5H8óyôólíökŒ|4“ШX—©`…s^ ]+sŸuÆVóøŒpb³ÒïÝÖë?–›a”ò§î?«_&"<§¸P’VÛÂïÐÊF ë­f›^µr)‚rBÙC°ue™´N´‘7vJ¨žÅð»Óó"ÂÅéÉÖ×[­@ÖÆè-8'òÇ÷õfN«'‡\Gp8ºõš7}w2& Ú‹XkÉL¶o˜ n³C—|³åêäÛîÞïågââ,0lž’˜À‰{sõn2V=N5ƒåØÏÕZZ‰±”ÈTítµR‹EѦ"l–€ÑU¥*=¾±daJ_ê1t?§@¶ÖL·©Žb1ÓËn­.ŸÓôz ež¥ˆ¹EEJc‹•ÑÑ1¡pÅh±ªš ê˜Â£[ŒÖi½Þ0%XÉ.³Ï·|6¬"„âE29©$5¸¿j˜åj=´>Îø=4F›67dí¥Øhb£ÍK šZ VœHÞ¼Xlë·Ø6/j÷1Ô.<±Å-ö^ïÙM,C 5g-ŒmÙ¡Š–…s ÂZrÙ( åÄzÄO4s¯_ñІž cÀ ¦>¾—F¨R%ãŠ÷åñ‰„äŠ}¡¾+þ5}!’v%Ë8°Úÿ²I"±f§áŒ'l‹ÔmV?i¿=ÀÚ1ø³=!÷ÂŽ¾×ƒ¤$jÿ‡6¶}ÒÚIm‚„`î`³Dö›EQ”TEC…œaš´Þ`æðº4T\ÓéµY o>ܶáZg­S–Ví7õøâ7-~e*V¶>e±4»&C«.c­eŠÔ µF…çê7¬Äo꫸X­ßô+”<¬Ó¨;0jzð{ÝDºÕê÷p;ÏÛ¸—r)Îo¾^èñ9À>ˆØV\¨!÷ '‚¢´œÒÚý èv°«OgåäÛÿù÷¾?gŸßžž [+k0q~öþ]ÏšáèX©é¾Œ¿e¢#V3QØ[¢HÖ‡AXZ–4%ÃcKF¸ç9…w5ë"rÈš¬’} V.žõ *vE$Ié)j¶Úžùé+Æ—b‡kÿvÞýf„„UØ{_ÎèÕîÕê"È_цjÃoõÓZ*¼•sÕ=ÍðE9²@NènãI |bŸM™P“WˆÅMœ4·{¤™— §ÌÚÓ9¡08&Pì—CQ«TL¶9óøÒÐïSªO~?mׯ+úܸ­JœŸÈlG¹·N:x¹z¤l¥k%F@n¼‹&P6Ô@1Näå#ÍœIêjr\óÔÈõ¶Þn^_ŸËÔÈGèiìÍ{¾fµQhžVIÇVJNõTWê FÒ"9þ¤,`øJª«àZÐÙ@šXÓ±s¤yyKCI€º C‰IÎËÌnAL‚Š“ŠL>㣓 H埙˜­¡Ú¶g±Š­(˜Ÿ™Ç1°UÍvšHAE€5Lv4³ÖÉSŽ…}ÌÞceiÈWV«!))!G Ÿè0ί3]³ò5–ÓJòõZÉfA%’Õµ]¢&õDó²£öÛC¼j½.sŸøÜAÜà%ÿ8Žö‰;ØHÞøÃcA¯yL`Ÿø[Wd¤qdQϳgæ>›úÀ3a£ç+Fj2ê&¦¯=éÉ^Ò3[•…Ó7£ŒƒcÌ®”ûɯ]74*=Ñ}Wf¥*ô(I|¸ùsª9ƒÚ(1%€Pè%°ZÎfÁ¬ýHóÏs¡ª]I#J´ÌºL›± "Ù<ül%Öeo±Úǧ"°+P¹Œv6Ú°DQY »òˆeqhû³K-µòmeUÕ£#MÁV+ê-G1ÌïJC™gÝžˆÛ'¾W8áâô[Óïúfß7&Åš˜RÅ?«K_þòzÃ2ãüV‘ýÛÿœ7Hæ{ý¦aàu‘ ]a$"¦Î^¸$§mƒX!б„ïÜgÿ½¾GvÁ$$¿&)û7ÖÈ¿ošüù/%7®¥'7,86rÓž—¯§—ÍÅ¿ gg¯oÖëÖä–îPõ¯møµï6ÿñöýly¢Yž†ó¼ùÏ¿þíõf7mR&3!ÑZËú¤Üºâ˜Ýhón‹ ïLImMÉ}´2Š1þÞØî2oÃUן>}ظWŽP*øxõï›³ÓøéËUŽ›k¾à ªv4ü¿ÿ½š ûÓ H§‚?¾š´[àh¸G°àŒ×¦.Ò‹ 8Ô!€¬"v²:oé…Ï×ï&ª&Yß|jÿ÷ßF‹TÙ c‰ÿþò¢^,3îmwÒ"hâGlã%l© Ô(‚EÏF8Ÿœ©1î·çÈ"‡æÈ )-öJ ´ç¬FìÁ–Ýš˜;8ä‡ =9t¡Ëÿ ‰…ă2‚ýZD3,#“È`æX`7Úñ’¡Ó à2ãFÆ8*Oöp‹÷‚[7wá)ü¼¤ëÍU¹þ|ñúø¥‚9”ŸÎÅ?nÞº‹Ácó_o;n«žô(1v_Ü9ð\ð5·èTt ðÚë4J®ý cÓGC²ED4$À´ÊÒŒQ„P!ªìÓúfQL‚Î)çÜ^1'Ý6‘Í:Øtn¤˜»'²Þû²Ð!þCrOs‘–{hªèPÁÜro<øã!›†ÔÞÖòðåáxˆá"p]pÁhKéc¸üP®/ÎØ/~³õõÖôW 3;-Zõî2)£ßw'DrŒÒB)Â7· Œ7Âû`k ‡¨oGÖ˜º§+…!BÇNÝ£ýS÷1æÁ›Žœ/’9(F¶®ñ¤ëJ–J½Óò{ lcÁA aXp´nìg+:aãïH€ÇK RÆØxØ®ZhßïH§Â5ʼn?Ä 9ùÊkÔÞþ‘æì+°Ú7jMƒ6¾ƒæ4®xÀ{Wo]d2ÊuŒ`/9œy/7øVuhÒÕÄ÷âä;ͱÝóS‚íB@s¶âë‡ó< ªçÿÀoº¼#¿_—“*²ÅIÕ±qRµ?“Uqΰèžñ 'û¥{¨ùÓÙÇ-õreÝàÑ‹éÆëÁ³[°]¬‡ÕÃJ89Ž^Væy”êa<¼‚á±²l¢(ë:J¶Èà:Ž/“o,ªÃn ËÎ{4º5ý£jR̸ –GI=H³nº¶˜Ÿ†Ð!DŒ§€I.b{΃¶ÕBZóŒŒÚ¿æ[¨ÎËÍ7ñ¹p@ϵªïà ðfã<^áçVfYQG²;g¥Ùüa`s+/‰Ê‰rn$ÕLllN-üÜ9ÒÌÉîèBV¬î¼ç£ƒÊNxM ªVVg²vÝ›{û|)ü\üÊ”†ÌwÄÚP•Ú6&æ6D6·›É £X£y¶ÝV´–&gR*©„C ,îj.‚l›Y² mbOZu“sxë*üìT»üsfÂZ¸#섎pÖiåzªÉÓ”}b’8­§¬ ¿üJ\\ßèQùÁwÜyï=îDâÙ3C‘x}l$^ïĻɘ?ÕÛywºùÁ ñ­$±“ŠN?ò9Åey{zÅpŸ¤³Ïl^î<ÞÚ‚©íCÂèÅUýè¿Tôã›ÞŒý€qsŠœ!);jÀœ²s¤+Æ‹”ÑNc7DFNŽÇRp—ˆ0Ð.”r™ÏC´Š:J i¹ÞñƦj§Ûük’(‚ˆ–ÝI›%°Ý]•ª´: ƒ ‡¬Ÿ,Oå¤Ã‚tOó~%A½k·Ž»ÂÇë~•[ó]z þ¡¸£Îì {Õ¢§Jµ`û D§rfGyâ ·O4¯O£“ Ö™/¡`£_+A5&JÙ@R´êH€/!ůŒ,I-3ˆ¬K ™Q¼’Q£$*/£#z¶½¯«™LÛ£ÀW<™ö—³OP#ýáêL”òiuÞ©­‡ºg …j;ÓuõBmy9ˆ¨m[¹”ñÐÊúËÄdÄΑæå-¶uÕ %]‹‘6$U‘M¢lCµÅÈɉt™¬·.ªïŠWUźFëÃE¡¬–”›ì|û‘OÃÛóOW×§éêäû³ô®¤â:\¾-×¢¾ûðþì°×ñÀûÞ ¿o”¯ŠŸwúñý™u–m,CÌ?e,à 1·²N}Ø™F=ÿ^ÂiNcÞJS÷xV¶2DN*·ÁŽQ„RØæ‘(°×h²Ÿè5îiæT˜÷Q¶ǹ‘‰J‚µ, k²Ç„Ë‘‰a…ŽÐA&´beÕwÅëRVŒ×G?tç[õ¸“|ØÉ Z6Â’ïÿqý¾ž˜9ÕØžËéá½ÛÇßW©ŸÜ/UwÖcÌlçïßFÛÐûñ]o†ß5N­O¦£•1=ºkèÃçXöÖú>ÛʱR<¿ÊG7ÌÿwÙΞ9ùû³ˆ2Û¡©;Çvi«ý]Ú² á @oR¬÷=t¥'£ï|¨Þ—YR©?–ûªWšõ‡Òƒ-"ˆ{1Œ#3=Ï7ÿ}ŒNvLàiÅ™§Z Ú(´VAs˜`UÇ_‘Ѷ ´6¬-²ÓM§Ú,’ûœŸt!EÐëÊ-ó³§”lš“‚íæÇ/çíNwaq²Ñ붇˜ÓÎÂÕõ7HZxógõ—–J¿U·¿+û[ýÀ4¶ù“ ruYh”$@±UõB³Ç‡ÎÛœ2þi RlMvXâè`Y(°ËúU ¶éZ»“"ÊÔm6¿5ÕÈ”6ô´ÜN¼÷ñ°'Dk¨ÉÒÝÒ5Ȥ“ªVG£•!ôÄzíÍœ}(ÞÚ’‚ðRj-Øá ûF9@4”Œ´Z/'n{‰„¤[˜a£ ÆèÚÐÖ*ˆ}lY )ª&m*Š%“ºÑ^õ@±5~~(²ýö®,ËŠ#Én…Óÿ>Øà¶‹ÚÇG •ÐA ¡ºöÕ蕵9¨á‘däó÷"B™A¾£ ‘n>˜ßk~Íl´cÝthÐ[ =°NWÔÖ”s‡°lxïg¬øV¸´½’Ækƒ»å¡ëöÙš¤Ì܈§æ0XÖÉ^k…'™,‡ QFàX[$JÚ¢,ŒChBŠ×”’M] ”Ò×Ò?eT4ʫДõhä\õ?vtH„8A†ä‰ÝðÕ3Ï40T7‡:Ðdåh“‰Í±‰êfC%‰>¬P~;¢m¸ÞûâšKiÍä€bBëzq€CÌ;Bsµtf¢F´ZüNAîï_}þ—:ŸúvÌWúÙ”_Þš÷?ÿþFO¨y÷k¯KbfRš,a"¤Ã'èç üô8'tŸ³cÆý³ì¢âìA%R¢W&«`†IÏ©wd"a3z9ÖàCgeÒûÓ! ©È¯=§Û„£g÷{¦ß;T3·#ŽªKäâIJñ{Ù›q³sãÖëVçZÿÿ»ÿ—èÑÝß0¬È®CG%=Üú÷‹ƒ0ñò/'õè6Êp93å'´œ[‹žn¦çå[êwá&¼|§6" ðÄØÕS<žr<¼¸Û ˆí“WWÇÞex#8 j~ŠQÕ’ ¬„Øw†´±vZòÝ.Ö€Øj¤äb¸ (ã³E*[;>eàM;¾û” ©Ë¨ŒõÁ)å/Å$`2>g -{òlÓçoPÿIˆÇHÜ:yíÊ%ͯ@?u yF;aà./‰¢ÜIKØãq5ps/>‡ÒÀ±U“&Zn°Žè˜Qÿ}-ôþI9eôh$Ý‚IÀ4ç-ë>/)»ýÍy€‰Ecz–ñFƃ€{=ç&´‘âEèMj “ Åë…vÜF4ZïCh=´–d©ÉQŒÆQŠ WÀ©k…b²w¥…Š5§k Ž‘Uð¡6R/ÁP°.ä +î¸hQ$žk`‡xÔ˜ëÜ©8veO‘'Š˜³?‘qþÝ!#È/N\èL«môÙ•N&‰âÈÖš\’aÈ‚•rï¸RÁswHÛºß9EÀCFûH~èÑU"/nhíøhíÅíÚÝòÚh!NH_„7k¸–Þ¿}õWTƼÿ9ýÒ¾ü‰þÕo¥Ù5S/Õ¼‹oþäEÜr û´šŸ šAŽ|MípŠ&RQÏëK©9 ½ÄTW6+¾3¤#Ê€I} 7’”r@¨ÝèÝ(ÃÈc‘ò~;=XÏaf›,µ¼¼ùÆ=r>¼ËÁmQõÊÁO<ë-(‘㌄íÀ Ás’ÆcC'Ž•ó?˸š? Rs’F\ÕÇ4@‡3’ŒEôÞ#øbóão#äø,·dé}š1 5µK JêŒÁd%µ˜;I| »ˆÝç±1ÇXÀj#°~ÕR1¾Õ^ejµ=þ¢¶g=XtvîT‹&²Ó™€žŠâOÐwK¯Ü<½òÅMMp¤Øìßò_Ü Úá¡Ú‹Û»ážÖó]K˜y}©È•AŸó™'?àõ5Q¥KR'8Fa˜ÐlE'Ç/ÍES_š\8Òã¾o¡ï%ý¾°ˆNnÒÛs1]P°5ö¼T(€Å:ä™í·w•Ý=JrÆßGex6‚#³ •¢/I–­ð&ª6rÙ× …çÖ[a¢îFEðéS¥è«Ç͹ºÚ ±¼sÅ ”[›=)4s2r"ô—Á+ ‘š²Ðz+¾üooEË]Fu næj¬V”æ ÖñîÀ©u(ë­ð~_+Jôµ*,P  —;TˆFàšQ¥Ó…R‚/aÙ ½xÊŠ½}Ø…ƒ¡˜Çs CÝQ†›g—ZWÜz+ÂÞ• l•ØQ‘6ÚæA.ò8!µy%–Ý;jtašÐú ì\'7gI÷‘z_èM©Ä ô„dÌR‰·i­žŸgu”~¹#@cËèÄ)jðb|Ž\ ÖnÓ~àIaZpr¾Ø@´Ž[rj‚Eó¢%Eêç#â>:¿Zó]>ÓÛ÷¿ c>š¤~pôbÄŸ~©K„è¬Å‰ºÑ2n^_à‘:õQ21Rx¼þí¶œd{û‰€5¡òöæÁŒ¡£wY1™MÓ-j¹zükk<ÝÒÆýÛ»•XuÀ£–šrKf“uäÊNlîÖwêÇîß>eà-b½û”Õ$5WT,lG󲤟ÎÊw éOs 5гM@¿¡©§Ñ½ìÄ2ÎÃLÍšh)ÉØ'èšSp-õfR z±Cì&"©'Oê¼8Ú¯‹^#è'š2ëâ–7@’(ãÉ %MÒû&Š5¾÷Œ£Ý4ö£)÷'—Gõõzæÿj¿½ÿY±è«¢ÿýuü_ ùGC?½ùWo/ç·¬cu?_˜P'GºK&ìíóT'¢@„ÊÙ¦ û2T>àM*Éšœ$ˆo­ îèVƒ×mÛè!‘ëÓv«s|,·h¨×Ï/[øö òâ$‹ :ã¬BÔY¢‰ázy–Î â«FÃÛR1u­ )ˆµ8E—=4É'Ú¯fBk£ŸÙE'Ý7¯—šLlýAɗϼ^>A—ÈF¢rŸ3ûñD®ŠÁ¿fèÉDÐ0Q±l³71´6,êW½u&¤ŒµI­¤•Iƒw†´q¦ Äðüé¡\— hêåPrJ1'ª¾»j圷 áîSÖ:SŽ%§œÛ€z[UÜ<‰ã gçÙ– GÁüsLÿ°AÃ1䦪–Û“ÖjׂïÛÛ½º§ãgéÌ’õ¨|øç‡ÿLÿþ#¯'¾{!<™š§HèÚhŸÊ\\4d’c.éV@Q¦âtÑj³&1uãŠkµ~š®QÞ÷ÔCdÔ~h>KJFÄŽ`»³êæŠM®ì¸hìÜD’"F¾ƒî7tG§ñm«0û+Oyÿñ—?ýýMqÝ(ýc#|ßW¹ÞþþÿçãJ®jüüQ?š(Åø¨{Ǽ{ûæ³rÿÛnÌgÿ7ƒ„Y]b$2ÿúðæß}¦ ôn?üsChE©:Ò--#1ÐÕ¨”‰ÁwïÐWF gBë_ìK ¡ùÚ†Ðü`Chç÷Bt<³ˆ׆“Ö'xmzO#%ؓ•nML‘D +›·ëxíÝ!mÌk{jmxË:òºÁ ô…E‰ŒÏIF -†™3ðÆkwŸ²¿%áÿ ¼ãQÄ0SÑ+†ùñ¾O_»ÑÒún ôk¼´ CÁ0ÁaXxçÞçR‹¥šª!ˆ¤\£ŽúEêüu¿4Q2—u-eYŒ:‡~¦„…=©2î$¼®øo¢èpqFÆâèSU½^ÁôÄ9AH˜B[‡Œîi[WWY/òÞGŸ­‘)×ت÷¼.kJ5•ê`¿“ùåx8i»ö·ƒå“¢Ï8óDIRîN•GrJÅ䤈¹$—Ð'n%X¾3¤•ã¾W¥tÙ뙇Ž#È Ý4õÎZ¨zG[9>eà ,ï>eÁ‡V”òŠ~”¤n´TÓzë]9euýÙÖ:¹ ¿žXþeW8ªqœ/v\vÙªdG$ô‚jY:iß{=Ë™ ¦_NpˆÐNôøŠt’F~¬>?»l¼ã(Í‘ ~Úx#º$À eA“¢è4×.Ì;㪅˜(7Å6+·É!m½MÀÆFd‚Xu°©FÅM¹nl ŵ·‰xœ3¹7šâc'9'‚M1TҳϢeŸSµQ¯éê™Çc–|ªáÛƒ’!v®§ýꎋe‡txˆ tV2§Sxu¥œá~Y ¿ŒÑ»(ßËJà%Hˆ6¼û¸h{?eû‰œéj´¿ïä\Læ?)8¿%âÜw>–¬va@è «£{ž=?ÂQz~PÞ05UP_Јb,hµÅb»«ûõü˜ÞFκKiztýªþºû†²” 5—nð·??üqFdvù? Ç" ´ÚÙTèEo£¨÷’C4Èuôþ„œzyX8.×´ ór¥pÌËñº¸f9άY³7ÝÝ)|uvND‘á%B@þþöò/ÇôG·|{9Á™dI9m |õíu¹Á—ßHÓÁaK7Ìy­C»ÅÛ‰DCñ–®õ…%ý’>ü÷ëoi~ûµüÚ'ß_øìí°S =ñ¶ZFøÏ Ö˜3×JèÝoS‘oîJoîAoçeõªLàúogíÕ}“¸¨ùFŒNè>ÿæØ³ÇeÿæQp¢Åá°t;ÿö€‰—;´i ¬}4õÅM³‡ çÅí]îHê‹¿(½¸ñÌãóÌ7@º% –‰k—ü¥€4½«“©×àë/ÿ¥±ö,]™ñÇ7ñïgéÄ>Òœ±¢E¯“æXçP‰¸Du¸¹•hGZlâø0 66^¢ßþZúí¦ß´´*Á²Ð̪œ¤_¦îÕâ$.óm‡tO´Ø½ÄHð ެ›°‹' ¼Ž$;aÁÉûÌ–ß £‡>j`»‰¡£½ôH—OûktlñO òèNE@£'ÂÞýtæ¤_ÿáÏ@miQ]€qÉ)#Õ©U×,b(µ&ØR%{&q3ðpî÷;€@W:€@3RX^C‰SkÈçé|5?·‹D5XïÝýŽÁ¢•ø€g˜3,:´úÏôÛ‰êíÏ_kŽgÜüùü…çZ™Œ— 3Ó@Ú¾šN.*OH¹?ìÎmFZrç|m€‘0o¹&ϪíýQò ªÿn-Û X°&;¶fèP|(Ýî'| rí#ÈΠȬpïÒƒA_òá“(9c‹ò¯˜j)e±½ òMpvBL6-Pòú‹"õ’ËhØEïþùñ§5•G–¿ú×[Iæõ¶® D &÷šM«Â1äì žqe#Yš^‹LýÃÈÔ¥ÅÕSÌ9Ñ>NW™×i ^-Nì",µŒ6À}°Ô)!å`)ZoíŒUÑ–ι®cÁRT®;#-Ak¯s%ú{õ÷QÈífºð„ˆXM-¥Ny—-~Ðg‡ÃžsøÔQ±‚3ÐS4Ù««Ž¶u§ÿd8÷8ë;öE* ×Rax˜ ûÅ5B˜Yãà'Îw3ùêò¹^ ™:àpŸ²êêãA3ý8úC·RXn1—s®iCÑí¨¦ÜÂöE £ÌTÕV¿Äö˜}ËÔØ#SãÅ-OâHÙÎRxq£úǧú/nt`C:@ãYb"üaïÌu 9Ò²I Vü6ñZ bóÓÜìû#Ì ÈÝ¿ô1:ûÔ—"–—û¼t’…¹I%„…ú…ˆüXvϨ¿ß¦Õ#wWáÖÂWX @ÛqŽl­7™%ŽáÈœwlb°*)µk»=;¢kðHÎÞ¯ÎDF-ÓÚL(&(¦Š 1Ë¡qöœ7œ½û”ÝêÛÞÚŸ=õ¶-ϯ‘ì“ =_ñv­Žâ.\ž!¸ÖËpä 3¥xŽTäÒ#nçz Ë­û %á¡„eC×›ƒG­Ò²ÇÉr¹çî”¶ä6z•U`#ÁE{Öò˜ƒKOàd±ÝW%„.xŸÍhµi€ˆMê)GÖ§ŠŽ©·ÿZkEt¼¯,ÆŠFÝáHöÍÕÄÂÙ4g]óСVXo…‡}­ œ’¯M¦¨œš«ZQ£3\SÍ9×À(ë­;뮺W´qXÆÉÍš¸gL#­4¯·v^ )³Õ±×"ãÑ…ÐŒú¦‡(UeäõVàÎV ÉȺ™ V3žuLr‰ ‘M©ö”muX!;û¨¢L”• ~29õ§fSC+6ë©6®·â¤ç.VÔêzâ‘«=êXT,tÈ]Æ(à”õVÈÎk!è³+L’qëe…!¹$2²`¥Ü;npºeçûBAqêGÙö!¨z_H'=Ý>ïKòë­“r°ûà(T¶¦°O¯9ƹ(ŠEôˆ7l`¹Eò$X±3¦i!¸ÞŠ“ λX‘¨X¶YRhêi #Þ:Eƒzm—2XGÚÀŠ×b¦CÍEVüãÞô­ÇëÙãNÊT{œxÿ‹êájÏÙô¢7¨®pîÑ«¤Qý¶“ØyeÕ˜;CÚ¸jŒu¹Ô‘¸…¡¡§+´hbëœÁ—ÿcïZš"É‘ô_ÁêÔ%z?Ö¬cc=m{ØÝ¶™9ìaÇ0=!)Hh2骮ÿµ`Ùº (ˆ‡"QTںȄ¹är}.wÿœ(|ØÀ2߀‹OYÄ€ïÁµGŠSoX<•:Dcd$y äÍ©8Ö˜¢zéÃe)¯ëxSBKý¢:ùÐâ…íR´¹¦úmòJ¬^H3erà”&7^³E g“ðœ ûêjd0ƺ>b¶F5"´×»,ÍY€½°öHœo‚!+—‚jŽj™šüˆ6 –.ÃQ¨µ&˜6nÍ—Hxè¡ ­žyözžD7ü©’ÏH´\iD“ˆ«ÈñØ"Âlò†Hu-1è³!Íìâù`Iޤix[NÀWÉwA”øÈ‚βÃvñŠ|wñ–Ÿ2‰Càš Ø°SI#C C’aœã\ñf]¼wàø]¸xoð¼x¶À0‡ö‡ZÜ#$Kñ€ÈÂÚžn®¶»µß>@%8¯£ÿˆ¶W·7pä ç4hA0 }||“Ÿ{Oÿ@’1DŒBN²ç$ëðYê¬áŒskøf¨ûÚAÿ ôDú¡{阭ÖCŽ‹ÃZ¦ö¸hj;jÆÄÊ€Åg¤­fì®—ë®+—k4ñî}bôÉc~ôåz{—ù´-‘4Á.µqèóÇ­ù<ÄÛ6þ‰{2¯µ¦ÎlayNœ › ¯bΆYê×F…1]d$Sµ‘õk#•Õ«&è|w£Õu¹[ "ù¢îíhQëÝh%l¶'ðÚËn.8?Ø@­s¿E°”ZFÉQ Ù`3ìÕ )'0F”²¦‹HœBèj¦Â¦6Saý[ÄUÏ|ƒ¤®³š÷Y¥GÝi7ÕfEÕ¢…—‡K¾Ò”þßÿV[jÊÔxBM¶ÞÄôÇÏ>oˆ›ŒAê¤Ð&l>ïù6+Ÿ¯¢ÆÓ(< }Ácðmöb‚2E2XÉveGœìâ°SS9ìT?‡]µŠÒ9ºMÐáåì7•S8¿·Ww >ÿI8O2„„¾œ^šafïÞ?ßów+â­ QE âÞIdY¦/óÄ‘à-Ž¨Õ #°ëÐ02•ð•ô¾âz “ߦD®@éªEQz>áÆGèüãéïa<£ð‹GÜ«˜ÇLEÝ¡”³s÷SM™½S1¡së-¡ú˜ÐœvµhTSyñT?/¯wŒÄÂé™%¼`j¥`˜•ÝgsÜejÚ¡^ )9mi»&•\Q¡f€ Þ¹çÃ4ØÛ“§"ÅB‰àÐéöRÿeÙò€3AJŒ”O4‘‘6¡@t"ÜIüÀÅLDñ.ЧâPÜCuõÚ> e}¯ê-w¢3º1}¯DõãæS‘H¸ú´¹¸²a š¬Ï%öm/¹<-Ä"ÝØ£O—6WåFOÉÞ¹d…„åNgƒ·¾ ï’©·¤ÿÖ‚Ð1kò[Ûš°ÑfêÞ¥è|qæxÁ{uƒ.Ö)ú?=èÕ¥ÝØÓxsrmýÇ\´»ïu³ÿ)•Œ–.Ài}yþåâz`…ëç'¿`ßÏ™…ÄtcÅDÁãI9É|A¢ ŽkB5ïÇ£ÎAÝ¥Sû9“~ÎlÜB´h@ƒ[¶ë˜}:IŽ{•öÃV­88‹´µu ãÜÞscZ*Ã^[™Ìz©’-bÖËäšsôCh­Ûw]å¢q» aÅUs¨™ÄþÔ«ëñª¥ãs³ Ún²°ŸRÒ”°„W<þÞ¬Y.¢SÙ¬ñh·ð·œ $óÞ DY"úï] w5n¦S1íÇx„W¯g£MV5ÆëAË™ÍEyÝ2Õ\N½µ>iüûI¹•N®5 —çéSá-hñóö(0žH°ˆÁ©ˆ81¹ ηž Ït4&öon£êŠ]‰©Ôþ)Q»^œà‰ñƇxÍ%l§µß¢`ãåÕŸùMY|±ï ÷kB=¶‚2’:gŒ#í(A*9¯|`Ü“¡KÆBW£Ï©±’þP ®FeœŠÑ¾ÒÅí6g…níåuÎ}<0º¾@Ò É¥æ^£ôåâãÒ®Á~Eyô\Ò„˜ò: Èxç‘‚ÝÇ|ÄÞ„Ø‚bB§.¤=õH"ýG¶Õk:>Zt“Ó˜šW¸®M×H§àƒsÙ·ù’~?XÒêçï}'®tˆZ"X¾\:rüh’I œYš·P•®‹Ö©†Sõ[NîªWtök™(]0O½T¤9Ôzʲ >°ò‚Òe3üMî~v0ì ÐW1×ÄSä€wr!ªÑ@¸~}5"XÊ‚Ö1­É’ÇÅÉ’=!‰·V Š%‰Y~kÝ&¯ÈoÔèÚGII“|çC“A8f1,rT$$¥%,·‡–¢¶øõÙf®fÓ‚Zœ4"8—$ KÍt€}„M"2|Ðůe¾¿.>e:IÁ„'ˆi S–4É”¦æŽ{Ù%êIöÍ¿¾ããï¢øõ¹x]GÉÙó£úGÀ²í’µ€-çhÿV‰RÆß6”ÊFi¡îp•"W FÉ z ›íñ‹ ¯DðžHÅÛ³ „æ¤' ªxÌlekjAjTMΩ@­bTVÒC½P(óÙ~L#LÙ°'ØEz\˜EÚ±ñåÊ(Å„jÝø Ðñ Ÿò…) Kèi?TK!f q«_ÇålmÜHÌiëêe®—Ì”öö%ütr.(¥oßB«¾Û”b šé‚‹pÁtx­ßBšÎV½Ú›™µÜ1bÉêÈ>¡ê'¸º½+­æ¸4ߦ^j#­ÒF®fØ"Õû]™`ÇÅ)b]A`Žb;|&œmêåcxR¯ñµ§KDVJçÖm"pÆ“óZ±·:!ì@.†˜HTÈŠãÜ|¤±QÈ+‰ƒ ¹E|}ÀÌhyèÄ^¯‹2¬»ªƒ´–†´nlM‰V3ì Æ†KSžeÓwgÓu‚-aÿµ"%]1ÝWS\,Hç×Þ&/Ür‡}Sžš¡ÆI2”<×UC¤£e}¸YI-gYŸ9<˜bM\pa„š­xfÄÊ-)_ôž²x3U ¢È’UA#«Z2C—ãj<½ Êée<™j‘ëÑÁI@zÁµYªM}TõnÊ0ëeG‰èqiéh'ÆÉŒgºå e+Ì 3x™œè™šŸWÈ¿œæqª–ë‚^!q½5Ÿ1Tß,¸pl™…«—¹zéä2!¸r ;F‚ßZ¯=Ì«¥ÂQvÔÈà,I…“4X¥1â.°œ’#„#,m$˜'¯m¨K…{>¤™s[¸°Œ*ŠŒÍmÕàA0$™$XÎ]bZºƒN…+ð=nñ)s–‘hœ_–ÁNá:å 9C%·*r"ßl*Ü{²øôT¸ÎãFb,^˜ïY—Ê­³ÖrHÓ Bħ‚Ì–ÏpÆ%WÈ|rþs1Eâ¸ÇÞW{–´5íç ÒÛØE¨3µÖö׺S»~_WRñ„=áØ’™íŠ #™$í±%zc¥Ri2'sØ8±'x+Å‹u°™je!²ÓFr£çïI³³7§q‡ÒÙÇó‹êF4͇í9“Œs”zÀ~Ääî3!‚=±˜¸VœÐúΤÈxg3ƒ©²ý¼v¥Q3ôžiÎfÇ_)¬Û«ß¥ 8S-˺ã„]Îò †_©]IS¾j)¸¨¡PÛ Š®/ì&>~³»f`*¥àdò]êÓOj¡Ú„§ï¡“O7%즈Ÿ˜vÀM„¹²6Xrç£^¢ ‡¥Á]Ði*—í'ó"ªz5%•O1a‚Û­ƒ·ƒgÑ’¦ ]Ìú‚Þ¥Â)=oRÂõZΜ,Å«¸~ÑèT ñÀ¨½ÿd} __Üž®7ƪ”â àÕæâüL‚ŽñÞÓ$QÕÁì A€UÉ­jÀРŒyp‚Ðïd¶ì.ÒÆ©ô;¢Ÿ~‡¹Ú5”XCÒÏò¹íL×¢RtT3`8|Y=‘”ÏGÆ ¾œ-‘ Õ:M—µl½GüÏ‚À‹ ŠG˜< ™åÝ!#ΜÈJY ¦.ðò|HóÞ¤rcV’"&B® 'Y’ ê…fœú`¤—x)ð=ð²ø”½^ÞSÍ¿w‚·|M(å ¸Kèz>ÔÃ!=šõ9QqK¾£r`Žq°¿oÎn¾¤azÖ Ý“ê2Žu”1ƒ%èKР*.!7GæÉa<@fßE/§ú²ß÷à¢ví!%™ûÏST æµãT­$É•Ü-éûL­Œ”õ77"\o©zêéÒí;?Ò´§Œxf‘°Ðl/dMŒÛ/6øõ­³nص·¤FêP‘õ)qé9Â!³Õsê‹€8Æ,8E¥P¯¯FŠÓYjp&Øàå®@Œ\¬©Í9ÇHó[kòÀ\„#ÑD0þ|^¯gÉ>÷Îþµ„ó8‹I$¨æ/Ÿ£ÿ‰ešuøã¼'>|¨9Ô|†2ks`ŽJé²iõREø3?žAE(lÙ—ïÂïÊAˆãÛíͱ[oŽákt•üîâ(Ü^^#{zzóIwänÐz³«–ZlòMÂú1•U :­jkïÑ«‹Ë†éõ!qŒ4è£ÿdèô–¦½½÷IKxIÚ+â:;þ‘©QvÒeÕ@š¯Þ™m÷J%……në#.ØJ›ÞšÿR©$^”«_¡ÆÁb±¾9V¯¤Õ6¿AZÿ­#aÜ5œ°‚HvÌR)¢ÒÄË\TL}¢ÉLá8èºHØó!ÍëÀyŒa*"®\¾ûµ iŠœ‚ ]uØlÜe¾GŸ2â’KÛ ÙáÑ!ûN 3Ä"¦^Ráy³‘°÷ûžï#ö†LJ ‹Ù×—™¾õ°ÓcïÒ¯?~ fV%AÛti†švW>ýÁ—Dõû¼Á ]ÿ¦æÉ’¦·¡v5©Nªÿz&ÇNp'³SÂhÕ^ºkæ2’gÄÔ#ùJñ—ôM.¯9}¥ÀÕbÉ…yaK²ÒªïÆ£•fòÙ|K)à4™ƒâü÷ÝygvoKjÊBëÉ€£ÙÑUsj%믳¤¬z­Äh#80Ÿí6O¬¤ÈDm©¼ùíš\V›<¡Øì&oHy³pBãoaá䫖ˆ…s‡ øMê š2c ZîñXîÝü)€[Z¥\\A?™³Í€1+оêÉK$Ø0"-2Ë ¸ÝÜ#G‰,ˆà,hãœpê¼Z!IûK$‰®?tÞf: ;¿XKá“7qjÀÅ£É"Ë@1iR‘æcôáõ]¥8ØöÆe–çð¯¬~k[¶Æ%Ç7'«# f¸’HÃVJ¢(, ºÒš¤HÒ’8ªÒT’Õ=Òs]ëšnƒ¥f¢•¬iò(­Š Ž&I mÌ•¤2!âI \"pE:®T&à{\iñ)óÉ&gJ f‹{k‘18ßpä¤ gño–Úî?q¥ñn?5^ÒZÓñü+Dß&//o7 |v‹äùébe¥à•GÃñî.íuí’š#Rý1"njWÆ/5iØ;§±Ýä+Á)&mŒ)Š®¡= bž—@¿[UF;Å2p²8ƒ~§XÕƒ_:¨S’`õ¡V Üê|sGÂ5ÊÇ ‰ˆmpJkƒ› ŒÁöÛdŒ¶ü UéH<ÒÌä 3éL„æs4  µÑ 0ÞxL¹öü°ÉŠ|w Ÿ2jü1óè»»#†43 Y/™ÔÂPÈ[u Þ+U¿×à,)ÄÌ¿G8îÑB]¦…#^§xê-,%²g)LáR<pXï¬3b4†· ~¯wÖMsÖIç¬x]ƒFç嬛‚YçÊ8©‡\˜ü{(׿*Ä“õõÉ Z8…O.Ö›ÛÏw˜A‰ýããúú:ïÅÍÕÝnc¾ÖP|´BÜÁÉüÛ¡!Z8ÎW#„C+mG¨Aççácz5 ø @ÐG½E—׸}° ¦”Â5ù÷°™°Ò¬ùßœ–ÿ'îÚzܸ±ô_)ôÃbˆÔ¼_ ˜ObÁî:†ã dA©ªÔ­I·Ú«VÛ3kø¿ïa]$J-O©‹m?$-Šb}‡<‡çJV³¹q k$§ç]q2¦â[øÖ…bï 0×{på WàLdÔ²9¨«9ejNé¼-ïZ ƒeñ¨š‡ðj#¬U¼j`Ÿ¹98íñǧ.K3ÏèŽ ¸³ÕéwõÅ9:‚ÅÐ*w*¼_4hwûÅ?.'ðÁ ÿüƒw¿o ýó7ψ D:?û·Õí-tÙf]F£gŽ#Êfà•8S£¸mŒ¬ì²¤u ¨¶Õ³áÂÒidÇõ'¸ÝÅé©+1hÞqÉDþåkÖ4˜ãë7ì(„ÿ“'Žì®ñ [Ó¶]yBU¬aÛ„@@¦”ÐÂ2®£³!”ÚφsÀ6qà_êíñÃÂaœƒ˜Ìï7×—nkš•«jóÿ~q0#ß]LHÿÙzŸ§ã ­› h;žŒ‹«ÌÇpËgõØEúò/›~ºjë‹üâö¾,na°Ö²­*hköPhÛY ^³³Ûà«»¢œu ß,  œ×yQæK“sé~üq¶ËÀ_À,)'n•²y~I)C1’‹; ðbcÔ¸¨jœ‘$·'<ÏF#4Ôßœr=G@1|À¤¡Ï †Ä|K¸”ŠGc6ÐOœº’û0ã¾ÏˆÇùgàŸš.4nº¸¨¶q*0ú,Å<Öî™aõq넨y° `µ|S¬Öð4ØQfÿùëß_Ï~úðîÂE·­°]¬‹íÅÎViWã»ì¡·îˆîa„ÍãzíZ~o–õÁ­kÿ°l¶Í\Vö6ó ü¹Xm¯r7ì6sͳþwy‹îèéóÀ¼N%<:/à‹« æ6ÀkÐMLM;RÚÙ©•†Ë˜Ùäúyþ(~vÞþüÃëw?¿ÿð¬Ó?kâb 0*ê@?ÿØ7~†^ÿχ×ïß¾ú¯Ÿ^D¼öO›x–„TJJ%á¿uÄ,½Ìîóú÷›/éæÏ%ÆSËýÝÖP¶Óôó¯Þýú!@ZÍ…PQÒ‰ Ä[æQé7¼æ1/…†HUÆÐÏ»k7ºŽ…0ë`•±Ö’èÓ­’ç­ÃÄ"‘`•‚¨hÜËõóÞà2fÞ½ýÿôöÇ0ȬfQuÇ é‚«°\•Ø?dÚÕ“4oýhh5\íØõmÝ¬Æædw? ËÕ-˜åßõ+óæç÷¿½zÿCëŒç¹]ÌöiðžY6ûgöêûï_¿ÆnRÛ›ûí}y þÜéwXM\zUE9JÛñ44TÏ¥Á* žrÜ@µÀ¹6ñ:GIHmÔ°\5ýˆ—îJ³çÒÀ¹»¿² œk.“­Ã>2ž¡­†GWAhÃm²Uxš€­-‰Q  ¥,Ñ0.õzýО4“÷m½º¾Y’zíª°ÓAÌ)„IQˬŽcòß[=&ÀDa¢¨A`²ì„Û¶^µ÷p0¹/rp%}mðΕª·å‹N‹¬Ö.о°±9ZÒÓUr-.©4¯%ž;yXÕDÙ^UMAÙþ›ì÷&¨¶.*ïu:ÙÛW?x}®ºB©.·ÄH•VŒ"0zÑÍÝ:Âݵ@¹{3ÞÅo ‘TÇàMU–›¬ìáßëòƒö· þ‹ç"†¹F¬ñ÷-Jø¶YWߨ÷²£åwÙ›í9 ÐlF4s¥Ša>,R‘Ðô£ÌKò,€dB]FÖõ÷ÑåÍ|7Ác—l;<¤Ð1nJñ —°?<ÜÔÕI´óæÐ ¨„?kÐLÏ™¥§OŒ#B‰IøáAHžàœÈHñ{K‘Wa ?šjªØŠS]@h:™Ü…‡“ïªpIÞÓ¼´õæs{S÷¿j“ O'0P+®5 Ð@î¦ÓŸàê¸]ýŸŸwèKu¡}ÛhÅþ·-ö(W|M".MÊPùT“1úÛ2Ü.Þ4f»„g¶êÁ6‰ñ¼ÍŒïÊjò/{g [º5!_÷•Að}÷§ûš4ÿ¾ú•.ÐaŸjF0m—]ªzôwXî:¯ƒ9ÕÁzìq‡ƒì4tò>Ÿîçr~?uÐï 7šÙô;å9ŒÿéšÂÿ¾fˆ uIsøœ/ë¥Î•â&ÿÒ~ódrôo7„)x^Ñf *s£Õè1©HnËvŒ:•`ãÇ5Í˺ÚÜÖbüÆ.rÞ’R‰|QëòŒ1Š*§ºCVÀùª=† $«jqؼ*a^ÇQê|¹hÆX@¤ÇO©‹e^ªfŒº€)¥gТe.Y3„v¬fÏ¡ÌÛ¹X\³j1~Cl^´tHXW² £Ç„,óEÑŒQJ`VYŒC,lÞ.‰QyAÕCFsӲ蘄ª3H)A¼–­¤p²[«Ñc(c–°Í0šäV£´4gEÏ¢uQŒÇ¡p•nçTÃR³ñ»6–÷bR²¨ÊñLª¹æ-sÀº/°ºäen[F‹œÒrü^lg¹åýªTFVçŒ!óZõ›O)Ïz#M^ô#ŒßwŒ*aóm©(ò¢VãyË”ÀLšì¶Ðe9†Z€jj·?™eeΣ$yi[™·¹©Ï[RÒÑj¶2çuÅGQBç²ãqØ„L1~ï)L«Ùr(ðª* 2~Œ’§Wl®D¯²ñc, +óvëY˜\ƒÈ1„²ù²58$LŒ”ã§´„¿óE‹cYçŠéåc€zTí²(Ð/ª¯Ý`7/AL[VwvÃK[ºm´j AØýtu†ž®È’4c0âèøé¨„•°¥·Cð\pqŒ’ˆ\¶ìdKº/qµÐu^µR Œ¢–t¼–­Í²„]¼Ý=yM–ã9½.eÞn„ ÌÆOÇRH [F+ø*—KEÎôYÝrXaòª¿²ËÒp0^ZR`nÕ˜=ì*äŸ ÊˆÇC€Ä(¯^w¥ò‡_³¿P°ûþšÝ/wž¼W·½¡Ä•˜/V×Y½¾¼¾q±Êª· ’üX¯ëOuvS,»ÿToÜŸÙ_ï2*¤ùë<{6aÚŒ"Œ1}•ù1ÖkL¼z>Ÿ‡€J¥0Á|!ĉ{ðþèžÞÆ.\¤§þÜ"~³)îj‡ß ~ƒ¹|W¸˜ù‡«ìzþÙÕN¾)Êíýæßû/Í}Ö#S$k?°ŠWÒÿPzÜ;qw–tÿ¡€ÿvå°û&ݯlU)¿sí} åá/®™é¾Á©Êºÿþh|`\Ôƒü‹ÕÌO˜H÷•0 ®Í®yI¼fKºæ¥ª»fð7 ÁúfkºfVPR‹¾¹.ý±ë¶Y¸$â¾Ùˆ¾Yø4Eß¼¨}$ªo®×\t¸Ue‰“¾ÙûænåèÍ»µ¬kR¶Íšþ‘¤¶»9ÑÄ0¾7øT~ïš\uSmµ»ìt)sËÉÆÄJ:ŽRüÛV©ìÅê¡Þ|Z•mf ð`%âç]›~Ä?÷Ü`=(§âêàˆëwÄÕ=yÙÞs1P)[táÐ÷uÙ×”ì 0\ò³=޶/3Ùå>ý‚ÌÓ 5V R!Ð/-¾*2S00%)‰Â~2,Ì}ab¨’†3e£@•_JhpÅ-Ø‚ÊFë Ÿ—¥{¶bÏ–TF¬`±-ô£DžÜ5”Ùˬ¿YdÕýº>Ì)9—„kiébxŠÒPG#¼£ó÷zHgø¼ªª.õ»©Ëzõ©®Ú»<–­Æw8ßÜozäãßÉ Ê|³*ƒ„H⎖!9!FG5Æ]UÒ%Àû>Å•f¸ë…>:ãCQ¼‚]È8xÿè\¼KŠïÏíu…h7÷×`ݹ³?;NK“æDQ§ISQR*¤¤zꆕ•{(ÎXdàîe)üºÇ|ÝTxäSt.•etF«,áZŒL$±ÅÇ•ûÚÝ"tS—Îêuõñø/ Äê~Û~Œ#›G(B,¼ƒ„i©fˆ-Äh3¡N…Þ2°Èè­Wø“JÞ@³h1¢ëçyÙÏ•· Wv’E™œ+Ê$Ë„f¶:•.~Ùc¿eðSÂÀæµQü”;Ù¬ã÷K0ç*<ÿ`ð8lüþõW“Î?òbǪ¨ïî×A2 ·(2´8g·;‰%´k A§BדenÏßSRÍà (Ò4õ®ž0£"®)cf2ùÅѽ˜J>·F H*ÓÜZ‰ D¥’Ô}”rgU=m Áç¬m…€ïÅOÆØ#O€„˜ydw½{\£R.É”¶I J¬xJ|û5‘D WMИ€f2‰Dqn/‰ÔÎ…°Y¨PŠP„²ñԥљ`]zïœÏõ⨠R`UA1ÏQ—§ Åt ü&„ZJJ%BSJnhÊ©‰1/Š¥‡,BFÀÂF(PFO$9½Ø{>ç`‡Á 8"øpÊNp@ÁZ%B€5“FJ&¥2#i\?1š>CÃÀžƒEL'c>Ýé*9çlHâ`‚,BW1ê—cL/qåM]«l.w £*‹1~–É·{þ £FQr£”vʘÇTà9·,®ÀgÉÓ1Ì…Ž4G`‘jZá:Á’;©2se‡¤Jð1 MÙ¸_¯ |YÞ>Âo¼¦èî*ÜùQ„:»a:G…x—!bhþ—_¥òa»ŠÔYµ*®×÷[Ø“/û¶6Ç×^ª¢@“at´°Ó­3n $0F+JNSåúÁ…ƒ_®ÊæÊ$´-'µ»\±¶\t¬b2^Øæúů<™Î–Kƒ+"Dht%’GÕ™2î=]T=§;­CæVi [Â[fš¤Úõîo·—íÿfEu·zpïìšE.p˜„!“CŸgÕ Æž›Ÿ„ðB¨DhHm¦ŒLLO†—¡( K/z–ÀÆ‹PyÆNWxgØ]^YÌ%Òw–[JúÎ3äSÙ}ü¥ûì̧®iÖ¾./H‚2J!4Ÿ•g¥³‚xB¼{DP6‡ÃÒ¸ä„Nék%£Ø¡ 9É/0à„’eBmˆæb/£e‡âñÜ]‘ˆ¡‚ñòuâî%†®î¬ ätÍ‘ŠqN…²ˆb'N¹:GR‡AMVgÍApµkQÎ蔑È£NŽP®œI›\’a»€aX,}‰*òÈìk¹¤йœs…‰¾ÜiÉîŸ^º@Ñ6¨:ŒP)P©œË³uàm;€!T.G©S)³Òç‚U„3„B"½ ;*B•ýܰÌSÛeÑØÜUppɸ@Ô÷p™<ÿ1u¤d’c€ž—}îž~^—+Wh‰ÐMq(kTÕâs0 ç–Æ1+ïºþT¢¢”µ(,ROžyîgqŸ#CñzŠZ!NÏpeÓU"nþ¬·oa¹/unнú £v÷7"´¸ÕçˆÏ1Œp‘Þz©¡oô‰· œ/K``£B„Y®†L%XFJ£úÐLx@iˆ=wz‰¸÷EH™1[åFëÄd*ÀeAFAYJ&÷Caà98‡Ä­Örbñí|ÝÎèï Rb$¥JÎ;ŠtËd¾Ÿ °í'SÓ{¶ÉˆÒŒ Ži‚×¼(TPP}ƒÅÊthf¾ßé",µîˆŒÔqMî%½HÝÛe¬>XP% â&wØàÌÂëöù¸r±J&P™EáŸZ›¤úzðî,&E€÷ƒ¯‰¤SIT\ºX|¢·=KöR%ôœ U“6^¸À€æI¥êº¹X@orKw«ëvAZ!îÚ3Î9‚eR—Fiawé:÷¢Ý†òµ¦ç“¤`[A¨v!“;ÙBXÅ-B #&®ŽÃ2µçƒë¡JT!™6ˆz!ØÑW/ôxG@ÄNu—iЄRè©ÓÖ¤ý”M¶Ñ"@ëTÉêî»YY ÷AE­DœxФØOÕãè %íËmz#ðk"©Àà·ÉSÐBsWâÇ⹞+^¼é%õPHQh£¡·IRS;jBÝÁ ÄÅFÂ=Q*ci¥ ˜A#,×=kxªpÐåÞ܂ޠŒ6V#´±Ib¨£ ¡´Ì Œ³i©~¹Ý Þ½¡m­ ÑÖd­Ö%jÍt!ˆKîö%5çCû’$ÌJÄ¥ ’Póm/*ŠÖŒ‚GJ ŠĽ"ñËŠ¥–£®÷U° qLæÛ¤»°hzª¸%ˆ¹¤,ùÁÉ.X°ƒÙè§Ÿ°À¥”L#‚¢Ròô±GÀ7Ü"ÜC)ur[*Ê â´§œòÚ"ÄÑu1—CeÝR¹ úP%;`½{©o¾FLj׎PˆšèÉ®;‚vþ \ Ê9¸´ôФ¾Nl:¢ QáÛIm“û™ ZZ„J4L¾ÜµbÇS½??lÅ- â0†4*Õ/÷ŸÖ³}UíåáÇÞ0Ÿ}¼-Ö¡˜?¬†±ˆt¨4vº˜ÿØûR!9°–[Žˆ¬ÊtÖúsþ]éòèsT‰‚ÕJÅNòÌój!8ÁâÜÃþÔ E‰F\N ™2þ–Špˆ†‘¼HA¹Í ªxÑÓÙ1Xö¯_ŠÏ)J¥E8Ç*Ýaµ§7ºbÕL®A€ç2ó0<˜Q‰Éƒ*FÈËyçÐ!ÆkVL$¿U¹›¹×(6¥Æyv絫¹a‡ÊÀ]0Q6¤8ùv—Çl‹Íu½ R %A¼fEqAŸqŽûJìгGJµ{… â0¨âVLŒ{rb¸1ˆ³ŸàÃ&ƒ+¡À„CèE1áI, {åAR éEI(J/ K^ä:¡Ëö†ØXš]IN1+ÉÎ’Å#¸y‚X åˆc¡Jª)3R“’¦…Aœ4RФ·GA €mXxª»„Ž™t'mt.­P¥•Æèp°ýÓY¡‡±øaËÓÛ„ÒÖ„¦±<ym¬Ò[ûÇÕ_ÂÚœ»Ö© ÅNjažâÇN¢”q¯«xõ³`2q5S ‹á^wuðñÙ°ùY!–C±â i J:¥öú&0õÿìYo#9’€ÿŠžÝ Hæ}föe0 ÌËΠ{óÐ(d)%»J—•òÑì_¦¤T¦’‘<Ò~˜‡>”v•¾`Ɉ`DðÓ̱ö¹©’›njèq¦i1Ò.C±¸ê¥£1t ƒƒ³l=į¡£aë»®#ƒ úóL·„¶ÍÔ /B»u1 E#†SLuæ·r¼Ü5H¾5 Â’Ÿ¬ØX£Î0Z%àÎ5âÓU†QkêçÙ<ۤф¼Ç]?-øçÙê…H·^ÝRÃÓ­í#%’,["—A]ì&Óô³Hæhß~†¨{lÚž˜üóx¨¥kÎsSë‰$GR¦˜H±ž›ãÓð:6G±EžÁ¦8SZbÏräϰéÀ"³g"¶G÷h€ËD;ÙÙ¥–ÙÚüMŠíîqöXL½þ:€Y†7HÂ{åÁaUÒë¾?Hþy&]ÂÖÍ=­'idÂe»]®-Hëÿ[yÌi¬I…‚²¤ÔW‘¤æG óy|7EóôÞtñ) ¥IŠI©Xu3u˜gü¼[—“qʸ½b"&íûQfœJz`òÁ²ÈÏýU2OqŒk„O(D%™ê£ Ùläš (À¨{¥*\‹P[(­Æ,s¡Z/BdO,…³$ìÍäSÕzäÓÎ.3F5ïªE#Ò HfõÕ®` ‹S ‚ÅSV‚i…€k*©òÏ(‹ÀI#–{-kç×¼‚£è¿ød¼/Ös°÷Fe…Úp’uÜ6^¥€hR뤵‚ŸE.…pö fÙG'qëàÏ»ùH ÇÆ‡)°^­§E<»ê©®º¢­s|¹=ÓNNsúã²êÇ»-æå®Jž<ûhÅæHî\5ØAü9„59†*@­ŽÍï¥ +'•@r‰@ˆü‹žb‚JKÂ"M§†6!e¥Ë\¡œé½×§w–çVA4e0AxèÍ€7hœ©³px̤ šÂI]éO"Éî)ÀY(N› ×ëÆQ®psQdáævŸ”ÓOÊbç‹uÁå+ªöƒ…6 *RaÓÞeø D’Ùï"2úOC)íèÃdåÔŠKBP/ëåòyõ¸ûÛ!Ñ<ìLèš$üDSF(ÁXB“NÇ‘ö0ÿÀ,"O†Ä‘=EÛ¸kºqŠœõxï(ä價UÄÍ£-EUUš€5iSƒA¡üsM£ÄÂsÞ>ÚØSöŸ1½\“ Jží’ßíúywÓĶýÀ& 5»L” d\ŽM…;ã+…¤?,¯ÒöÐþ4RõÞ³Èt¹ñµû8q%Qä8’ê ŒÈ\7×ÝFêê¿ËÑ"(œ¼€Î ]¿äˆo3„ Ù²€ç«¤»«Ê˜Ž· Eƒê 7½Eà¨G³Â­?)m‘HfCÁ Ì=úÕuY€ – ‹LY:×Ö»:ìÅFšº¬ª3$ØÏ²ÝÖa‰ÛA éàp’«E÷N˜Šéêjµä=7¦È)ŒÊžì¥ª € „EÓÜ!i{-øˆ¸*ðÁ2ðlá-#L¹^àIÈ1G0b™§¯Ù­•’IŒ±qžôží¾á©Î?ɸÂÂxÊÎf7U²ñ«µtN*(s¶[nޢޯÖ|4ÿ^>S‘K¦×pìE_ÓxZ„Ya•w´»ì$ò™ÓÉ %T~Ì’°ªÎ£§M „p5\R‚*9ë­„µ¯=JÄ^YÍ›þÏ<ú­ÎÞϾ‡ñjZÝJǨõë•@€V›óÆõí¨“¥_¿¯Š×_«übt¯¨Úÿ÷zZüóq÷ðqõÄŒPùu0½Žw“‡_Büù?þ…Þ&¡ñX‡dJ§¼ýaÒúPÐÖ‡n>ŒÍ¿Z=.þ«õHÖ?z:í_.ZðäüO*YTØý>›M'Eýó‹¿Ÿ˜åxó;z#-$1Yýˆ©Y!÷Aõx†Z5:>ž‰âøËÙ˜‘ú±VÇÇdŒQÁêÇŤýw‡ÇÆC­ÇŠÕYû+Õ¸~|_´IDý¸P­Çã#·˜jÔ¤¨~<·ÿQÉûåø.‹M%BõW¢BŸÆD"EÚIýÛXŒÛ¿]˜ÅíðIË1™_ú?›&+*¸¶g—ütÞ¹ŠÕt³6sý·…¿›Ô¡5vý'ÊáÝvþ‹¯ˆ÷ÌcÌOØéëû£Ï[H¡Ã”ÒXÚx+%t§e7zër<âNk,€bYuÆÒvø°˜ÿ6xˆÐxÍ#4æ %®í°®Òj!Û´Æ"±¾8¸>Þ¥#íˆ\Ãé)Ê­,7MüáÃûË„ÅÓ«ÚR¡DúrpäŽ7þÆq­/h„E }yÛ-ãCþ¢ß_£é%M /{”ÈëäáÈ;‰Çq­/xDU }Ñ*«¾x’a†÷äI¾E a Ë(3Ɔž—'ïx1c¢o‘`“Ò(êÕn2]gy §r2ßÆË #tÈFÕ‡ q‘Q…¬‚ï ÑfÂ)T(꺿L‡ëNΚóç“'=‰—1ñ ÃZ@yÎM«Ybe±l½Æ‘)4£¼–°£ðÒ¼“æŽ×RãxC¢<üˆ– £õq€%«È…‹BR;æa¯¤±pÅ£y—¦SþPªûrËH„F5-àŒ%V•‹A¬·'5b:‰dÞžœÃÉn²ÛÄŠ@PŒs‹(ªæÎMRÛ7€Á®c~|Äq ý!QÅÝ~‘ éC¹|ñxiR„uö(‰óbà"ðÔ†P·WP/PbÄx˼@ùš+ Õr.^£¥1޼+¼šŽ®Ó{ò!o¬NS(E¤Eº ¢Ÿ?oM¤§ÝvJ¢Åв³=p>õJ{¢¡iÇ[A³½”Z÷øH¥Ñ=öÑ.ÞðèPÔ‹÷)T¯¦ñ¾e*Þ×s¦sú’X#€LBôîô¹‡àÚtë$X@™÷@¥v]'ãkKáYÜï¢P,±¶G±©× b©J}¬âèf'UÎ&ÃJª£Û»éúuµX§åðI²úæŒH&!Ø8f•º€ñœÚZY# X©ÎsX{9¨µn°‘tšë`pN{ʪ,Ærúð.¬È’qFÈþ ž@÷#4?HWYM©P¶ü FŠÓ.C‰S´D0h™Wœ—¿É|{o ²liëÃí“~Š"êdÞ…V­ ¹Ù źšKœ'7È=൱s*T …>6´X?|-îŒdÃ…œÍ^⥒©C79#BFEë¾C·E=é³JE‹}i°_.ØgEž„~'¯¯ñB¥ Dž£% C F!u×+3½à– –ŠÒþ"¯ïO’i9›Ç ³eZ±"²Ù„dK9Ïši•ì¤DÔ•… –"ói\ÝÀÅø¤eå`\|nfÈ”¨wkŠ›PŒ¤‰ÊÊv³YÍöó?eAb*¨ˆ S™t{-gÏUC–G"žÛÛ»ìÕtãh{ù¶Û.¬¢†D,ã|À›TwÖ´KK.œêHš¡`:øÍMíÄE ÃtV½Z»ª’÷äü?›:•ÉlÏóÇÕp)VjiGR$ âˆcËgS± m2(D„Äã¥LmÝw| MEqUœ€åѸ›>Žç«u¹{œ”w'‰÷õ»ñv^ì†êm¥­¥' #‘_…p¼†Ýàò)WK:«Œh@+V­ɤ`°WÐ:[a.ý‚ŠCÙG%Ø.èîf3mÝ('$XTb¯›-]â€2긲R+AúKð2ΤsƒJ–9c s7ÍáfõþfÍÍS›% –¢±aX+[¢F«ZLjRf_JsŽˆ\kX.Ló§—f Ÿ>QîNÿ7”/åR[é)¦Ó Mb¸ gbñéw­È‚ À}ŒZ3’#Ø:ÄMQ•³úŒŸ9 ’ý}ËJk£ã D¤(ߣÀ=GO$•W=}(àê7­ÍîBZ¾Q(e_€:H¢óî}U»Õë„z­žVrl¦‚—¼’§¨i¨P­tý*C5÷·V­ÄJíÚ‡¶©õu\/Ù=sK“SºÎµ8÷³·×¥•Ÿ3.@ü:¦ìQ¼ÉP~}\)tò¶&¾anV$]Z#9ò_vW ÚOc“‰Y.«¢°³Ã÷·ï[-‚ŒYbnùšœ~ׯ«!ÜJfjqâìzÑ‘#D\«X#²yÌ»¸kî…µrbE±òsb“~Ûpœ«†³ß©ùRÆ1‡À%jWr=lÍÔóuCI{(Ú¾^øÞ§åÖj‰`Î×ô¨8ÏŠ«†Û»µ»‰úC †7y$Ç1¸Ma·v#s@A%$Îï1;¶Ê—%x§°]÷(N·n–ΨÄrš¥ ×7àMáN-‚ŠÁøG”wÛ.`²íö^X…ŒQˆPœFîDV´$÷òBM5%I$é§ØÛýBš`Ÿt¯]P±27=E]O%5¿Ê&5n†¡S¬]{XÈÏ»vQJÌ?~rŠtÖØŸeÀ[·\J–‚|Lvo3IÊ!Ûl^D¼ ©z¯Ð‚sx •2VÐ3ÚSïµtM+ âR(¨$2o)ÓÒ8ÏåÝá?ÃñtùXî®Z«®Ú,õ“UŽª½? HãÄ}xVÚÿA<#‚31+]Z]Îäú0çåmj=ýàX ‘)®~¥â¶×}bYaRBÂlíÇRšïÝ^G“{Äœ>!X*ÖŸ9_§¼›¹µ.'ã¶Ë2ü¾~± åРaT‚Â+^„YÏ«Ò ˆ"gW0¿„§CíŒsÅÉmÛ·…;–ÃÍb¼*N?)‹–…ð>U;«-Ƶ±ÆÁS®x|ý—2B×! (§ ´9ÈÁ.•K…ÙÇV×%®ßŸf÷«4ÔL!@LUD¥¸ùùbÔŒ ‹äõ.Ý^As„ÂsŽ!ò0Þ÷åMñádõú}f•@0z#œÄ_]pÁ\êip4!ȯH•÷u«;œÓÜË ²ç»µ¯ÎÔnÅlÐa97Q!÷+œÎ7ÇR" Äteò0ÖÍmÌ$ኀ‚©±ìñLù®9ì¼/·S댤f¥ƒÐ“È -Ú0°ãd+² ’BÎ’å=E¾â–ÓæR(=ÿm.¦ãbiüU6cÊ®C’( )ªWœ,I‹‹*|ƒˆŸeò^r/£±¨µp¨X&z³wÚ×]nï§ mõÞFƒ¤‰;õóñEØAJ0@³ŽJÔœ-/"67j—~AÅi-gŽ»¥ÿÚ–âò†i«‰£´ÄúVÿû†éß0ýio˜6k-r† ¯ó+ýeZ~üZ˜íbò¸¨–‡ãϯF}Ï›éØž‹Œ«J}@ZF툒Y]f‹õë·ãaÜae5Sº^~þn~áó ƒòÕd0^˜ukú‡ÙEžž‹rWXÚK¬™ þ&4+¦o'ÍÚ~ìðÜ!²`–¾Á~i8Þt_FSjÕòtwQr}}1¦' Ž:=8¦à/«a“Q~wþq¸2k{΀÷í¼(=âuÈÑØ1¿1´Œ›íþpuzfŠC˜×µf€qׂBIµ-úK`*YXwÆÎøé±yÀD«ðÝÝl×UÈáòAÜ€)v>K%p²ÅêêÄ=-xã–ëà3ÐXLžÃé!%Oj¡ÜJË0²M4ñÐé‰H§hb£¤ó¦Ù5b£ÑÒ#k4ëý=ýਘ1ª!¨2½ÓTǦ¦%mw¾\ß?L¯óÍM2é±pèfi¯ƒCR%’BÞh‚“.$ª°6Fúzk~xÊÀj,•PV`û[ñ•ëçm#Là{²tÓCÒ“Ý_ çdsáÔá¯ÞÛæ3=.£I:÷½p|.¥áƒU÷f# M5–@#iøæsüoz(…“„:::òð)¼ð`D°‚Ði”c†'HýGÓS³Xƒ£I¥È@§B- hÎ>œ#)Ì…o@µ²žJXº¾u¬81l>ŽÛ6ÓbÖu½>ÂaˆÄþk+u‰*(ÁÙÜíûW÷İE6ìÎö::&J¡ ŒÒ@+9X†àÀÅÍP¨Uê“Á³³¢ôÀ,âXÀw»œ–+£å)‚[Ú Õ9¼áCcúô´œÆz×·ˆÂ… þæÑcX”ë|qíúº¡p!CÎ6ƒpDI)!FGºÂÇ‘I(@JstÑ%º E"…/åx¹Y–‘J0%:.íL²°œ£4ef­€ ©t&F«qMzPE"s7®o–ƒCjª9’ÇAºrÓÓj™Âc¯nÃRf\Z ¡ÈRÔH¨ø:øÛ®úÚâð•ÕϋɮlR?™måE?%iEkÒ³¶" ¦ˆ 2Ê«ÞïqûÇeJ5·~¿b”A¾_Þ\¦÷_ÿÛfüºªÞè_Ö+3IÇ“ƒ_Í~6øKeS þç¡Êµ}?“ i@T–KKFñ×QŒÇõÔ˜J‹Åûœî_¯¤ü2ØLÌP¬ŠÅ`¾6¦Óöqn¬ÔbúåðN+ôÙøqaư~»ëÙ`÷ÇÆõFmq¬8„-˜­˜ž¡*­v]AÕzîÀœ”¯,vÏ›}¹† µ_¿RO Y ð#Dp¤ªÃvîÙS»ù"×Q [ɸ F]Ïûíûàe„«î³Âp'»WA1“€}°ûfÍì ÎTítöˆ¼Ç„ö‚*1yšî\'p€ó°“ u&»™¯!ùŸ :~7½Sò|hVÈ‘tÄ©W‡ò¸¢p‚´=p`sÃá~HxH< éJ€ HKç†ÀΞà™÷ðœXà 7<è^‡Ñ9Ÿ3 6š eã«ÍvˆÄo;óV`áø¾UkÔ·#ñ·flöNÒÞ« fã @ËÊEÚ¬·Uµóظ¤æ-ï«aZŒÏ^–ѨÚÒæ„UeÉÿ?î«{+«Ýø.£ãn[¿2$†¾ñ ŒÞsyW]3x<8©çΞrIðÂvÍÑúëѱ:x¨/.øŠc•‡K­±²³æE/å^7ú:øë[1ù‰‘Ÿ¿ îžËíÝý£±^Ìb<›ìƒáßO؆Ãûçʧ6ïâ`AVU‘åà~;,ÞÃh@Nr ¸ÛʱB ¡!\!,—hPAOÁžUñz$Ïˬ´†Dü„ôÞn °¼U]؇ ¾¨6Öj~Q@tJh‘E)Úö„•‘Q£»~F‰qFÇ^n%æH+1Ͳ¶9ý+³ ”,Ëò` Eãr™àbYÉ#Bn»n.jIëÜÛ3Uq³ãýoÕ" ¢®Ô7næ|;^.‹é`kÌÒro’¤]î "H²4ÛœíЖˆ€Ûm‚nöI`¶> ¾þDjƈæ™jNßÒF­¥©yvêkßÎJ‘‚P+[_ïÈÅ!l¨ÁÐ47t‡‘66…ú(ÌC–‚[þ|h#§°ÁÞÙ䇕(‹Mi‹¸F-Í‚ë9ÊŠ¦fY¨ÁAãh~Ž4+9ϰîõø dîLDIA!GðJØö=Eku.Ÿ'ãïÌžç“ æ`<3¿7@õaýx·+–›ÝOåϱ¢IÛ%¦QZà½*Å4¤èÁ–ÛrºŒ`G#[Ç8N}|‘üîJ£>@R$³I~Pbžƒ8pÕˆ"RuäOS,[̹iÅHB2z4Â}†;.Ï{Sâ§çN7W°Æ›ÁÊ4ð¡Ò_—¬u q\ö‡Ç94á RѤ¯»%D˜ƒ ç'¥B@ø[ed¹l)ÍYŒ,4-å:üî¼B…QJ‡¨óÕy¸7 ™Â È@¿•_c¥å‚#­Êá»t„ q@ý‹æ4ÃöêèÞR$\ÂR*ð‡6ö¤µ2\2ù. ÄÑhx¥sý°kšÇƒ¥ìvÝ(BB2ñN"ÓT¼9‘‘àe ÊGpc&`Ü,gôØ›ÉoV ìQ5Š¡2–8GöLüP‚ñEþ¨Îmªûb(Șµç2[6Ô‹7dŦTIˆJÓ”†AO* mwË6?\˜S±ˆŒ97Mz´TLQž|&w W44Ëq:ÈÌBÇΙ8fK3·R ãðnëtöàM· Ei¶ˆ‰¡/­"Óé.®ì¾?Bå‘Aab[õ„EƒŽ/Å «¸’Ë?&ý¼()^ •3‘ÀVvkÅÖ”#¶ÊyöžZç=æ¶9wž²U. Ñ8˜ŠÇVpÆc¥ÆÿOݵ5·u[Ý¿ÂÑsdá²qóLÒ4Íçiãxl§}èx<¬D'ü"‰Qv’vúß Jò¡D ç`Êmsu\® àû²–ÐÈ»*y²€»úÞÇCm–†q)ÒK¡åHùÕ+ˆt`zo¿SÕGšEH„žèŒŠo‹cj„V-ƒùMi| Ä’Ó¹åÐ ‚³'Sµ´Ö`1žéŠ^¢üÈ"w:¯›þiüÓ\;{Îw<Kx’:¹¾ø±pOýó=}Ìx|懲¦ÒÁ!h%G]YËU÷hØ•d«3±àð9YŠ‹Ì9’Ÿ˜¬¤„€š¦ÙÉwÐ1e±'Ýi;OµÆè.ºÖºò˜"_ÆBN±@²áLA–h0òð_YÒÓˆ´ò†!ƒh¶²€ Ê.í¶Ø ǘ‹¼¦BêÄÐû¦í:ì‡BJ× Ä.žðÕν,JM(‚ŒÖXží¿)Ȧ%¼S½¾¨u<ñæVÁ~xŽ}P˜H2kDšVÈ;«FTQl3N (zÛêXÔ"búØõªƒÿ:ñO!ˉÅÓh´š@¤°Ü Œ= ŸYôVj f7YÉÙGñ–Çìu€0æÜË ‡À«¾Œ8WlR;GHÇR³0hî=9AäÌïãÈž Ð!). =wé£zÿúM¶Æ°oxEâá¼AXB<âp˜+ú†ý0Ÿ"x¤TóÓˆL³¨eÄ &ar…-Œ¹Šð!ôJr´Ôüä$Ÿù»ópC€à*Ž>ßÌgÁjGkØ;Ö¡…Ñ Îm¦iÏÚAÆ(ƒä ß4K|<5iNŸuüà$b±¥&ñ!± ËßHCÖIdúûW–ÆüÅ‹îX씥Ðç½–´ZªXrª&Ër&T9Ïó)(KÔ¼7)_%§¦«(ª=õy‹ØÐ¿þpm+åµgt¨Ñ ÙmR3K- û¸šÚŒTPÝ{¶°kä»Y’t¨~9AƒâCŸ_ÎîË·Vµ•Ѩôéì‡Í§ÏgóNÙ­{töŸ–—g‹ßŸÏâ…ãÏë¸Ô¤x¦¤{FÏĉҳ7×§ñ#ž)?y¦fßÿÖÿ™f9Ÿÿœ$ÁÞÍÞÎÿu¾x>‹¿ˆ£8?¿x>ÿÍÚ½Aç¼E‘~ âÒ,wdRP¡W©_è¤2: °Ñƒ6’ZLnû.]| Yå*¡=VÐ…>­;a…ã›åÅ".í¯¥‰?,?/~_®o’@Ûùñ¿®7š Ù¬¸‰‹R%–†ÌÄoƒ½Ec­DªFÙ>*ò“…ï´uÈŒXÙ> Ͷ€F‘4,G,•TfE‡Ymf²«„¥€¤&q>PâË•ôCYªäi=Ä„²à´¨®³ÊNµj…]y!ù q`‡Ýš 2ÒÄsýÀªw Ø.£xèÀ¶f!‹Ò:!rYf©£6DßO"‰‘¦¼_,©ZÔëýt¼bPðM›ÎÆøæ.Þk$€Ø Á~~;iAXTã+̃wm4@É_:âÅ¿rFvŽï}RÈYøZ>Ïz4ú¤éƒ ×fXoôméÁÔŠÚ…xkÒHâÖcmå±ðœoçVâè¼ÐFÙ[7¬fµ'=½5Áþ¯”@rÊ^Šip'õ)±d€dí—bIÿFç'`óÆ:$W扚6ã³ì@>¤ª/ÀÇŸº±ž¬ê.!úê‹TRkÈb/lÜ»fXæ= k}-ÁdTe<À0Rd\—Ëõ X‚ðÍÜâ. ”tÑ![±32÷>êÕ$!µAÚÞM_%”gžI(l qµ®Ô5z kå im¦e}÷£•<¸QÒA²€‚W½y“X\ž]­¢»ó‚).F ´Ÿ‘’5ŠÂU€Ö hÖ0‰ÈmCƒ0qéj¥u›ÆËÝö)zUnåÀ)›–iy ©¦:`Ž4'·àÀ(>âüJ¤œ €ƒ±ðÂZ—ëwØx28ÄÍWÁòfÁtbLïÔÃu–}‰¶ŸÏ¡ÖÊèàÔ¡iv¡ÑÞš:¦{i%r#­§6†ýþL©IH X´âÇbD–ÀÊÊZÚñë·/Ð2­|ãÓ·5'Eh¨!mù7b‹s\§Z™iÀb )ýŠfyæóÃݻڔèNA¤q|Y%>2ÂB‰ÞxÞ³/f# "[1ó2w‘ßÖO8h…–šÿh3qñA#Ê&[³Ÿ]zÀèbi¡ÕA,º—…©·L+!yIELdÅŸwîõðº—hG@XL’½ü럜2–iפë f“š ªÔCÆÅ³Ec?¬QñþûFVL  Ë gê7_UÀKgnÙ¾óØ*€<8YѲjlì`¨ÿ;o±˜A°xÖ@Ïç1­~Ã-IDÅHÅ7Œ‘ù4ÎKª•Ýú1 Ù§Q§a%ë[ ,ô¨ND^Yß²Uæ ~§ÝI+‘HŽÄÙ¼¸{µÖ¿z 9I¤‹5…VÖ“£à<2þ*p&²Úã%ŽŠyX…¼4¸·Çé¶$99›/.òQVÐraBÙæS@žE7xÒè—5ríÔÞ „zÈ“å ª5²À¶H9FÓikŽKwâ %=¥Èp' B0Üój‘äxÁXoµ´kÔ¯{Ð<-M˜P¹ã$óyÖŠD߇L’f#ëma„—1‚ÄÐ:úc<}UÞ¡- =M¡)‰þ±)°»!¥HA#fâ«SÄ×xõF ôé"‘V0é?ž¤tÍþ®#:ý`”äS„@jo€&dÌ @ég{”ð²àŒwH Wð-¥FOV’”Fxv‡èŸ: DŒ”<™ªûÔoýË-WÒÃh\Ë ô¨…ªdpʳ[†¸ÂC)¹,J†r™´Qz¦ðÆà£ƒê§sÛÐÀ>ôÆqgîAŸC­ã„P{=1¯Ÿ1ª­×Y¦D.¹8ö@Œ¶ZNZ<¸™Ò4þ T (xc ”a41†cSœ6ö:ÿŠñåIÿ¬wÓàa'î­ý2N[‚Y ¬µ™,ý¤Ì7b€¥Him™0`}aÆå¥AÚ©# Ug1$Èx  ½¡ùºØðJ:CIÓŒ9érÄ‹ØÇõÉæÇó³‹åºë(rûw4©HDƒH¶¯õgš‹WY Ï¡ý=|*Œ °é#†Éäe»û#Y¼ä„DF›ñ-c •Æ0é[í¬U¾ûð¶f9k†s3h ÙÆ#Ú{Ý,õÇ+‚ÐJò nÀ:¯ó¡-) q4É&`Y«Ec¬—âkä‘'ïˆàw¥9r¿ÈÙáâçHZ•Œgjܑ҇c9É‘üõfu~òÎg£Ã…0¸yÏ/½trÿo9¸^ÉÍ;zÊøúã• ˆkÉÕj•ü` …Pœmµ81Åâ9é+ãÃbxáëG;/-BfÅZ\ß`݄Číj^+B hµïÏÁÉMB-|JrÀ™æœäË·ŽmáVy6! Kž ànDcµœì‚ÒL·ËFä2Øþ}üºâ~[ K€÷ašî»Ì3…Ùdœ1 ßV(2 Ñy)a71¶D=rËd/Fñqvƒ”0ï¿ó r(­ ˆ²´5MY§o¼³¬qÐ;Òˆ¼Äpu0—~70 ‹@1[5èŒ} d¸„µñËP¨µ¶e!ßALŽI½®²€Ø:oµÀE«O|1EqñŠbÙ¢/é…îõ©\®Îï—Wï™_žÅïz¾¼üø{²$¾1ïfo~]^]uoêêòøãz‘­g/^%ÅìTÛßÙñ˪SÉVÿ•'~v¾Ê!tJšrÄ9ž“=Œf7:ÞñïIÇ»«ióëã‹+‘+­HLíE°Òô80šý¬0~"ÝF¥y4NdP;Eé—ñ· ×½c½Žëöò2"Ï`p´+—IxçÕ kÝ×°Ž8úëÏgGG£¿«_Þy÷]ôà»ânò].ìÑæ¦ÚÜW«ë›4ïqg‰[D&‡¼QN•+È‚'Ú7ð |ÅwõùS <•>îÆÚKíKg¶Jü3ž‹µ³ßeYé(©DHãIø²ùÊ›I(rfÖ˜åQiCÑ42i[c ·ÚƒÆh)®Äúöâc7Ä~¹«rEpµ}FÆÅØÇ]9Ó+?=ÙY¾ZiYðÞÛðXö,‹Ïõ¸ŠݛܓÅüüæ—@2ÒÙw³ÿë>Úè$Wêã:ý}y¹^œ~¼îJç/ã}2'¿(hÒEFÜô\/5z{ ¼ïܦÛqyÿùÝœp*D/ÓÍÆÃ;[ôxâ7?§ºŸ ¬ ¼ÅÃ5ÁêqåÆIŠ>”ﺧ’?w;vo6™´ÕeÏ'~¤p;±iá¤õΔ¼ D^ç4ƒ~T¯º¥nõU —ÚO¯Ïh³S–'0Zh™˜–Š=õ6)-©˜ãI69Éç_lÉõÝæÒù,a*E,”󤘜XÐKŸbõ)&¥ÐÊD)§<‹²ºÊ`€‰¤¬ELdº¡ìì%`4Cû ¶Ï©²–œ,!Ç.,¼#x_oOª"Bì1S)þŒ^n’´¶X8ŸSªÒÓwò6FºËÁŸ­.Ïfo—‹ÙÍü×Ååó™4ÏT¼¿*s±Îa%œÑe¬Ô+ïyýÞœ¢w³£íkÈÕõê÷?nŸþ÷lsöÝ¿>]à^Ä[ƒ2ö(Ð:áã-¥0uœ=¸ŸÜÎû=ÄWÊ›ÅM‡ïãÕlñiqy3» Dobà·7–7›?˜3ëãÝ¥Ø_• ;Q€$Óê}xaºûúÙ·ûûa+ô+î3GÑâ‹ÍÊÒ){tôù²ƒÑuG? "¸YuK먷à¾>Ú2e8¯ãÄj[¯%a'6±}»ÁÛZœ „”„ÙH=¦ÎÆÃõ¾ ’|°ÈÞ‚o3n6ã-4§F± à½zÙù~¨ÎꢈG5´ÆÖ{Ê{™m$¸t^Í—×;ö‘Ýßo”´ª\«Ÿ½ŠêîxÚ>–¨¿…}/®—³W«hù³?Í×ñ¼|¼ÑͱUÚÐ*0ùÞÖú{<Ÿ/ççp)ç`ELÑ÷0TÌsÆç„u"d›ÙÑ;£båoN2E唸\Ÿ²düë¶¼ºZÅDJ¯A¡¶Ãÿ7“Q-†§ Ûvxî«JòÈB ¤Èz©ƒTß]O² Èö„j—W7©ŽbÝo» ö]|³ßvŸßU1<Ÿ­~»œþ2_F¼û#³ËùÍW›Of?þýåñ_úÓwÇß}ÿú»7oŽ_¼:þá§¿½}qüòÅ·_%úfuº:ÿ§Ÿh7x+dtÏË{¦¢×Œx~s…JÃøzSh‘ï""?ûúèî™ãõüÓ"úí×?¼ˆ.óúëß}u­8z—¥¬-ƒR}‚]#Ú%ði¼¿¥¬Kªjš?ÕW?¾yûúÇŸÞ¾xùýƒAüªû3Ïgÿùçñÿïüwÿ͘FÆ *6TÅçtpSw|ÓCpEö4mY?(åuÄT\Nõ/J#AüåÏç‹ûuðú»â2¸˜¥Æì¸ dÄk~\ßþøòåß¼þkü0Ö7«ëE÷ËÜ’HmDR•]9gúŠqÌ£ïY¨Ê Ýk³æ•ÚóÈ•A%¦§¦ 6ë"ƒ‹¤Ž;{(’ÅçTONæÀKUÆýÁjMÆï[ª$µ©³»h¨é%ó® RÒDÉ!HÏõ.æÜ ¢‡¢T1Œ—žë³ò/U—FÊy9âs^L6ƒ^á\1’¤Tíùc‡£"‚.Ê¢¦çzÿóåå¯÷؇”*‹¿DËSk¾¿w*#‘à.êýçÛÁ.Özûàl«Û1ºìóóå¿SÐúòlÖÕ„?ËAõ&þUž@ê“r¬>­¯–Ýô ŽÕ»èW½™}ûê§ÙüÇâÙíS³³å:MæY„K—èb¿kz®§püéúÃÖ4©xíÿûë¿à³äBô@4ð­ý˜>E]Jâbõ©»0¯~ëîJg׫«Ù÷ß¼~5†zÔ+× ü&1.ÏÓ7ßþnöÛòæ—ÙÇÔ‰¥vÊà½.Í~zNh6«JõÅÔZ²Ê[žlJÕîã~£ñq5ì12/s?ãØûw¬Ñ¹> Þdèy\ïUWÆãsLTƒ{á™@¢ÈÈÝM~`Òu‚_3ªQ,PsTd£ájâÑ¢b@ÊÄÅ3®ÆNP³7¶¢Æ §¹j&µÂLÌSº“ q´þi©xUAwÜìÓùû#8,i‰6Æ>M ‚*ÜSÔ騰 ÏˆÆUD·Ï)¬I¼R¦x…ìF@=A’ <ÓÔð)T÷¤éf+,‰[6ïÅ;Ç{X3Ú’åÈ>=ÿ¸¾I'òüâ*…/QÐëø76‹È+6²0DŽSlðÀY-Ôš§¦„{À[[DS r|ŽŠæç€ÈIMìNçè“Ç[b'¤ím9FLËbX=Ö—¥F:CxÚp¶¶Ì/±Kú”#î5\óPu± R9Ľל‡íçwàêz•¦äá£ñ5iƒkïE®_Z¸QîKp¨²Ð3z"ì«.ä'Qá=›/.2´ [âP€ ’§ÁÛÊ7‘ÐÁëÑMC‡XFLª,J•žãqVw´É‚̼ “³A±Ç`Ô$¢ÐíAé 6/è€4(°Þ–±%„ÓEå­d–§©Úwï_ÜêÁÇ­ æà²g n¢ ,Ãf©‰zOF÷èãf傳°Ã´Ìç£'G²\ Ê‚'弉ÙÇÇy܆:Žë‹åÏ›‰@Ђ\@Æ;ð ޶AKiËùÐdëôeô©»y4pež,'p‚Gäd†Ÿ¼T~ï𣸵8L'ÒñmQÏ]¸°ô¸èYhÀ£âVb¾>` O)´ [aY6£ÛߟÎñíÓk b{7®´jPô,pÎáv¬ò*®ò ŒBŒðtÀwõt?-hÄGB‡¤up‡› À ©µÌ`âÙŽ_ž¬ÒÏÇ÷¿ Z·­C‰té(RDÐðJŽ‹îÀÑ…1³8eƒkU@2˜:xÎr•¢Ðöhü,Çgs'5‡‰£fD!aZ#ËÅPÒ—Í£ Ö"(ɦ:k£˜ƒo’È?HNøèš0ÒZ$Ýj„gLÄÀhUˆŽ€V²\H÷*ÝÇl’ê*,g½â¸= píƒrHØ‘ù¬¢REkôäGª§@‰:ò2æá» ØXö¬Ó]n-z‰«õé¼/8½R¬ÙÜñáRØç¹™kê§!ùŠ2aA|ˆÅ=Ì)†gᯢ´,=åÕh ‰QYKüÙùµQ¿äaÃ\xʪ–4½d<–QÎÁK ™@3£r¢]áQ{tšI<²œ*ª_NK$‘Ó;[Ù°2À@BŽÊîµxwÑ1›}‰‰Ó-RÉ ±/Xdþò‚ãí‚—ðI茇£/SÔ9A÷"¯‘ê©ÀéIÆ}!Ý=·~‹ÙyNwª r„ !Ëß~ÚÙ5ZÊ…xô\ŸlþqšœGÒklŒ=€ÕÅÅǤXd’@Qk¡Ùé/*@ï‚8XÝØº»ÞGYÐÖzƒŒ´ Ópk"˜ƒHGÁ—ÝÄ®ãñï—óóBY!ŠZ ¯ù"³µ ›°¢üÄÒ 1¼ÔTÇ£/(¸b,zZ²¬¥‰· ŒOž¥téb·¼ÕuÄ{¶þ¨x¼ÿ½kë‘×Ñ¥§ÝEÛÑ…Ô¥p΃dœ 08û2 Ë–“>Û·éê\fóß—ªKÇÝe—¤²««r¦ƒAÒS¥¶ù‘ER¤”H¼<Ì~ÖHÚ•±&‰ñx˜«þ*”S‰$è8–$g;å2œ)œÒ™‡ú Ü&!•Ñ ‰{ÅsÊÒŽÞ¶|æ'ƒAy¤ÃXV…Â{ Å ¦”ÜèÒŒíA{àAà˜‚GFR=­C¹ @P8eX4“ Ã{t£6 ¾åS MüPw ömØ òÃf–G›/¢Q•‚EÊcÝÿ1Ðæu@¨žÀž=¥ìÌ’Œ{À±‚sžÇÂ1K£öõÓÒ;ŒÝØYftH8æ›%; ]r1ÍÑÅd”=PWÏ£HfVBü² §ð÷‡nöºóp&éO« à~y1Þ¡ ö€¿Ø>ÃȤLA&à ÚF/¡D('HâáüÝΙïH Æ0¨§º=¦óQ>‘Ô–pÃ!×Ì¡ðè½ÖoBû‹„Pâ@äÄñФµR'¬G ê´í/$5çX2Ð ÑîÚ³ÚŒ“$WÕ¥_P æ«ÞÏ~Ý$Ïîü…¤üþ~V­3‡›±ý  ›#vµX® °VÀK£`v:ŸE‰V(­.Y+@ìª^ÜŒ3×CŠ`€¢Ï7 …yq®ÞawõYmÆu½…éh¦4h^î¼w3Ž‹ï«†«þÃz¢}ønU>Ü\_œ×KÍu—ïg¯‰ «±³Õ€ä5£ø|2…û|Õ\øÅ E¢äqÝúêR˜÷³W¾º >߆£­uEwÿm-ùEA ]iÓaþ¦„,y¥jõ¯û*kb”4(w¥ÀïÇ¡yzíùx{Sý¥ùÙôëã¨În¶d2ÜmÈVã°Ó+S_¯Ö'Aæ#™˜üÏÇßš™q¾Ò¢æ š…„ê†Ø#+Ь¶1ŠcÕ8Ãy2ç7ªV¾aŠ3Ï‹õóO¯7?“Ðÿão¢º] þÕÏof·þ·Ï~qwZdöÊ‚[ƵZGdAãX§Îb- ÝOQíùÕù‚‚®Ó•Å‘µ¾ø¿?^œÍËÿru~ñï½2 =pCîHDFᬇ¡-[¦¯ý¯ïˆ/ëZLŽ\·³»ßoüìß¾ðòíu3#÷”\ºËsŠ/‚úßùÙ¿Ð_Î(0¼!+è›5ŸoWƤ _û/þên>ûz~qAø/¯¿øÙùÝXȪ³ÿrsÝ,–¶L«Ž)¦Oç™Xˆ é†%JÊŽ¶!Š‚Ž÷³ŸVno ë@ævágoÞ.– Ø”žÍÎoóg¥àºd¥†!ÚéØZ½¢}踓IuhKøe¸ðæíÁJ d&ކÈ:þ‹ëºº ð¿*„ON… šÜ¸ï@¾ìûÙÛÛë-Ê~^ ~»ËqX›Â¢ñW¿ÑNó¡ SÖÏølÉ•ÕÌ)Ë^,”aFẢ0X"¢g[É$ç¨bVʆ¢b™ÀÏ•¡M†â×­f·H‘<6¶±èZt…ºlì×÷³úÖ¯P\\<_ñóönp?ìCäy5»¾š-¾žßÕŸèõ_(þCžÀÕ¹ #dK—ž ±ü}#‚å+÷äA?uv,Í ;]€DÇRÕzŸ¹2 hÖóvóS¬¢(#û‚þLb;¾rLŬ˜FŪ¦ùÛJ¹Þݪ®®®ï–Ûų»óË DU2ÜàDReõîLËz\'¾_VFIä„U&†—‘ }¹8›]œ»ë/‹Æ­ !jy`áKc‰»;Á¬Æé¡¦ @} _m¹€8Z<>^]߆ ݂֑Ù'ú§º KÒüN_ú«ÙÍÊÌûf¿fR«]u¡÷ã†Îq™ÿž~Æd̰`˜1Tá{fìãtLÄ p‚ˆrÄhØ!Ñ’œû7а<¬]†¥´\¢HÌ«Ûܶ1 ãdB3ú`кzþ´¨ÌЙö#P­­×@¤BF¥0 R‘ûM:å-23AQµX·®ò¦µsέUklõ“™w^Ñ«ýª—¬BÃì2é¢H[v¦<~zý:š~:%‚ûåcPÑ/ê¨|BRð»­üä/nȯ¹8¿úüme.Éw|µ¬|ú|ëÿë¿™÷áçƒ~äYð&~¦qóY„3g³O׋»74Ûüjü#NmÏ7pϺØÏf¿TW»þöæõ|öb,—_œÍÞ¾zCÏ^¦ÊÃ#Ïf_>Ùit5ã¼\­ã…¡P¥àضwRJ¥è·ÿó§Wó«æhæôwcç÷ú,ä#æ³_¿ç^ Ù/5 Sfp‘Z×1éÓ›U±Dâ)ÜI)¾{±Ü÷Jºx÷bþ+}pYÕôÓ»^ÌœƒójÎý¼iß½8{÷"L‘å×(|÷â³Î/oiÊÃ_öwŸØò“ÅJÇ—¾üRݾ¼ý|ö¸¯/ÛÖZ Ô¤˜Œ“é¶pNêšÆ°ÐÐ[ßÓSÎo6Ä“‹µåó(åòmW{пå˯îÙ@_ò?ÞGR­ JºóÃ¥ì;õ9Œ3Ð9Ö{{Û9܆½U°²õRu4[£DÈ y=[ωT¬‚Y‚D 1O?U A£4qÕàh·óaq¡‹PÃu ½}ÊÄ<6™¡¾³c²éÉ*3y%]© çí¹o¾7º>jÉ©]ÌBGàì³&¤òJê®YÍÂC§Ë>Ë}›WRXîcg»%¸Œ#¼ÏáO$õd^u,ãìþöcí¾%HCòOï¤ó ñÀš°®4›D ,ùÒáŽFÙIãï”:M±ck|Cfµ’ÌZ‰ V¼×û5­¶ÎÊ–â^Pž Û˜ô¿ªijg½nÑߘßòvlG¾,yÇ6‹£R±½wlGê'](!9‹ýa °;¶£E½Õª¥±ÆÚÞó 2‘X>é–íh¬I} P’IÒÜá—Œ,ç:†ŸÆ=ì+‰îÙNoÕäÀJ4d>ûzÐj6P dËp[ D1 4n𢖌²ö#ä©&¨uÏcÓе*Ç`Óñ:&òxfåéñìHYŒ<ßð™qÆqû̸ ±’ƒI`Ü`0ó̸݌ƒg÷q µÑʨØT¥qô'g¯{l¤R¡ ·)LÖÞ .E5Q+Zcèq•Uœ£iêÊK¦ƒVíPÀ<³ŽO[¢uÒhú%kœ0,*YMìJ´F²m«Dë³J´ÆŠ`¸D«môPÔÜØªª-¬iUA^7r°3¨Ý*Ñâ8P£eK­4[VÈGdª•²ZåÔ/œþlÍ©_8i4i5ZÜÏAÍ›ë&tUêa™Õ# #5Zm~úy lˆ=øŠJ1_Ô®-é³­êĦ¢¥>O[¤…¢”Ú†vM…Õ8¶»ufû"‚EZ…sxº }3LF©0”> ²d’Hvš…å¸ÊÉOÞ¢mIíkS1ïx¥k…ª '×À¹¦ñÚ@äw me9gšŠÂL^Ý\|»ÌlWù²ÔägG»G„ä&?Gê%s°´E&Ærœ´:#ù9ZÔ“Ÿ K¼”Þ6†<$Ý;8§H~ŽÆš’ü$ü¡„~¾å¡ÝŽEák…"+÷95¼] MÃvòS”ŒR>€T„3öˆØl£q\Lk¿úêPÌÍ…jò,Óàc2l¡“Šhâ\Ýù\›3Hj?Q4 „» 2¬ÉÆ÷Ø  5\é!õÉ ì¤vbŠD ж!dhB*AÆ, c2ϤS¾¤”|+Pcõxr›•L `@­3Á€¤úŒådƺ·䚷Þ7«B«ª-£_Ü ÙzÙXðüê¶©ëÕ´†“FÓ+Y%´fH–Æ¡QÊ0ŒdÛöÙ#Ù˜wöÈH gŒj„õŽÚ…¼‚®Yá[µ®< Ö dx½•a¦?Ã@² wïpr;0"SͬƬÙ:¼D ú•èªp™:1­ñÈ› ™j°vÈZßHZßDÝJÇ$ Ð0¯š–pWÕrÚyx$:{¥zgÉ)P<" à ˜Cåð²=w2(oîŒdîðÜ‘n9 S€©V¦•¼ =¯eŠs.æŽ`[sGŠÁ¹ch™#Wdw¡ÝjtÊ«²s§¿ÒådçNMZvŽù9Ú9¸¹Åy«æ¦~täÏC wgçxŸCVs[_ÔÈ«8« §Y8QW58z}Ȥ¥dç–kÁÄÙ9Šk*û^‘©ò¨BçL…c.#9J~$:“ËbÎ ¨ÈyEJ(©ïC w«¯`{4£f53M¡¤#“ܶPTÞ@ἫuÝÖôz‘¦¾Ks<±úª’£!Û#ØnK¾'ó¼ ÓÞgÉíþÁvEIb€F."õrœÊ[£O_²ÿŒ;hñ¹L³‘“•¸$ÿcèxÜŒ±Œ{ªà<Θý;€'/ñ^Ò¤¨8í(aê{CFW5<ìæ8¢ÁÓQܧlÎcLÐ 69›ž¬(“W¨àkÜCÆin'0NÉgÆÝ3D+ ~¥ŽMUöD§­^Ýr \¡É+]íFrÝê—–EÊ´Wã:!enÝj?ýCëµ01OÆu/ŒŒ­ñûqŪep°÷X‹<”“V¬AH)W °ŒF3„Š,šÃ‘:Ü0¬Í»`!‘ê•–¢¦…Ø.UÅÍÕ~á@gŠ´Ù}ÚÈjœTê)ZŸUÉE¤`vuô§ÀœÍç¡©î™iŒôªj}ŸfhlkÕ¢&«[;fmCCËx[ÕÖzDDw’³iëïŽAd¯¤ä–Ø}]ÊjœétUM[|×Ï­Ê»GÜɪ¼ËÙáÊ;lQÑÀºh•´ SAU«_9 B…fCU«b»j` òΔ€du”bë6&ìêmÊälÖƒÈÄ«ñÔ\»yåæ:ô%Ï›G¥n(ŒíQ'×:!µ4¦€º–¹Tªp5g¯¤jéµÂZ–Vh´Ô¾) 4a*L¸DØuüÂfGí;ÞkÈËkw][{ILôŸ÷_F‘Ú‚Q®øêþü‰˜/yÿÌ©ÏLó-7¸¥Q’ñ8¤RY¾e>/ú)$H‘tDŒB^øšûÈgË÷ä¥ §¼nû8:ŒÃ1N¯Ñl´ë¹¢$W”—Z¾†`"Sš\±]ÉàÍ8*ê‹Ncã›’•Loû¦ª4d A RRIäÒB ÂçO`)ªæò|± ©_½ûDà ¬匩-ú‹ûßÛ)lGü-Ö„xC1¸0<ÎC%ÍÄÖ$ޤŸf ¸wæ³Ö㔲ìØrGq!>\îË·äÈ=‡¶“…~¹/‘ôÓŒV ¥UÌ€Ñ8ìxû®+)³¹/ËŠ3>d²2 Lpxé4 “Y²pSÚ ­V\†³—bk,cÆVvëÙsð¬ŠO€€sqlÑ­1&8+Âeǃ:«„¥õ™é(pÁ'ðöÐÉ•bi,gˆ½·ƒrŤ„¨B‚‰E= 'Ø¡!öËns˜.#=T¦/‘ÇÔª½"9¸A}wm;®Ç=¸hŠEsáo¿Tîü¢[ßRh‚ªé]-¿}ûíò*o…LzdÆr˜Áúoïå0‰ì~•ÐÆ ŠYŸ0Žë û™(œci™E1¨nô¢šÔZ&"J=‹îÛ´XQOŽ©Žˆ¢´!‰”e7öC±9uÆ€àÛFB† ëò~[ÏÉ„€¢õ7#.Ç19¥kM?,|Ç@*ã¶vmqõÛÝÉ1±g%Û…,~ÈNˆ™gbôöSf„ J1Ê(„–6Ù Äå°e TÈ÷Ò¨ :­žÐÄ‘$YBG†NŠAx’¬…Gúa3,@6ù+rmi„å}}¢$·ÔXÙ K®¹E)ÅnM_Ž3Æî9óƒQ;¯}qç/üe¨xé/ªÅÝyÝÁY[ µBpˆ»Ë¯CëQ¢Jž÷¥È%ŒrCLI²î¼Ÿ0Ë¥”¤p1 ‘ßøÔ^BØšõXJ&„ÕCê”N'ò} Ú IÒ¬'t(¸±ƒèT¸47®V);4ë§ ~­÷Hó¾ï *É5>€#´q‰JEpÐ8%yl_}аgF1¤¯*aœ)*Á‘.tÍï}S×Ò×m%Ý"èF°Z‡¼¡¯ÿŸ½'AräÆñ+þ@V ‚úÁ>bc‚Ì$gì#Ü3ÞÝç/(U·URf2)eIªYÛa»Ý…–p qD>ÝÿòþúdÌËÒ×ÂR‡¡%K¯vëï~›ÿÁ¥³—ùÛùu=7ç#ÿúææÜÉûå×û‘Å(i°eÀâqÐ?!CÑÁG0ì¡,½Þ_OµF?÷zƒEªë—¬;ì°Ë0·eΔ­fÄSVš(>D1ke2Ñd3Ú8%¨³\L´.r¡èp¤\LÿåÆ}­ðpž—’LÞ‹÷-)‘ƒŸdr›˜se_Ì겯{½l_ÒdË0z‡æÑ !CÄ$˜ô»²7aÁ¾Üõ\*”eû Áx·:óâ®î"Ûn_ÍÔ“[vênœLì5w(…!DœRÇ““•xJ…I]MiJ“þÄKÉö4¬ç"»$— ®¯ñ„Æy¹ÜWýĶLŒÅŒc%G5JTº8¨“=®Š .«.;ˆ99(J·ŒF*Ùe¼U,/‡ë¬T¨=yÓ’€æT¦/’šµ*<_ŸŒ5 ñÜAèðì!ÓŰ¹6j@oØdb]V¢‡näYO|‰fbö#y™¦³9ÌÎ5 Uù ™ÄµîWŒg?¨hžƒÅOµ*¡Lv£òó“•ÿ¼P÷få­÷ýªþX¤·)ö¨á WÅw°þê¦ôÆbOýŠÉHÈN5¹d Ob¢¡®ÓÔn DoG†á Òg(6ê jë8׆b+ø ËÉzn¼Èr²#—Mí$ Ԇ‘¸¯°ã§wIÆówü´mÆ«0Œ°iÙŒÂsÿú‹e–.£p°„¼S¦Zj#ž¡F[3ñƒ/´àÀ?’ÝÍÄ—] 8ú7ãŒiy‰#œ¥p÷ÀË]=㽃+{†Á€'ÍxîæÆ¢ 7ž1ÙôT§GÓd‰c°ó,Ñìé?úŠE5ëwó<:œã‘¥rþÇo߯¬çËïïM ¿ÔÞø£ÄkïsaŽplùßgOrozùUö$·’Ððæõ/nx¥#Üù«[ƒç{÷°TõH±0pmÎáí©ê,™Ç]êÊÎÐÂ]jƒìÎ í]¦½ w’ælÒÓ÷¡Éq›ðÙô¸ºW/º­—ÜáJäÿ‹ÔÑáÚÀÖIÂ?4a+¯¾Ê¶^Ò0üq lç•|¡U¼½ÔþЄ­¼~á;u ¦ÅbðÖ¯Ïk?ÁÉkËžèݱÚǦàÌë°éi÷ó<Ãðz<{Î-~'ãvXìöïÄ82Žaã<ýÁ¸›w6ÿúÆõ0.ø?wbÀ›×X'XÄUw„¹y;F’•Îß~Êó¿;üþa?~Æ8úÚ3<â0ý2ýí_KÃg?ðO7ÍÖA]\tvÓ¶96tJ(ÐBZ@ÌülØÝ{5@R£wkx~nl' g'MßÉÝ©Ü4\R)Ÿ¶_ÉQeã×ÛNpÎ/-Œül²NS'å˜Èºë©“§°2;¨îb‚ŠýêÓ× Î"îºvîš/üϯòKß‚ØÅÙìYŽÔ!9 Üæ¢½yGì"ªóHydG¦Íu¬‰]aü•g€·zæ[YRŸí8²±»nŠ]¡b“åÛbˆ&,Q&F€m‹21>`×¶Øí˜Ÿ0•7öõ|valæi8n¼U·íÖÿžàŒ/¿ªµ·ôäVµù€‚ÑäÕ5ä pÎÐËïÿì-yòþÏFýÔJoůÇ7ÎÒÒ¤ÃßIÿùozrßòŸþgí<ÿø¿3Iì4Êð˯ÿú¥ü„“(4¥,4z#%Œ%rŒQƒ²b±N‚ä­‹zRGÄdbœrNk·þÝ6óJÏˇ… ñzY÷ ÍÂÊéã°Ÿ˜SÀ4ÈS Ž)[›N\„b‹ue ÈÖ9v (Ù°ò(+§Ð¦éæá¡_ŠŽyiŠ1¬¹\hISáìý½ ÞçQET'BÔkùõ jïpÎÝ^D5Oé8ÁÑÒÝÞ§|¸×¡õÑÇ?öÃ$ êÙ@Ò IÈÝ_»¼ßSÆ. ÛCýÝŸtÿhV° cÅs ^+DZ´;!Ÿ=šgëÃYaÎÒçò|MKí>Ÿ5”g3V#PñbÜúÑojYœ+e\Ü,ðÏ¿ò?‡ïoBf©Žñê3þ´ü[‹tÃõ ¹M¿ø-EŒ=´Îc$ÁÔ¿›)œãÙ Æ>~_–/ªÒÖʤ¹¹Ó±ÐFÌ­…Í}$l©]T²¼6~‰¬€ÁÕQÌ-²¸™ÌöT»xÚ'4ÝqX¨™)\„7AæËJ1M¹=)NÜ @á¥sõ윕ዌÇëò2Sr†0%Ÿ¬3\‚¦nLðiôV’¤!©Ûc)n÷³GrVÁP`‹ë£°NpróRí ‹äf¸q½tì#wú–ŽÝÉÙå4Q˜d#˜×08ãRqé®ÐÜüšƒ7c­ ÊúEË Žw®øyŽÉtîöy8’ŸPòa¤ƒ•ùƒ÷s¹â#†;hºálÉeôÃdc™†¨)ö@ õEÐcΰñÚí~íê²b+²þ‚x‚Cû»¯ÿá@Ž._<#¤êm€Í7ˆ!¢Öw×%Í~þ»­1Í/?‡¬§6òü™Ço®~¥÷ÐâƒÂ™³Nœ¸à5.¿}éÖY¡‘½h0°¾ã¸ÂA°gû|½ ©ö˜­ç '8 ;tH~|—ý¾OÆKÀD)¨¯†ÿúÛ·MMÚôI›ù#Ç”¦Í ¡³ÏqÝ3xÉ[-Û Ö¯áTަÙʸUW=Iš‰‚0á¬!u¡IwèVÜJȦ¶$%ÎÛ`y‘8!Ëa}Ë  6ô$Þˆü Y÷ȇ*‰™­ú³92о‘7ìÖßÞNp窴—¹ñç¿ÿr¼ü”НK/døëÃ_þÚoñ+¶ÕèO”j$Ö—ÇœàXì]F¿‚ðVÔ,"Oõq¤¼5ò5zd!Ý;a\{60€ø‘ xXä‰:¨ã7qü+·AV½ Ô À-^ÌYÉÌÝãÕãÀ¶ Hýì ×(·}‡³÷—ríêï/Ùéd™G0ài]Ü÷Hn<§¦«’z%ä±u–+8~úºµ®ˆ ‡6±Ï\·öYd9^·ÖAIx R/þ#`Np(سž.ÿ„Ñ—Qlm(šÂZѰ›ÆÉÅD1b¶nòF HP2Â؇1ÆÚw™îòLçe¼©Ã¡%ƒÀÌÓ –†\òâºä#oújAîäër-ˆO.Ž0Á݇» &8ëOR±L‹ƒäº„çjAôÏ(€ñ(ƬU#¿Ã±\o°kËÚ{ǘ…Á”d£L‰Lö4²KäLäQcvçGgcä0M„ÙNB£C7"Ùä²Ä4îo7OÃv^2€ŽÄ"·$jfò‰&´Â–+kº`S—5ÝËâk*ŽƒIj>jÊ8—Ê5;Ì=Ç–¬É_[-Z“ƒ–4˜kÈLáœë¶¦ÅzòÅ»(õÚð”’²ƒMv…ÆdLf žôOeB†œëNÇ1M¢MiÚß”žƒê‚L<å¦LÄžæ];ðôp =Qà™(n«tãxHîƃ³_ÓEÃô†J7é¯t+yâ% Ä”†8Å2¸'kÌ8q(+ÝxçJ·“2:ã¬gÛt$‹ŸMÞ¨´Ï=–{õ÷iØnSåB‡4ÆrÕûaC•}¿*»&4†IýÜ€E£ÄZ–:xãTÇÅÀdýFU¦OQeõ½ ˜–*«ïçÏL–Τ« æâŒê bî=ß–ƒ˜Ñ &‹RërUÎi¢¡î Õ˜†ƒcã¼Éy)ˆáë ƒÖøEôÛ˜ÂñõƒmË÷<1ˆéu<ÏAu›×±ppñPb¸ÃrûÑq\`Øð:ÜïuHm:E75°<AsÒÐÛ² X¶ ¸»×±òVwTèº6Ïü®>tïÖáwõ;Cù¦l­7üü÷)ÿï`‡ôÛ϶7ümþÈmeE½ÜAãoêÿÛŒö<‚@ÚXãZòÂ*”»„3³öòÜnŒn|‘wëì hãŠ#^©d‘:{¯I% mo¼ŠÓFûÆž¡®¤™)M"u‚aæ`¹–M5èQ8ãýÖÞÁík“Iz2ðd¸hBb)ZʦˆõMLÆ%Ž!«OÅã³9FMÂ߯•ðEpž—R°ÅŠmI)s¶Jl¿ÎÂÍ̹Š$/˜ÕIÞËèåHÅkŠ;jò[F§‘¤ñC,އbJMš&ýŒ¥H2\G’~>’T‰øà‚²}­¤ç;9ÙÚhøJöµµïðEpÞ[–|À¬QaMg­þâ"¶¼À°[†þØ’HÈV9RŒê§†Ìú«$CD9i i7Æ–~÷ØXÔSEܪC:Á¡ðW;6–«[¿Ð±¡Ü÷á8p3˜†”|])‹_Íù,W½~çÓ¶1 Þi¬L-S8Àža€÷°ç1Õ°Ýô‹l©†Ý5EÓo¦æÎÆpÕ…e½•±w }U%ë—QGëwíï{z´ª=®bVê^1ôä•̵;ßá\W1ÛƒíîÞ2¶^fœw;=ˆOªyhb¹ý54±ÏãpQ_…É ŽWu½²å*Ïö&ܼå0‚‹ÓqQFä\k`¡)!ä2Éde&Pe(nK]dy1mÖæ¼¸¨ 35+28xÙoèÚËäÚ˜½Ðµaš!väæ6+ óÑŸyqÑ¥Ÿøc1†©ëïÎñ™Á*\&¿µx.\´7Dk¾Ù{ Œ&Ê¡÷ ÅE±µªW +#™Ô„b–¥¶.îk9 ²¬“ÚÑ)Mg†ñr¸èdeX\ZÂE·“±!àŠj†Ú]âß.Ús¾Mñ|# æ• Ó¢®5(U ›u"Dœi=?.ª§à¢Sµã ΄‹ê!ó1*¹Oc 2éV7ÇSqQÝ‹‹š” xî@ 'óëÜ‚¸¨ž„‹šTN1G¶WJ‡J¹¡™‘;:P8#.ªqQN L,Êd½qâèÔ)¯E;|ò5t²aæ‹owû1Ó%—y™A–×®’€„Ù”ué*¨ ve\ai\îIè²Ìl­ ÈIü³<³¼$,ºÏÝVB’E=Ŭ„εš£/ ‹êXtOY“`ÑS=PxT¨åwBPÚ„\á“$¾v*èuoáQy‹B,ÚX„C³2ï1f9¡#ëæC&–Û_ó!‹ð<uvE´ªaUÊ/»â½©{F`Ñr:,ê2ïr]ê¤p%É"uY’žSc©*cs5{ð·ÈÄ©•›ÞZÃ×F ïíG»6æ…E/umpê¬ æ#V:¥ÌG;|æ…E—>|â{Œ­D(@#Ö:ÄscUzQXtªüV-]´?BãÔ‡´M&Æa ƒeaQÝ‹BŸ ‘A¤oöòù–Ú¢°¨m¢-ÛÁIå;:ãÎ!©/ ‹NR†ZZ—ƒEƒÄŽB_þ¸fœVúÝ¢ýçÛ4~O°h¯TÀ)&P¨A¤C‡Þ/‹Òü°(MƒE7R“G×N;`¥!óYçLÈÀŽ1híÇs’qÊèèÂ*0]Û†&ÔŽãü2ò‚°(M*£w©öL]eôî"e4`LJ§´Ó3¢4‹ªTð¥s@e­;åñ©‹ZÃP’ïŽhƼŸÝ†­ m.k˜BŸ2]8‹JNã,×Ì(ß äjÎ „‹6+$1™ÉkŸ—Ô’¸è"?I ­žpo5œ,³SäQáÆÿñhĦ²µ^¯žªÍŸ‚…åƒ^ÖWÿ¹[”0O΢˜(¤ÀQPËmÿÇà³W3˜f­¿G¬Ø9œß'*Kdp/QÔýÉ¢Z»ì*üö ü•í¡¼¯Öãâ‘ξ÷d8j-z­xUÙJí̲kqO¸–#89A€"Ò†¡ÅF}4_zÞÇ¢ ùÒ¢}$M: ™ˆX ½7³Á"k°ø§ÃÇAú?c,ð1I~ö¸|°~>»92@44¾dK§œ‹t é{Úp)ùøwÅÈùÓf6dcH‚1P†“pPYꊔR¤tT(ííøü® LoðH•MÜ5[Ü ‡®Ñª c¡¾_9gc7¥Ðið-²š÷ÑwéÈ*@ˆ-Àð³‹ÿpîÆ-´ð£ï4ù¡U“0ó£o/þÜ|³ÇMiåÐp+©b‰G_êyôÅ>AÔÐÒïþ™=¼ƒ$¶Oš"b!±Ð¡ÕgF|˧—õ©ˆoøŒÑˆo#…ž¯>.¿m3"¾ÏnŽŒFfQÇ82È­(ôÄw£ïƒd6z„Úttƒõ™!ߣ²×ÂH&ÚôŠe<GÅ"ïO};ÙÞ°‰©³!¿+]M« AžÅýTècˆG'aàIXÎÀѶN8M`oà '=ׄ¯bÝpe‰5Î A{H îá!Ç™q¥SXW#!x3/¯ÃBMÁXCHÐâŽN‚øc“Ie]<¬¿ÜÕ/¯+$¹¿««â÷â¾ú>ý<|Bñ’ˆ?TßÝWëDâ\4Þ¨äÖà?¿ôAŽc>û‘ÏENÖ´Âû¡È9tÓÍ©sÍwsŒSë­u幬òsL.¡#èé<5‡XWÎÚJ#éÚ€a§5c°…­åèµexÇQ`4WU ek” ÌE‘9k*:Ö»|ÿÌwÛ YË‚U*f7d4ÇN¼£›–#ÑÒ7º§µIÞè©ñ’kU›J%Fy—Nò\WI]q‘ÉçjÇuŸ7Š‡Þ¨íðF7¦ Ê1Î#FL(tí8ô7úA¶^·—úþ™ç½fՊ󕯂÷ªÍŠöªx÷8Œx¯xD‰Ñ®(ó<1¬$~B ò0¬=Ç5—s;Ëë±C_ìüÞ«NѳgvtÎ+ú°wLbɇ¼ctJ⃷ÓÃ^é¨U0õѨþD”u@Å÷aÈ ò ¬cöô!xšð27‹žyíž®ˆŽî:¯ÝgŠ â ctz†Ž~Ÿƒÿƒ×qÛ+‘ù!q‰ÚiRK¯ÂÅ^Í_¥Õè¼Sq­hÀ‹ke™×ôï"kV£Tƒ~¶×ô£7úA}Ãr@ëBé DD t¨ÿ£¬>E5F/kõíí›”ÕÃïIvŸ¼¥èò‚~XÄ@ÞOiz©ûøÄ·ô©ZA¥ùbZ¹LúÅt9º¸Š.–ñ*?„^Ìz„ž¼ýÕ“6©º3‚ÀŸ[zÚЩVMYxY¼+6.±åŸ¯þœ‰˜MçžÛêåêåKÓÇ&ˆ~µ¾ûwÕ8›éªäjuõðxµþV|¹jþ^þ¦¼{n„þ½ƒC¦FI¤„ƒ6tªÕMd)‰SáÉʃ Ç–®r·4³¡ÃÖë2/sfû&HÊÄA:l½–/ÃaøÚ$ zЫkG'û;xT:xÝs+Íœ™ ûfßé@\¥øKöüµzyº—3ù¦¨Ä ­ïª²uS~!ûÆ [óÞ®n¥óÑ ¡t¯tjZíá(‰;™’{ȲüŠ1%qxv‘2ÑŠ?ZCïsoTGp4•G§ÜÉ…#¥·êTb PÜ'™Õ Dt“Ì"D‹ã|3¬C8U]ÒaèjLªäÂ7ºSJÅ·D ƒ‡Ô–ζƒïž*ŽQ2\•ÁT&w†‚gK”‘5Æ\ņêšK­I~¢}UBíl^gPkt»ÒgeU^øøì¶†%YOMÌ–Ø©“ëÿZjhüõ)ä ˆÝSФ öTåö±ès¶òÏ’Ú•˜PnË„ ÔIn**½)ØÄÒaË]Alc$KÀ×k „5±Jš÷²wbE3ï€Ïq¡¬Ä±:_eÕÊ©U™­<¿F÷8Œ„²4=”Emt¡kTÚf e™I2[æIf\ª¨læxd(Ë3‡²n…˜ ³Áýºöó9»çªü’½´ÍWóDÿšæÕuÆX—n#—OÞÄå·­VKS]ºn>»9jfn±‹ZÄ„¢Ê þ\Ÿ¾œ9He2š.oaƒZÝÙvš£<9£˜l¯X†Í1ÿN‡~’'7’íÝ D3ñº^ šœüëÔ %·Œä¿¡“=?ʼnëÙdÞ©²R¹õµÉ<›ÚºRsQ;v®tº²¨òÒZ*ÈyAŽr“or¥Ê¢ž×ƒ»“Ýv0Ì*º=:ײüî[·6|·=íLòÝNÕìÀDyU6É=+¹3ë,Érã’}§³Pm®{}·ìÀwsÔí»…ÑHZ\·áx~K¯‚~[fŠãv &ÇymW™ MÃ\½BX©½ùç{F¼¶lº×攳%ɺәVa݉צÄkSèeŠœœgã¼¶fõÍìµ…ìÎPgý ê°¡3^}„€}L‡…w°£KÁ€ò8l@§¼âŽé›ð¾BÀØžq)jRÆš¡òŽ-ö½C؆ rÇ©d™ne–¦uC8±n¾Í† pVG¹’CNÍÐø`«=ødæZb-×Î`'˜ÂagßéÔ¢ X¨†ýU:ÔÞŽÑBkgÍWÃÞÖÑ~Ñúøbõ†Gç2x[ÒNWK{¶“ñÔ"Ù`𙵉+@÷µ ŸW*AßII¡{‹kƒ—ÔÆ%ª¨QY8þ&‚ mèÈ/7`ŽÛxŠlÎ,2`±0u$ñË‘:ÈTC§¼{höÿË»–$ÇmºÏ)ru@__&¥Ÿ“L¥²ÈdRÉíCÊ-Ç–H©í–<êM— ¶@Š$ˆ`­¯þW³Ñ¾ÊŒä¥0Jf¤Oß·ÖKßù¾¯à¢G»§:Ô.ш ãc)hIw»3öØÊ?_§0¾Ý?_~•š1`EHèÑìœÏ…-¾õÌïF.òî³gK·ü¤Uà Leíî·–j¿™O¾ÒøÌ>9ÂK\zI„);™Orñdq·ö¸¥ð~köäùݵßÍ_i ÒÍL±+žôTdq€®dź·+¾tã]©Ø\ãÅMüðwÒ邹‰¾L =@Ü0œc#Ä ¥c”C¡Çf?DïóË¿ýõïPüܶ¿¬Ì~˜û™eT¹Q»D„ϯÎÖÒðöì‡9¨Ó |ª÷­¹Bög95Ùó†ŸÈ~èLôL_‘í±ÙóZ,Ì~PoêeV3 L°¨YüÃuÙ‹‘ŸJôÚYT§hsÂÑÇÅ)¢O¡ŽÌ‹Ë]ÎŒrzKÊr)f_ºɸ Mo|í»Vjác+¾éÀðØ=Kè\Wƒ‹{¯¾w½QßÄyÔ¡}tòÃ>8§G#HJ•É•ä{•K¥àÞ+ùaÎ 7º+­"ÐÝkÜyvm‹¾UÏU\{šJÛª¦mŽŽ;h\7›ÁßÞè&û £`@$FËRGXÅ|ÙñÝYÅ|ÙçÂîE>¥=Ôí¡÷cqÕ{ó aF×®§ÑµCj\h«Úù¾Âµj¨q5ñ´‡V»–ÒèݽH‡ qSÁ\-Q.„ð¡ËÒñ„Æ"j.»q” BßÃÞ¸šgø,{câ’¹Ê £åÀÉ÷°Ú®æî¾Ú–•†•`©ÆSa”¢Ðx†³&Ù&ޱVçè!ßÁ3\ °(”PIŠ >šg8†¼ŠfðxQñeð°Å4Ú. 1*¦!µé*@=mi€bgíRoZ`…§iDŒúâ5úáÊœŸÒƒœêhv\ï½»~ULP¶~È(¢[`§Èƨ¥§8%¬l ïpKkìÜ8©j˜(šÄ0 ïÎ3\±˜É;Ç®¬šìÉ3\­–b6>w–cÞ˜g¸\“WSJ759MNrB°k½U€àŹ´¼úüMîIüz È„¹üPxÜK6Qy”úpí4phÀî<9Àù#É( é÷vº)‘ˆçæ`È*4[fo”c»ÈؤÌÍÁ zŸÆ¹ØäW9€SÒ’'áH$x"Ÿ®AŽ ·.}qJšaŸåØ®sg.þö:ëË縥Ô柕ÿüéŸOCœ0í§çoÍ@1´Lâ³’MCqk±œ¢–!¥LUZ9=Ó5ÑÙû6fL3ذˆíË”Ï[ʧ.Î+,¡ !ÜÏ‚¸úÓå'ã¯ôûÄõü|ó#‹«-_¿u ˆ²¶ÓSM¯%Hˆ—!ŒþÓ߰R“&`˜Ä+ ßÏ~X¢Ã"îCª°Gb<§9Ÿ2MBQ/¥©,ækîÃpŸpj*N¥©1Á5ó!¤vz$8£kº:d)Mç(ÇKß\t§¬À=»ãÑiôq[ìj¶®®›ž|ŽO¨5Ü ´Ôk>~]žÈjíŒÀ{ØåôH˜ù@¥w$•ß î~Öõ.¸7æ¸á<\™gçá^ÓÎsz2×[ªcí‹ÔIÕ4®©;²Öâ#¬›ãu;gõüvu’Ss¸Gº†xèrÎ3=ÖF9j°Iìíe…?8*î§í4ãÐå<듌rìõùϹ‹¸fOtÎÍçË‹#G>ävˆW9¯ïÊ»,„ñ)©›w ”ÐxBœ¹H P¼ÔíÆµ2‡†âîî98ÍŽF9âð}wä‡ÿ¼ÿtD#././@LongLink0000644000000000000000000000025700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043063033067 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000011347215117043043033077 0ustar zuulzuul2025-12-12T16:25:28.596778887+00:00 stderr F ++ K8S_NODE=crc 2025-12-12T16:25:28.596778887+00:00 stderr F ++ [[ -n crc ]] 2025-12-12T16:25:28.596778887+00:00 stderr F ++ [[ -f /env/crc ]] 2025-12-12T16:25:28.596778887+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:28.596778887+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:28.596778887+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.596778887+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:28.596778887+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:28.596953522+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:28.596953522+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:28.596953522+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:28.596953522+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:28.596953522+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:28.598215654+00:00 stderr F + start-ovn-controller info 2025-12-12T16:25:28.598239274+00:00 stderr F + local log_level=info 2025-12-12T16:25:28.598239274+00:00 stderr F + [[ 1 -ne 1 ]] 2025-12-12T16:25:28.598860480+00:00 stderr F ++ date -Iseconds 2025-12-12T16:25:28.601655930+00:00 stderr F + echo '2025-12-12T16:25:28+00:00 - starting ovn-controller' 2025-12-12T16:25:28.601724482+00:00 stdout F 2025-12-12T16:25:28+00:00 - starting ovn-controller 2025-12-12T16:25:28.601760583+00:00 stderr F + exec ovn-controller unix:/var/run/openvswitch/db.sock -vfile:off --no-chdir --pidfile=/var/run/ovn/ovn-controller.pid --syslog-method=null --log-file=/var/log/ovn/acl-audit-log.log -vFACILITY:local0 -vconsole:info -vconsole:acl_log:off '-vPATTERN:console:%D{%Y-%m-%dT%H:%M:%S.###Z}|%05N|%c%T|%p|%m' -vsyslog:acl_log:info -vfile:acl_log:info 2025-12-12T16:25:28.608359759+00:00 stderr F 2025-12-12T16:25:28Z|00001|vlog|INFO|opened log file /var/log/ovn/acl-audit-log.log 2025-12-12T16:25:28.612490223+00:00 stderr F 2025-12-12T16:25:28.612Z|00002|reconnect|INFO|unix:/var/run/openvswitch/db.sock: connecting... 2025-12-12T16:25:28.612490223+00:00 stderr F 2025-12-12T16:25:28.612Z|00003|reconnect|INFO|unix:/var/run/openvswitch/db.sock: connected 2025-12-12T16:25:28.618657078+00:00 stderr F 2025-12-12T16:25:28.618Z|00004|main|INFO|OVN internal version is : [25.03.1-20.41.0-78.8] 2025-12-12T16:25:28.618657078+00:00 stderr F 2025-12-12T16:25:28.618Z|00005|main|INFO|OVS IDL reconnected, force recompute. 2025-12-12T16:25:28.618732550+00:00 stderr F 2025-12-12T16:25:28.618Z|00006|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:28.618732550+00:00 stderr F 2025-12-12T16:25:28.618Z|00007|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connection attempt failed (No such file or directory) 2025-12-12T16:25:28.618732550+00:00 stderr F 2025-12-12T16:25:28.618Z|00008|main|INFO|OVNSB IDL reconnected, force recompute. 2025-12-12T16:25:28.618732550+00:00 stderr F 2025-12-12T16:25:28.618Z|00009|ovn_util|INFO|statctrl: connecting to switch: "unix:/var/run/openvswitch/br-int.mgmt" 2025-12-12T16:25:28.618746050+00:00 stderr F 2025-12-12T16:25:28.618Z|00010|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting... 2025-12-12T16:25:28.618818122+00:00 stderr F 2025-12-12T16:25:28.618Z|00011|ovn_util|INFO|pinctrl: connecting to switch: "unix:/var/run/openvswitch/br-int.mgmt" 2025-12-12T16:25:28.618818122+00:00 stderr F 2025-12-12T16:25:28.618Z|00012|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting... 2025-12-12T16:25:28.619654373+00:00 stderr F 2025-12-12T16:25:28.619Z|00001|rconn(ovn_statctrl3)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected 2025-12-12T16:25:28.619729265+00:00 stderr F 2025-12-12T16:25:28.619Z|00001|rconn(ovn_pinctrl0)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected 2025-12-12T16:25:29.620260440+00:00 stderr F 2025-12-12T16:25:29.620Z|00013|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:29.620260440+00:00 stderr F 2025-12-12T16:25:29.620Z|00014|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connection attempt failed (No such file or directory) 2025-12-12T16:25:29.620260440+00:00 stderr F 2025-12-12T16:25:29.620Z|00015|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: waiting 2 seconds before reconnect 2025-12-12T16:25:31.621443395+00:00 stderr F 2025-12-12T16:25:31.621Z|00016|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:31.621443395+00:00 stderr F 2025-12-12T16:25:31.621Z|00017|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connection attempt failed (No such file or directory) 2025-12-12T16:25:31.621443395+00:00 stderr F 2025-12-12T16:25:31.621Z|00018|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: waiting 4 seconds before reconnect 2025-12-12T16:25:35.625523415+00:00 stderr F 2025-12-12T16:25:35.625Z|00019|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:35.625617258+00:00 stderr F 2025-12-12T16:25:35.625Z|00020|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connected 2025-12-12T16:25:35.669072921+00:00 stderr F 2025-12-12T16:25:35.668Z|00021|ovn_util|INFO|features: connecting to switch: "unix:/var/run/openvswitch/br-int.mgmt" 2025-12-12T16:25:35.669072921+00:00 stderr F 2025-12-12T16:25:35.669Z|00022|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting... 2025-12-12T16:25:35.670031645+00:00 stderr F 2025-12-12T16:25:35.669Z|00023|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected 2025-12-12T16:25:35.670114527+00:00 stderr F 2025-12-12T16:25:35.670Z|00024|features|INFO|OVS Feature: ct_zero_snat, state: supported 2025-12-12T16:25:35.670114527+00:00 stderr F 2025-12-12T16:25:35.670Z|00025|features|INFO|OVS Feature: ct_flush, state: supported 2025-12-12T16:25:35.670114527+00:00 stderr F 2025-12-12T16:25:35.670Z|00026|features|INFO|OVS Feature: dp_hash_l4_sym_support, state: supported 2025-12-12T16:25:35.670297702+00:00 stderr F 2025-12-12T16:25:35.670Z|00027|reconnect|INFO|unix:/var/run/openvswitch/db.sock: connecting... 2025-12-12T16:25:35.670297702+00:00 stderr F 2025-12-12T16:25:35.670Z|00028|main|INFO|OVS feature set changed, force recompute. 2025-12-12T16:25:35.670297702+00:00 stderr F 2025-12-12T16:25:35.670Z|00029|ovn_util|INFO|ofctrl: connecting to switch: "unix:/var/run/openvswitch/br-int.mgmt" 2025-12-12T16:25:35.670297702+00:00 stderr F 2025-12-12T16:25:35.670Z|00030|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting... 2025-12-12T16:25:35.671363169+00:00 stderr F 2025-12-12T16:25:35.671Z|00031|features|INFO|OVS Feature: meter_support, state: supported 2025-12-12T16:25:35.671442711+00:00 stderr F 2025-12-12T16:25:35.671Z|00032|features|INFO|OVS Feature: group_support, state: supported 2025-12-12T16:25:35.671452241+00:00 stderr F 2025-12-12T16:25:35.671Z|00033|features|INFO|OVS Feature: sample_action_with_registers, state: supported 2025-12-12T16:25:35.671460321+00:00 stderr F 2025-12-12T16:25:35.671Z|00034|reconnect|INFO|unix:/var/run/openvswitch/db.sock: connected 2025-12-12T16:25:35.671490012+00:00 stderr F 2025-12-12T16:25:35.671Z|00035|main|INFO|OVS feature set changed, force recompute. 2025-12-12T16:25:35.672695692+00:00 stderr F 2025-12-12T16:25:35.672Z|00036|features|INFO|OVS DB schema supports 4 flow table prefixes, our IDL supports: 4 2025-12-12T16:25:35.692670175+00:00 stderr F 2025-12-12T16:25:35.692Z|00037|features|INFO|OVS Feature: ct_label_flush, state: supported 2025-12-12T16:25:35.692670175+00:00 stderr F 2025-12-12T16:25:35.692Z|00038|main|INFO|OVS feature set changed, force recompute. 2025-12-12T16:25:35.692893951+00:00 stderr F 2025-12-12T16:25:35.692Z|00039|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected 2025-12-12T16:25:35.692902921+00:00 stderr F 2025-12-12T16:25:35.692Z|00040|main|INFO|OVS OpenFlow connection reconnected,force recompute. 2025-12-12T16:26:06.160301732+00:00 stderr F 2025-12-12T16:26:06.160Z|00041|memory|INFO|20992 kB peak resident set size after 37.6 seconds 2025-12-12T16:26:06.160301732+00:00 stderr F 2025-12-12T16:26:06.160Z|00042|memory|INFO|idl-cells-OVN_Southbound:15715 idl-cells-Open_vSwitch:3036 lflow-cache-entries-cache-expr:290 lflow-cache-entries-cache-matches:625 lflow-cache-size-KB:758 local_datapath_usage-KB:1 ofctrl_desired_flow_usage-KB:800 ofctrl_installed_flow_usage-KB:590 ofctrl_sb_flow_ref_usage-KB:311 2025-12-12T16:26:06.301367476+00:00 stderr F 2025-12-12T16:26:06.301Z|00043|memory_trim|INFO|Detected inactivity (last active 30001 ms ago): trimming memory 2025-12-12T16:26:39.105540479+00:00 stderr F 2025-12-12T16:26:39.105Z|00044|binding|INFO|Releasing lport openshift-marketplace_redhat-marketplace-jkgqd from this chassis (sb_readonly=0) 2025-12-12T16:26:39.105540479+00:00 stderr F 2025-12-12T16:26:39.105Z|00045|if_status|WARN|Trying to release unknown interface openshift-marketplace_redhat-marketplace-jkgqd 2025-12-12T16:26:39.105540479+00:00 stderr F 2025-12-12T16:26:39.105Z|00046|binding|INFO|Setting lport openshift-marketplace_redhat-marketplace-jkgqd down in Southbound 2025-12-12T16:26:40.434578214+00:00 stderr F 2025-12-12T16:26:40.434Z|00047|binding|INFO|Claiming lport openshift-image-registry_image-registry-5d9d95bf5b-6md9w for this chassis. 2025-12-12T16:26:40.434578214+00:00 stderr F 2025-12-12T16:26:40.434Z|00048|binding|INFO|openshift-image-registry_image-registry-5d9d95bf5b-6md9w: Claiming 0a:58:0a:d9:00:07 10.217.0.7 2025-12-12T16:26:40.459470723+00:00 stderr F 2025-12-12T16:26:40.459Z|00049|binding|INFO|Setting lport openshift-image-registry_image-registry-5d9d95bf5b-6md9w ovn-installed in OVS 2025-12-12T16:26:40.459470723+00:00 stderr F 2025-12-12T16:26:40.459Z|00050|binding|INFO|Setting lport openshift-image-registry_image-registry-5d9d95bf5b-6md9w up in Southbound 2025-12-12T16:26:43.307841063+00:00 stderr F 2025-12-12T16:26:43.307Z|00051|binding|INFO|Claiming lport openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85 for this chassis. 2025-12-12T16:26:43.307906565+00:00 stderr F 2025-12-12T16:26:43.307Z|00052|binding|INFO|openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85: Claiming 0a:58:0a:d9:00:09 10.217.0.9 2025-12-12T16:26:43.335650876+00:00 stderr F 2025-12-12T16:26:43.335Z|00053|binding|INFO|Setting lport openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85 ovn-installed in OVS 2025-12-12T16:26:43.335742558+00:00 stderr F 2025-12-12T16:26:43.335Z|00054|binding|INFO|Setting lport openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85 up in Southbound 2025-12-12T16:26:48.081300607+00:00 stderr F 2025-12-12T16:26:48.081Z|00055|binding|INFO|Releasing lport openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85 from this chassis (sb_readonly=0) 2025-12-12T16:26:48.081389459+00:00 stderr F 2025-12-12T16:26:48.081Z|00056|binding|INFO|Setting lport openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85 down in Southbound 2025-12-12T16:26:49.723800682+00:00 stderr F 2025-12-12T16:26:49.723Z|00057|binding|INFO|Claiming lport openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx for this chassis. 2025-12-12T16:26:49.723884274+00:00 stderr F 2025-12-12T16:26:49.723Z|00058|binding|INFO|openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx: Claiming 0a:58:0a:d9:00:0f 10.217.0.15 2025-12-12T16:26:49.750604129+00:00 stderr F 2025-12-12T16:26:49.750Z|00059|binding|INFO|Setting lport openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx ovn-installed in OVS 2025-12-12T16:26:49.750713802+00:00 stderr F 2025-12-12T16:26:49.750Z|00060|binding|INFO|Setting lport openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx up in Southbound 2025-12-12T16:26:53.739650586+00:00 stderr F 2025-12-12T16:26:53.738Z|00061|binding|INFO|Claiming lport openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5 for this chassis. 2025-12-12T16:26:53.739765539+00:00 stderr F 2025-12-12T16:26:53.739Z|00062|binding|INFO|openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5: Claiming 0a:58:0a:d9:00:1c 10.217.0.28 2025-12-12T16:26:53.789255430+00:00 stderr F 2025-12-12T16:26:53.789Z|00063|binding|INFO|Setting lport openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5 ovn-installed in OVS 2025-12-12T16:26:53.789353742+00:00 stderr F 2025-12-12T16:26:53.789Z|00064|binding|INFO|Setting lport openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5 up in Southbound 2025-12-12T16:26:53.860614582+00:00 stderr F 2025-12-12T16:26:53.860Z|00065|binding|INFO|Claiming lport openshift-marketplace_certified-operators-8pl6d for this chassis. 2025-12-12T16:26:53.860716405+00:00 stderr F 2025-12-12T16:26:53.860Z|00066|binding|INFO|openshift-marketplace_certified-operators-8pl6d: Claiming 0a:58:0a:d9:00:20 10.217.0.32 2025-12-12T16:26:53.914796521+00:00 stderr F 2025-12-12T16:26:53.914Z|00067|binding|INFO|Setting lport openshift-marketplace_certified-operators-8pl6d up in Southbound 2025-12-12T16:26:53.915444788+00:00 stderr F 2025-12-12T16:26:53.915Z|00068|binding|INFO|Setting lport openshift-marketplace_certified-operators-8pl6d ovn-installed in OVS 2025-12-12T16:26:56.184827600+00:00 stderr F 2025-12-12T16:26:56.184Z|00069|binding|INFO|Releasing lport openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx from this chassis (sb_readonly=0) 2025-12-12T16:26:56.184952383+00:00 stderr F 2025-12-12T16:26:56.184Z|00070|binding|INFO|Setting lport openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx down in Southbound 2025-12-12T16:26:58.386524263+00:00 stderr F 2025-12-12T16:26:58.385Z|00071|binding|INFO|Claiming lport openshift-marketplace_redhat-operators-b4n58 for this chassis. 2025-12-12T16:26:58.386524263+00:00 stderr F 2025-12-12T16:26:58.385Z|00072|binding|INFO|openshift-marketplace_redhat-operators-b4n58: Claiming 0a:58:0a:d9:00:2c 10.217.0.44 2025-12-12T16:26:58.445073705+00:00 stderr F 2025-12-12T16:26:58.443Z|00073|binding|INFO|Setting lport openshift-marketplace_redhat-operators-b4n58 up in Southbound 2025-12-12T16:26:58.446513972+00:00 stderr F 2025-12-12T16:26:58.446Z|00074|binding|INFO|Setting lport openshift-marketplace_redhat-operators-b4n58 ovn-installed in OVS 2025-12-12T16:27:05.672355948+00:00 stderr F 2025-12-12T16:27:05.671Z|00075|binding|INFO|Claiming lport openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g for this chassis. 2025-12-12T16:27:05.672355948+00:00 stderr F 2025-12-12T16:27:05.672Z|00076|binding|INFO|openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g: Claiming 0a:58:0a:d9:00:2f 10.217.0.47 2025-12-12T16:27:05.747660014+00:00 stderr F 2025-12-12T16:27:05.744Z|00077|binding|INFO|Setting lport openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g ovn-installed in OVS 2025-12-12T16:27:05.747660014+00:00 stderr F 2025-12-12T16:27:05.744Z|00078|binding|INFO|Setting lport openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g up in Southbound 2025-12-12T16:27:05.793766710+00:00 stderr F 2025-12-12T16:27:05.793Z|00079|binding|INFO|Claiming lport openshift-operators_observability-operator-78c97476f4-qxqmn for this chassis. 2025-12-12T16:27:05.793766710+00:00 stderr F 2025-12-12T16:27:05.793Z|00080|binding|INFO|openshift-operators_observability-operator-78c97476f4-qxqmn: Claiming 0a:58:0a:d9:00:30 10.217.0.48 2025-12-12T16:27:05.881773798+00:00 stderr F 2025-12-12T16:27:05.880Z|00081|binding|INFO|Setting lport openshift-operators_observability-operator-78c97476f4-qxqmn ovn-installed in OVS 2025-12-12T16:27:05.881773798+00:00 stderr F 2025-12-12T16:27:05.880Z|00082|binding|INFO|Setting lport openshift-operators_observability-operator-78c97476f4-qxqmn up in Southbound 2025-12-12T16:27:05.911305555+00:00 stderr F 2025-12-12T16:27:05.910Z|00083|binding|INFO|Claiming lport openshift-operators_perses-operator-68bdb49cbf-nqtp8 for this chassis. 2025-12-12T16:27:05.911305555+00:00 stderr F 2025-12-12T16:27:05.910Z|00084|binding|INFO|openshift-operators_perses-operator-68bdb49cbf-nqtp8: Claiming 0a:58:0a:d9:00:31 10.217.0.49 2025-12-12T16:27:05.911305555+00:00 stderr F 2025-12-12T16:27:05.910Z|00085|binding|INFO|Claiming lport service-telemetry_elastic-operator-6c994c654b-42tmw for this chassis. 2025-12-12T16:27:05.911305555+00:00 stderr F 2025-12-12T16:27:05.910Z|00086|binding|INFO|service-telemetry_elastic-operator-6c994c654b-42tmw: Claiming 0a:58:0a:d9:00:32 10.217.0.50 2025-12-12T16:27:06.029908637+00:00 stderr F 2025-12-12T16:27:06.029Z|00087|binding|INFO|Setting lport service-telemetry_elastic-operator-6c994c654b-42tmw up in Southbound 2025-12-12T16:27:06.029908637+00:00 stderr F 2025-12-12T16:27:06.029Z|00088|binding|INFO|Setting lport openshift-operators_perses-operator-68bdb49cbf-nqtp8 up in Southbound 2025-12-12T16:27:06.030771749+00:00 stderr F 2025-12-12T16:27:06.030Z|00089|binding|INFO|Setting lport service-telemetry_elastic-operator-6c994c654b-42tmw ovn-installed in OVS 2025-12-12T16:27:06.030771749+00:00 stderr F 2025-12-12T16:27:06.030Z|00090|binding|INFO|Setting lport openshift-operators_perses-operator-68bdb49cbf-nqtp8 ovn-installed in OVS 2025-12-12T16:27:06.033984590+00:00 stderr F 2025-12-12T16:27:06.033Z|00091|if_status|INFO|Not updating pb chassis for openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr now as sb is readonly 2025-12-12T16:27:06.034888783+00:00 stderr F 2025-12-12T16:27:06.034Z|00092|binding|INFO|Claiming lport openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr for this chassis. 2025-12-12T16:27:06.034888783+00:00 stderr F 2025-12-12T16:27:06.034Z|00093|binding|INFO|openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr: Claiming 0a:58:0a:d9:00:2e 10.217.0.46 2025-12-12T16:27:06.034915724+00:00 stderr F 2025-12-12T16:27:06.034Z|00094|binding|INFO|Claiming lport openshift-operators_obo-prometheus-operator-86648f486b-wbj29 for this chassis. 2025-12-12T16:27:06.034915724+00:00 stderr F 2025-12-12T16:27:06.034Z|00095|binding|INFO|openshift-operators_obo-prometheus-operator-86648f486b-wbj29: Claiming 0a:58:0a:d9:00:2d 10.217.0.45 2025-12-12T16:27:06.122821968+00:00 stderr F 2025-12-12T16:27:06.122Z|00096|binding|INFO|Setting lport openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr ovn-installed in OVS 2025-12-12T16:27:06.122821968+00:00 stderr F 2025-12-12T16:27:06.122Z|00097|binding|INFO|Setting lport openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr up in Southbound 2025-12-12T16:27:06.122821968+00:00 stderr F 2025-12-12T16:27:06.122Z|00098|binding|INFO|Setting lport openshift-operators_obo-prometheus-operator-86648f486b-wbj29 ovn-installed in OVS 2025-12-12T16:27:06.122821968+00:00 stderr F 2025-12-12T16:27:06.122Z|00099|binding|INFO|Setting lport openshift-operators_obo-prometheus-operator-86648f486b-wbj29 up in Southbound 2025-12-12T16:27:06.464338662+00:00 stderr F 2025-12-12T16:27:06.464Z|00100|binding|INFO|Releasing lport openshift-marketplace_certified-operators-8pl6d from this chassis (sb_readonly=0) 2025-12-12T16:27:06.464338662+00:00 stderr F 2025-12-12T16:27:06.464Z|00101|binding|INFO|Setting lport openshift-marketplace_certified-operators-8pl6d down in Southbound 2025-12-12T16:27:09.489243038+00:00 stderr F 2025-12-12T16:27:09.486Z|00102|binding|INFO|Releasing lport openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5 from this chassis (sb_readonly=0) 2025-12-12T16:27:09.489243038+00:00 stderr F 2025-12-12T16:27:09.486Z|00103|binding|INFO|Setting lport openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5 down in Southbound 2025-12-12T16:27:27.822857365+00:00 stderr F 2025-12-12T16:27:27.822Z|00104|binding|INFO|Releasing lport openshift-marketplace_redhat-operators-b4n58 from this chassis (sb_readonly=0) 2025-12-12T16:27:27.822857365+00:00 stderr F 2025-12-12T16:27:27.822Z|00105|binding|INFO|Setting lport openshift-marketplace_redhat-operators-b4n58 down in Southbound 2025-12-12T16:27:29.671677036+00:00 stderr F 2025-12-12T16:27:29.671Z|00106|binding|INFO|Releasing lport openshift-image-registry_image-registry-66587d64c8-jqtjf from this chassis (sb_readonly=0) 2025-12-12T16:27:29.671677036+00:00 stderr F 2025-12-12T16:27:29.671Z|00107|if_status|WARN|Trying to release unknown interface openshift-image-registry_image-registry-66587d64c8-jqtjf 2025-12-12T16:27:29.671677036+00:00 stderr F 2025-12-12T16:27:29.671Z|00108|binding|INFO|Setting lport openshift-image-registry_image-registry-66587d64c8-jqtjf down in Southbound 2025-12-12T16:27:29.780767157+00:00 stderr F 2025-12-12T16:27:29.780Z|00109|binding|INFO|Claiming lport cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt for this chassis. 2025-12-12T16:27:29.780767157+00:00 stderr F 2025-12-12T16:27:29.780Z|00110|binding|INFO|cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt: Claiming 0a:58:0a:d9:00:33 10.217.0.51 2025-12-12T16:27:29.816095231+00:00 stderr F 2025-12-12T16:27:29.816Z|00111|binding|INFO|Setting lport cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt ovn-installed in OVS 2025-12-12T16:27:29.816095231+00:00 stderr F 2025-12-12T16:27:29.816Z|00112|binding|INFO|Setting lport cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt up in Southbound 2025-12-12T16:27:29.829882190+00:00 stderr F 2025-12-12T16:27:29.829Z|00113|binding|INFO|Claiming lport openshift-marketplace_community-operators-9wq8j for this chassis. 2025-12-12T16:27:29.829882190+00:00 stderr F 2025-12-12T16:27:29.829Z|00114|binding|INFO|openshift-marketplace_community-operators-9wq8j: Claiming 0a:58:0a:d9:00:34 10.217.0.52 2025-12-12T16:27:29.876127621+00:00 stderr F 2025-12-12T16:27:29.876Z|00115|binding|INFO|Setting lport openshift-marketplace_community-operators-9wq8j ovn-installed in OVS 2025-12-12T16:27:29.876127621+00:00 stderr F 2025-12-12T16:27:29.876Z|00116|binding|INFO|Setting lport openshift-marketplace_community-operators-9wq8j up in Southbound 2025-12-12T16:27:31.953583768+00:00 stderr F 2025-12-12T16:27:31.953Z|00117|binding|INFO|Claiming lport service-telemetry_elasticsearch-es-default-0 for this chassis. 2025-12-12T16:27:31.953583768+00:00 stderr F 2025-12-12T16:27:31.953Z|00118|binding|INFO|service-telemetry_elasticsearch-es-default-0: Claiming 0a:58:0a:d9:00:35 10.217.0.53 2025-12-12T16:27:31.991111808+00:00 stderr F 2025-12-12T16:27:31.991Z|00119|binding|INFO|Setting lport service-telemetry_elasticsearch-es-default-0 ovn-installed in OVS 2025-12-12T16:27:31.991111808+00:00 stderr F 2025-12-12T16:27:31.991Z|00120|binding|INFO|Setting lport service-telemetry_elasticsearch-es-default-0 up in Southbound 2025-12-12T16:27:55.732828031+00:00 stderr F 2025-12-12T16:27:55.732Z|00121|binding|INFO|Releasing lport openshift-marketplace_community-operators-9wq8j from this chassis (sb_readonly=0) 2025-12-12T16:27:55.732828031+00:00 stderr F 2025-12-12T16:27:55.732Z|00122|binding|INFO|Setting lport openshift-marketplace_community-operators-9wq8j down in Southbound 2025-12-12T16:28:00.064018778+00:00 stderr F 2025-12-12T16:28:00.063Z|00123|binding|INFO|Claiming lport cert-manager_cert-manager-858d87f86b-r7f8q for this chassis. 2025-12-12T16:28:00.064018778+00:00 stderr F 2025-12-12T16:28:00.063Z|00124|binding|INFO|cert-manager_cert-manager-858d87f86b-r7f8q: Claiming 0a:58:0a:d9:00:38 10.217.0.56 2025-12-12T16:28:00.121259386+00:00 stderr F 2025-12-12T16:28:00.121Z|00125|binding|INFO|Setting lport cert-manager_cert-manager-858d87f86b-r7f8q ovn-installed in OVS 2025-12-12T16:28:00.121259386+00:00 stderr F 2025-12-12T16:28:00.121Z|00126|binding|INFO|Setting lport cert-manager_cert-manager-858d87f86b-r7f8q up in Southbound 2025-12-12T16:28:00.283455361+00:00 stderr F 2025-12-12T16:28:00.283Z|00127|binding|INFO|Claiming lport cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl for this chassis. 2025-12-12T16:28:00.283455361+00:00 stderr F 2025-12-12T16:28:00.283Z|00128|binding|INFO|cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl: Claiming 0a:58:0a:d9:00:37 10.217.0.55 2025-12-12T16:28:00.321313759+00:00 stderr F 2025-12-12T16:28:00.321Z|00129|binding|INFO|Setting lport cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl ovn-installed in OVS 2025-12-12T16:28:00.321313759+00:00 stderr F 2025-12-12T16:28:00.321Z|00130|binding|INFO|Setting lport cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl up in Southbound 2025-12-12T16:28:00.396895312+00:00 stderr F 2025-12-12T16:28:00.396Z|00131|binding|INFO|Claiming lport cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt for this chassis. 2025-12-12T16:28:00.396895312+00:00 stderr F 2025-12-12T16:28:00.396Z|00132|binding|INFO|cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt: Claiming 0a:58:0a:d9:00:36 10.217.0.54 2025-12-12T16:28:00.442009874+00:00 stderr F 2025-12-12T16:28:00.441Z|00133|binding|INFO|Setting lport cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt ovn-installed in OVS 2025-12-12T16:28:00.442009874+00:00 stderr F 2025-12-12T16:28:00.441Z|00134|binding|INFO|Setting lport cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt up in Southbound 2025-12-12T16:28:18.740750749+00:00 stderr F 2025-12-12T16:28:18.740Z|00135|binding|INFO|Claiming lport service-telemetry_service-telemetry-framework-index-1-build for this chassis. 2025-12-12T16:28:18.740750749+00:00 stderr F 2025-12-12T16:28:18.740Z|00136|binding|INFO|service-telemetry_service-telemetry-framework-index-1-build: Claiming 0a:58:0a:d9:00:39 10.217.0.57 2025-12-12T16:28:18.776103754+00:00 stderr F 2025-12-12T16:28:18.774Z|00137|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-1-build ovn-installed in OVS 2025-12-12T16:28:18.776103754+00:00 stderr F 2025-12-12T16:28:18.774Z|00138|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-1-build up in Southbound 2025-12-12T16:28:26.689120912+00:00 stderr F 2025-12-12T16:28:26.688Z|00139|binding|INFO|Releasing lport service-telemetry_service-telemetry-framework-index-1-build from this chassis (sb_readonly=0) 2025-12-12T16:28:26.689120912+00:00 stderr F 2025-12-12T16:28:26.689Z|00140|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-1-build down in Southbound 2025-12-12T16:28:40.808043635+00:00 stderr F 2025-12-12T16:28:40.807Z|00141|binding|INFO|Claiming lport service-telemetry_service-telemetry-framework-index-2-build for this chassis. 2025-12-12T16:28:40.808043635+00:00 stderr F 2025-12-12T16:28:40.807Z|00142|binding|INFO|service-telemetry_service-telemetry-framework-index-2-build: Claiming 0a:58:0a:d9:00:3c 10.217.0.60 2025-12-12T16:28:40.853511436+00:00 stderr F 2025-12-12T16:28:40.853Z|00143|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-2-build ovn-installed in OVS 2025-12-12T16:28:40.853511436+00:00 stderr F 2025-12-12T16:28:40.853Z|00144|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-2-build up in Southbound 2025-12-12T16:28:59.668839794+00:00 stderr F 2025-12-12T16:28:59.668Z|00145|binding|INFO|Releasing lport service-telemetry_service-telemetry-framework-index-2-build from this chassis (sb_readonly=0) 2025-12-12T16:28:59.668839794+00:00 stderr F 2025-12-12T16:28:59.668Z|00146|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-2-build down in Southbound 2025-12-12T16:29:03.778797984+00:00 stderr F 2025-12-12T16:29:03.778Z|00147|binding|INFO|Claiming lport service-telemetry_service-telemetry-framework-index-3-build for this chassis. 2025-12-12T16:29:03.778797984+00:00 stderr F 2025-12-12T16:29:03.778Z|00148|binding|INFO|service-telemetry_service-telemetry-framework-index-3-build: Claiming 0a:58:0a:d9:00:3d 10.217.0.61 2025-12-12T16:29:03.802721710+00:00 stderr F 2025-12-12T16:29:03.802Z|00149|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-3-build ovn-installed in OVS 2025-12-12T16:29:03.802721710+00:00 stderr F 2025-12-12T16:29:03.802Z|00150|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-3-build up in Southbound 2025-12-12T16:29:08.666221008+00:00 stderr F 2025-12-12T16:29:08.666Z|00151|binding|INFO|Releasing lport service-telemetry_service-telemetry-framework-index-3-build from this chassis (sb_readonly=0) 2025-12-12T16:29:08.666221008+00:00 stderr F 2025-12-12T16:29:08.666Z|00152|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-3-build down in Southbound 2025-12-12T16:29:19.582010240+00:00 stderr F 2025-12-12T16:29:19.581Z|00153|binding|INFO|Claiming lport service-telemetry_service-telemetry-framework-index-4-build for this chassis. 2025-12-12T16:29:19.582010240+00:00 stderr F 2025-12-12T16:29:19.581Z|00154|binding|INFO|service-telemetry_service-telemetry-framework-index-4-build: Claiming 0a:58:0a:d9:00:3e 10.217.0.62 2025-12-12T16:29:19.615652738+00:00 stderr F 2025-12-12T16:29:19.615Z|00155|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-4-build ovn-installed in OVS 2025-12-12T16:29:19.615652738+00:00 stderr F 2025-12-12T16:29:19.615Z|00156|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-4-build up in Southbound 2025-12-12T16:29:23.199961750+00:00 stderr F 2025-12-12T16:29:23.199Z|00157|binding|INFO|Releasing lport service-telemetry_service-telemetry-framework-index-4-build from this chassis (sb_readonly=0) 2025-12-12T16:29:23.199961750+00:00 stderr F 2025-12-12T16:29:23.199Z|00158|binding|INFO|Setting lport service-telemetry_service-telemetry-framework-index-4-build down in Southbound 2025-12-12T16:29:23.745094244+00:00 stderr F 2025-12-12T16:29:23.745Z|00159|binding|INFO|Claiming lport service-telemetry_infrawatch-operators-cj72z for this chassis. 2025-12-12T16:29:23.745094244+00:00 stderr F 2025-12-12T16:29:23.745Z|00160|binding|INFO|service-telemetry_infrawatch-operators-cj72z: Claiming 0a:58:0a:d9:00:3f 10.217.0.63 2025-12-12T16:29:23.779850860+00:00 stderr F 2025-12-12T16:29:23.779Z|00161|binding|INFO|Setting lport service-telemetry_infrawatch-operators-cj72z ovn-installed in OVS 2025-12-12T16:29:23.779850860+00:00 stderr F 2025-12-12T16:29:23.779Z|00162|binding|INFO|Setting lport service-telemetry_infrawatch-operators-cj72z up in Southbound 2025-12-12T16:29:27.926668154+00:00 stderr F 2025-12-12T16:29:27.926Z|00163|binding|INFO|Releasing lport service-telemetry_infrawatch-operators-cj72z from this chassis (sb_readonly=0) 2025-12-12T16:29:27.926668154+00:00 stderr F 2025-12-12T16:29:27.926Z|00164|binding|INFO|Setting lport service-telemetry_infrawatch-operators-cj72z down in Southbound 2025-12-12T16:29:29.161614397+00:00 stderr F 2025-12-12T16:29:29.161Z|00165|binding|INFO|Claiming lport service-telemetry_infrawatch-operators-cdpts for this chassis. 2025-12-12T16:29:29.161614397+00:00 stderr F 2025-12-12T16:29:29.161Z|00166|binding|INFO|service-telemetry_infrawatch-operators-cdpts: Claiming 0a:58:0a:d9:00:41 10.217.0.65 2025-12-12T16:29:29.192325402+00:00 stderr F 2025-12-12T16:29:29.192Z|00167|binding|INFO|Setting lport service-telemetry_infrawatch-operators-cdpts up in Southbound 2025-12-12T16:29:29.192919737+00:00 stderr F 2025-12-12T16:29:29.192Z|00168|binding|INFO|Setting lport service-telemetry_infrawatch-operators-cdpts ovn-installed in OVS 2025-12-12T16:29:59.174448817+00:00 stderr F 2025-12-12T16:29:59.174Z|00169|memory_trim|INFO|Detected inactivity (last active 30012 ms ago): trimming memory 2025-12-12T16:30:00.678881614+00:00 stderr F 2025-12-12T16:30:00.678Z|00170|binding|INFO|Claiming lport openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh for this chassis. 2025-12-12T16:30:00.678881614+00:00 stderr F 2025-12-12T16:30:00.678Z|00171|binding|INFO|openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh: Claiming 0a:58:0a:d9:00:42 10.217.0.66 2025-12-12T16:30:00.732763692+00:00 stderr F 2025-12-12T16:30:00.732Z|00172|binding|INFO|Setting lport openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh ovn-installed in OVS 2025-12-12T16:30:00.732763692+00:00 stderr F 2025-12-12T16:30:00.732Z|00173|binding|INFO|Setting lport openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh up in Southbound 2025-12-12T16:30:03.322370449+00:00 stderr F 2025-12-12T16:30:03.321Z|00174|binding|INFO|Releasing lport openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh from this chassis (sb_readonly=0) 2025-12-12T16:30:03.322370449+00:00 stderr F 2025-12-12T16:30:03.321Z|00175|binding|INFO|Setting lport openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh down in Southbound 2025-12-12T16:30:33.517739441+00:00 stderr F 2025-12-12T16:30:33.517Z|00176|memory_trim|INFO|Detected inactivity (last active 30004 ms ago): trimming memory 2025-12-12T16:31:05.830044180+00:00 stderr F 2025-12-12T16:31:05.829Z|00177|memory_trim|INFO|Detected inactivity (last active 30029 ms ago): trimming memory 2025-12-12T16:34:26.205552065+00:00 stderr F 2025-12-12T16:34:26.205Z|00178|binding|INFO|Claiming lport service-telemetry_infrawatch-operators-6bs58 for this chassis. 2025-12-12T16:34:26.205552065+00:00 stderr F 2025-12-12T16:34:26.205Z|00179|binding|INFO|service-telemetry_infrawatch-operators-6bs58: Claiming 0a:58:0a:d9:00:43 10.217.0.67 2025-12-12T16:34:26.232528219+00:00 stderr F 2025-12-12T16:34:26.232Z|00180|binding|INFO|Setting lport service-telemetry_infrawatch-operators-6bs58 ovn-installed in OVS 2025-12-12T16:34:26.232528219+00:00 stderr F 2025-12-12T16:34:26.232Z|00181|binding|INFO|Setting lport service-telemetry_infrawatch-operators-6bs58 up in Southbound 2025-12-12T16:34:56.214452178+00:00 stderr F 2025-12-12T16:34:56.214Z|00182|memory_trim|INFO|Detected inactivity (last active 30009 ms ago): trimming memory 2025-12-12T16:37:14.207422812+00:00 stderr F 2025-12-12T16:37:14.207Z|00183|binding|INFO|Claiming lport openshift-marketplace_certified-operators-h46w2 for this chassis. 2025-12-12T16:37:14.207422812+00:00 stderr F 2025-12-12T16:37:14.207Z|00184|binding|INFO|openshift-marketplace_certified-operators-h46w2: Claiming 0a:58:0a:d9:00:44 10.217.0.68 2025-12-12T16:37:14.234563394+00:00 stderr F 2025-12-12T16:37:14.234Z|00185|binding|INFO|Setting lport openshift-marketplace_certified-operators-h46w2 ovn-installed in OVS 2025-12-12T16:37:14.234563394+00:00 stderr F 2025-12-12T16:37:14.234Z|00186|binding|INFO|Setting lport openshift-marketplace_certified-operators-h46w2 up in Southbound 2025-12-12T16:37:23.600328353+00:00 stderr F 2025-12-12T16:37:23.600Z|00187|binding|INFO|Claiming lport openshift-marketplace_redhat-operators-k5p4x for this chassis. 2025-12-12T16:37:23.600328353+00:00 stderr F 2025-12-12T16:37:23.600Z|00188|binding|INFO|openshift-marketplace_redhat-operators-k5p4x: Claiming 0a:58:0a:d9:00:4a 10.217.0.74 2025-12-12T16:37:23.636972334+00:00 stderr F 2025-12-12T16:37:23.636Z|00189|binding|INFO|Setting lport openshift-marketplace_redhat-operators-k5p4x ovn-installed in OVS 2025-12-12T16:37:23.636972334+00:00 stderr F 2025-12-12T16:37:23.636Z|00190|binding|INFO|Setting lport openshift-marketplace_redhat-operators-k5p4x up in Southbound 2025-12-12T16:37:27.051352792+00:00 stderr F 2025-12-12T16:37:27.049Z|00191|binding|INFO|Releasing lport openshift-marketplace_certified-operators-h46w2 from this chassis (sb_readonly=0) 2025-12-12T16:37:27.051352792+00:00 stderr F 2025-12-12T16:37:27.049Z|00192|binding|INFO|Setting lport openshift-marketplace_certified-operators-h46w2 down in Southbound 2025-12-12T16:37:39.781227536+00:00 stderr F 2025-12-12T16:37:39.781Z|00193|binding|INFO|Releasing lport openshift-marketplace_redhat-operators-k5p4x from this chassis (sb_readonly=0) 2025-12-12T16:37:39.781227536+00:00 stderr F 2025-12-12T16:37:39.781Z|00194|binding|INFO|Setting lport openshift-marketplace_redhat-operators-k5p4x down in Southbound 2025-12-12T16:38:10.888549664+00:00 stderr F 2025-12-12T16:38:10.888Z|00195|binding|INFO|Claiming lport openshift-marketplace_community-operators-4sccg for this chassis. 2025-12-12T16:38:10.888549664+00:00 stderr F 2025-12-12T16:38:10.888Z|00196|binding|INFO|openshift-marketplace_community-operators-4sccg: Claiming 0a:58:0a:d9:00:4c 10.217.0.76 2025-12-12T16:38:10.923258206+00:00 stderr F 2025-12-12T16:38:10.923Z|00197|binding|INFO|Setting lport openshift-marketplace_community-operators-4sccg up in Southbound 2025-12-12T16:38:10.923868881+00:00 stderr F 2025-12-12T16:38:10.923Z|00198|binding|INFO|Setting lport openshift-marketplace_community-operators-4sccg ovn-installed in OVS 2025-12-12T16:38:27.211923541+00:00 stderr F 2025-12-12T16:38:27.211Z|00199|binding|INFO|Releasing lport openshift-marketplace_community-operators-4sccg from this chassis (sb_readonly=0) 2025-12-12T16:38:27.211923541+00:00 stderr F 2025-12-12T16:38:27.211Z|00200|binding|INFO|Setting lport openshift-marketplace_community-operators-4sccg down in Southbound 2025-12-12T16:38:57.380496825+00:00 stderr F 2025-12-12T16:38:57.380Z|00201|memory_trim|INFO|Detected inactivity (last active 30005 ms ago): trimming memory 2025-12-12T16:40:38.743851939+00:00 stderr F 2025-12-12T16:40:38.743Z|00202|binding|INFO|Claiming lport openshift-must-gather-2sjxj_must-gather-v4h5l for this chassis. 2025-12-12T16:40:38.743851939+00:00 stderr F 2025-12-12T16:40:38.743Z|00203|binding|INFO|openshift-must-gather-2sjxj_must-gather-v4h5l: Claiming 0a:58:0a:d9:00:4d 10.217.0.77 2025-12-12T16:40:38.785523706+00:00 stderr F 2025-12-12T16:40:38.785Z|00204|binding|INFO|Setting lport openshift-must-gather-2sjxj_must-gather-v4h5l ovn-installed in OVS 2025-12-12T16:40:38.785523706+00:00 stderr F 2025-12-12T16:40:38.785Z|00205|binding|INFO|Setting lport openshift-must-gather-2sjxj_must-gather-v4h5l up in Southbound 2025-12-12T16:41:08.746793623+00:00 stderr F 2025-12-12T16:41:08.746Z|00206|memory_trim|INFO|Detected inactivity (last active 30003 ms ago): trimming memory 2025-12-12T16:43:03.857228591+00:00 stderr F 2025-12-12T16:43:03.857Z|00207|binding|INFO|Removing iface e61ff18407c4da6 ovn-installed in OVS 2025-12-12T16:43:03.857972830+00:00 stderr F 2025-12-12T16:43:03.857Z|00208|binding|INFO|Removing lport openshift-must-gather-2sjxj_must-gather-v4h5l ovn-installed in OVS ././@LongLink0000644000000000000000000000024500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000025200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000000446515117043043033100 0ustar zuulzuul2025-12-12T16:25:31.558701296+00:00 stderr F + [[ -f /env/_master ]] 2025-12-12T16:25:31.558701296+00:00 stderr F + . /ovnkube-lib/ovnkube-lib.sh 2025-12-12T16:25:31.558701296+00:00 stderr F ++ set -x 2025-12-12T16:25:31.558701296+00:00 stderr F ++ K8S_NODE= 2025-12-12T16:25:31.558998024+00:00 stderr F ++ [[ -n '' ]] 2025-12-12T16:25:31.558998024+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:31.558998024+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:31.558998024+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:31.558998024+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:31.558998024+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:31.558998024+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:31.558998024+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:31.558998024+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:31.558998024+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:31.558998024+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:31.560255755+00:00 stderr F + trap quit-sbdb TERM INT 2025-12-12T16:25:31.560277366+00:00 stderr F + start-sbdb info 2025-12-12T16:25:31.560277366+00:00 stderr F + local log_level=info 2025-12-12T16:25:31.560277366+00:00 stderr F + [[ 1 -ne 1 ]] 2025-12-12T16:25:31.560551083+00:00 stderr F + wait 23749 2025-12-12T16:25:31.560889111+00:00 stderr F + exec /usr/share/ovn/scripts/ovn-ctl --no-monitor --db-sb-sock=/var/run/ovn/ovnsb_db.sock '--ovn-sb-log=-vconsole:info -vfile:off -vPATTERN:console:%D{%Y-%m-%dT%H:%M:%S.###Z}|%05N|%c%T|%p|%m' run_sb_ovsdb 2025-12-12T16:25:31.688568604+00:00 stderr F 2025-12-12T16:25:31.688Z|00001|vlog|INFO|opened log file /var/log/ovn/ovsdb-server-sb.log 2025-12-12T16:25:31.800377567+00:00 stderr F 2025-12-12T16:25:31.800Z|00002|ovsdb_server|INFO|ovsdb-server (Open vSwitch) 3.5.2-33.el9fdp 2025-12-12T16:25:41.805905027+00:00 stderr F 2025-12-12T16:25:41.805Z|00003|memory|INFO|17736 kB peak resident set size after 10.1 seconds 2025-12-12T16:25:41.806782549+00:00 stderr F 2025-12-12T16:25:41.806Z|00004|memory|INFO|atoms:18856 cells:17964 json-caches:2 monitors:5 n-weak-refs:263 sessions:3 ././@LongLink0000644000000000000000000000025600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000000000015117043043033055 0ustar zuulzuul././@LongLink0000644000000000000000000000024500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000025200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000000457115117043043033076 0ustar zuulzuul2025-12-12T16:25:29.294990806+00:00 stderr F + [[ -f /env/_master ]] 2025-12-12T16:25:29.294990806+00:00 stderr F + . /ovnkube-lib/ovnkube-lib.sh 2025-12-12T16:25:29.295095019+00:00 stderr F ++ set -x 2025-12-12T16:25:29.295095019+00:00 stderr F ++ K8S_NODE=crc 2025-12-12T16:25:29.295095019+00:00 stderr F ++ [[ -n crc ]] 2025-12-12T16:25:29.295095019+00:00 stderr F ++ [[ -f /env/crc ]] 2025-12-12T16:25:29.295095019+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:29.295095019+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:29.295105419+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:29.295105419+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:29.295105419+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:29.295113789+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:29.295113789+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:29.295121019+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:29.295127979+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:29.295127979+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:29.296286569+00:00 stderr F + trap quit-nbdb TERM INT 2025-12-12T16:25:29.296286569+00:00 stderr F + start-nbdb info 2025-12-12T16:25:29.296303579+00:00 stderr F + local log_level=info 2025-12-12T16:25:29.296303579+00:00 stderr F + [[ 1 -ne 1 ]] 2025-12-12T16:25:29.296648388+00:00 stderr F + wait 23630 2025-12-12T16:25:29.296861733+00:00 stderr F + exec /usr/share/ovn/scripts/ovn-ctl --no-monitor --db-nb-sock=/var/run/ovn/ovnnb_db.sock '--ovn-nb-log=-vconsole:info -vfile:off -vPATTERN:console:%D{%Y-%m-%dT%H:%M:%S.###Z}|%05N|%c%T|%p|%m' run_nb_ovsdb 2025-12-12T16:25:29.413983190+00:00 stderr F 2025-12-12T16:25:29.413Z|00001|vlog|INFO|opened log file /var/log/ovn/ovsdb-server-nb.log 2025-12-12T16:25:29.472525893+00:00 stderr F 2025-12-12T16:25:29.472Z|00002|ovsdb_server|INFO|ovsdb-server (Open vSwitch) 3.5.2-33.el9fdp 2025-12-12T16:25:39.478683769+00:00 stderr F 2025-12-12T16:25:39.478Z|00003|memory|INFO|14336 kB peak resident set size after 10.1 seconds 2025-12-12T16:25:39.478824543+00:00 stderr F 2025-12-12T16:25:39.478Z|00004|memory|INFO|atoms:7148 cells:4847 json-caches:2 monitors:4 n-weak-refs:165 sessions:2 ././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000025400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000001064715117043043033077 0ustar zuulzuul2025-12-12T16:25:29.190034625+00:00 stderr F + [[ -f /env/_master ]] 2025-12-12T16:25:29.190034625+00:00 stderr F + . /ovnkube-lib/ovnkube-lib.sh 2025-12-12T16:25:29.190034625+00:00 stderr F ++ set -x 2025-12-12T16:25:29.190034625+00:00 stderr F ++ K8S_NODE= 2025-12-12T16:25:29.190301492+00:00 stderr F ++ [[ -n '' ]] 2025-12-12T16:25:29.190301492+00:00 stderr F ++ northd_pidfile=/var/run/ovn/ovn-northd.pid 2025-12-12T16:25:29.190301492+00:00 stderr F ++ controller_pidfile=/var/run/ovn/ovn-controller.pid 2025-12-12T16:25:29.190301492+00:00 stderr F ++ controller_logfile=/var/log/ovn/acl-audit-log.log 2025-12-12T16:25:29.190301492+00:00 stderr F ++ vswitch_dbsock=/var/run/openvswitch/db.sock 2025-12-12T16:25:29.190301492+00:00 stderr F ++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid 2025-12-12T16:25:29.190301492+00:00 stderr F ++ nbdb_sock=/var/run/ovn/ovnnb_db.sock 2025-12-12T16:25:29.190301492+00:00 stderr F ++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl 2025-12-12T16:25:29.190301492+00:00 stderr F ++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid 2025-12-12T16:25:29.190301492+00:00 stderr F ++ sbdb_sock=/var/run/ovn/ovnsb_db.sock 2025-12-12T16:25:29.190301492+00:00 stderr F ++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl 2025-12-12T16:25:29.191202934+00:00 stderr F + trap quit-ovn-northd TERM INT 2025-12-12T16:25:29.191216135+00:00 stderr F + start-ovn-northd info 2025-12-12T16:25:29.191224445+00:00 stderr F + local log_level=info 2025-12-12T16:25:29.191235145+00:00 stderr F + [[ 1 -ne 1 ]] 2025-12-12T16:25:29.191892032+00:00 stderr F ++ date -Iseconds 2025-12-12T16:25:29.195085282+00:00 stderr F + echo '2025-12-12T16:25:29+00:00 - starting ovn-northd' 2025-12-12T16:25:29.195164924+00:00 stdout F 2025-12-12T16:25:29+00:00 - starting ovn-northd 2025-12-12T16:25:29.195464942+00:00 stderr F + wait 23620 2025-12-12T16:25:29.195733158+00:00 stderr F + exec ovn-northd --no-chdir -vconsole:info -vfile:off '-vPATTERN:console:%D{%Y-%m-%dT%H:%M:%S.###Z}|%05N|%c%T|%p|%m' --pidfile /var/run/ovn/ovn-northd.pid --n-threads=1 2025-12-12T16:25:29.200581730+00:00 stderr F 2025-12-12T16:25:29.200Z|00001|ovn_northd|INFO|OVN internal version is : [25.03.1-20.41.0-78.8] 2025-12-12T16:25:29.200886418+00:00 stderr F 2025-12-12T16:25:29.200Z|00002|reconnect|INFO|unix:/var/run/ovn/ovnnb_db.sock: connecting... 2025-12-12T16:25:29.200886418+00:00 stderr F 2025-12-12T16:25:29.200Z|00003|reconnect|INFO|unix:/var/run/ovn/ovnnb_db.sock: connection attempt failed (No such file or directory) 2025-12-12T16:25:29.200916709+00:00 stderr F 2025-12-12T16:25:29.200Z|00004|ovn_northd|INFO|OVN NB IDL reconnected, force recompute. 2025-12-12T16:25:29.200964880+00:00 stderr F 2025-12-12T16:25:29.200Z|00005|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:29.200964880+00:00 stderr F 2025-12-12T16:25:29.200Z|00006|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connection attempt failed (No such file or directory) 2025-12-12T16:25:29.200975620+00:00 stderr F 2025-12-12T16:25:29.200Z|00007|ovn_northd|INFO|OVN SB IDL reconnected, force recompute. 2025-12-12T16:25:30.202600734+00:00 stderr F 2025-12-12T16:25:30.202Z|00008|reconnect|INFO|unix:/var/run/ovn/ovnnb_db.sock: connecting... 2025-12-12T16:25:30.202678356+00:00 stderr F 2025-12-12T16:25:30.202Z|00009|reconnect|INFO|unix:/var/run/ovn/ovnnb_db.sock: connected 2025-12-12T16:25:30.202760098+00:00 stderr F 2025-12-12T16:25:30.202Z|00010|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:30.202863960+00:00 stderr F 2025-12-12T16:25:30.202Z|00011|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connection attempt failed (No such file or directory) 2025-12-12T16:25:30.202874370+00:00 stderr F 2025-12-12T16:25:30.202Z|00012|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: waiting 2 seconds before reconnect 2025-12-12T16:25:32.203419558+00:00 stderr F 2025-12-12T16:25:32.203Z|00013|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connecting... 2025-12-12T16:25:32.204505615+00:00 stderr F 2025-12-12T16:25:32.204Z|00014|reconnect|INFO|unix:/var/run/ovn/ovnsb_db.sock: connected 2025-12-12T16:25:32.204760102+00:00 stderr F 2025-12-12T16:25:32.204Z|00015|ovn_northd|INFO|ovn-northd lock acquired. This ovn-northd instance is now active. 2025-12-12T16:25:46.803421524+00:00 stderr F 2025-12-12T16:25:46.803Z|00016|memory|INFO|15360 kB peak resident set size after 17.6 seconds 2025-12-12T16:25:46.803636680+00:00 stderr F 2025-12-12T16:25:46.803Z|00017|memory|INFO|idl-cells-OVN_Northbound:3933 idl-cells-OVN_Southbound:15715 ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043043033051 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043062033052 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000000134015117043043033051 0ustar zuulzuul2025-12-12T16:30:01.230661105+00:00 stderr F time="2025-12-12T16:30:01Z" level=info msg="Successfully created configMap openshift-operator-lifecycle-manager/olm-operator-heap-2kvt8" 2025-12-12T16:30:01.288516433+00:00 stderr F time="2025-12-12T16:30:01Z" level=info msg="Successfully created configMap openshift-operator-lifecycle-manager/catalog-operator-heap-qzs5n" 2025-12-12T16:30:01.292305839+00:00 stderr F time="2025-12-12T16:30:01Z" level=info msg="Successfully deleted configMap openshift-operator-lifecycle-manager/catalog-operator-heap-9rg4c" 2025-12-12T16:30:01.297319255+00:00 stderr F time="2025-12-12T16:30:01Z" level=info msg="Successfully deleted configMap openshift-operator-lifecycle-manager/olm-operator-heap-mn8zt" ././@LongLink0000644000000000000000000000025600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015117043043033130 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015117043062033131 5ustar zuulzuul././@LongLink0000644000000000000000000000030300000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000644000175000017500000003747015117043043033145 0ustar zuulzuul2025-12-12T16:18:59.351328542+00:00 stdout F Copying system trust bundle 2025-12-12T16:18:59.399568414+00:00 stderr F I1212 16:18:59.399468 1 dynamic_serving_content.go:113] "Loaded a new cert/key pair" name="serving-cert::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.crt::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.key" 2025-12-12T16:18:59.400088947+00:00 stderr F I1212 16:18:59.400039 1 dynamic_serving_content.go:113] "Loaded a new cert/key pair" name="sni-serving-cert::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing" 2025-12-12T16:18:59.784390328+00:00 stderr F I1212 16:18:59.784329 1 audit.go:340] Using audit backend: ignoreErrors 2025-12-12T16:18:59.798330803+00:00 stderr F I1212 16:18:59.798238 1 requestheader_controller.go:244] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:18:59.815500297+00:00 stderr F I1212 16:18:59.815418 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:18:59.815500297+00:00 stderr F I1212 16:18:59.815446 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:18:59.815500297+00:00 stderr F I1212 16:18:59.815466 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:18:59.815500297+00:00 stderr F I1212 16:18:59.815471 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:18:59.819949397+00:00 stderr F I1212 16:18:59.819881 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:18:59.820190073+00:00 stderr F I1212 16:18:59.820141 1 genericapiserver.go:528] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:18:59.824464379+00:00 stderr F I1212 16:18:59.824399 1 configmap_cafile_content.go:202] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:18:59.824464379+00:00 stderr F I1212 16:18:59.824441 1 shared_informer.go:311] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:18:59.824581922+00:00 stderr F I1212 16:18:59.824519 1 configmap_cafile_content.go:202] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:18:59.824611393+00:00 stderr F I1212 16:18:59.824388 1 requestheader_controller.go:169] Starting RequestHeaderAuthRequestController 2025-12-12T16:18:59.824659994+00:00 stderr F I1212 16:18:59.824640 1 shared_informer.go:311] Waiting for caches to sync for RequestHeaderAuthRequestController 2025-12-12T16:18:59.824876559+00:00 stderr F I1212 16:18:59.824595 1 shared_informer.go:311] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:18:59.824959421+00:00 stderr F I1212 16:18:59.824894 1 dynamic_serving_content.go:132] "Starting controller" name="serving-cert::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.crt::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.key" 2025-12-12T16:18:59.825035223+00:00 stderr F I1212 16:18:59.824989 1 dynamic_serving_content.go:132] "Starting controller" name="sni-serving-cert::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing" 2025-12-12T16:18:59.825117485+00:00 stderr F I1212 16:18:59.825070 1 tlsconfig.go:200] "Loaded serving cert" certName="serving-cert::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.crt::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.key" certDetail="\"oauth-openshift.openshift-authentication.svc\" [serving] validServingFor=[oauth-openshift.openshift-authentication.svc,oauth-openshift.openshift-authentication.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:18:59.825007153 +0000 UTC))" 2025-12-12T16:18:59.827952345+00:00 stderr F I1212 16:18:59.827886 1 named_certificates.go:53] "Loaded SNI cert" index=1 certName="sni-serving-cert::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing" certDetail="\"*.apps-crc.testing\" [serving] validServingFor=[*.apps-crc.testing] issuer=\"ingress-operator@1762070846\" (2025-11-02 08:07:26 +0000 UTC to 2027-11-02 08:07:27 +0000 UTC (now=2025-12-12 16:18:59.827841543 +0000 UTC))" 2025-12-12T16:18:59.828263653+00:00 stderr F I1212 16:18:59.828163 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556339\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556339\" (2025-12-12 15:18:59 +0000 UTC to 2026-12-12 15:18:59 +0000 UTC (now=2025-12-12 16:18:59.82813871 +0000 UTC))" 2025-12-12T16:18:59.828308974+00:00 stderr F I1212 16:18:59.828282 1 secure_serving.go:213] Serving securely on [::]:6443 2025-12-12T16:18:59.828354415+00:00 stderr F I1212 16:18:59.828332 1 genericapiserver.go:681] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:18:59.828385776+00:00 stderr F I1212 16:18:59.828364 1 tlsconfig.go:240] "Starting DynamicServingCertificateController" 2025-12-12T16:18:59.829785691+00:00 stderr F I1212 16:18:59.829735 1 reflector.go:351] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.29.2/tools/cache/reflector.go:229 2025-12-12T16:18:59.832210591+00:00 stderr F I1212 16:18:59.832158 1 reflector.go:351] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.29.2/tools/cache/reflector.go:229 2025-12-12T16:18:59.833685987+00:00 stderr F I1212 16:18:59.833646 1 reflector.go:351] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.29.2/tools/cache/reflector.go:229 2025-12-12T16:18:59.838494896+00:00 stderr F I1212 16:18:59.838442 1 reflector.go:351] Caches populated for *v1.Group from k8s.io/client-go@v0.29.2/tools/cache/reflector.go:229 2025-12-12T16:18:59.925357383+00:00 stderr F I1212 16:18:59.925269 1 shared_informer.go:318] Caches are synced for RequestHeaderAuthRequestController 2025-12-12T16:18:59.925435155+00:00 stderr F I1212 16:18:59.925401 1 shared_informer.go:318] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:18:59.925495157+00:00 stderr F I1212 16:18:59.925467 1 shared_informer.go:318] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:18:59.925727613+00:00 stderr F I1212 16:18:59.925690 1 tlsconfig.go:178] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:59.925647821 +0000 UTC))" 2025-12-12T16:18:59.926008190+00:00 stderr F I1212 16:18:59.925969 1 tlsconfig.go:200] "Loaded serving cert" certName="serving-cert::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.crt::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.key" certDetail="\"oauth-openshift.openshift-authentication.svc\" [serving] validServingFor=[oauth-openshift.openshift-authentication.svc,oauth-openshift.openshift-authentication.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:18:59.925949428 +0000 UTC))" 2025-12-12T16:18:59.926169464+00:00 stderr F I1212 16:18:59.926135 1 named_certificates.go:53] "Loaded SNI cert" index=1 certName="sni-serving-cert::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing" certDetail="\"*.apps-crc.testing\" [serving] validServingFor=[*.apps-crc.testing] issuer=\"ingress-operator@1762070846\" (2025-11-02 08:07:26 +0000 UTC to 2027-11-02 08:07:27 +0000 UTC (now=2025-12-12 16:18:59.926121892 +0000 UTC))" 2025-12-12T16:18:59.926416990+00:00 stderr F I1212 16:18:59.926339 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556339\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556339\" (2025-12-12 15:18:59 +0000 UTC to 2026-12-12 15:18:59 +0000 UTC (now=2025-12-12 16:18:59.926324827 +0000 UTC))" 2025-12-12T16:18:59.926560163+00:00 stderr F I1212 16:18:59.926526 1 tlsconfig.go:178] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:18:59.926514622 +0000 UTC))" 2025-12-12T16:18:59.926560163+00:00 stderr F I1212 16:18:59.926551 1 tlsconfig.go:178] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:18:59.926541623 +0000 UTC))" 2025-12-12T16:18:59.926574324+00:00 stderr F I1212 16:18:59.926566 1 tlsconfig.go:178] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:18:59.926557393 +0000 UTC))" 2025-12-12T16:18:59.926601274+00:00 stderr F I1212 16:18:59.926580 1 tlsconfig.go:178] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:18:59.926571663 +0000 UTC))" 2025-12-12T16:18:59.926609224+00:00 stderr F I1212 16:18:59.926603 1 tlsconfig.go:178] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:18:59.926589514 +0000 UTC))" 2025-12-12T16:18:59.926693867+00:00 stderr F I1212 16:18:59.926632 1 tlsconfig.go:178] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:18:59.926609144 +0000 UTC))" 2025-12-12T16:18:59.926693867+00:00 stderr F I1212 16:18:59.926656 1 tlsconfig.go:178] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:18:59.926639185 +0000 UTC))" 2025-12-12T16:18:59.926693867+00:00 stderr F I1212 16:18:59.926671 1 tlsconfig.go:178] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:18:59.926661616 +0000 UTC))" 2025-12-12T16:18:59.926693867+00:00 stderr F I1212 16:18:59.926689 1 tlsconfig.go:178] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:18:59.926675526 +0000 UTC))" 2025-12-12T16:18:59.926713127+00:00 stderr F I1212 16:18:59.926706 1 tlsconfig.go:178] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:18:59.926698057 +0000 UTC))" 2025-12-12T16:18:59.926740378+00:00 stderr F I1212 16:18:59.926723 1 tlsconfig.go:178] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:18:59.926712067 +0000 UTC))" 2025-12-12T16:18:59.926963523+00:00 stderr F I1212 16:18:59.926925 1 tlsconfig.go:200] "Loaded serving cert" certName="serving-cert::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.crt::/var/config/system/secrets/v4-0-config-system-serving-cert/tls.key" certDetail="\"oauth-openshift.openshift-authentication.svc\" [serving] validServingFor=[oauth-openshift.openshift-authentication.svc,oauth-openshift.openshift-authentication.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:18:59.926907322 +0000 UTC))" 2025-12-12T16:18:59.927167198+00:00 stderr F I1212 16:18:59.927130 1 named_certificates.go:53] "Loaded SNI cert" index=1 certName="sni-serving-cert::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing::/var/config/system/secrets/v4-0-config-system-router-certs/apps-crc.testing" certDetail="\"*.apps-crc.testing\" [serving] validServingFor=[*.apps-crc.testing] issuer=\"ingress-operator@1762070846\" (2025-11-02 08:07:26 +0000 UTC to 2027-11-02 08:07:27 +0000 UTC (now=2025-12-12 16:18:59.927116857 +0000 UTC))" 2025-12-12T16:18:59.927365903+00:00 stderr F I1212 16:18:59.927343 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556339\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556339\" (2025-12-12 15:18:59 +0000 UTC to 2026-12-12 15:18:59 +0000 UTC (now=2025-12-12 16:18:59.927325212 +0000 UTC))" ././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015117043043033041 5ustar zuulzuul././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015117043062033042 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000644000175000017500000000007515117043043033045 0ustar zuulzuul2025-12-12T16:16:55.800585554+00:00 stdout F serving on 8080 ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043043033051 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043063033053 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000000042115117043043033050 0ustar zuulzuul2025-12-12T16:16:45.137632677+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="error verifying provided cert and key: certificate has expired" 2025-12-12T16:16:45.137632677+00:00 stderr F time="2025-12-12T16:16:45Z" level=info msg="generating a new cert and key" ././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043043033144 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043062033145 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000644000175000017500000005373615117043043033164 0ustar zuulzuul2025-12-12T16:27:30.130292392+00:00 stderr F 2025-12-12T16:27:30Z INFO setup running with arguments {"namespace": "openshift-operators", "metrics-bind-address": ":8080", "images": "alertmanager=registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,health-analyzer=registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,korrel8r=registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,perses=registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,prometheus=registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,thanos=registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ui-dashboards=registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ui-distributed-tracing-pf4=registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ui-distributed-tracing-pf5=registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ui-distributed-tracing=registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ui-logging-pf4=registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ui-logging=registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ui-monitoring-pf5=registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ui-monitoring=registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ui-troubleshooting-panel=registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3", "openshift.enabled": true} 2025-12-12T16:27:30.197017511+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"serving-cert::/etc/tls/private/tls.crt::/etc/tls/private/tls.key", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded serving cert ["serving-cert::/etc/tls/private/tls.crt::/etc/tls/private/tls.key"]: "observability-operator.openshift-operators.svc" [serving] validServingFor=[observability-operator.openshift-operators.svc,observability-operator.openshift-operators.svc.cluster.local] issuer="openshift-service-serving-signer@1762069924" (2025-12-12 16:26:57 +0000 UTC to 2027-12-12 16:26:58 +0000 UTC (now=2025-12-12 16:27:30.135112924 +0000 UTC)) 2025-12-12T16:27:30.234625233+00:00 stderr F 2025-12-12T16:27:30Z INFO setup starting manager 2025-12-12T16:27:30.234842818+00:00 stderr F 2025-12-12T16:27:30Z INFO controller-runtime.metrics Starting metrics server 2025-12-12T16:27:30.237445214+00:00 stderr F I1212 16:27:30.236318 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:27:30.237445214+00:00 stderr F I1212 16:27:30.236360 1 shared_informer.go:349] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:27:30.237445214+00:00 stderr F I1212 16:27:30.236417 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:27:30.237445214+00:00 stderr F 2025-12-12T16:27:30Z INFO starting server {"name": "pprof", "addr": "127.0.0.1:8083"} 2025-12-12T16:27:30.237445214+00:00 stderr F 2025-12-12T16:27:30Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-12T16:27:30.337542388+00:00 stderr F I1212 16:27:30.337461 1 shared_informer.go:356] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:27:30.396286604+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [0/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "admin-kubeconfig-signer" [] issuer="" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:27:30.338128252 +0000 UTC)) 2025-12-12T16:27:30.396426748+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1alpha1.UIPlugin"} 2025-12-12T16:27:30.396472979+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1.Secret"} 2025-12-12T16:27:30.396519100+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [1/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "kube-control-plane-signer" [] issuer="" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:27:30.338195694 +0000 UTC)) 2025-12-12T16:27:30.396558061+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.PodDisruptionBudget"} 2025-12-12T16:27:30.396587282+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [2/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "kube-apiserver-to-kubelet-signer" [] issuer="" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:27:30.338207644 +0000 UTC)) 2025-12-12T16:27:30.396622883+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [3/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "kubelet-bootstrap-kubeconfig-signer" [] issuer="" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:27:30.338218235 +0000 UTC)) 2025-12-12T16:27:30.396665684+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [4/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "openshift-kube-apiserver-operator_node-system-admin-signer@1762069887" [] issuer="" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:27:30.338229895 +0000 UTC)) 2025-12-12T16:27:30.396722805+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [5/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "openshift-kube-controller-manager-operator_csr-signer-signer@1762071455" [] issuer="" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:27:30.338297127 +0000 UTC)) 2025-12-12T16:27:30.396759326+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.Service"} 2025-12-12T16:27:30.396798117+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [6/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "kube-csr-signer_@1762071455" [] issuer="openshift-kube-controller-manager-operator_csr-signer-signer@1762071455" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:27:30.338311197 +0000 UTC)) 2025-12-12T16:27:30.396833998+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [7/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209" [] issuer="" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:27:30.338333678 +0000 UTC)) 2025-12-12T16:27:30.396879729+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [8/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209" [] issuer="" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:27:30.338372049 +0000 UTC)) 2025-12-12T16:27:30.396919760+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"client-ca::kube-system::extension-apiserver-authentication::client-ca-file", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded client CA [9/"client-ca::kube-system::extension-apiserver-authentication::client-ca-file"]: "admin-kubeconfig-signer-custom" [] issuer="" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:27:30.33843272 +0000 UTC)) 2025-12-12T16:27:30.396961491+00:00 stderr F 2025-12-12T16:27:30Z INFO events Event(v1.ObjectReference{Kind:"", Namespace:"", Name:"serving-cert::/etc/tls/private/tls.crt::/etc/tls/private/tls.key", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'TLSConfigChanged' loaded serving cert ["serving-cert::/etc/tls/private/tls.crt::/etc/tls/private/tls.key"]: "observability-operator.openshift-operators.svc" [serving] validServingFor=[observability-operator.openshift-operators.svc,observability-operator.openshift-operators.svc.cluster.local] issuer="openshift-service-serving-signer@1762069924" (2025-12-12 16:26:57 +0000 UTC to 2027-12-12 16:26:58 +0000 UTC (now=2025-12-12 16:27:30.338664196 +0000 UTC)) 2025-12-12T16:27:30.396995842+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1alpha1.ObservabilityInstaller"} 2025-12-12T16:27:30.397109895+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1alpha1.ThanosQuerier"} 2025-12-12T16:27:30.397255399+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.Alertmanager"} 2025-12-12T16:27:30.397307390+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "observability-operator", "controllerGroup": "", "controllerKind": "Service", "source": "kind source: *v1.ServiceMonitor"} 2025-12-12T16:27:30.397416883+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.ServiceAccount"} 2025-12-12T16:27:30.397428043+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.ServiceMonitor"} 2025-12-12T16:27:30.397428043+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1.Deployment"} 2025-12-12T16:27:30.397485425+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.Role"} 2025-12-12T16:27:30.397485425+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1.ServiceAccount"} 2025-12-12T16:27:30.397519816+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.RoleBinding"} 2025-12-12T16:27:30.397550676+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1.Service"} 2025-12-12T16:27:30.397550676+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1.Prometheus"} 2025-12-12T16:27:30.397583967+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1alpha1.Subscription"} 2025-12-12T16:27:30.397625098+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:27:30.397675420+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "source": "kind source: *v1alpha1.MonitoringStack"} 2025-12-12T16:27:30.397763202+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1.Namespace"} 2025-12-12T16:27:30.397775892+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1.ClusterRoleBinding"} 2025-12-12T16:27:30.398009278+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "observability-operator", "controllerGroup": "", "controllerKind": "Service", "source": "kind source: *v1.Service"} 2025-12-12T16:27:30.398079990+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1.Secret"} 2025-12-12T16:27:30.398128271+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "source": "kind source: *v1alpha1.MonitoringStack"} 2025-12-12T16:27:30.398256754+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "source": "kind source: *v1.ClusterRole"} 2025-12-12T16:27:30.398533531+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1.ConsolePlugin"} 2025-12-12T16:27:30.398533531+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1.Service"} 2025-12-12T16:27:30.398606683+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1alpha1.UIPlugin"} 2025-12-12T16:27:30.398606683+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1.RoleBinding"} 2025-12-12T16:27:30.398620253+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1.ServiceAccount"} 2025-12-12T16:27:30.398620253+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1.Deployment"} 2025-12-12T16:27:30.398670675+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1.Role"} 2025-12-12T16:27:30.398681565+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1alpha1.PersesDatasource"} 2025-12-12T16:27:30.398754407+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "source": "kind source: *v1alpha1.PersesDashboard"} 2025-12-12T16:27:31.101502833+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting Controller {"controller": "observability-operator", "controllerGroup": "", "controllerKind": "Service"} 2025-12-12T16:27:31.101612805+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting workers {"controller": "observability-operator", "controllerGroup": "", "controllerKind": "Service", "worker count": 1} 2025-12-12T16:27:31.101868182+00:00 stderr F 2025-12-12T16:27:31Z INFO observability-operator Reconciling operator resources {"operator": {"name":"observability-operator","namespace":"openshift-operators"}} 2025-12-12T16:27:31.103103423+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting Controller {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier"} 2025-12-12T16:27:31.103154914+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting workers {"controller": "thanosquerier", "controllerGroup": "monitoring.rhobs", "controllerKind": "ThanosQuerier", "worker count": 1} 2025-12-12T16:27:31.103330539+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting Controller {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack"} 2025-12-12T16:27:31.103363740+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting workers {"controller": "monitoringstack", "controllerGroup": "monitoring.rhobs", "controllerKind": "MonitoringStack", "worker count": 1} 2025-12-12T16:27:31.103519394+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting Controller {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin"} 2025-12-12T16:27:31.103545344+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting workers {"controller": "uiplugin", "controllerGroup": "observability.openshift.io", "controllerKind": "UIPlugin", "worker count": 1} 2025-12-12T16:27:31.110311405+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting Controller {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller"} 2025-12-12T16:27:31.110366487+00:00 stderr F 2025-12-12T16:27:31Z INFO Starting workers {"controller": "cluster-observability", "controllerGroup": "observability.openshift.io", "controllerKind": "ObservabilityInstaller", "worker count": 1} 2025-12-12T16:27:31.294414735+00:00 stderr F 2025-12-12T16:27:31Z INFO observability-operator Reconciling operator resources {"operator": {"name":"observability-operator","namespace":"openshift-operators"}} 2025-12-12T16:27:31.315476308+00:00 stderr F 2025-12-12T16:27:31Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": true} ././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043043032775 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/6.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000022327015117043043033005 0ustar zuulzuul2025-12-12T16:36:53.061665720+00:00 stderr F I1212 16:36:53.061392 36245 start.go:70] Version: 89b561f0 (f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:36:53.061931206+00:00 stderr F I1212 16:36:53.061708 36245 update.go:2651] Running: mount --rbind /run/secrets /rootfs/run/secrets 2025-12-12T16:36:53.065652310+00:00 stderr F I1212 16:36:53.065615 36245 update.go:2651] Running: mount --rbind /usr/bin /rootfs/run/machine-config-daemon-bin 2025-12-12T16:36:53.068241345+00:00 stderr F I1212 16:36:53.068202 36245 daemon.go:555] using appropriate binary for source=rhel-9 target=rhel-9 2025-12-12T16:36:53.147405964+00:00 stderr F I1212 16:36:53.147290 36245 daemon.go:608] Invoking re-exec /run/bin/machine-config-daemon 2025-12-12T16:36:53.190253100+00:00 stderr F I1212 16:36:53.190148 36245 start.go:70] Version: 89b561f0 (f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:36:53.191588044+00:00 stderr F I1212 16:36:53.191456 36245 image_manager_helper.go:194] Linking rpm-ostree authfile to /etc/mco/internal-registry-pull-secret.json 2025-12-12T16:36:53.244239427+00:00 stderr F I1212 16:36:53.244132 36245 daemon.go:345] Booted osImageURL: image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest (9.6.20251021-0) 765a8d9fdcb7d177cbf4fd31343316543b668c78028d2ab915d810e45d5d583b 2025-12-12T16:36:53.245310614+00:00 stderr F I1212 16:36:53.245270 36245 start.go:136] overriding kubernetes api to https://api-int.crc.testing:6443 2025-12-12T16:36:53.246310589+00:00 stderr F I1212 16:36:53.246275 36245 metrics.go:92] Registering Prometheus metrics 2025-12-12T16:36:53.246356370+00:00 stderr F I1212 16:36:53.246339 36245 metrics.go:99] Starting metrics listener on 127.0.0.1:8797 2025-12-12T16:36:53.258091005+00:00 stderr F I1212 16:36:53.257994 36245 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:36:53.262372403+00:00 stderr F I1212 16:36:53.262253 36245 featuregates.go:112] FeatureGates initialized: enabled=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks], disabled=[AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:36:53.263213054+00:00 stderr F I1212 16:36:53.263136 36245 event.go:377] Event(v1.ObjectReference{Kind:"Node", Namespace:"openshift-machine-config-operator", Name:"crc", UID:"23216ff3-032e-49af-af7e-1d23d5907b59", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:36:53.265783728+00:00 stderr F I1212 16:36:53.265733 36245 writer.go:87] NodeWriter initialized with credentials from /var/lib/kubelet/kubeconfig 2025-12-12T16:36:53.266577408+00:00 stderr F I1212 16:36:53.266524 36245 start.go:221] Feature enabled: PinnedImages 2025-12-12T16:36:53.266983888+00:00 stderr F I1212 16:36:53.266886 36245 update.go:2696] "Starting to manage node: crc" 2025-12-12T16:36:53.271681226+00:00 stderr F I1212 16:36:53.271605 36245 image_manager_helper.go:92] Running captured: rpm-ostree status 2025-12-12T16:36:53.367941655+00:00 stderr F I1212 16:36:53.367754 36245 pinned_image_set.go:819] Starting PinnedImageSet Manager 2025-12-12T16:36:53.403025307+00:00 stderr F I1212 16:36:53.402939 36245 daemon.go:1827] State: idle 2025-12-12T16:36:53.403025307+00:00 stderr F Deployments: 2025-12-12T16:36:53.403025307+00:00 stderr F * ostree-unverified-registry:image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest 2025-12-12T16:36:53.403025307+00:00 stderr F Digest: sha256:97576b6e5dcea61323cc5edae1d4c603ef3869df0ea140c0ada45fa333ff09d8 2025-12-12T16:36:53.403025307+00:00 stderr F Version: 9.6.20251021-0 (2025-11-03T09:00:13Z) 2025-12-12T16:36:53.403025307+00:00 stderr F LayeredPackages: cloud-init gvisor-tap-vsock-gvforwarder hyperv-daemons 2025-12-12T16:36:53.403533149+00:00 stderr F I1212 16:36:53.403457 36245 coreos.go:53] CoreOS aleph version: mtime=2022-08-01 23:42:11 +0000 UTC 2025-12-12T16:36:53.403533149+00:00 stderr F { 2025-12-12T16:36:53.403533149+00:00 stderr F "container-image": { 2025-12-12T16:36:53.403533149+00:00 stderr F "image-digest": "sha256:346eadc1d679be03d2b6a0dc447edded7077483224443f2a27652056e5e51ed8", 2025-12-12T16:36:53.403533149+00:00 stderr F "image-labels": { 2025-12-12T16:36:53.403533149+00:00 stderr F "com.coreos.osname": "rhcos", 2025-12-12T16:36:53.403533149+00:00 stderr F "containers.bootc": "1", 2025-12-12T16:36:53.403533149+00:00 stderr F "coreos-assembler.image-config-checksum": "b444a32e2801642f1e41777fd51fa53304496c58a3a6b15e5964a1f86f866507", 2025-12-12T16:36:53.403533149+00:00 stderr F "io.openshift.build.version-display-names": "machine-os=Red Hat Enterprise Linux CoreOS", 2025-12-12T16:36:53.403533149+00:00 stderr F "io.openshift.build.versions": "machine-os=9.6.20251015-1", 2025-12-12T16:36:53.403533149+00:00 stderr F "org.opencontainers.image.revision": "7b9eaa1ba9269e6287cb00f7044614b0e9da747e", 2025-12-12T16:36:53.403533149+00:00 stderr F "org.opencontainers.image.source": "https://github.com/coreos/rhel-coreos-config", 2025-12-12T16:36:53.403533149+00:00 stderr F "org.opencontainers.image.version": "9.6.20251015-1", 2025-12-12T16:36:53.403533149+00:00 stderr F "ostree.bootable": "true", 2025-12-12T16:36:53.403533149+00:00 stderr F "ostree.commit": "8df94c06f4995c7f493360f258ee92a068ab3280ea64919ec2bf9945a8648a4d", 2025-12-12T16:36:53.403533149+00:00 stderr F "ostree.final-diffid": "sha256:12787d84fa137cd5649a9005efe98ec9d05ea46245fdc50aecb7dd007f2035b1", 2025-12-12T16:36:53.403533149+00:00 stderr F "ostree.linux": "5.14.0-570.55.1.el9_6.x86_64", 2025-12-12T16:36:53.403533149+00:00 stderr F "rpmostree.inputhash": "b2542ee90d9bfa3873e873c3ad0e6550db088c732dbef4033568bbbd6dc58a81" 2025-12-12T16:36:53.403533149+00:00 stderr F }, 2025-12-12T16:36:53.403533149+00:00 stderr F "image-name": "oci-archive:/rhcos-9.6.20251015-1-ostree.x86_64.ociarchive" 2025-12-12T16:36:53.403533149+00:00 stderr F }, 2025-12-12T16:36:53.403533149+00:00 stderr F "osbuild-version": "161", 2025-12-12T16:36:53.403533149+00:00 stderr F "ostree-commit": "8df94c06f4995c7f493360f258ee92a068ab3280ea64919ec2bf9945a8648a4d", 2025-12-12T16:36:53.403533149+00:00 stderr F "ref": "docker://ostree-image-signed:oci-archive:/rhcos-9.6.20251015-1-ostree.x86_64.ociarchive", 2025-12-12T16:36:53.403533149+00:00 stderr F "version": "9.6.20251015-1" 2025-12-12T16:36:53.403533149+00:00 stderr F } 2025-12-12T16:36:53.403564320+00:00 stderr F I1212 16:36:53.403550 36245 coreos.go:70] Ignition provisioning: time=2025-11-02T07:44:17Z 2025-12-12T16:36:53.403564320+00:00 stderr F I1212 16:36:53.403559 36245 image_manager_helper.go:92] Running captured: journalctl --list-boots 2025-12-12T16:36:53.414396122+00:00 stderr F I1212 16:36:53.414322 36245 daemon.go:1836] journalctl --list-boots: 2025-12-12T16:36:53.414396122+00:00 stderr F IDX BOOT ID FIRST ENTRY LAST ENTRY 2025-12-12T16:36:53.414396122+00:00 stderr F -3 5cc629ac7367418d888178e530691988 Mon 2025-11-03 09:44:05 UTC Mon 2025-11-03 09:44:09 UTC 2025-12-12T16:36:53.414396122+00:00 stderr F -2 9ce94f2d4be449f9a71ac96c59658a3d Mon 2025-11-03 09:44:31 UTC Mon 2025-11-03 09:45:03 UTC 2025-12-12T16:36:53.414396122+00:00 stderr F -1 c31a9c1303104477a1ad38c2c89b35bb Fri 2025-12-12 16:09:15 UTC Fri 2025-12-12 16:13:25 UTC 2025-12-12T16:36:53.414396122+00:00 stderr F 0 e5f274e50ab6408eb0cf1af5b029b864 Fri 2025-12-12 16:13:30 UTC Fri 2025-12-12 16:36:53 UTC 2025-12-12T16:36:53.414396122+00:00 stderr F I1212 16:36:53.414352 36245 image_manager_helper.go:92] Running captured: systemctl list-units --state=failed --no-legend 2025-12-12T16:36:53.428231550+00:00 stderr F I1212 16:36:53.428165 36245 daemon.go:1852] systemd service state: OK 2025-12-12T16:36:53.428231550+00:00 stderr F I1212 16:36:53.428203 36245 daemon.go:1405] Starting MachineConfigDaemon 2025-12-12T16:36:53.428342273+00:00 stderr F I1212 16:36:53.428294 36245 daemon.go:1412] Enabling Kubelet Healthz Monitor 2025-12-12T16:36:53.482477833+00:00 stderr F I1212 16:36:53.482376 36245 daemon.go:3034] Found 3 requested local packages in the booted deployment 2025-12-12T16:36:53.482477833+00:00 stderr F I1212 16:36:53.482413 36245 daemon.go:3043] Unsupported package cloud-init 2025-12-12T16:36:53.482477833+00:00 stderr F I1212 16:36:53.482419 36245 daemon.go:3043] Unsupported package gvisor-tap-vsock-gvforwarder 2025-12-12T16:36:53.482477833+00:00 stderr F I1212 16:36:53.482424 36245 daemon.go:3043] Unsupported package hyperv-daemons 2025-12-12T16:36:54.278207666+00:00 stderr F I1212 16:36:54.278053 36245 daemon.go:689] Node crc is part of the control plane 2025-12-12T16:36:54.328343916+00:00 stderr F I1212 16:36:54.328244 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs275292089 --cleanup 2025-12-12T16:36:54.331240849+00:00 stderr F [2025-12-12T16:36:54Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:36:54.331340321+00:00 stderr F [2025-12-12T16:36:54Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:36:54.331367062+00:00 stdout F 2025-12-12T16:36:54.340127352+00:00 stderr F I1212 16:36:54.340069 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:36:54.340146803+00:00 stderr F I1212 16:36:54.340128 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:36:54.340154553+00:00 stderr F I1212 16:36:54.340146 36245 daemon.go:1795] state: Degraded 2025-12-12T16:36:54.340257495+00:00 stderr F I1212 16:36:54.340231 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:36:54.400322915+00:00 stdout F Deployments unchanged. 2025-12-12T16:36:54.410800708+00:00 stderr F I1212 16:36:54.410730 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:36:54.411631789+00:00 stderr F I1212 16:36:54.411503 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:36:54.411631789+00:00 stderr F W1212 16:36:54.411522 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:36:54.411631789+00:00 stderr F I1212 16:36:54.411531 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:36:54.473366890+00:00 stderr F E1212 16:36:54.473300 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:36:54.473366890+00:00 stderr F bytes.Join({ 2025-12-12T16:36:54.473366890+00:00 stderr F "{", 2025-12-12T16:36:54.473366890+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:36:54.473366890+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:36:54.473366890+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:36:54.473366890+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:36:54.473366890+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:36:54.473366890+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:36:54.473366890+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:36:54.473366890+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:36:54.473366890+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:36:54.473366890+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:36:54.473366890+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:36:54.473366890+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:36:54.473366890+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:36:54.473366890+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:36:54.473366890+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:36:54.473366890+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:36:54.473366890+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:36:54.473366890+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:36:54.473366890+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:36:54.473366890+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:36:54.473366890+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:36:54.473366890+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:36:54.473366890+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:36:54.473366890+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:36:54.473366890+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:36:54.473366890+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:36:54.473366890+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:36:54.473366890+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:36:54.473366890+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:36:54.473366890+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:36:54.473366890+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:36:54.473366890+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:36:54.473366890+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:36:54.473366890+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:36:54.473366890+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:36:54.473366890+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:36:54.473366890+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:36:54.473366890+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:36:54.473366890+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:36:54.473366890+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:36:54.473366890+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:36:54.473366890+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:36:54.473366890+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:36:54.473366890+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:36:54.473366890+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:36:54.473366890+00:00 stderr F }, "") 2025-12-12T16:36:54.473462512+00:00 stderr F E1212 16:36:54.473389 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:36:56.479964237+00:00 stderr F I1212 16:36:56.479265 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs750066003 --cleanup 2025-12-12T16:36:56.482904811+00:00 stderr F [2025-12-12T16:36:56Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:36:56.483053214+00:00 stdout F 2025-12-12T16:36:56.483059475+00:00 stderr F [2025-12-12T16:36:56Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:36:56.496448231+00:00 stderr F I1212 16:36:56.496368 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:36:56.496448231+00:00 stderr F I1212 16:36:56.496424 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:36:56.496448231+00:00 stderr F I1212 16:36:56.496433 36245 daemon.go:1795] state: Degraded 2025-12-12T16:36:56.496507412+00:00 stderr F I1212 16:36:56.496493 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:36:56.561503935+00:00 stdout F Deployments unchanged. 2025-12-12T16:36:56.569589989+00:00 stderr F I1212 16:36:56.569534 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:36:56.569965348+00:00 stderr F I1212 16:36:56.569929 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:36:56.569965348+00:00 stderr F W1212 16:36:56.569944 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:36:56.569965348+00:00 stderr F I1212 16:36:56.569955 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:36:56.634495630+00:00 stderr F E1212 16:36:56.634427 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:36:56.634495630+00:00 stderr F bytes.Join({ 2025-12-12T16:36:56.634495630+00:00 stderr F "{", 2025-12-12T16:36:56.634495630+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:36:56.634495630+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:36:56.634495630+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:36:56.634495630+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:36:56.634495630+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:36:56.634495630+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:36:56.634495630+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:36:56.634495630+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:36:56.634495630+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:36:56.634495630+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:36:56.634495630+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:36:56.634495630+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:36:56.634495630+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:36:56.634495630+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:36:56.634495630+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:36:56.634495630+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:36:56.634495630+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:36:56.634495630+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:36:56.634495630+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:36:56.634495630+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:36:56.634495630+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:36:56.634495630+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:36:56.634495630+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:36:56.634495630+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:36:56.634495630+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:36:56.634495630+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:36:56.634495630+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:36:56.634495630+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:36:56.634495630+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:36:56.634495630+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:36:56.634495630+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:36:56.634495630+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:36:56.634495630+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:36:56.634495630+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:36:56.634495630+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:36:56.634495630+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:36:56.634495630+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:36:56.634495630+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:36:56.634495630+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:36:56.634495630+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:36:56.634495630+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:36:56.634495630+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:36:56.634495630+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:36:56.634495630+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:36:56.634495630+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:36:56.634495630+00:00 stderr F }, "") 2025-12-12T16:36:56.634685264+00:00 stderr F E1212 16:36:56.634666 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:37:00.661736696+00:00 stderr F I1212 16:37:00.661613 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs1055734372 --cleanup 2025-12-12T16:37:00.665433689+00:00 stderr F [2025-12-12T16:37:00Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:37:00.665542192+00:00 stdout F 2025-12-12T16:37:00.665550082+00:00 stderr F [2025-12-12T16:37:00Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:37:00.675127182+00:00 stderr F I1212 16:37:00.675029 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:37:00.675127182+00:00 stderr F I1212 16:37:00.675070 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:00.675127182+00:00 stderr F I1212 16:37:00.675080 36245 daemon.go:1795] state: Degraded 2025-12-12T16:37:00.675127182+00:00 stderr F I1212 16:37:00.675111 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:37:00.737587002+00:00 stdout F Deployments unchanged. 2025-12-12T16:37:00.747537002+00:00 stderr F I1212 16:37:00.747432 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:00.748529827+00:00 stderr F I1212 16:37:00.748489 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:37:00.748529827+00:00 stderr F W1212 16:37:00.748508 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:37:00.748529827+00:00 stderr F I1212 16:37:00.748518 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:37:00.818093405+00:00 stderr F E1212 16:37:00.817998 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:37:00.818093405+00:00 stderr F bytes.Join({ 2025-12-12T16:37:00.818093405+00:00 stderr F "{", 2025-12-12T16:37:00.818093405+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:37:00.818093405+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:37:00.818093405+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:37:00.818093405+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:37:00.818093405+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:37:00.818093405+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:37:00.818093405+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:37:00.818093405+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:37:00.818093405+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:37:00.818093405+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:37:00.818093405+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:37:00.818093405+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:37:00.818093405+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:37:00.818093405+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:37:00.818093405+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:37:00.818093405+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:37:00.818093405+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:37:00.818093405+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:37:00.818093405+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:37:00.818093405+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:37:00.818093405+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:37:00.818093405+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:37:00.818093405+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:37:00.818093405+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:37:00.818093405+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:37:00.818093405+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:37:00.818093405+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:37:00.818093405+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:37:00.818093405+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:37:00.818093405+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:37:00.818093405+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:37:00.818093405+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:37:00.818093405+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:37:00.818093405+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:37:00.818093405+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:37:00.818093405+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:37:00.818093405+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:37:00.818093405+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:37:00.818093405+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:37:00.818093405+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:37:00.818093405+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:37:00.818093405+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:37:00.818093405+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:37:00.818093405+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:37:00.818093405+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:37:00.818093405+00:00 stderr F }, "") 2025-12-12T16:37:00.818093405+00:00 stderr F E1212 16:37:00.818069 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:37:08.832945467+00:00 stderr F I1212 16:37:08.832864 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs3140637192 --cleanup 2025-12-12T16:37:08.836615500+00:00 stderr F [2025-12-12T16:37:08Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:37:08.836778924+00:00 stdout F 2025-12-12T16:37:08.836785954+00:00 stderr F [2025-12-12T16:37:08Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:37:08.845255347+00:00 stderr F I1212 16:37:08.845205 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:37:08.845255347+00:00 stderr F I1212 16:37:08.845223 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:08.845255347+00:00 stderr F I1212 16:37:08.845230 36245 daemon.go:1795] state: Degraded 2025-12-12T16:37:08.845285028+00:00 stderr F I1212 16:37:08.845255 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:37:08.902971447+00:00 stdout F Deployments unchanged. 2025-12-12T16:37:08.912364123+00:00 stderr F I1212 16:37:08.912302 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:08.913177443+00:00 stderr F I1212 16:37:08.913135 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:37:08.913177443+00:00 stderr F W1212 16:37:08.913161 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:37:08.913264585+00:00 stderr F I1212 16:37:08.913172 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:37:08.985079210+00:00 stderr F E1212 16:37:08.985010 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:37:08.985079210+00:00 stderr F bytes.Join({ 2025-12-12T16:37:08.985079210+00:00 stderr F "{", 2025-12-12T16:37:08.985079210+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:37:08.985079210+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:37:08.985079210+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:37:08.985079210+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:37:08.985079210+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:37:08.985079210+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:37:08.985079210+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:37:08.985079210+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:37:08.985079210+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:37:08.985079210+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:37:08.985079210+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:37:08.985079210+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:37:08.985079210+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:37:08.985079210+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:37:08.985079210+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:37:08.985079210+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:37:08.985079210+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:37:08.985079210+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:37:08.985079210+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:37:08.985079210+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:37:08.985079210+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:37:08.985079210+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:37:08.985079210+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:37:08.985079210+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:37:08.985079210+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:37:08.985079210+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:37:08.985079210+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:37:08.985079210+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:37:08.985079210+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:37:08.985079210+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:37:08.985079210+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:37:08.985079210+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:37:08.985079210+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:37:08.985079210+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:37:08.985079210+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:37:08.985079210+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:37:08.985079210+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:37:08.985079210+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:37:08.985079210+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:37:08.985079210+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:37:08.985079210+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:37:08.985079210+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:37:08.985079210+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:37:08.985079210+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:37:08.985079210+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:37:08.985079210+00:00 stderr F }, "") 2025-12-12T16:37:08.985154152+00:00 stderr F E1212 16:37:08.985084 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:37:24.994266466+00:00 stderr F I1212 16:37:24.994099 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs2428200007 --cleanup 2025-12-12T16:37:24.998917433+00:00 stderr F [2025-12-12T16:37:24Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:37:24.999000795+00:00 stdout F 2025-12-12T16:37:24.999009755+00:00 stderr F [2025-12-12T16:37:24Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:37:25.011045797+00:00 stderr F I1212 16:37:25.010971 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:37:25.011045797+00:00 stderr F I1212 16:37:25.011016 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:25.011045797+00:00 stderr F I1212 16:37:25.011030 36245 daemon.go:1795] state: Degraded 2025-12-12T16:37:25.011111279+00:00 stderr F I1212 16:37:25.011064 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:37:25.084229856+00:00 stdout F Deployments unchanged. 2025-12-12T16:37:25.095978401+00:00 stderr F I1212 16:37:25.095879 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:25.096895275+00:00 stderr F I1212 16:37:25.096810 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:37:25.096895275+00:00 stderr F W1212 16:37:25.096838 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:37:25.096895275+00:00 stderr F I1212 16:37:25.096854 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:37:25.186237969+00:00 stderr F E1212 16:37:25.186113 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:37:25.186237969+00:00 stderr F bytes.Join({ 2025-12-12T16:37:25.186237969+00:00 stderr F "{", 2025-12-12T16:37:25.186237969+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:37:25.186237969+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:37:25.186237969+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:37:25.186237969+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:37:25.186237969+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:37:25.186237969+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:37:25.186237969+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:37:25.186237969+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:37:25.186237969+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:37:25.186237969+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:37:25.186237969+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:37:25.186237969+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:37:25.186237969+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:37:25.186237969+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:37:25.186237969+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:37:25.186237969+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:37:25.186237969+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:37:25.186237969+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:37:25.186237969+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:37:25.186237969+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:37:25.186237969+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:37:25.186237969+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:37:25.186237969+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:37:25.186237969+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:37:25.186237969+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:37:25.186237969+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:37:25.186237969+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:37:25.186237969+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:37:25.186237969+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:37:25.186237969+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:37:25.186237969+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:37:25.186237969+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:37:25.186237969+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:37:25.186237969+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:37:25.186237969+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:37:25.186237969+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:37:25.186237969+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:37:25.186237969+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:37:25.186237969+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:37:25.186237969+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:37:25.186237969+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:37:25.186237969+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:37:25.186237969+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:37:25.186237969+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:37:25.186237969+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:37:25.186237969+00:00 stderr F }, "") 2025-12-12T16:37:25.186548897+00:00 stderr F E1212 16:37:25.186502 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:37:53.484060120+00:00 stderr F I1212 16:37:53.483957 36245 certificate_writer.go:294] Certificate was synced from controllerconfig resourceVersion 39750 2025-12-12T16:37:57.197778938+00:00 stderr F I1212 16:37:57.197644 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs3981117247 --cleanup 2025-12-12T16:37:57.202002074+00:00 stderr F [2025-12-12T16:37:57Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:37:57.202091096+00:00 stdout F 2025-12-12T16:37:57.202105877+00:00 stderr F [2025-12-12T16:37:57Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:37:57.219030412+00:00 stderr F I1212 16:37:57.218618 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:37:57.219030412+00:00 stderr F I1212 16:37:57.218965 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:57.219030412+00:00 stderr F I1212 16:37:57.218978 36245 daemon.go:1795] state: Degraded 2025-12-12T16:37:57.219030412+00:00 stderr F I1212 16:37:57.219009 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:37:57.283549633+00:00 stdout F Deployments unchanged. 2025-12-12T16:37:57.297214116+00:00 stderr F I1212 16:37:57.296363 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:37:57.297731639+00:00 stderr F I1212 16:37:57.297682 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:37:57.297731639+00:00 stderr F W1212 16:37:57.297699 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:37:57.297731639+00:00 stderr F I1212 16:37:57.297711 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:37:57.376593061+00:00 stderr F E1212 16:37:57.376493 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:37:57.376593061+00:00 stderr F bytes.Join({ 2025-12-12T16:37:57.376593061+00:00 stderr F "{", 2025-12-12T16:37:57.376593061+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:37:57.376593061+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:37:57.376593061+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:37:57.376593061+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:37:57.376593061+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:37:57.376593061+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:37:57.376593061+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:37:57.376593061+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:37:57.376593061+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:37:57.376593061+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:37:57.376593061+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:37:57.376593061+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:37:57.376593061+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:37:57.376593061+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:37:57.376593061+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:37:57.376593061+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:37:57.376593061+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:37:57.376593061+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:37:57.376593061+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:37:57.376593061+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:37:57.376593061+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:37:57.376593061+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:37:57.376593061+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:37:57.376593061+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:37:57.376593061+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:37:57.376593061+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:37:57.376593061+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:37:57.376593061+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:37:57.376593061+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:37:57.376593061+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:37:57.376593061+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:37:57.376593061+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:37:57.376593061+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:37:57.376593061+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:37:57.376593061+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:37:57.376593061+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:37:57.376593061+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:37:57.376593061+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:37:57.376593061+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:37:57.376593061+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:37:57.376593061+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:37:57.376593061+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:37:57.376593061+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:37:57.376593061+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:37:57.376593061+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:37:57.376593061+00:00 stderr F }, "") 2025-12-12T16:37:57.376718404+00:00 stderr F E1212 16:37:57.376596 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:38:57.383120601+00:00 stderr F I1212 16:38:57.383016 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs370238536 --cleanup 2025-12-12T16:38:57.386405093+00:00 stderr F [2025-12-12T16:38:57Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:38:57.386447545+00:00 stdout F 2025-12-12T16:38:57.386460095+00:00 stderr F [2025-12-12T16:38:57Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:38:57.395260236+00:00 stderr F I1212 16:38:57.395170 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:38:57.395260236+00:00 stderr F I1212 16:38:57.395237 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:38:57.395260236+00:00 stderr F I1212 16:38:57.395249 36245 daemon.go:1795] state: Degraded 2025-12-12T16:38:57.395298847+00:00 stderr F I1212 16:38:57.395283 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:38:57.456731850+00:00 stdout F Deployments unchanged. 2025-12-12T16:38:57.463059789+00:00 stderr F I1212 16:38:57.462933 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:38:57.464110446+00:00 stderr F I1212 16:38:57.464070 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:38:57.464110446+00:00 stderr F W1212 16:38:57.464093 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:38:57.464110446+00:00 stderr F I1212 16:38:57.464101 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:38:57.538080684+00:00 stderr F E1212 16:38:57.537986 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:38:57.538080684+00:00 stderr F bytes.Join({ 2025-12-12T16:38:57.538080684+00:00 stderr F "{", 2025-12-12T16:38:57.538080684+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:38:57.538080684+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:38:57.538080684+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:38:57.538080684+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:38:57.538080684+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:38:57.538080684+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:38:57.538080684+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:38:57.538080684+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:38:57.538080684+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:38:57.538080684+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:38:57.538080684+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:38:57.538080684+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:38:57.538080684+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:38:57.538080684+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:38:57.538080684+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:38:57.538080684+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:38:57.538080684+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:38:57.538080684+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:38:57.538080684+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:38:57.538080684+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:38:57.538080684+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:38:57.538080684+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:38:57.538080684+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:38:57.538080684+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:38:57.538080684+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:38:57.538080684+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:38:57.538080684+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:38:57.538080684+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:38:57.538080684+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:38:57.538080684+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:38:57.538080684+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:38:57.538080684+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:38:57.538080684+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:38:57.538080684+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:38:57.538080684+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:38:57.538080684+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:38:57.538080684+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:38:57.538080684+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:38:57.538080684+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:38:57.538080684+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:38:57.538080684+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:38:57.538080684+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:38:57.538080684+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:38:57.538080684+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:38:57.538080684+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:38:57.538080684+00:00 stderr F }, "") 2025-12-12T16:38:57.538169807+00:00 stderr F E1212 16:38:57.538072 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:39:57.543996067+00:00 stderr F I1212 16:39:57.543920 36245 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs763547478 --cleanup 2025-12-12T16:39:57.547335101+00:00 stderr F [2025-12-12T16:39:57Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:39:57.547441964+00:00 stdout F 2025-12-12T16:39:57.547449544+00:00 stderr F [2025-12-12T16:39:57Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:39:57.557584229+00:00 stderr F I1212 16:39:57.557159 36245 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:39:57.557584229+00:00 stderr F I1212 16:39:57.557204 36245 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:39:57.557584229+00:00 stderr F I1212 16:39:57.557217 36245 daemon.go:1795] state: Degraded 2025-12-12T16:39:57.557584229+00:00 stderr F I1212 16:39:57.557246 36245 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:39:57.609122504+00:00 stdout F Deployments unchanged. 2025-12-12T16:39:57.617923845+00:00 stderr F I1212 16:39:57.617802 36245 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:39:57.618446368+00:00 stderr F I1212 16:39:57.618372 36245 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:39:57.618446368+00:00 stderr F W1212 16:39:57.618396 36245 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:39:57.618446368+00:00 stderr F I1212 16:39:57.618410 36245 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:39:57.687483692+00:00 stderr F E1212 16:39:57.687419 36245 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:39:57.687483692+00:00 stderr F bytes.Join({ 2025-12-12T16:39:57.687483692+00:00 stderr F "{", 2025-12-12T16:39:57.687483692+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:39:57.687483692+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:39:57.687483692+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:39:57.687483692+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:39:57.687483692+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:39:57.687483692+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:39:57.687483692+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:39:57.687483692+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:39:57.687483692+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:39:57.687483692+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:39:57.687483692+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:39:57.687483692+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:39:57.687483692+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:39:57.687483692+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:39:57.687483692+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:39:57.687483692+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:39:57.687483692+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:39:57.687483692+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:39:57.687483692+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:39:57.687483692+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:39:57.687483692+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:39:57.687483692+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:39:57.687483692+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:39:57.687483692+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:39:57.687483692+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:39:57.687483692+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:39:57.687483692+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:39:57.687483692+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:39:57.687483692+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:39:57.687483692+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:39:57.687483692+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:39:57.687483692+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:39:57.687483692+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:39:57.687483692+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:39:57.687483692+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:39:57.687483692+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:39:57.687483692+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:39:57.687483692+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:39:57.687483692+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:39:57.687483692+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:39:57.687483692+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:39:57.687483692+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:39:57.687483692+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:39:57.687483692+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:39:57.687483692+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:39:57.687483692+00:00 stderr F }, "") 2025-12-12T16:39:57.687654567+00:00 stderr F E1212 16:39:57.687638 36245 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:40:22.756223196+00:00 stderr F I1212 16:40:22.756106 36245 daemon.go:3092] Daemon logs from /var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e preserved at /etc/machine-config-daemon/previous-logs/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e 2025-12-12T16:40:22.756419831+00:00 stderr F I1212 16:40:22.756380 36245 daemon.go:1445] Shutting down MachineConfigDaemon ././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/7.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000020340315117043043033001 0ustar zuulzuul2025-12-12T16:40:23.097978163+00:00 stderr F I1212 16:40:23.097874 39670 start.go:70] Version: 89b561f0 (f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:40:23.098578918+00:00 stderr F I1212 16:40:23.098114 39670 update.go:2651] Running: mount --rbind /run/secrets /rootfs/run/secrets 2025-12-12T16:40:23.101361368+00:00 stderr F I1212 16:40:23.101318 39670 update.go:2651] Running: mount --rbind /usr/bin /rootfs/run/machine-config-daemon-bin 2025-12-12T16:40:23.104722572+00:00 stderr F I1212 16:40:23.104687 39670 daemon.go:555] using appropriate binary for source=rhel-9 target=rhel-9 2025-12-12T16:40:23.216711605+00:00 stderr F I1212 16:40:23.216633 39670 daemon.go:608] Invoking re-exec /run/bin/machine-config-daemon 2025-12-12T16:40:23.258814243+00:00 stderr F I1212 16:40:23.258726 39670 start.go:70] Version: 89b561f0 (f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:40:23.259577202+00:00 stderr F I1212 16:40:23.259533 39670 image_manager_helper.go:194] Linking rpm-ostree authfile to /etc/mco/internal-registry-pull-secret.json 2025-12-12T16:40:23.340255919+00:00 stderr F I1212 16:40:23.340080 39670 daemon.go:345] Booted osImageURL: image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest (9.6.20251021-0) 765a8d9fdcb7d177cbf4fd31343316543b668c78028d2ab915d810e45d5d583b 2025-12-12T16:40:23.341291975+00:00 stderr F I1212 16:40:23.341210 39670 start.go:136] overriding kubernetes api to https://api-int.crc.testing:6443 2025-12-12T16:40:23.342825714+00:00 stderr F I1212 16:40:23.342722 39670 metrics.go:92] Registering Prometheus metrics 2025-12-12T16:40:23.343046249+00:00 stderr F I1212 16:40:23.343006 39670 metrics.go:99] Starting metrics listener on 127.0.0.1:8797 2025-12-12T16:40:23.357304098+00:00 stderr F I1212 16:40:23.355996 39670 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:40:23.362534339+00:00 stderr F I1212 16:40:23.362290 39670 featuregates.go:112] FeatureGates initialized: enabled=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks], disabled=[AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:40:23.363041562+00:00 stderr F I1212 16:40:23.362941 39670 event.go:377] Event(v1.ObjectReference{Kind:"Node", Namespace:"openshift-machine-config-operator", Name:"crc", UID:"23216ff3-032e-49af-af7e-1d23d5907b59", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:40:23.367947675+00:00 stderr F I1212 16:40:23.367906 39670 writer.go:87] NodeWriter initialized with credentials from /var/lib/kubelet/kubeconfig 2025-12-12T16:40:23.368163781+00:00 stderr F I1212 16:40:23.368136 39670 start.go:221] Feature enabled: PinnedImages 2025-12-12T16:40:23.368888549+00:00 stderr F I1212 16:40:23.368858 39670 update.go:2696] "Starting to manage node: crc" 2025-12-12T16:40:23.379066024+00:00 stderr F I1212 16:40:23.378975 39670 image_manager_helper.go:92] Running captured: rpm-ostree status 2025-12-12T16:40:23.469407514+00:00 stderr F I1212 16:40:23.469336 39670 pinned_image_set.go:819] Starting PinnedImageSet Manager 2025-12-12T16:40:23.504952187+00:00 stderr F I1212 16:40:23.504890 39670 daemon.go:1827] State: idle 2025-12-12T16:40:23.504952187+00:00 stderr F Deployments: 2025-12-12T16:40:23.504952187+00:00 stderr F * ostree-unverified-registry:image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest 2025-12-12T16:40:23.504952187+00:00 stderr F Digest: sha256:97576b6e5dcea61323cc5edae1d4c603ef3869df0ea140c0ada45fa333ff09d8 2025-12-12T16:40:23.504952187+00:00 stderr F Version: 9.6.20251021-0 (2025-11-03T09:00:13Z) 2025-12-12T16:40:23.504952187+00:00 stderr F LayeredPackages: cloud-init gvisor-tap-vsock-gvforwarder hyperv-daemons 2025-12-12T16:40:23.505480771+00:00 stderr F I1212 16:40:23.505437 39670 coreos.go:53] CoreOS aleph version: mtime=2022-08-01 23:42:11 +0000 UTC 2025-12-12T16:40:23.505480771+00:00 stderr F { 2025-12-12T16:40:23.505480771+00:00 stderr F "container-image": { 2025-12-12T16:40:23.505480771+00:00 stderr F "image-digest": "sha256:346eadc1d679be03d2b6a0dc447edded7077483224443f2a27652056e5e51ed8", 2025-12-12T16:40:23.505480771+00:00 stderr F "image-labels": { 2025-12-12T16:40:23.505480771+00:00 stderr F "com.coreos.osname": "rhcos", 2025-12-12T16:40:23.505480771+00:00 stderr F "containers.bootc": "1", 2025-12-12T16:40:23.505480771+00:00 stderr F "coreos-assembler.image-config-checksum": "b444a32e2801642f1e41777fd51fa53304496c58a3a6b15e5964a1f86f866507", 2025-12-12T16:40:23.505480771+00:00 stderr F "io.openshift.build.version-display-names": "machine-os=Red Hat Enterprise Linux CoreOS", 2025-12-12T16:40:23.505480771+00:00 stderr F "io.openshift.build.versions": "machine-os=9.6.20251015-1", 2025-12-12T16:40:23.505480771+00:00 stderr F "org.opencontainers.image.revision": "7b9eaa1ba9269e6287cb00f7044614b0e9da747e", 2025-12-12T16:40:23.505480771+00:00 stderr F "org.opencontainers.image.source": "https://github.com/coreos/rhel-coreos-config", 2025-12-12T16:40:23.505480771+00:00 stderr F "org.opencontainers.image.version": "9.6.20251015-1", 2025-12-12T16:40:23.505480771+00:00 stderr F "ostree.bootable": "true", 2025-12-12T16:40:23.505480771+00:00 stderr F "ostree.commit": "8df94c06f4995c7f493360f258ee92a068ab3280ea64919ec2bf9945a8648a4d", 2025-12-12T16:40:23.505480771+00:00 stderr F "ostree.final-diffid": "sha256:12787d84fa137cd5649a9005efe98ec9d05ea46245fdc50aecb7dd007f2035b1", 2025-12-12T16:40:23.505480771+00:00 stderr F "ostree.linux": "5.14.0-570.55.1.el9_6.x86_64", 2025-12-12T16:40:23.505480771+00:00 stderr F "rpmostree.inputhash": "b2542ee90d9bfa3873e873c3ad0e6550db088c732dbef4033568bbbd6dc58a81" 2025-12-12T16:40:23.505480771+00:00 stderr F }, 2025-12-12T16:40:23.505480771+00:00 stderr F "image-name": "oci-archive:/rhcos-9.6.20251015-1-ostree.x86_64.ociarchive" 2025-12-12T16:40:23.505480771+00:00 stderr F }, 2025-12-12T16:40:23.505480771+00:00 stderr F "osbuild-version": "161", 2025-12-12T16:40:23.505480771+00:00 stderr F "ostree-commit": "8df94c06f4995c7f493360f258ee92a068ab3280ea64919ec2bf9945a8648a4d", 2025-12-12T16:40:23.505480771+00:00 stderr F "ref": "docker://ostree-image-signed:oci-archive:/rhcos-9.6.20251015-1-ostree.x86_64.ociarchive", 2025-12-12T16:40:23.505480771+00:00 stderr F "version": "9.6.20251015-1" 2025-12-12T16:40:23.505480771+00:00 stderr F } 2025-12-12T16:40:23.505592744+00:00 stderr F I1212 16:40:23.505580 39670 coreos.go:70] Ignition provisioning: time=2025-11-02T07:44:17Z 2025-12-12T16:40:23.505619014+00:00 stderr F I1212 16:40:23.505610 39670 image_manager_helper.go:92] Running captured: journalctl --list-boots 2025-12-12T16:40:23.515893122+00:00 stderr F I1212 16:40:23.515819 39670 daemon.go:1836] journalctl --list-boots: 2025-12-12T16:40:23.515893122+00:00 stderr F IDX BOOT ID FIRST ENTRY LAST ENTRY 2025-12-12T16:40:23.515893122+00:00 stderr F -3 5cc629ac7367418d888178e530691988 Mon 2025-11-03 09:44:05 UTC Mon 2025-11-03 09:44:09 UTC 2025-12-12T16:40:23.515893122+00:00 stderr F -2 9ce94f2d4be449f9a71ac96c59658a3d Mon 2025-11-03 09:44:31 UTC Mon 2025-11-03 09:45:03 UTC 2025-12-12T16:40:23.515893122+00:00 stderr F -1 c31a9c1303104477a1ad38c2c89b35bb Fri 2025-12-12 16:09:15 UTC Fri 2025-12-12 16:13:25 UTC 2025-12-12T16:40:23.515893122+00:00 stderr F 0 e5f274e50ab6408eb0cf1af5b029b864 Fri 2025-12-12 16:13:30 UTC Fri 2025-12-12 16:40:23 UTC 2025-12-12T16:40:23.515893122+00:00 stderr F I1212 16:40:23.515875 39670 image_manager_helper.go:92] Running captured: systemctl list-units --state=failed --no-legend 2025-12-12T16:40:23.530610592+00:00 stderr F I1212 16:40:23.529671 39670 daemon.go:1852] systemd service state: OK 2025-12-12T16:40:23.530610592+00:00 stderr F I1212 16:40:23.529699 39670 daemon.go:1405] Starting MachineConfigDaemon 2025-12-12T16:40:23.530610592+00:00 stderr F I1212 16:40:23.529777 39670 daemon.go:1412] Enabling Kubelet Healthz Monitor 2025-12-12T16:40:23.576934196+00:00 stderr F I1212 16:40:23.576835 39670 daemon.go:3034] Found 3 requested local packages in the booted deployment 2025-12-12T16:40:23.576934196+00:00 stderr F I1212 16:40:23.576871 39670 daemon.go:3043] Unsupported package cloud-init 2025-12-12T16:40:23.576934196+00:00 stderr F I1212 16:40:23.576877 39670 daemon.go:3043] Unsupported package gvisor-tap-vsock-gvforwarder 2025-12-12T16:40:23.576934196+00:00 stderr F I1212 16:40:23.576881 39670 daemon.go:3043] Unsupported package hyperv-daemons 2025-12-12T16:40:24.379725776+00:00 stderr F I1212 16:40:24.378588 39670 daemon.go:689] Node crc is part of the control plane 2025-12-12T16:40:24.429324443+00:00 stderr F I1212 16:40:24.429230 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs4139436607 --cleanup 2025-12-12T16:40:24.432956694+00:00 stderr F [2025-12-12T16:40:24Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:40:24.433049016+00:00 stdout F 2025-12-12T16:40:24.433060897+00:00 stderr F [2025-12-12T16:40:24Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:40:24.444727750+00:00 stderr F I1212 16:40:24.444657 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:40:24.444727750+00:00 stderr F I1212 16:40:24.444693 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:24.444727750+00:00 stderr F I1212 16:40:24.444704 39670 daemon.go:1795] state: Degraded 2025-12-12T16:40:24.444762291+00:00 stderr F I1212 16:40:24.444731 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:40:24.499711361+00:00 stdout F Deployments unchanged. 2025-12-12T16:40:24.512106143+00:00 stderr F I1212 16:40:24.511983 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:24.513381415+00:00 stderr F I1212 16:40:24.513320 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:40:24.513381415+00:00 stderr F W1212 16:40:24.513360 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:40:24.513403535+00:00 stderr F I1212 16:40:24.513379 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:40:24.583871096+00:00 stderr F E1212 16:40:24.583771 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:40:24.583871096+00:00 stderr F bytes.Join({ 2025-12-12T16:40:24.583871096+00:00 stderr F "{", 2025-12-12T16:40:24.583871096+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:40:24.583871096+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:40:24.583871096+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:40:24.583871096+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:40:24.583871096+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:40:24.583871096+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:40:24.583871096+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:40:24.583871096+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:40:24.583871096+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:40:24.583871096+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:40:24.583871096+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:40:24.583871096+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:40:24.583871096+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:40:24.583871096+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:40:24.583871096+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:40:24.583871096+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:40:24.583871096+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:40:24.583871096+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:40:24.583871096+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:40:24.583871096+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:40:24.583871096+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:40:24.583871096+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:40:24.583871096+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:40:24.583871096+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:40:24.583871096+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:40:24.583871096+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:40:24.583871096+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:40:24.583871096+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:40:24.583871096+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:40:24.583871096+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:40:24.583871096+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:40:24.583871096+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:40:24.583871096+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:40:24.583871096+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:40:24.583871096+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:40:24.583871096+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:40:24.583871096+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:40:24.583871096+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:40:24.583871096+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:40:24.583871096+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:40:24.583871096+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:40:24.583871096+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:40:24.583871096+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:40:24.583871096+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:40:24.583871096+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:40:24.583871096+00:00 stderr F }, "") 2025-12-12T16:40:24.583966788+00:00 stderr F E1212 16:40:24.583879 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:40:26.594472703+00:00 stderr F I1212 16:40:26.593688 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs2785707451 --cleanup 2025-12-12T16:40:26.599491819+00:00 stderr F [2025-12-12T16:40:26Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:40:26.599586591+00:00 stdout F 2025-12-12T16:40:26.599597482+00:00 stderr F [2025-12-12T16:40:26Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:40:26.614451765+00:00 stderr F I1212 16:40:26.614380 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:40:26.614451765+00:00 stderr F I1212 16:40:26.614409 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:26.614451765+00:00 stderr F I1212 16:40:26.614417 39670 daemon.go:1795] state: Degraded 2025-12-12T16:40:26.614451765+00:00 stderr F I1212 16:40:26.614442 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:40:26.676570856+00:00 stdout F Deployments unchanged. 2025-12-12T16:40:26.685405398+00:00 stderr F I1212 16:40:26.685329 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:26.686712991+00:00 stderr F I1212 16:40:26.685923 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:40:26.686712991+00:00 stderr F W1212 16:40:26.685947 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:40:26.686712991+00:00 stderr F I1212 16:40:26.685959 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:40:26.774048034+00:00 stderr F E1212 16:40:26.773949 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:40:26.774048034+00:00 stderr F bytes.Join({ 2025-12-12T16:40:26.774048034+00:00 stderr F "{", 2025-12-12T16:40:26.774048034+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:40:26.774048034+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:40:26.774048034+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:40:26.774048034+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:40:26.774048034+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:40:26.774048034+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:40:26.774048034+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:40:26.774048034+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:40:26.774048034+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:40:26.774048034+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:40:26.774048034+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:40:26.774048034+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:40:26.774048034+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:40:26.774048034+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:40:26.774048034+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:40:26.774048034+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:40:26.774048034+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:40:26.774048034+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:40:26.774048034+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:40:26.774048034+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:40:26.774048034+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:40:26.774048034+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:40:26.774048034+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:40:26.774048034+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:40:26.774048034+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:40:26.774048034+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:40:26.774048034+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:40:26.774048034+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:40:26.774048034+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:40:26.774048034+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:40:26.774048034+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:40:26.774048034+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:40:26.774048034+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:40:26.774048034+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:40:26.774048034+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:40:26.774048034+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:40:26.774048034+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:40:26.774048034+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:40:26.774048034+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:40:26.774048034+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:40:26.774048034+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:40:26.774048034+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:40:26.774048034+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:40:26.774048034+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:40:26.774048034+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:40:26.774048034+00:00 stderr F }, "") 2025-12-12T16:40:26.774164547+00:00 stderr F E1212 16:40:26.774078 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:40:30.789791950+00:00 stderr F I1212 16:40:30.789641 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs2490096329 --cleanup 2025-12-12T16:40:30.794416207+00:00 stdout F 2025-12-12T16:40:30.794451507+00:00 stderr F [2025-12-12T16:40:30Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:40:30.794451507+00:00 stderr F [2025-12-12T16:40:30Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:40:30.811346982+00:00 stderr F I1212 16:40:30.811240 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:40:30.811346982+00:00 stderr F I1212 16:40:30.811284 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:30.811346982+00:00 stderr F I1212 16:40:30.811299 39670 daemon.go:1795] state: Degraded 2025-12-12T16:40:30.811346982+00:00 stderr F I1212 16:40:30.811337 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:40:30.907144649+00:00 stdout F Deployments unchanged. 2025-12-12T16:40:30.916172236+00:00 stderr F I1212 16:40:30.916120 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:30.917443328+00:00 stderr F I1212 16:40:30.917400 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:40:30.917443328+00:00 stderr F W1212 16:40:30.917426 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:40:30.917461318+00:00 stderr F I1212 16:40:30.917442 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:40:30.986296658+00:00 stderr F E1212 16:40:30.986168 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:40:30.986296658+00:00 stderr F bytes.Join({ 2025-12-12T16:40:30.986296658+00:00 stderr F "{", 2025-12-12T16:40:30.986296658+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:40:30.986296658+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:40:30.986296658+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:40:30.986296658+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:40:30.986296658+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:40:30.986296658+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:40:30.986296658+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:40:30.986296658+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:40:30.986296658+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:40:30.986296658+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:40:30.986296658+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:40:30.986296658+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:40:30.986296658+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:40:30.986296658+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:40:30.986296658+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:40:30.986296658+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:40:30.986296658+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:40:30.986296658+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:40:30.986296658+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:40:30.986296658+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:40:30.986296658+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:40:30.986296658+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:40:30.986296658+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:40:30.986296658+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:40:30.986296658+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:40:30.986296658+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:40:30.986296658+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:40:30.986296658+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:40:30.986296658+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:40:30.986296658+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:40:30.986296658+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:40:30.986296658+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:40:30.986296658+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:40:30.986296658+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:40:30.986296658+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:40:30.986296658+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:40:30.986296658+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:40:30.986296658+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:40:30.986296658+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:40:30.986296658+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:40:30.986296658+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:40:30.986296658+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:40:30.986296658+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:40:30.986296658+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:40:30.986296658+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:40:30.986296658+00:00 stderr F }, "") 2025-12-12T16:40:30.986390160+00:00 stderr F E1212 16:40:30.986294 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:40:38.991487711+00:00 stderr F I1212 16:40:38.991214 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs3713469368 --cleanup 2025-12-12T16:40:38.996284992+00:00 stderr F [2025-12-12T16:40:38Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:40:38.996481877+00:00 stdout F 2025-12-12T16:40:38.996491767+00:00 stderr F [2025-12-12T16:40:38Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:40:39.018112010+00:00 stderr F I1212 16:40:39.017157 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:40:39.018112010+00:00 stderr F I1212 16:40:39.017205 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:39.018112010+00:00 stderr F I1212 16:40:39.017215 39670 daemon.go:1795] state: Degraded 2025-12-12T16:40:39.018112010+00:00 stderr F I1212 16:40:39.017246 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:40:39.096420678+00:00 stdout F Deployments unchanged. 2025-12-12T16:40:39.111114567+00:00 stderr F I1212 16:40:39.111027 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:39.111584199+00:00 stderr F I1212 16:40:39.111524 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:40:39.111584199+00:00 stderr F W1212 16:40:39.111544 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:40:39.111584199+00:00 stderr F I1212 16:40:39.111554 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:40:39.199158999+00:00 stderr F E1212 16:40:39.199082 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:40:39.199158999+00:00 stderr F bytes.Join({ 2025-12-12T16:40:39.199158999+00:00 stderr F "{", 2025-12-12T16:40:39.199158999+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:40:39.199158999+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:40:39.199158999+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:40:39.199158999+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:40:39.199158999+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:40:39.199158999+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:40:39.199158999+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:40:39.199158999+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:40:39.199158999+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:40:39.199158999+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:40:39.199158999+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:40:39.199158999+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:40:39.199158999+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:40:39.199158999+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:40:39.199158999+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:40:39.199158999+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:40:39.199158999+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:40:39.199158999+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:40:39.199158999+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:40:39.199158999+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:40:39.199158999+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:40:39.199158999+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:40:39.199158999+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:40:39.199158999+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:40:39.199158999+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:40:39.199158999+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:40:39.199158999+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:40:39.199158999+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:40:39.199158999+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:40:39.199158999+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:40:39.199158999+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:40:39.199158999+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:40:39.199158999+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:40:39.199158999+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:40:39.199158999+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:40:39.199158999+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:40:39.199158999+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:40:39.199158999+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:40:39.199158999+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:40:39.199158999+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:40:39.199158999+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:40:39.199158999+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:40:39.199158999+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:40:39.199158999+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:40:39.199158999+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:40:39.199158999+00:00 stderr F }, "") 2025-12-12T16:40:39.199256822+00:00 stderr F E1212 16:40:39.199157 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:40:56.097532118+00:00 stderr F I1212 16:40:56.097441 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs55872738 --cleanup 2025-12-12T16:40:56.101116708+00:00 stderr F [2025-12-12T16:40:56Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:40:56.101218271+00:00 stdout F 2025-12-12T16:40:56.101229501+00:00 stderr F [2025-12-12T16:40:56Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:40:56.109768465+00:00 stderr F I1212 16:40:56.109722 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:40:56.109768465+00:00 stderr F I1212 16:40:56.109756 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:56.109819447+00:00 stderr F I1212 16:40:56.109766 39670 daemon.go:1795] state: Degraded 2025-12-12T16:40:56.109819447+00:00 stderr F I1212 16:40:56.109801 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:40:56.173713532+00:00 stdout F Deployments unchanged. 2025-12-12T16:40:56.182996475+00:00 stderr F I1212 16:40:56.182935 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:40:56.183434716+00:00 stderr F I1212 16:40:56.183406 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:40:56.183434716+00:00 stderr F W1212 16:40:56.183423 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:40:56.183449547+00:00 stderr F I1212 16:40:56.183432 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:40:56.250339407+00:00 stderr F E1212 16:40:56.249868 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:40:56.250339407+00:00 stderr F bytes.Join({ 2025-12-12T16:40:56.250339407+00:00 stderr F "{", 2025-12-12T16:40:56.250339407+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:40:56.250339407+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:40:56.250339407+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:40:56.250339407+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:40:56.250339407+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:40:56.250339407+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:40:56.250339407+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:40:56.250339407+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:40:56.250339407+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:40:56.250339407+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:40:56.250339407+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:40:56.250339407+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:40:56.250339407+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:40:56.250339407+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:40:56.250339407+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:40:56.250339407+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:40:56.250339407+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:40:56.250339407+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:40:56.250339407+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:40:56.250339407+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:40:56.250339407+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:40:56.250339407+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:40:56.250339407+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:40:56.250339407+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:40:56.250339407+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:40:56.250339407+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:40:56.250339407+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:40:56.250339407+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:40:56.250339407+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:40:56.250339407+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:40:56.250339407+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:40:56.250339407+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:40:56.250339407+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:40:56.250339407+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:40:56.250339407+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:40:56.250339407+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:40:56.250339407+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:40:56.250339407+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:40:56.250339407+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:40:56.250339407+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:40:56.250339407+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:40:56.250339407+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:40:56.250339407+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:40:56.250339407+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:40:56.250339407+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:40:56.250339407+00:00 stderr F }, "") 2025-12-12T16:40:56.250339407+00:00 stderr F E1212 16:40:56.249936 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:41:23.578380334+00:00 stderr F I1212 16:41:23.578313 39670 certificate_writer.go:294] Certificate was synced from controllerconfig resourceVersion 39750 2025-12-12T16:41:28.254131133+00:00 stderr F I1212 16:41:28.254031 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs764323482 --cleanup 2025-12-12T16:41:28.256815441+00:00 stderr F [2025-12-12T16:41:28Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:41:28.256856052+00:00 stdout F 2025-12-12T16:41:28.256863752+00:00 stderr F [2025-12-12T16:41:28Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:41:28.265750325+00:00 stderr F I1212 16:41:28.265693 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:41:28.265780276+00:00 stderr F I1212 16:41:28.265744 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:41:28.265780276+00:00 stderr F I1212 16:41:28.265757 39670 daemon.go:1795] state: Degraded 2025-12-12T16:41:28.265807827+00:00 stderr F I1212 16:41:28.265789 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:41:28.329454206+00:00 stdout F Deployments unchanged. 2025-12-12T16:41:28.340742009+00:00 stderr F I1212 16:41:28.340624 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:41:28.341407566+00:00 stderr F I1212 16:41:28.341363 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:41:28.341407566+00:00 stderr F W1212 16:41:28.341383 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:41:28.341407566+00:00 stderr F I1212 16:41:28.341393 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:41:28.408752448+00:00 stderr F E1212 16:41:28.408658 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:41:28.408752448+00:00 stderr F bytes.Join({ 2025-12-12T16:41:28.408752448+00:00 stderr F "{", 2025-12-12T16:41:28.408752448+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:41:28.408752448+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:41:28.408752448+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:41:28.408752448+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:41:28.408752448+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:41:28.408752448+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:41:28.408752448+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:41:28.408752448+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:41:28.408752448+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:41:28.408752448+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:41:28.408752448+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:41:28.408752448+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:41:28.408752448+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:41:28.408752448+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:41:28.408752448+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:41:28.408752448+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:41:28.408752448+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:41:28.408752448+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:41:28.408752448+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:41:28.408752448+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:41:28.408752448+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:41:28.408752448+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:41:28.408752448+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:41:28.408752448+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:41:28.408752448+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:41:28.408752448+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:41:28.408752448+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:41:28.408752448+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:41:28.408752448+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:41:28.408752448+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:41:28.408752448+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:41:28.408752448+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:41:28.408752448+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:41:28.408752448+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:41:28.408752448+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:41:28.408752448+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:41:28.408752448+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:41:28.408752448+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:41:28.408752448+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:41:28.408752448+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:41:28.408752448+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:41:28.408752448+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:41:28.408752448+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:41:28.408752448+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:41:28.408752448+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:41:28.408752448+00:00 stderr F }, "") 2025-12-12T16:41:28.408752448+00:00 stderr F E1212 16:41:28.408731 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" 2025-12-12T16:42:28.417842243+00:00 stderr F I1212 16:42:28.417728 39670 daemon.go:2057] Running: /run/machine-config-daemon-bin/nmstatectl persist-nic-names --root / --kargs-out /tmp/nmstate-kargs1423270202 --cleanup 2025-12-12T16:42:28.421168535+00:00 stderr F [2025-12-12T16:42:28Z INFO nmstatectl] Nmstate version: 2.2.54 2025-12-12T16:42:28.421343050+00:00 stdout F 2025-12-12T16:42:28.421352510+00:00 stderr F [2025-12-12T16:42:28Z INFO nmstatectl::persist_nic] /etc/systemd/network does not exist, no need to clean up 2025-12-12T16:42:28.432260324+00:00 stderr F I1212 16:42:28.432161 39670 daemon.go:1645] Previous boot ostree-finalize-staged.service appears successful 2025-12-12T16:42:28.432260324+00:00 stderr F I1212 16:42:28.432232 39670 daemon.go:1780] Current+desired config: rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:42:28.432260324+00:00 stderr F I1212 16:42:28.432247 39670 daemon.go:1795] state: Degraded 2025-12-12T16:42:28.432297545+00:00 stderr F I1212 16:42:28.432284 39670 update.go:2651] Running: rpm-ostree cleanup -r 2025-12-12T16:42:28.494438236+00:00 stdout F Deployments unchanged. 2025-12-12T16:42:28.501632477+00:00 stderr F I1212 16:42:28.501573 39670 daemon.go:2255] Validating against current config rendered-master-d582710c680b4cd4536e11249c7e09e9 2025-12-12T16:42:28.502049257+00:00 stderr F I1212 16:42:28.502017 39670 daemon.go:2167] SSH key location ("/home/core/.ssh/authorized_keys.d/ignition") up-to-date! 2025-12-12T16:42:28.502049257+00:00 stderr F W1212 16:42:28.502033 39670 daemon.go:2791] osImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" is not a digest; using a digest is recommended 2025-12-12T16:42:28.502049257+00:00 stderr F I1212 16:42:28.502041 39670 image_manager_helper.go:92] Running captured: rpm-ostree kargs 2025-12-12T16:42:28.602481201+00:00 stderr F E1212 16:42:28.602371 39670 on_disk_validation.go:251] content mismatch for file "/var/lib/kubelet/config.json" (-want +got): 2025-12-12T16:42:28.602481201+00:00 stderr F bytes.Join({ 2025-12-12T16:42:28.602481201+00:00 stderr F "{", 2025-12-12T16:42:28.602481201+00:00 stderr F + `"auths":{"cloud.openshift.com":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2Ut`, 2025-12-12T16:42:28.602481201+00:00 stderr F + "ZGV2K29jbV9hY2Nlc3NfMWI4OTIxNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6", 2025-12-12T16:42:28.602481201+00:00 stderr F + "Rk03WUxFT1hIWDQ0VVpEM1lZME9QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdT", 2025-12-12T16:42:28.602481201+00:00 stderr F + `RE1KWTZEMFBBMTEzUzU4Vg==","email":"pablintino@gmail.com"},"quay.`, 2025-12-12T16:42:28.602481201+00:00 stderr F + `io":{"auth":"b3BlbnNoaWZ0LXJlbGVhc2UtZGV2K29jbV9hY2Nlc3NfMWI4OTI`, 2025-12-12T16:42:28.602481201+00:00 stderr F + "xNzU1MmJjNDJkMWJlM2ZiMDZhMWFlZDAwMWE6Rk03WUxFT1hIWDQ0VVpEM1lZME9", 2025-12-12T16:42:28.602481201+00:00 stderr F + `QTDlXVVNMMVFKTzUxOUYzQUEzNVhGT1BWRUdTRE1KWTZEMFBBMTEzUzU4Vg==","`, 2025-12-12T16:42:28.602481201+00:00 stderr F + `email":"pablintino@gmail.com"},"registry.connect.redhat.com":{"a`, 2025-12-12T16:42:28.602481201+00:00 stderr F + `uth":"fHVoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0OD`, 2025-12-12T16:42:28.602481201+00:00 stderr F + "g3MTpleUpoYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTm", 2025-12-12T16:42:28.602481201+00:00 stderr F + "haakEwT0RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZG", 2025-12-12T16:42:28.602481201+00:00 stderr F + "xYY3oxTUwzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaV", 2025-12-12T16:42:28.602481201+00:00 stderr F + "p2TkFremVTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRV", 2025-12-12T16:42:28.602481201+00:00 stderr F + "lheEkxSmFjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZD", 2025-12-12T16:42:28.602481201+00:00 stderr F + "RBNGw0MVVjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMX", 2025-12-12T16:42:28.602481201+00:00 stderr F + "NZWnFwSFZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2", 2025-12-12T16:42:28.602481201+00:00 stderr F + "tsQ1lrM05UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMT", 2025-12-12T16:42:28.602481201+00:00 stderr F + "RnNFE4TmF6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVj", 2025-12-12T16:42:28.602481201+00:00 stderr F + "VxS1hNY0g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYj", 2025-12-12T16:42:28.602481201+00:00 stderr F + "F6cmRSbC1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ2", 2025-12-12T16:42:28.602481201+00:00 stderr F + "9HNlFVclE5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTm", 2025-12-12T16:42:28.602481201+00:00 stderr F + "M4UWZnblBFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk", 2025-12-12T16:42:28.602481201+00:00 stderr F + "9pWGwtd1pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNl", 2025-12-12T16:42:28.602481201+00:00 stderr F + "MzYmR2TEFjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTW", 2025-12-12T16:42:28.602481201+00:00 stderr F + `U1bWUtWjRFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==",`, 2025-12-12T16:42:28.602481201+00:00 stderr F + `"email":"pablintino@gmail.com"},"registry.redhat.io":{"auth":"fH`, 2025-12-12T16:42:28.602481201+00:00 stderr F + "VoYy1wb29sLWU0YTY0ZjFlLTc1MmUtNGVhNi1hNWM5LTIwMDY2YzE0ODg3MTpleU", 2025-12-12T16:42:28.602481201+00:00 stderr F + "poYkdjaU9pSlNVelV4TWlKOS5leUp6ZFdJaU9pSmpaVE5tWWpsaU9HTmhaakEwT0", 2025-12-12T16:42:28.602481201+00:00 stderr F + "RreVlqZGtOV1k0TUdabVptRTFZbVpqTUNKOS51b0pIaGg4RkY4ZWI4ZGxYY3oxTU", 2025-12-12T16:42:28.602481201+00:00 stderr F + "wzUURzLUQ2Qk5ZRTFncDZOQmRmbUl5bFRKTW9hdzg5V2VQNGt5ZU1WaVp2TkFrem", 2025-12-12T16:42:28.602481201+00:00 stderr F + "VTYWQ3emtQcDZCcXU2NlFjazNCVEpHckt4cHJYVHBxRVVyYXVKazljRVlheEkxSm", 2025-12-12T16:42:28.602481201+00:00 stderr F + "FjTWV4TkxZdURZMHZJSEdNRHBqUS1fSldyZmxJNldWMXVkUlF0ZjhuZDRBNGw0MV", 2025-12-12T16:42:28.602481201+00:00 stderr F + "VjVXlIVjRDQmZ3b1dGRWdmX2ZwZ08zaUhlSjZuM2lHOURPa3dKYVcyMXNZWnFwSF", 2025-12-12T16:42:28.602481201+00:00 stderr F + "ZpbkN3Q2hwOTNabHlEcFhReUFkSzFDay12YmtHeUlaSjJQUTZPS0RIb2tsQ1lrM0", 2025-12-12T16:42:28.602481201+00:00 stderr F + "5UdDVJbDVyUU5ZY3Fxc0dGRjhEcTJpdVUzdmFQaDR4aV9zV2I1R1dtMTRnNFE4Tm", 2025-12-12T16:42:28.602481201+00:00 stderr F + "F6SVB0QmloUGVHRW03NERtWE5JRVpXR3UyQjB0b0lYZjlFV1hISjhtVjVxS1hNY0", 2025-12-12T16:42:28.602481201+00:00 stderr F + "g5cURacDQ4djFnUUhkZWdvNVJPU2t1ZmE5Mm96LWdoMXdsaWQ1VDRNYjF6cmRSbC", 2025-12-12T16:42:28.602481201+00:00 stderr F + "1XSWYyZm9oNjBYR3UtR1VRT0JNaW9pNGV5MlZpRFJkMVFmTVJhbnhnZ29HNlFVcl", 2025-12-12T16:42:28.602481201+00:00 stderr F + "E5Y0JNQS1DWE9VWnZFWHFKVmptV0N0MkM4UEFNUDZaOUN6QkwxVldiTmM4UWZnbl", 2025-12-12T16:42:28.602481201+00:00 stderr F + "BFRWxKMUw2T1p5S295Z3V3YXhrZFhKaEN6Rk10bFZuckRHUGpFaFJGYk9pWGwtd1", 2025-12-12T16:42:28.602481201+00:00 stderr F + "pYb25OS2w2S1NMUFpJdWZuV1gtMnlVdUV4ZEVRejlMYXFIekxETERDNlMzYmR2TE", 2025-12-12T16:42:28.602481201+00:00 stderr F + "FjaDlJQk1uT0xOYy1ZRHh5RFdVaTFMeW9WV3MtZzdiaWItV3hSUTZHTWU1bWUtWj", 2025-12-12T16:42:28.602481201+00:00 stderr F + `RFWTdWUDctQml6U1cxcTVzRTI2TkQ0dkExVG5vYUJlZEpqMzlCSQ==","email":`, 2025-12-12T16:42:28.602481201+00:00 stderr F + `"pablintino@gmail.com"}}`, 2025-12-12T16:42:28.602481201+00:00 stderr F ... // 2 identical bytes 2025-12-12T16:42:28.602481201+00:00 stderr F }, "") 2025-12-12T16:42:28.602590394+00:00 stderr F E1212 16:42:28.602456 39670 writer.go:231] Marking Degraded due to: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"" ././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000000227415117043043033004 0ustar zuulzuul2025-12-12T16:16:23.236760214+00:00 stderr F W1212 16:16:23.236511 6583 deprecated.go:66] 2025-12-12T16:16:23.236760214+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:23.236760214+00:00 stderr F 2025-12-12T16:16:23.236760214+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:23.236760214+00:00 stderr F 2025-12-12T16:16:23.236760214+00:00 stderr F =============================================== 2025-12-12T16:16:23.236760214+00:00 stderr F 2025-12-12T16:16:23.236760214+00:00 stderr F I1212 16:16:23.236698 6583 kube-rbac-proxy.go:532] Reading config file: /etc/kube-rbac-proxy/config-file.yaml 2025-12-12T16:16:23.238698862+00:00 stderr F I1212 16:16:23.238642 6583 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:23.240709611+00:00 stderr F I1212 16:16:23.240372 6583 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:23.241426638+00:00 stderr F I1212 16:16:23.241291 6583 kube-rbac-proxy.go:397] Starting TCP socket on 0.0.0.0:9001 2025-12-12T16:16:23.242437353+00:00 stderr F I1212 16:16:23.242406 6583 kube-rbac-proxy.go:404] Listening securely on 0.0.0.0:9001 ././@LongLink0000644000000000000000000000024600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_rout0000755000175000017500000000000015117043043033171 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_rout0000755000175000017500000000000015117043062033172 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_rout0000644000175000017500000004130415117043043033175 0ustar zuulzuul2025-12-12T16:16:44.897747310+00:00 stderr F I1212 16:16:44.893101 1 template.go:560] "msg"="starting router" "logger"="router" "version"="majorFromGit: \nminorFromGit: \ncommitFromGit: 96bfd2164c7885df9019ce9eeb79d506bd7e871b\nversionFromGit: 4.0.0-581-g96bfd216\ngitTreeState: clean\nbuildDate: 2025-10-21T12:30:19Z\n" 2025-12-12T16:16:44.904605098+00:00 stderr F I1212 16:16:44.904543 1 metrics.go:156] "msg"="router health and metrics port listening on HTTP and HTTPS" "address"="0.0.0.0:1936" "logger"="metrics" 2025-12-12T16:16:44.926101452+00:00 stderr F I1212 16:16:44.926020 1 router.go:214] "msg"="creating a new template router" "logger"="template" "writeDir"="/var/lib/haproxy" 2025-12-12T16:16:44.926140143+00:00 stderr F I1212 16:16:44.926121 1 router.go:298] "msg"="router will coalesce reloads within an interval of each other" "interval"="5s" "logger"="template" 2025-12-12T16:16:44.927298512+00:00 stderr F I1212 16:16:44.927254 1 router.go:368] "msg"="watching for changes" "logger"="template" "path"="/etc/pki/tls/private" 2025-12-12T16:16:44.927361863+00:00 stderr F I1212 16:16:44.927340 1 router.go:283] "msg"="router is including routes in all namespaces" "logger"="router" 2025-12-12T16:16:44.940377871+00:00 stderr F W1212 16:16:44.940306 1 reflector.go:547] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:44.940377871+00:00 stderr F E1212 16:16:44.940359 1 reflector.go:150] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:44.954337582+00:00 stderr F I1212 16:16:44.954230 1 reflector.go:359] Caches populated for *v1.EndpointSlice from github.com/openshift/router/pkg/router/controller/factory/factory.go:124 2025-12-12T16:16:44.955623853+00:00 stderr F I1212 16:16:44.955550 1 reflector.go:359] Caches populated for *v1.Service from github.com/openshift/router/pkg/router/template/service_lookup.go:33 2025-12-12T16:16:46.314225622+00:00 stderr F W1212 16:16:46.314127 1 reflector.go:547] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:46.314225622+00:00 stderr F E1212 16:16:46.314172 1 reflector.go:150] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:46.331383311+00:00 stderr F I1212 16:16:46.330687 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:46.331383311+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:46.331383311+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:47.332398860+00:00 stderr F I1212 16:16:47.330321 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:47.332398860+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:47.332398860+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:48.328542170+00:00 stderr F I1212 16:16:48.328472 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:48.328542170+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:48.328542170+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:48.534256403+00:00 stderr F W1212 16:16:48.530317 1 reflector.go:547] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:48.534256403+00:00 stderr F E1212 16:16:48.530349 1 reflector.go:150] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:49.332687726+00:00 stderr F I1212 16:16:49.332600 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:49.332687726+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:49.332687726+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:50.344405386+00:00 stderr F I1212 16:16:50.344344 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:50.344405386+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:50.344405386+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:51.326734969+00:00 stderr F I1212 16:16:51.326434 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:51.326734969+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:51.326734969+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:52.327956613+00:00 stderr F I1212 16:16:52.327886 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:52.327956613+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:52.327956613+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:52.716810927+00:00 stderr F W1212 16:16:52.716716 1 reflector.go:547] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:52.716810927+00:00 stderr F E1212 16:16:52.716798 1 reflector.go:150] github.com/openshift/router/pkg/router/controller/factory/factory.go:124: Failed to watch *v1.Route: failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io) 2025-12-12T16:16:53.330584430+00:00 stderr F I1212 16:16:53.327669 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:53.330584430+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:53.330584430+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:54.332053991+00:00 stderr F I1212 16:16:54.331988 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:54.332053991+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:54.332053991+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:55.331975613+00:00 stderr F I1212 16:16:55.331892 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:55.331975613+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:55.331975613+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:56.340825054+00:00 stderr F I1212 16:16:56.340703 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:56.340825054+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:56.340825054+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:57.330749411+00:00 stderr F I1212 16:16:57.330689 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:57.330749411+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:57.330749411+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:58.325442726+00:00 stderr F I1212 16:16:58.325349 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:58.325442726+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:58.325442726+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:16:59.327078801+00:00 stderr F I1212 16:16:59.327017 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:16:59.327078801+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:16:59.327078801+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:17:00.329416181+00:00 stderr F I1212 16:17:00.328747 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:17:00.329416181+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:17:00.329416181+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:17:01.326497265+00:00 stderr F I1212 16:17:01.326326 1 healthz.go:255] backend-http,has-synced check failed: healthz 2025-12-12T16:17:01.326497265+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:17:01.326497265+00:00 stderr F [-]has-synced failed: Router not synced 2025-12-12T16:17:01.564133766+00:00 stderr F I1212 16:17:01.564053 1 reflector.go:359] Caches populated for *v1.Route from github.com/openshift/router/pkg/router/controller/factory/factory.go:124 2025-12-12T16:17:01.792617645+00:00 stderr F I1212 16:17:01.792552 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:01.792983373+00:00 stderr F I1212 16:17:01.792950 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:01.793352032+00:00 stderr F I1212 16:17:01.793324 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:01.793614049+00:00 stderr F I1212 16:17:01.793579 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:01.793777953+00:00 stderr F I1212 16:17:01.793754 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:01.795084705+00:00 stderr F E1212 16:17:01.795047 1 haproxy.go:418] can't scrape HAProxy: dial unix /var/lib/haproxy/run/haproxy.sock: connect: no such file or directory 2025-12-12T16:17:02.327090773+00:00 stderr F I1212 16:17:02.326514 1 healthz.go:255] backend-http check failed: healthz 2025-12-12T16:17:02.327090773+00:00 stderr F [-]backend-http failed: backend reported failure 2025-12-12T16:17:02.483480012+00:00 stderr F I1212 16:17:02.483401 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" 2025-12-12T16:17:15.519362500+00:00 stderr F I1212 16:17:15.519129 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:15.519405951+00:00 stderr F I1212 16:17:15.519395 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:15.520356885+00:00 stderr F I1212 16:17:15.519846 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:15.520356885+00:00 stderr F I1212 16:17:15.520103 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:15.520356885+00:00 stderr F I1212 16:17:15.520280 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:15.646724650+00:00 stderr F I1212 16:17:15.646561 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" 2025-12-12T16:17:20.448353984+00:00 stderr F I1212 16:17:20.448227 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:20.448431496+00:00 stderr F I1212 16:17:20.448404 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:20.448923821+00:00 stderr F I1212 16:17:20.448730 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:20.448935751+00:00 stderr F I1212 16:17:20.448924 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:20.449216219+00:00 stderr F I1212 16:17:20.449111 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:17:23.555428208+00:00 stderr F I1212 16:17:23.555319 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" 2025-12-12T16:18:00.901607288+00:00 stderr F I1212 16:18:00.900581 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:18:00.901607288+00:00 stderr F I1212 16:18:00.900711 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:18:00.901607288+00:00 stderr F I1212 16:18:00.901212 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:18:00.901607288+00:00 stderr F I1212 16:18:00.901470 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:18:00.902265584+00:00 stderr F I1212 16:18:00.901713 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:18:00.977560095+00:00 stderr F I1212 16:18:00.976655 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" 2025-12-12T16:19:03.522136095+00:00 stderr F I1212 16:19:03.522045 1 reflector.go:359] Caches populated for *v1.Service from github.com/openshift/router/pkg/router/template/service_lookup.go:33 2025-12-12T16:19:03.701706235+00:00 stderr F I1212 16:19:03.701628 1 reflector.go:359] Caches populated for *v1.EndpointSlice from github.com/openshift/router/pkg/router/controller/factory/factory.go:124 2025-12-12T16:19:05.692143253+00:00 stderr F I1212 16:19:05.692070 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:19:05.692320258+00:00 stderr F I1212 16:19:05.692287 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:19:05.692644786+00:00 stderr F I1212 16:19:05.692613 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:19:05.692838020+00:00 stderr F I1212 16:19:05.692799 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:19:05.693052576+00:00 stderr F I1212 16:19:05.693032 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:19:05.750347222+00:00 stderr F I1212 16:19:05.750285 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" 2025-12-12T16:27:03.000723223+00:00 stderr F I1212 16:27:03.000609 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:03.000816555+00:00 stderr F I1212 16:27:03.000795 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:03.001116373+00:00 stderr F I1212 16:27:03.001079 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:03.002413025+00:00 stderr F I1212 16:27:03.002370 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:03.002700103+00:00 stderr F I1212 16:27:03.002675 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:03.075472535+00:00 stderr F I1212 16:27:03.075345 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" 2025-12-12T16:27:08.005722163+00:00 stderr F I1212 16:27:08.005641 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:08.005911908+00:00 stderr F I1212 16:27:08.005882 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:08.006639496+00:00 stderr F I1212 16:27:08.006378 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:08.006798140+00:00 stderr F I1212 16:27:08.006714 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:08.007136989+00:00 stderr F I1212 16:27:08.006952 1 template_helper.go:370] "msg"="parseIPList empty list found" "logger"="template" 2025-12-12T16:27:08.083217074+00:00 stderr F I1212 16:27:08.083090 1 router.go:665] "msg"="router reloaded" "logger"="template" "output"=" - Checking http://localhost:80 ...\n - Health check ok : 0 retry attempt(s).\n" ././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043043033043 5ustar zuulzuul././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015117043062033044 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000644000175000017500000000364715117043043033057 0ustar zuulzuul2025-12-12T16:17:33.511842905+00:00 stderr F I1212 16:17:33.505753 1 cmd.go:39] &{ true {false} prune true map[cert-dir:0xc0008ac3c0 max-eligible-revision:0xc0008ac140 protected-revisions:0xc0008ac1e0 resource-dir:0xc0008ac280 static-pod-name:0xc0008ac320 v:0xc0008ad4a0] [0xc0008ad4a0 0xc0008ac140 0xc0008ac1e0 0xc0008ac280 0xc0008ac3c0 0xc0008ac320] [] map[cert-dir:0xc0008ac3c0 help:0xc0008ad860 log-flush-frequency:0xc0008ad400 max-eligible-revision:0xc0008ac140 protected-revisions:0xc0008ac1e0 resource-dir:0xc0008ac280 static-pod-name:0xc0008ac320 v:0xc0008ad4a0 vmodule:0xc0008ad540] [0xc0008ac140 0xc0008ac1e0 0xc0008ac280 0xc0008ac320 0xc0008ac3c0 0xc0008ad400 0xc0008ad4a0 0xc0008ad540 0xc0008ad860] [0xc0008ac3c0 0xc0008ad860 0xc0008ad400 0xc0008ac140 0xc0008ac1e0 0xc0008ac280 0xc0008ac320 0xc0008ad4a0 0xc0008ad540] map[104:0xc0008ad860 118:0xc0008ad4a0] [] -1 0 0xc0007e5020 true 0xae3c00 []} 2025-12-12T16:17:33.511842905+00:00 stderr F I1212 16:17:33.506944 1 cmd.go:40] (*prune.PruneOptions)(0xc000862320)({ 2025-12-12T16:17:33.511842905+00:00 stderr F MaxEligibleRevision: (int) 12, 2025-12-12T16:17:33.511842905+00:00 stderr F ProtectedRevisions: ([]int) (len=6 cap=6) { 2025-12-12T16:17:33.511842905+00:00 stderr F (int) 7, 2025-12-12T16:17:33.511842905+00:00 stderr F (int) 8, 2025-12-12T16:17:33.511842905+00:00 stderr F (int) 9, 2025-12-12T16:17:33.511842905+00:00 stderr F (int) 10, 2025-12-12T16:17:33.511842905+00:00 stderr F (int) 11, 2025-12-12T16:17:33.511842905+00:00 stderr F (int) 12 2025-12-12T16:17:33.511842905+00:00 stderr F }, 2025-12-12T16:17:33.511842905+00:00 stderr F ResourceDir: (string) (len=36) "/etc/kubernetes/static-pod-resources", 2025-12-12T16:17:33.511842905+00:00 stderr F CertDir: (string) (len=20) "kube-apiserver-certs", 2025-12-12T16:17:33.511842905+00:00 stderr F StaticPodName: (string) (len=18) "kube-apiserver-pod" 2025-12-12T16:17:33.511842905+00:00 stderr F }) ././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043063033077 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000010715117043043033075 0ustar zuulzuul2025-12-12T16:26:54.315497853+00:00 stdout F '/bin/cpb' -> '/util/cpb' ././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043063033077 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000021346615117043043033113 0ustar zuulzuul2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: / 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /afs 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /boot 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /bundle 2025-12-12T16:27:06.041132351+00:00 stdout F skipping all files in the dir: /dev 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/X11 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/X11/applnk 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/X11/fontpath.d 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/X11/xinit 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/X11/xinit/xinitrc.d 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/X11/xinit/xinput.d 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/alternatives 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/bash_completion.d 2025-12-12T16:27:06.041132351+00:00 stdout F skipping a dir without errors: /etc/crypto-policies 2025-12-12T16:27:06.041370367+00:00 stdout F skipping a dir without errors: /etc/crypto-policies/back-ends 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/crypto-policies/local.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/crypto-policies/policies 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/crypto-policies/policies/modules 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/crypto-policies/state 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/default 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf/aliases.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf/modules.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf/modules.defaults.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf/plugins 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf/protected.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/dnf/vars 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/fonts 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/fonts/conf.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/gcrypt 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/gnupg 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/gss 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/gss/mech.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/issue.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/krb5.conf.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/ld.so.conf.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/libreport 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/libreport/events 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/libreport/events.d 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/libreport/plugins 2025-12-12T16:27:06.043694296+00:00 stdout F skipping a dir without errors: /etc/libreport/workflows.d 2025-12-12T16:27:06.043728227+00:00 stdout F skipping a dir without errors: /etc/logrotate.d 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/motd.d 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/openldap 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/openldap/certs 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/opt 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pkcs11 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pkcs11/modules 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/extracted 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/extracted/edk2 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/extracted/java 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/extracted/openssl 2025-12-12T16:27:06.046209329+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/extracted/pem 2025-12-12T16:27:06.048270562+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/extracted/pem/directory-hash 2025-12-12T16:27:06.051730099+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/source 2025-12-12T16:27:06.051752830+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/source/anchors 2025-12-12T16:27:06.051798511+00:00 stdout F skipping a dir without errors: /etc/pki/ca-trust/source/blocklist 2025-12-12T16:27:06.051906084+00:00 stdout F skipping a dir without errors: /etc/pki/entitlement 2025-12-12T16:27:06.051996096+00:00 stdout F skipping a dir without errors: /etc/pki/java 2025-12-12T16:27:06.052027907+00:00 stdout F skipping a dir without errors: /etc/pki/product 2025-12-12T16:27:06.052059438+00:00 stdout F skipping a dir without errors: /etc/pki/product-default 2025-12-12T16:27:06.052101259+00:00 stdout F skipping a dir without errors: /etc/pki/rpm-gpg 2025-12-12T16:27:06.052158950+00:00 stdout F skipping a dir without errors: /etc/pki/swid 2025-12-12T16:27:06.052204751+00:00 stdout F skipping a dir without errors: /etc/pki/swid/CA 2025-12-12T16:27:06.052238782+00:00 stdout F skipping a dir without errors: /etc/pki/swid/CA/redhat.com 2025-12-12T16:27:06.052280613+00:00 stdout F skipping a dir without errors: /etc/pki/tls 2025-12-12T16:27:06.052321554+00:00 stdout F skipping a dir without errors: /etc/pki/tls/certs 2025-12-12T16:27:06.052389326+00:00 stdout F skipping a dir without errors: /etc/pki/tls/misc 2025-12-12T16:27:06.052433977+00:00 stdout F skipping a dir without errors: /etc/pki/tls/openssl.d 2025-12-12T16:27:06.052474398+00:00 stdout F skipping a dir without errors: /etc/pki/tls/private 2025-12-12T16:27:06.052515249+00:00 stdout F skipping a dir without errors: /etc/pm 2025-12-12T16:27:06.052543480+00:00 stdout F skipping a dir without errors: /etc/pm/config.d 2025-12-12T16:27:06.052572781+00:00 stdout F skipping a dir without errors: /etc/pm/power.d 2025-12-12T16:27:06.052601821+00:00 stdout F skipping a dir without errors: /etc/pm/sleep.d 2025-12-12T16:27:06.052632002+00:00 stdout F skipping a dir without errors: /etc/popt.d 2025-12-12T16:27:06.052694144+00:00 stdout F skipping a dir without errors: /etc/profile.d 2025-12-12T16:27:06.053880634+00:00 stdout F skipping a dir without errors: /etc/rpm 2025-12-12T16:27:06.053956346+00:00 stdout F skipping a dir without errors: /etc/rwtab.d 2025-12-12T16:27:06.053995407+00:00 stdout F skipping a dir without errors: /etc/sasl2 2025-12-12T16:27:06.054035598+00:00 stdout F skipping a dir without errors: /etc/selinux 2025-12-12T16:27:06.054166171+00:00 stdout F skipping a dir without errors: /etc/skel 2025-12-12T16:27:06.054251443+00:00 stdout F skipping a dir without errors: /etc/ssl 2025-12-12T16:27:06.054331495+00:00 stdout F skipping a dir without errors: /etc/statetab.d 2025-12-12T16:27:06.054398957+00:00 stdout F skipping a dir without errors: /etc/swid 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /etc/swid/swidtags.d 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /etc/sysconfig 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /etc/terminfo 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /etc/xdg 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /etc/xdg/autostart 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /etc/yum.repos.d 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /home 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /licenses 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /manifests 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /media 2025-12-12T16:27:06.055290889+00:00 stdout F skipping a dir without errors: /metadata 2025-12-12T16:27:06.055314840+00:00 stdout F skipping a dir without errors: /mnt 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /opt 2025-12-12T16:27:06.057257749+00:00 stdout F skipping all files in the dir: /proc 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /root 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /root/buildinfo 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /root/buildinfo/content_manifests 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/blkid 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/lock 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/motd.d 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets/kubernetes.io 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets/kubernetes.io/serviceaccount 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets/kubernetes.io/serviceaccount/..2025_12_12_16_26_53.3849831187 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm/ca 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm/syspurpose 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /run/setrans 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /srv 2025-12-12T16:27:06.057257749+00:00 stdout F skipping all files in the dir: /sys 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /tests 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /tests/scorecard 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /tmp 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /usr 2025-12-12T16:27:06.057257749+00:00 stdout F skipping a dir without errors: /usr/bin 2025-12-12T16:27:06.061949778+00:00 stdout F skipping a dir without errors: /usr/games 2025-12-12T16:27:06.061981229+00:00 stdout F skipping a dir without errors: /usr/include 2025-12-12T16:27:06.062023700+00:00 stdout F skipping a dir without errors: /usr/lib 2025-12-12T16:27:06.062131492+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id 2025-12-12T16:27:06.062165213+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/00 2025-12-12T16:27:06.062248395+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/01 2025-12-12T16:27:06.062293837+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/03 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/04 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/06 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/07 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/08 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/0d 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/0e 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/11 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/12 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/13 2025-12-12T16:27:06.073874530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/15 2025-12-12T16:27:06.073942761+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/16 2025-12-12T16:27:06.073952182+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/17 2025-12-12T16:27:06.074015243+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/19 2025-12-12T16:27:06.074103675+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/1a 2025-12-12T16:27:06.074198708+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/1b 2025-12-12T16:27:06.074246709+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/1c 2025-12-12T16:27:06.074330711+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/1f 2025-12-12T16:27:06.074375722+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/21 2025-12-12T16:27:06.074426354+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/22 2025-12-12T16:27:06.074492215+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/23 2025-12-12T16:27:06.074555517+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/25 2025-12-12T16:27:06.074608378+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/26 2025-12-12T16:27:06.074654639+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/27 2025-12-12T16:27:06.074703271+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/28 2025-12-12T16:27:06.074765632+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/29 2025-12-12T16:27:06.074827464+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/2a 2025-12-12T16:27:06.074878355+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/2c 2025-12-12T16:27:06.074931366+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/2e 2025-12-12T16:27:06.075005988+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/2f 2025-12-12T16:27:06.075056890+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/31 2025-12-12T16:27:06.075117751+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/32 2025-12-12T16:27:06.075262935+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/33 2025-12-12T16:27:06.075262935+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/34 2025-12-12T16:27:06.075312496+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/36 2025-12-12T16:27:06.075376458+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/38 2025-12-12T16:27:06.075465140+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/3a 2025-12-12T16:27:06.075515031+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/3b 2025-12-12T16:27:06.075580643+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/3f 2025-12-12T16:27:06.075654165+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/41 2025-12-12T16:27:06.075715276+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/42 2025-12-12T16:27:06.075801858+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/43 2025-12-12T16:27:06.075878780+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/44 2025-12-12T16:27:06.075940242+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/45 2025-12-12T16:27:06.076004264+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/49 2025-12-12T16:27:06.076055465+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/4a 2025-12-12T16:27:06.076106716+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/4b 2025-12-12T16:27:06.076169318+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/4c 2025-12-12T16:27:06.076261430+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/4d 2025-12-12T16:27:06.076322942+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/4e 2025-12-12T16:27:06.076450705+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/4f 2025-12-12T16:27:06.076462695+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/53 2025-12-12T16:27:06.076539637+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/54 2025-12-12T16:27:06.076661280+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/55 2025-12-12T16:27:06.076661280+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/57 2025-12-12T16:27:06.076715692+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/58 2025-12-12T16:27:06.076766463+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/5a 2025-12-12T16:27:06.076832375+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/5b 2025-12-12T16:27:06.076882046+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/5c 2025-12-12T16:27:06.076930717+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/5d 2025-12-12T16:27:06.076996159+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/5f 2025-12-12T16:27:06.077081261+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/60 2025-12-12T16:27:06.077131132+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/61 2025-12-12T16:27:06.077300686+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/62 2025-12-12T16:27:06.077345898+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/63 2025-12-12T16:27:06.077398359+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/64 2025-12-12T16:27:06.081365309+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/65 2025-12-12T16:27:06.081400970+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/66 2025-12-12T16:27:06.081452191+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/68 2025-12-12T16:27:06.081496683+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/69 2025-12-12T16:27:06.081567144+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/6c 2025-12-12T16:27:06.081614466+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/6e 2025-12-12T16:27:06.081671327+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/6f 2025-12-12T16:27:06.081731128+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/71 2025-12-12T16:27:06.081794290+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/72 2025-12-12T16:27:06.081889542+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/74 2025-12-12T16:27:06.081993955+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/75 2025-12-12T16:27:06.082044376+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/76 2025-12-12T16:27:06.082121958+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/77 2025-12-12T16:27:06.082196290+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/78 2025-12-12T16:27:06.082257142+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/7a 2025-12-12T16:27:06.082332894+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/7d 2025-12-12T16:27:06.082365415+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/7e 2025-12-12T16:27:06.082437686+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/7f 2025-12-12T16:27:06.082503748+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/80 2025-12-12T16:27:06.082550269+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/82 2025-12-12T16:27:06.082635161+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/83 2025-12-12T16:27:06.082648192+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/86 2025-12-12T16:27:06.082703373+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/87 2025-12-12T16:27:06.082763845+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/88 2025-12-12T16:27:06.082812146+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/89 2025-12-12T16:27:06.082875517+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/8b 2025-12-12T16:27:06.082922349+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/8c 2025-12-12T16:27:06.082971740+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/8e 2025-12-12T16:27:06.083018351+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/8f 2025-12-12T16:27:06.083067002+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/90 2025-12-12T16:27:06.083126784+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/93 2025-12-12T16:27:06.083287748+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/94 2025-12-12T16:27:06.083287748+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/95 2025-12-12T16:27:06.083329039+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/97 2025-12-12T16:27:06.083381350+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/98 2025-12-12T16:27:06.083458892+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/99 2025-12-12T16:27:06.083518164+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/9b 2025-12-12T16:27:06.083589956+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/9c 2025-12-12T16:27:06.083638337+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/9d 2025-12-12T16:27:06.083698238+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/9e 2025-12-12T16:27:06.083754630+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/9f 2025-12-12T16:27:06.083801031+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a0 2025-12-12T16:27:06.083861202+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a3 2025-12-12T16:27:06.086641693+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a4 2025-12-12T16:27:06.086682384+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a5 2025-12-12T16:27:06.086752806+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a6 2025-12-12T16:27:06.086817537+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a7 2025-12-12T16:27:06.086860378+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/a8 2025-12-12T16:27:06.086905529+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/aa 2025-12-12T16:27:06.086971331+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ac 2025-12-12T16:27:06.087005522+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ad 2025-12-12T16:27:06.087055723+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ae 2025-12-12T16:27:06.087105385+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/af 2025-12-12T16:27:06.087148186+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b0 2025-12-12T16:27:06.087244808+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b2 2025-12-12T16:27:06.087289469+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b3 2025-12-12T16:27:06.087335780+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b4 2025-12-12T16:27:06.087395882+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b6 2025-12-12T16:27:06.087436933+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b7 2025-12-12T16:27:06.087476604+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b8 2025-12-12T16:27:06.087521225+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/b9 2025-12-12T16:27:06.087560466+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ba 2025-12-12T16:27:06.087604697+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/bb 2025-12-12T16:27:06.087666209+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/bc 2025-12-12T16:27:06.087908715+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/bd 2025-12-12T16:27:06.087908715+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/be 2025-12-12T16:27:06.087908715+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/c0 2025-12-12T16:27:06.087969216+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/c2 2025-12-12T16:27:06.088009277+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/c5 2025-12-12T16:27:06.088051358+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/c6 2025-12-12T16:27:06.088101460+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/c9 2025-12-12T16:27:06.088148841+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/cb 2025-12-12T16:27:06.088366296+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/cc 2025-12-12T16:27:06.088366296+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ce 2025-12-12T16:27:06.088366296+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/cf 2025-12-12T16:27:06.088366296+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d0 2025-12-12T16:27:06.088366296+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d1 2025-12-12T16:27:06.088420478+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d2 2025-12-12T16:27:06.088471949+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d3 2025-12-12T16:27:06.088520050+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d4 2025-12-12T16:27:06.088658494+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d5 2025-12-12T16:27:06.088704425+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d6 2025-12-12T16:27:06.088744856+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d8 2025-12-12T16:27:06.088787937+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/d9 2025-12-12T16:27:06.088831788+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/da 2025-12-12T16:27:06.088904740+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/db 2025-12-12T16:27:06.088946341+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/dc 2025-12-12T16:27:06.088987342+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/dd 2025-12-12T16:27:06.089040424+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/de 2025-12-12T16:27:06.089084095+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/df 2025-12-12T16:27:06.089127046+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e0 2025-12-12T16:27:06.089241159+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e1 2025-12-12T16:27:06.089323581+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e2 2025-12-12T16:27:06.089365042+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e3 2025-12-12T16:27:06.089406523+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e6 2025-12-12T16:27:06.089480405+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e7 2025-12-12T16:27:06.089556357+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e8 2025-12-12T16:27:06.089596928+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/e9 2025-12-12T16:27:06.089638109+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ea 2025-12-12T16:27:06.089677530+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/eb 2025-12-12T16:27:06.089738811+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ec 2025-12-12T16:27:06.089783372+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ed 2025-12-12T16:27:06.089824143+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/ef 2025-12-12T16:27:06.089872855+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/f0 2025-12-12T16:27:06.089963887+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/f1 2025-12-12T16:27:06.090017258+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/f4 2025-12-12T16:27:06.090059279+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/f7 2025-12-12T16:27:06.090132991+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/f8 2025-12-12T16:27:06.090198883+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/fa 2025-12-12T16:27:06.090268375+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/fd 2025-12-12T16:27:06.090318426+00:00 stdout F skipping a dir without errors: /usr/lib/.build-id/fe 2025-12-12T16:27:06.090362807+00:00 stdout F skipping a dir without errors: /usr/lib/debug 2025-12-12T16:27:06.090393078+00:00 stdout F skipping a dir without errors: /usr/lib/debug/.dwz 2025-12-12T16:27:06.090461609+00:00 stdout F skipping a dir without errors: /usr/lib/debug/usr 2025-12-12T16:27:06.090501820+00:00 stdout F skipping a dir without errors: /usr/lib/debug/usr/bin 2025-12-12T16:27:06.090532051+00:00 stdout F skipping a dir without errors: /usr/lib/debug/usr/lib 2025-12-12T16:27:06.090562502+00:00 stdout F skipping a dir without errors: /usr/lib/debug/usr/lib64 2025-12-12T16:27:06.090597543+00:00 stdout F skipping a dir without errors: /usr/lib/debug/usr/sbin 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/games 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/locale 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/locale/C.utf8 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/locale/C.utf8/LC_MESSAGES 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/modules 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/motd.d 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/fileattrs 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/lua 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/macros.d 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/aarch64-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/alpha-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/alphaev5-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/alphaev56-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/alphaev6-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/alphaev67-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/alphapca56-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/amd64-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv3l-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv4b-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv4l-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv5tejl-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv5tel-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv5tl-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv6hl-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv6l-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv7hl-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv7hnl-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv7l-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv8hl-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/armv8l-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/athlon-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/geode-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/i386-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/i486-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/i586-linux 2025-12-12T16:27:06.092383728+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/i686-linux 2025-12-12T16:27:06.092479861+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ia32e-linux 2025-12-12T16:27:06.092523522+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ia64-linux 2025-12-12T16:27:06.092566353+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/m68k-linux 2025-12-12T16:27:06.092623354+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mips-linux 2025-12-12T16:27:06.092668525+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mips64-linux 2025-12-12T16:27:06.092711426+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mips64el-linux 2025-12-12T16:27:06.092752367+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mips64r6-linux 2025-12-12T16:27:06.092794009+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mips64r6el-linux 2025-12-12T16:27:06.092837320+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mipsel-linux 2025-12-12T16:27:06.092881671+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mipsr6-linux 2025-12-12T16:27:06.092923242+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/mipsr6el-linux 2025-12-12T16:27:06.092965823+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/noarch-linux 2025-12-12T16:27:06.093008304+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/pentium3-linux 2025-12-12T16:27:06.093049505+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/pentium4-linux 2025-12-12T16:27:06.093091506+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc-linux 2025-12-12T16:27:06.093132057+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc32dy4-linux 2025-12-12T16:27:06.093191879+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc64-linux 2025-12-12T16:27:06.093261710+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc64iseries-linux 2025-12-12T16:27:06.093395534+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc64le-linux 2025-12-12T16:27:06.093395534+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc64p7-linux 2025-12-12T16:27:06.093433255+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc64pseries-linux 2025-12-12T16:27:06.093474446+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc8260-linux 2025-12-12T16:27:06.093512737+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppc8560-linux 2025-12-12T16:27:06.093559008+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppciseries-linux 2025-12-12T16:27:06.093600269+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/ppcpseries-linux 2025-12-12T16:27:06.093640670+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/riscv64-linux 2025-12-12T16:27:06.093683881+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/s390-linux 2025-12-12T16:27:06.093723832+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/s390x-linux 2025-12-12T16:27:06.093766933+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sh-linux 2025-12-12T16:27:06.093810484+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sh3-linux 2025-12-12T16:27:06.093851505+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sh4-linux 2025-12-12T16:27:06.093891806+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sh4a-linux 2025-12-12T16:27:06.093935447+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sparc-linux 2025-12-12T16:27:06.093977598+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sparc64-linux 2025-12-12T16:27:06.094019429+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sparc64v-linux 2025-12-12T16:27:06.094062131+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sparcv8-linux 2025-12-12T16:27:06.094124822+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sparcv9-linux 2025-12-12T16:27:06.094164853+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/sparcv9v-linux 2025-12-12T16:27:06.094435040+00:00 stdout F skipping a dir without errors: /usr/lib/rpm/platform/x86_64-linux 2025-12-12T16:27:06.094561883+00:00 stdout F skipping a dir without errors: /usr/lib/swidtag 2025-12-12T16:27:06.094597744+00:00 stdout F skipping a dir without errors: /usr/lib/swidtag/redhat.com 2025-12-12T16:27:06.094657086+00:00 stdout F skipping a dir without errors: /usr/lib/sysctl.d 2025-12-12T16:27:06.094709477+00:00 stdout F skipping a dir without errors: /usr/lib/sysimage 2025-12-12T16:27:06.094735538+00:00 stdout F skipping a dir without errors: /usr/lib/tmpfiles.d 2025-12-12T16:27:06.094911802+00:00 stdout F skipping a dir without errors: /usr/lib64 2025-12-12T16:27:06.095024665+00:00 stdout F skipping a dir without errors: /usr/lib64/X11 2025-12-12T16:27:06.095047305+00:00 stdout F skipping a dir without errors: /usr/lib64/audit 2025-12-12T16:27:06.095089587+00:00 stdout F skipping a dir without errors: /usr/lib64/bpf 2025-12-12T16:27:06.095123057+00:00 stdout F skipping a dir without errors: /usr/lib64/engines-3 2025-12-12T16:27:06.095222530+00:00 stdout F skipping a dir without errors: /usr/lib64/fipscheck 2025-12-12T16:27:06.095290572+00:00 stdout F skipping a dir without errors: /usr/lib64/games 2025-12-12T16:27:06.095406315+00:00 stdout F skipping a dir without errors: /usr/lib64/gawk 2025-12-12T16:27:06.095553548+00:00 stdout F skipping a dir without errors: /usr/lib64/gconv 2025-12-12T16:27:06.095700972+00:00 stdout F skipping a dir without errors: /usr/lib64/gconv/gconv-modules.d 2025-12-12T16:27:06.095712532+00:00 stdout F skipping a dir without errors: /usr/lib64/gio 2025-12-12T16:27:06.095742213+00:00 stdout F skipping a dir without errors: /usr/lib64/gio/modules 2025-12-12T16:27:06.095784884+00:00 stdout F skipping a dir without errors: /usr/lib64/girepository-1.0 2025-12-12T16:27:06.096046551+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5 2025-12-12T16:27:06.096059191+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5/plugins 2025-12-12T16:27:06.096100332+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5/plugins/authdata 2025-12-12T16:27:06.096110012+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5/plugins/kdb 2025-12-12T16:27:06.096194355+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5/plugins/libkrb5 2025-12-12T16:27:06.096194355+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5/plugins/preauth 2025-12-12T16:27:06.096261516+00:00 stdout F skipping a dir without errors: /usr/lib64/krb5/plugins/tls 2025-12-12T16:27:06.096660906+00:00 stdout F skipping a dir without errors: /usr/lib64/libdnf 2025-12-12T16:27:06.096660906+00:00 stdout F skipping a dir without errors: /usr/lib64/libdnf/plugins 2025-12-12T16:27:06.098325418+00:00 stdout F skipping a dir without errors: /usr/lib64/libpeas-1.0 2025-12-12T16:27:06.098325418+00:00 stdout F skipping a dir without errors: /usr/lib64/libpeas-1.0/loaders 2025-12-12T16:27:06.098977265+00:00 stdout F skipping a dir without errors: /usr/lib64/lua 2025-12-12T16:27:06.098977265+00:00 stdout F skipping a dir without errors: /usr/lib64/lua/5.4 2025-12-12T16:27:06.099029906+00:00 stdout F skipping a dir without errors: /usr/lib64/ossl-modules 2025-12-12T16:27:06.099083878+00:00 stdout F skipping a dir without errors: /usr/lib64/pkcs11 2025-12-12T16:27:06.106830174+00:00 stdout F skipping a dir without errors: /usr/lib64/pm-utils 2025-12-12T16:27:06.106869075+00:00 stdout F skipping a dir without errors: /usr/lib64/pm-utils/module.d 2025-12-12T16:27:06.106942237+00:00 stdout F skipping a dir without errors: /usr/lib64/pm-utils/power.d 2025-12-12T16:27:06.106942237+00:00 stdout F skipping a dir without errors: /usr/lib64/pm-utils/sleep.d 2025-12-12T16:27:06.106953247+00:00 stdout F skipping a dir without errors: /usr/lib64/rpm-plugins 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/lib64/sasl2 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/lib64/security 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec/awk 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec/coreutils 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec/getconf 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec/openldap 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec/p11-kit 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/libexec/selinux 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/bin 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/etc 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/games 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/include 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/lib 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/lib64 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/lib64/bpf 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/libexec 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/sbin 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/applications 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/info 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man1 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man1x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man2 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man2x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man3 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man3x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man4 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man4x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man5 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man5x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man6 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man6x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man7 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man7x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man8 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man8x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man9 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/man9x 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/share/man/mann 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/local/src 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/sbin 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/share 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/share/X11 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/share/aclocal 2025-12-12T16:27:06.110213839+00:00 stdout F skipping a dir without errors: /usr/share/appdata 2025-12-12T16:27:06.110271731+00:00 stdout F skipping a dir without errors: /usr/share/applications 2025-12-12T16:27:06.110271731+00:00 stdout F skipping a dir without errors: /usr/share/augeas 2025-12-12T16:27:06.110606379+00:00 stdout F skipping a dir without errors: /usr/share/augeas/lenses 2025-12-12T16:27:06.110606379+00:00 stdout F skipping a dir without errors: /usr/share/awk 2025-12-12T16:27:06.110697702+00:00 stdout F skipping a dir without errors: /usr/share/backgrounds 2025-12-12T16:27:06.110787964+00:00 stdout F skipping a dir without errors: /usr/share/bash-completion 2025-12-12T16:27:06.110830585+00:00 stdout F skipping a dir without errors: /usr/share/bash-completion/completions 2025-12-12T16:27:06.111025610+00:00 stdout F skipping a dir without errors: /usr/share/bash-completion/helpers 2025-12-12T16:27:06.111057291+00:00 stdout F skipping a dir without errors: /usr/share/buildinfo 2025-12-12T16:27:06.111118022+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies 2025-12-12T16:27:06.111210065+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/DEFAULT 2025-12-12T16:27:06.111399089+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/FIPS 2025-12-12T16:27:06.111574544+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/FUTURE 2025-12-12T16:27:06.111746588+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/LEGACY 2025-12-12T16:27:06.112365464+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/back-ends 2025-12-12T16:27:06.112365464+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/back-ends/DEFAULT 2025-12-12T16:27:06.112522738+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/back-ends/FIPS 2025-12-12T16:27:06.112770704+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/back-ends/FUTURE 2025-12-12T16:27:06.112965409+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/back-ends/LEGACY 2025-12-12T16:27:06.113196485+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/policies 2025-12-12T16:27:06.113301017+00:00 stdout F skipping a dir without errors: /usr/share/crypto-policies/policies/modules 2025-12-12T16:27:06.113428271+00:00 stdout F skipping a dir without errors: /usr/share/desktop-directories 2025-12-12T16:27:06.113460902+00:00 stdout F skipping a dir without errors: /usr/share/dict 2025-12-12T16:27:06.113492732+00:00 stdout F skipping a dir without errors: /usr/share/doc 2025-12-12T16:27:06.113528023+00:00 stdout F skipping a dir without errors: /usr/share/empty 2025-12-12T16:27:06.113560514+00:00 stdout F skipping a dir without errors: /usr/share/file 2025-12-12T16:27:06.113608165+00:00 stdout F skipping a dir without errors: /usr/share/fontconfig 2025-12-12T16:27:06.113640246+00:00 stdout F skipping a dir without errors: /usr/share/fontconfig/conf.avail 2025-12-12T16:27:06.113700778+00:00 stdout F skipping a dir without errors: /usr/share/fonts 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/fonts/dejavu-sans-fonts 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/games 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/gcc-11 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/gcc-11/python 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/gcc-11/python/libstdcxx 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/gcc-11/python/libstdcxx/__pycache__ 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/gcc-11/python/libstdcxx/v6 2025-12-12T16:27:06.117233737+00:00 stdout F skipping a dir without errors: /usr/share/gcc-11/python/libstdcxx/v6/__pycache__ 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gdb 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gdb/auto-load 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gdb/auto-load/usr 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gdb/auto-load/usr/lib64 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gdb/auto-load/usr/lib64/__pycache__ 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/glib-2.0 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/glib-2.0/schemas 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gnome 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/gnupg 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/help 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/i18n 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/i18n/charmaps 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/i18n/locales 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/icons 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/idl 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/info 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/libgpg-error 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/libreport 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/libreport/conf.d 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/libreport/conf.d/plugins 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/libreport/events 2025-12-12T16:27:06.118571841+00:00 stdout F skipping a dir without errors: /usr/share/libreport/workflows 2025-12-12T16:27:06.118613092+00:00 stdout F skipping a dir without errors: /usr/share/licenses 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/alternatives 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/audit-libs 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/bash 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/bzip2-libs 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/coreutils-single 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/crypto-policies 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/cyrus-sasl-lib 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/dejavu-sans-fonts 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/dnf 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/file-libs 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gawk 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gdbm-libs 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/glib2 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/glibc 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gmp 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gnupg2 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gnutls 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gobject-introspection 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/gpgme 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/grep 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/json-c 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/json-glib 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/keyutils-libs 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/krb5-libs 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libarchive 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libassuan 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libcap 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libcap-ng 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libcom_err 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libcurl-minimal 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libdnf 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libevent 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libffi 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libgcc 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libgcrypt 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libgpg-error 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libidn2 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libksba 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libmodulemd 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libmount 2025-12-12T16:27:06.120636413+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libnghttp2 2025-12-12T16:27:06.120671794+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libpeas 2025-12-12T16:27:06.120679294+00:00 stdout F skipping a dir without errors: /usr/share/licenses/librepo 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/librhsm 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libselinux 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libsemanage 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libsepol 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libsigsegv 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libsmartcols 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libsolv 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libtasn1 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libunistring 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libusbx 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libuuid 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libverto 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libxcrypt 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libxml2 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libyaml 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/libzstd 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/microdnf 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/mpfr 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/ncurses-base 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/nettle 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/npth 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/openldap 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/openssl-libs 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/p11-kit 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/pcre 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/pcre2-syntax 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/popt 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/readline 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/rpm 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/sed 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/setup 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/shadow-utils 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/systemd 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/tzdata 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/xz-libs 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/licenses/zlib 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/locale 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/lua 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/lua/5.4 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man0p 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man1 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man1p 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man1x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man2 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man2x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man3 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man3p 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man3x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man4 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man4x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man5 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man5x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man6 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man6x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man7 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man7x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man8 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man8x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man9 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/man9x 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/man/mann 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/metainfo 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/mime-info 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/misc 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/omf 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/p11-kit 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/p11-kit/modules 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/pixmaps 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/pki 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/pki/ca-trust-legacy 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/pki/ca-trust-source 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/pki/ca-trust-source/anchors 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/pki/ca-trust-source/blocklist 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/sounds 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/tabset 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/A 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/E 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/a 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/b 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/c 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/d 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/e 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/g 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/h 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/j 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/k 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/l 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/m 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/n 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/p 2025-12-12T16:27:06.126684296+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/r 2025-12-12T16:27:06.127208749+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/s 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/t 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/v 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/w 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/terminfo/x 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/themes 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/wayland-sessions 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/share/xsessions 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/src 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/src/debug 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /usr/src/kernels 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /util 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/adm 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/cache 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/db 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/empty 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/ftp 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/games 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/kerberos 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/kerberos/krb5 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/kerberos/krb5/user 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/alternatives 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/dnf 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/games 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/misc 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/rpm 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/rpm-state 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/selinux 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/lib/selinux/tmp 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/local 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/log 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/nis 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/opt 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/preserve 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/spool 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/spool/lpd 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/spool/mail 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/tmp 2025-12-12T16:27:06.135317555+00:00 stdout F skipping a dir without errors: /var/yp 2025-12-12T16:27:06.135317555+00:00 stdout F &{metadata/annotations.yaml manifests/} ././@LongLink0000644000000000000000000000032200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043063033077 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000414215117043043033100 0ustar zuulzuul2025-12-12T16:27:07.783776786+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Using in-cluster kube client config" 2025-12-12T16:27:07.810280796+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/acme.cert-manager.io_challenges.yaml 2025-12-12T16:27:07.823323497+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/acme.cert-manager.io_orders.yaml 2025-12-12T16:27:07.824128667+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager-operator-controller-manager-metrics-service_v1_service.yaml 2025-12-12T16:27:07.824245790+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager-operator-metrics-reader_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:07.824355463+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager-operator.clusterserviceversion.yaml 2025-12-12T16:27:07.826873836+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager.io_certificaterequests.yaml 2025-12-12T16:27:07.827421570+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager.io_certificates.yaml 2025-12-12T16:27:07.838145772+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager.io_clusterissuers.yaml 2025-12-12T16:27:07.871607169+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/cert-manager.io_issuers.yaml 2025-12-12T16:27:07.902586163+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/operator.openshift.io_certmanagers.yaml 2025-12-12T16:27:07.917546751+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/manifests/operator.openshift.io_istiocsrs.yaml 2025-12-12T16:27:07.919616694+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="Reading file" file=/bundle/metadata/annotations.yaml ././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-cana0000755000175000017500000000000015117043043033020 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-cana0000755000175000017500000000000015117043062033021 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-cana0000644000175000017500000000424215117043043033024 0ustar zuulzuul2025-12-12T16:16:45.925515842+00:00 stdout F serving TLS on 8888 2025-12-12T16:16:45.934693156+00:00 stdout F serving TLS on 8443 2025-12-12T16:17:58.169834860+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:18:58.173022312+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:19:58.170019041+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:20:58.171033587+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:21:58.172314238+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:22:58.174956968+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:23:58.181997969+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:24:58.185282721+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:25:58.179447998+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:26:58.207886659+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:27:58.177725569+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:28:58.190891700+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:29:58.177801400+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:30:58.190150049+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:31:58.186222252+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:32:58.186274799+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:33:58.193857873+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:34:58.192260191+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:35:58.195784134+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:36:58.184317220+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:37:58.185723550+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:38:58.185645935+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:39:58.194665865+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:40:58.189064999+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:41:58.199235177+00:00 stdout F Serving canary healthcheck request 2025-12-12T16:42:58.193460369+00:00 stdout F Serving canary healthcheck request ././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015117043043033130 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015117043062033131 5ustar zuulzuul././@LongLink0000644000000000000000000000033400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000644000175000017500000154673115117043043033153 0ustar zuulzuul2025-12-12T16:16:42.900994641+00:00 stdout F Copying system trust bundle 2025-12-12T16:16:43.436372861+00:00 stderr F W1212 16:16:43.435633 1 cmd.go:167] Unable to read initial content of "/tmp/terminate": open /tmp/terminate: no such file or directory 2025-12-12T16:16:43.440625355+00:00 stderr F I1212 16:16:43.440567 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:43.440910382+00:00 stderr F I1212 16:16:43.440676 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:43.441024405+00:00 stderr F I1212 16:16:43.440997 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:43.441967588+00:00 stderr F I1212 16:16:43.441935 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:43.446160810+00:00 stderr F I1212 16:16:43.446121 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.446245022+00:00 stderr F I1212 16:16:43.446234 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.446271033+00:00 stderr F I1212 16:16:43.446262 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.446293284+00:00 stderr F I1212 16:16:43.446285 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.446314674+00:00 stderr F I1212 16:16:43.446306 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.534550928+00:00 stderr F I1212 16:16:43.534034 1 builder.go:304] cluster-authentication-operator version - 2025-12-12T16:16:43.544152213+00:00 stderr F I1212 16:16:43.543623 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:44.052301469+00:00 stderr F I1212 16:16:44.051317 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:16:44.070923694+00:00 stderr F I1212 16:16:44.068742 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:44.070923694+00:00 stderr F I1212 16:16:44.068770 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:44.070923694+00:00 stderr F I1212 16:16:44.068791 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:16:44.070923694+00:00 stderr F I1212 16:16:44.068814 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:16:44.087029057+00:00 stderr F I1212 16:16:44.085580 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:44.087029057+00:00 stderr F I1212 16:16:44.085608 1 genericapiserver.go:546] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:16:44.087029057+00:00 stderr F W1212 16:16:44.085634 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:44.087029057+00:00 stderr F W1212 16:16:44.085646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:44.087029057+00:00 stderr F W1212 16:16:44.085651 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:44.087029057+00:00 stderr F W1212 16:16:44.085659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:44.087029057+00:00 stderr F W1212 16:16:44.085663 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:44.087029057+00:00 stderr F W1212 16:16:44.085666 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:44.092463529+00:00 stderr F I1212 16:16:44.092401 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:44.092792238+00:00 stderr F I1212 16:16:44.092758 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:44.092866829+00:00 stderr F I1212 16:16:44.092843 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:44.092875510+00:00 stderr F I1212 16:16:44.092861 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:44.093166307+00:00 stderr F I1212 16:16:44.093145 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:44.093306140+00:00 stderr F I1212 16:16:44.093273 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-authentication-operator.svc\" [serving] validServingFor=[metrics.openshift-authentication-operator.svc,metrics.openshift-authentication-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:16:44.093229128 +0000 UTC))" 2025-12-12T16:16:44.093467814+00:00 stderr F I1212 16:16:44.093445 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556204\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556203\" (2025-12-12 15:16:43 +0000 UTC to 2028-12-12 15:16:43 +0000 UTC (now=2025-12-12 16:16:44.093425853 +0000 UTC))" 2025-12-12T16:16:44.093477834+00:00 stderr F I1212 16:16:44.093469 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:44.093518735+00:00 stderr F I1212 16:16:44.093499 1 genericapiserver.go:696] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:16:44.093527385+00:00 stderr F I1212 16:16:44.093520 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:44.093994807+00:00 stderr F I1212 16:16:44.093525 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:44.093994807+00:00 stderr F I1212 16:16:44.093670 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:44.093994807+00:00 stderr F I1212 16:16:44.093698 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:44.094422887+00:00 stderr F I1212 16:16:44.094405 1 leaderelection.go:257] attempting to acquire leader lease openshift-authentication-operator/cluster-authentication-operator-lock... 2025-12-12T16:16:44.101287305+00:00 stderr F I1212 16:16:44.101168 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:16:44.101543831+00:00 stderr F I1212 16:16:44.101514 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:16:44.101892670+00:00 stderr F I1212 16:16:44.101716 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/authentication/request/headerrequest/requestheader_controller.go:183" 2025-12-12T16:16:44.106400960+00:00 stderr F I1212 16:16:44.106350 1 leaderelection.go:271] successfully acquired lease openshift-authentication-operator/cluster-authentication-operator-lock 2025-12-12T16:16:44.109276760+00:00 stderr F I1212 16:16:44.107401 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-authentication-operator", Name:"cluster-authentication-operator-lock", UID:"8d2fa493-2cfa-49bd-b154-efd4906977bf", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"36984", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' authentication-operator-7f5c659b84-6t92c_06785a6c-e3fa-42f3-adbe-7a076dce8a62 became leader 2025-12-12T16:16:44.167113602+00:00 stderr F I1212 16:16:44.167037 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:44.168802083+00:00 stderr F I1212 16:16:44.168243 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:44.175021145+00:00 stderr F I1212 16:16:44.173954 1 reflector.go:430] "Caches populated" type="*v1.Console" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.175021145+00:00 stderr F I1212 16:16:44.174454 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.175021145+00:00 stderr F I1212 16:16:44.174966 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.175394894+00:00 stderr F I1212 16:16:44.175333 1 reflector.go:430] "Caches populated" type="*v1.Authentication" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.176289086+00:00 stderr F I1212 16:16:44.175565 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.176472091+00:00 stderr F I1212 16:16:44.176392 1 reflector.go:430] "Caches populated" type="*v1.OAuth" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.182844116+00:00 stderr F I1212 16:16:44.182788 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.195231309+00:00 stderr F I1212 16:16:44.193736 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:44.195231309+00:00 stderr F I1212 16:16:44.194310 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:44.195231309+00:00 stderr F I1212 16:16:44.194644 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:44.194603293 +0000 UTC))" 2025-12-12T16:16:44.195231309+00:00 stderr F I1212 16:16:44.194930 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-authentication-operator.svc\" [serving] validServingFor=[metrics.openshift-authentication-operator.svc,metrics.openshift-authentication-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:16:44.194909251 +0000 UTC))" 2025-12-12T16:16:44.195231309+00:00 stderr F I1212 16:16:44.195151 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556204\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556203\" (2025-12-12 15:16:43 +0000 UTC to 2028-12-12 15:16:43 +0000 UTC (now=2025-12-12 16:16:44.195136316 +0000 UTC))" 2025-12-12T16:16:44.196475839+00:00 stderr F I1212 16:16:44.196343 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:44.196809117+00:00 stderr F I1212 16:16:44.196775 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:44.196747136 +0000 UTC))" 2025-12-12T16:16:44.196823927+00:00 stderr F I1212 16:16:44.196809 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:44.196794547 +0000 UTC))" 2025-12-12T16:16:44.196852118+00:00 stderr F I1212 16:16:44.196828 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:44.196816987 +0000 UTC))" 2025-12-12T16:16:44.196880209+00:00 stderr F I1212 16:16:44.196857 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:44.196846548 +0000 UTC))" 2025-12-12T16:16:44.196889039+00:00 stderr F I1212 16:16:44.196882 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:44.196867918 +0000 UTC))" 2025-12-12T16:16:44.196924020+00:00 stderr F I1212 16:16:44.196901 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:44.196888719 +0000 UTC))" 2025-12-12T16:16:44.196933850+00:00 stderr F I1212 16:16:44.196923 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:44.19691197 +0000 UTC))" 2025-12-12T16:16:44.196964831+00:00 stderr F I1212 16:16:44.196941 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:44.19692931 +0000 UTC))" 2025-12-12T16:16:44.197223087+00:00 stderr F I1212 16:16:44.197193 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-authentication-operator.svc\" [serving] validServingFor=[metrics.openshift-authentication-operator.svc,metrics.openshift-authentication-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:16:44.197161186 +0000 UTC))" 2025-12-12T16:16:44.197958815+00:00 stderr F I1212 16:16:44.197378 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556204\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556203\" (2025-12-12 15:16:43 +0000 UTC to 2028-12-12 15:16:43 +0000 UTC (now=2025-12-12 16:16:44.197363131 +0000 UTC))" 2025-12-12T16:16:44.320098017+00:00 stderr F I1212 16:16:44.317369 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.515323874+00:00 stderr F I1212 16:16:44.515263 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.719782015+00:00 stderr F I1212 16:16:44.719051 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.720733 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.720815 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.721035 1 base_controller.go:76] Waiting for caches to sync for openshift-oauth-apiserver-EncryptionCondition 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.722318 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-UnsupportedConfigOverrides 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723330 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723411 1 base_controller.go:76] Waiting for caches to sync for OAuthServer-WorkloadWorkloadController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723432 1 base_controller.go:76] Waiting for caches to sync for authentication-ManagementState 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723451 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-Metadata 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723478 1 base_controller.go:76] Waiting for caches to sync for OAuthClientsController_SwitchedController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723505 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-PayloadConfig 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723547 1 base_controller.go:76] Waiting for caches to sync for RouterCertsDomainValidationController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723559 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-ServiceCA 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723820 1 base_controller.go:76] Waiting for caches to sync for OpenshiftAuthenticationStaticResources-StaticResources 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723835 1 base_controller.go:76] Waiting for caches to sync for WellKnownReadyController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723851 1 base_controller.go:76] Waiting for caches to sync for OAuthServerRouteEndpointAccessibleController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723881 1 base_controller.go:76] Waiting for caches to sync for OAuthServerServiceEndpointAccessibleController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723893 1 base_controller.go:76] Waiting for caches to sync for OAuthServerServiceEndpointsEndpointAccessibleController 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723905 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-IngressNodesAvailable 2025-12-12T16:16:44.729248436+00:00 stderr F I1212 16:16:44.723923 1 base_controller.go:76] Waiting for caches to sync for ProxyConfigController 2025-12-12T16:16:44.729248436+00:00 stderr P I1212 16:16:44.723935 1 base_controller.go:76] Waiting for caches to sync for CustomRouteContr 2025-12-12T16:16:44.729317598+00:00 stderr F oller 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.723948 1 base_controller.go:76] Waiting for caches to sync for TrustDistributionController 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.723959 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-RemoveStaleConditions 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.723970 1 base_controller.go:76] Waiting for caches to sync for openshift-authentication-IngressState 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.723984 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724002 1 base_controller.go:76] Waiting for caches to sync for OpenShiftAuthenticatorCertRequester 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724016 1 base_controller.go:76] Waiting for caches to sync for WebhookAuthenticatorController 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724029 1 base_controller.go:76] Waiting for caches to sync for WebhookAuthenticatorCertApprover_OpenShiftAuthenticator 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724053 1 base_controller.go:76] Waiting for caches to sync for NamespaceFinalizerController_openshift-oauth-apiserver 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724161 1 base_controller.go:76] Waiting for caches to sync for APIServerStaticResources-StaticResources 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724207 1 base_controller.go:76] Waiting for caches to sync for openshift-oauth-apiserver-EncryptionKey 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724228 1 base_controller.go:76] Waiting for caches to sync for openshift-oauth-apiserver-EncryptionPrune 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724241 1 base_controller.go:76] Waiting for caches to sync for openshift-oauth-apiserver-EncryptionMigration 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724272 1 base_controller.go:76] Waiting for caches to sync for OAuthAPIServerController-WorkloadWorkloadController 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724286 1 base_controller.go:76] Waiting for caches to sync for SecretRevisionPruneController 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724303 1 base_controller.go:76] Waiting for caches to sync for RevisionController 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724316 1 base_controller.go:76] Waiting for caches to sync for openshift-oauth-apiserver-EncryptionState 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724334 1 base_controller.go:76] Waiting for caches to sync for openshift-apiserver-APIService 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724349 1 base_controller.go:76] Waiting for caches to sync for auditPolicyController 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.724460 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_authentication 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.726334 1 base_controller.go:76] Waiting for caches to sync for oauth-server 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.726351 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:44.729317598+00:00 stderr F I1212 16:16:44.726499 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.733312776+00:00 stderr F E1212 16:16:44.729888 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:44.733312776+00:00 stderr F I1212 16:16:44.732371 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.733734 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.733768 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.733895 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.734092 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.735154 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.735365 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.737361624+00:00 stderr F I1212 16:16:44.735631 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.745488873+00:00 stderr F I1212 16:16:44.745069 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.746660581+00:00 stderr F I1212 16:16:44.746594 1 reflector.go:430] "Caches populated" type="*v1.IngressController" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.747589404+00:00 stderr F I1212 16:16:44.747519 1 reflector.go:430] "Caches populated" type="*v1alpha1.StorageVersionMigration" reflector="sigs.k8s.io/kube-storage-version-migrator/pkg/clients/informer/factory.go:132" 2025-12-12T16:16:44.747649716+00:00 stderr F I1212 16:16:44.747599 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:44.747892571+00:00 stderr F I1212 16:16:44.747842 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.749169423+00:00 stderr F I1212 16:16:44.749103 1 reflector.go:430] "Caches populated" type="*v1.KubeAPIServer" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:16:44.749169423+00:00 stderr F I1212 16:16:44.749144 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.754545914+00:00 stderr F I1212 16:16:44.754454 1 reflector.go:430] "Caches populated" type="*v1.APIService" reflector="k8s.io/kube-aggregator/pkg/client/informers/externalversions/factory.go:141" 2025-12-12T16:16:44.756123202+00:00 stderr F I1212 16:16:44.756065 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.760679444+00:00 stderr F I1212 16:16:44.760625 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:44.768130726+00:00 stderr F I1212 16:16:44.767823 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=authentications" reflector="k8s.io/client-go/dynamic/dynamicinformer/informer.go:108" 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824288 1 base_controller.go:82] Caches are synced for openshift-authentication-UnsupportedConfigOverrides 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824330 1 base_controller.go:119] Starting #1 worker of openshift-authentication-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824391 1 base_controller.go:82] Caches are synced for authentication-ManagementState 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824425 1 base_controller.go:119] Starting #1 worker of authentication-ManagementState controller ... 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824536 1 base_controller.go:82] Caches are synced for openshift-authentication-RemoveStaleConditions 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824541 1 base_controller.go:119] Starting #1 worker of openshift-authentication-RemoveStaleConditions controller ... 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824563 1 base_controller.go:82] Caches are synced for OpenShiftAuthenticatorCertRequester 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824567 1 base_controller.go:119] Starting #1 worker of OpenShiftAuthenticatorCertRequester controller ... 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824585 1 base_controller.go:82] Caches are synced for WebhookAuthenticatorCertApprover_OpenShiftAuthenticator 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824589 1 base_controller.go:119] Starting #1 worker of WebhookAuthenticatorCertApprover_OpenShiftAuthenticator controller ... 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824614 1 base_controller.go:82] Caches are synced for NamespaceFinalizerController_openshift-oauth-apiserver 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824631 1 base_controller.go:119] Starting #1 worker of NamespaceFinalizerController_openshift-oauth-apiserver controller ... 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824641 1 base_controller.go:82] Caches are synced for APIServerStaticResources-StaticResources 2025-12-12T16:16:44.826398278+00:00 stderr F I1212 16:16:44.824646 1 base_controller.go:119] Starting #1 worker of APIServerStaticResources-StaticResources controller ... 2025-12-12T16:16:44.827485825+00:00 stderr F I1212 16:16:44.827403 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:44.827485825+00:00 stderr F I1212 16:16:44.827419 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827827 1 base_controller.go:82] Caches are synced for SecretRevisionPruneController 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827839 1 base_controller.go:119] Starting #1 worker of SecretRevisionPruneController controller ... 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827892 1 base_controller.go:82] Caches are synced for RevisionController 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827896 1 base_controller.go:119] Starting #1 worker of RevisionController controller ... 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827917 1 base_controller.go:82] Caches are synced for auditPolicyController 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827921 1 base_controller.go:119] Starting #1 worker of auditPolicyController controller ... 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827933 1 base_controller.go:82] Caches are synced for StatusSyncer_authentication 2025-12-12T16:16:44.828292154+00:00 stderr F I1212 16:16:44.827938 1 base_controller.go:119] Starting #1 worker of StatusSyncer_authentication controller ... 2025-12-12T16:16:44.831271927+00:00 stderr F I1212 16:16:44.830468 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused\nOAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::IngressStateEndpoints_NonReadyEndpoints::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError::OAuthServerServiceEndpointAccessibleController_SyncError::OAuthServerServiceEndpointsEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_NoPod::APIServices_PreconditionNotReady::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::OAuthServerServiceEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:44.929745271+00:00 stderr F I1212 16:16:44.924682 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded changed from False to True ("APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused\nOAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready") 2025-12-12T16:16:44.929745271+00:00 stderr F I1212 16:16:44.929140 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:45.126286580+00:00 stderr F I1212 16:16:45.125645 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:45.339311931+00:00 stderr F I1212 16:16:45.339206 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:45.424468160+00:00 stderr F I1212 16:16:45.424396 1 base_controller.go:82] Caches are synced for openshift-oauth-apiserver-EncryptionCondition 2025-12-12T16:16:45.424468160+00:00 stderr F I1212 16:16:45.424448 1 base_controller.go:119] Starting #1 worker of openshift-oauth-apiserver-EncryptionCondition controller ... 2025-12-12T16:16:45.424624544+00:00 stderr F I1212 16:16:45.424542 1 base_controller.go:82] Caches are synced for openshift-authentication-IngressNodesAvailable 2025-12-12T16:16:45.424624544+00:00 stderr F I1212 16:16:45.424555 1 base_controller.go:119] Starting #1 worker of openshift-authentication-IngressNodesAvailable controller ... 2025-12-12T16:16:45.424624544+00:00 stderr F I1212 16:16:45.424607 1 base_controller.go:82] Caches are synced for openshift-oauth-apiserver-EncryptionKey 2025-12-12T16:16:45.424624544+00:00 stderr F I1212 16:16:45.424614 1 base_controller.go:119] Starting #1 worker of openshift-oauth-apiserver-EncryptionKey controller ... 2025-12-12T16:16:45.424642284+00:00 stderr F I1212 16:16:45.424628 1 base_controller.go:82] Caches are synced for openshift-oauth-apiserver-EncryptionPrune 2025-12-12T16:16:45.424642284+00:00 stderr F I1212 16:16:45.424633 1 base_controller.go:119] Starting #1 worker of openshift-oauth-apiserver-EncryptionPrune controller ... 2025-12-12T16:16:45.424760167+00:00 stderr F I1212 16:16:45.424651 1 base_controller.go:82] Caches are synced for openshift-oauth-apiserver-EncryptionMigration 2025-12-12T16:16:45.424760167+00:00 stderr F I1212 16:16:45.424661 1 base_controller.go:119] Starting #1 worker of openshift-oauth-apiserver-EncryptionMigration controller ... 2025-12-12T16:16:45.427157955+00:00 stderr F I1212 16:16:45.426331 1 base_controller.go:82] Caches are synced for openshift-oauth-apiserver-EncryptionState 2025-12-12T16:16:45.427157955+00:00 stderr F I1212 16:16:45.426369 1 base_controller.go:119] Starting #1 worker of openshift-oauth-apiserver-EncryptionState controller ... 2025-12-12T16:16:45.545822123+00:00 stderr F I1212 16:16:45.543906 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:45.728135204+00:00 stderr F I1212 16:16:45.727663 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:45.926986927+00:00 stderr F I1212 16:16:45.923418 1 request.go:752] "Waited before sending request" delay="1.199501054s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/nodes?limit=500&resourceVersion=0" 2025-12-12T16:16:45.926986927+00:00 stderr F I1212 16:16:45.925938 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.024788365+00:00 stderr F I1212 16:16:46.024701 1 base_controller.go:82] Caches are synced for OAuthAPIServerController-WorkloadWorkloadController 2025-12-12T16:16:46.024788365+00:00 stderr F I1212 16:16:46.024729 1 base_controller.go:119] Starting #1 worker of OAuthAPIServerController-WorkloadWorkloadController controller ... 2025-12-12T16:16:46.124903840+00:00 stderr F E1212 16:16:46.124621 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:46.124903840+00:00 stderr F I1212 16:16:46.124668 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:46.124903840+00:00 stderr F I1212 16:16:46.124746 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.129644455+00:00 stderr F I1212 16:16:46.129417 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:46.332275352+00:00 stderr F I1212 16:16:46.331169 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.425854157+00:00 stderr F I1212 16:16:46.424026 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:46.425854157+00:00 stderr F I1212 16:16:46.424071 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:46.541325096+00:00 stderr F I1212 16:16:46.541146 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.741354800+00:00 stderr F I1212 16:16:46.739697 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.931002570+00:00 stderr F I1212 16:16:46.928362 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.122066265+00:00 stderr F I1212 16:16:47.121511 1 request.go:752] "Waited before sending request" delay="2.397294698s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication-operator/secrets?limit=500&resourceVersion=0" 2025-12-12T16:16:47.127665901+00:00 stderr F I1212 16:16:47.124748 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.326519936+00:00 stderr F I1212 16:16:47.325985 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.544988980+00:00 stderr F I1212 16:16:47.543158 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.624754048+00:00 stderr F I1212 16:16:47.624322 1 base_controller.go:82] Caches are synced for WebhookAuthenticatorController 2025-12-12T16:16:47.624819249+00:00 stderr F I1212 16:16:47.624806 1 base_controller.go:119] Starting #1 worker of WebhookAuthenticatorController controller ... 2025-12-12T16:16:47.624865720+00:00 stderr F I1212 16:16:47.624856 1 base_controller.go:82] Caches are synced for openshift-apiserver-APIService 2025-12-12T16:16:47.624890541+00:00 stderr F I1212 16:16:47.624881 1 base_controller.go:119] Starting #1 worker of openshift-apiserver-APIService controller ... 2025-12-12T16:16:47.624952132+00:00 stderr F I1212 16:16:47.624942 1 base_controller.go:82] Caches are synced for OAuthClientsController_SwitchedController 2025-12-12T16:16:47.624976443+00:00 stderr F I1212 16:16:47.624967 1 base_controller.go:119] Starting #1 worker of OAuthClientsController_SwitchedController controller ... 2025-12-12T16:16:47.629508974+00:00 stderr F I1212 16:16:47.629451 1 base_controller.go:76] Waiting for caches to sync for OAuthClientsController 2025-12-12T16:16:47.648461036+00:00 stderr F I1212 16:16:47.648360 1 reflector.go:430] "Caches populated" type="*v1.OAuthClient" reflector="github.com/openshift/cluster-authentication-operator/pkg/controllers/oauthclientscontroller/oauthclientscontroller.go:57" 2025-12-12T16:16:47.745372262+00:00 stderr F I1212 16:16:47.745314 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.800251342+00:00 stderr F I1212 16:16:47.800079 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused\nOAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::IngressStateEndpoints_NonReadyEndpoints::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError::OAuthServerServiceEndpointAccessibleController_SyncError::OAuthServerServiceEndpointsEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_NoPod::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::OAuthServerServiceEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:47.829996999+00:00 stderr F I1212 16:16:47.829935 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Available message changed from "APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nAPIServicesAvailable: PreconditionNotReady\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" to "APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:47.932585803+00:00 stderr F I1212 16:16:47.932523 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:48.122115710+00:00 stderr F I1212 16:16:48.122060 1 request.go:752] "Waited before sending request" delay="3.397647551s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/default/endpoints?limit=500&resourceVersion=0" 2025-12-12T16:16:48.127338308+00:00 stderr F I1212 16:16:48.127286 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:48.127504342+00:00 stderr F I1212 16:16:48.127492 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:48.131206342+00:00 stderr F I1212 16:16:48.131173 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:48.193284268+00:00 stderr F E1212 16:16:48.192478 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:48.338169435+00:00 stderr F I1212 16:16:48.338117 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:48.529696411+00:00 stderr F I1212 16:16:48.529644 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:48.529884606+00:00 stderr F I1212 16:16:48.529866 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:48.531630829+00:00 stderr F I1212 16:16:48.531615 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:48.624158748+00:00 stderr F I1212 16:16:48.624113 1 base_controller.go:82] Caches are synced for openshift-authentication-IngressState 2025-12-12T16:16:48.624221109+00:00 stderr F I1212 16:16:48.624210 1 base_controller.go:119] Starting #1 worker of openshift-authentication-IngressState controller ... 2025-12-12T16:16:48.723330179+00:00 stderr F I1212 16:16:48.722889 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:48.953231292+00:00 stderr F I1212 16:16:48.950257 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:49.024191574+00:00 stderr F I1212 16:16:49.024092 1 base_controller.go:82] Caches are synced for OAuthServerServiceEndpointAccessibleController 2025-12-12T16:16:49.024299787+00:00 stderr F I1212 16:16:49.024288 1 base_controller.go:119] Starting #1 worker of OAuthServerServiceEndpointAccessibleController controller ... 2025-12-12T16:16:49.024337778+00:00 stderr F I1212 16:16:49.024260 1 base_controller.go:82] Caches are synced for OAuthServerServiceEndpointsEndpointAccessibleController 2025-12-12T16:16:49.024388429+00:00 stderr F I1212 16:16:49.024378 1 base_controller.go:119] Starting #1 worker of OAuthServerServiceEndpointsEndpointAccessibleController controller ... 2025-12-12T16:16:49.027246699+00:00 stderr F E1212 16:16:49.027227 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.043114696+00:00 stderr F E1212 16:16:49.043056 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.055057988+00:00 stderr F E1212 16:16:49.045784 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.059572738+00:00 stderr F E1212 16:16:49.059529 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.069235364+00:00 stderr F E1212 16:16:49.066759 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.097240058+00:00 stderr F E1212 16:16:49.094070 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.123410067+00:00 stderr F I1212 16:16:49.123316 1 request.go:752] "Waited before sending request" delay="4.398351103s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets?limit=500&resourceVersion=0" 2025-12-12T16:16:49.161133378+00:00 stderr F E1212 16:16:49.160707 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.196849390+00:00 stderr F I1212 16:16:49.195722 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:49.200942069+00:00 stderr F E1212 16:16:49.200873 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.223820308+00:00 stderr F I1212 16:16:49.223718 1 base_controller.go:82] Caches are synced for openshift-authentication-ServiceCA 2025-12-12T16:16:49.223820308+00:00 stderr F I1212 16:16:49.223770 1 base_controller.go:119] Starting #1 worker of openshift-authentication-ServiceCA controller ... 2025-12-12T16:16:49.223870749+00:00 stderr F I1212 16:16:49.223822 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:49.223870749+00:00 stderr F I1212 16:16:49.223829 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:49.225093659+00:00 stderr F I1212 16:16:49.224450 1 base_controller.go:82] Caches are synced for RouterCertsDomainValidationController 2025-12-12T16:16:49.225138260+00:00 stderr F I1212 16:16:49.225096 1 base_controller.go:119] Starting #1 worker of RouterCertsDomainValidationController controller ... 2025-12-12T16:16:49.225745865+00:00 stderr F I1212 16:16:49.225707 1 base_controller.go:82] Caches are synced for TrustDistributionController 2025-12-12T16:16:49.225745865+00:00 stderr F I1212 16:16:49.225721 1 base_controller.go:119] Starting #1 worker of TrustDistributionController controller ... 2025-12-12T16:16:49.227106238+00:00 stderr F I1212 16:16:49.227037 1 base_controller.go:82] Caches are synced for oauth-server 2025-12-12T16:16:49.227106238+00:00 stderr F I1212 16:16:49.227089 1 base_controller.go:119] Starting #1 worker of oauth-server controller ... 2025-12-12T16:16:49.237800899+00:00 stderr F E1212 16:16:49.237226 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.259006957+00:00 stderr F E1212 16:16:49.258915 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.304620221+00:00 stderr F E1212 16:16:49.304536 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.330017931+00:00 stderr F I1212 16:16:49.328448 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:49.398067131+00:00 stderr F E1212 16:16:49.397514 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.426453444+00:00 stderr F I1212 16:16:49.426376 1 base_controller.go:82] Caches are synced for OpenshiftAuthenticationStaticResources-StaticResources 2025-12-12T16:16:49.426453444+00:00 stderr F I1212 16:16:49.426405 1 base_controller.go:119] Starting #1 worker of OpenshiftAuthenticationStaticResources-StaticResources controller ... 2025-12-12T16:16:49.430255627+00:00 stderr F E1212 16:16:49.427752 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:16:49.592586440+00:00 stderr F E1212 16:16:49.591111 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.698411904+00:00 stderr F I1212 16:16:49.695214 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::IngressStateEndpoints_NonReadyEndpoints::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError::OAuthServerServiceEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_NoPod::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::OAuthServerServiceEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.716522756+00:00 stderr F I1212 16:16:49.715344 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::IngressStateEndpoints_NonReadyEndpoints::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError::OAuthServerServiceEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_NoPod::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::OAuthServerServiceEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.716522756+00:00 stderr F I1212 16:16:49.715593 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused\nOAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready" to "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" 2025-12-12T16:16:49.738986445+00:00 stderr F E1212 16:16:49.738912 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:49.809044015+00:00 stderr F I1212 16:16:49.808487 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::IngressStateEndpoints_NonReadyEndpoints::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError::OAuthServerServiceEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"APIServerDeployment_NoPod::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.830708694+00:00 stderr F I1212 16:16:49.829227 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Available message changed from "APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" to "APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.845259629+00:00 stderr F I1212 16:16:49.844170 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::IngressStateEndpoints_NonReadyEndpoints::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"APIServerDeployment_NoPod::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.857483298+00:00 stderr F I1212 16:16:49.857416 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused\nOAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": dial tcp 10.217.5.136:443: connect: connection refused" to "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:50.124324992+00:00 stderr F I1212 16:16:50.124158 1 request.go:752] "Waited before sending request" delay="5.296953461s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets/openshift-authenticator-certs" 2025-12-12T16:16:50.133517517+00:00 stderr F I1212 16:16:50.133015 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'NoValidCertificateFound' No valid client certificate for OpenShiftAuthenticatorCertRequester is found: part of the certificate is expired: sub: CN=system:serviceaccount:openshift-oauth-apiserver:openshift-authenticator, notAfter: 2025-12-03 08:35:50 +0000 UTC 2025-12-12T16:16:50.138484778+00:00 stderr F I1212 16:16:50.138431 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CSRCreated' A csr "system:openshift:openshift-authenticator-d52cl" is created for OpenShiftAuthenticatorCertRequester 2025-12-12T16:16:50.138892948+00:00 stderr F I1212 16:16:50.138864 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CSRApproval' The CSR "system:openshift:openshift-authenticator-d52cl" has been approved 2025-12-12T16:16:51.126728646+00:00 stderr F I1212 16:16:51.125828 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:51.321942762+00:00 stderr F I1212 16:16:51.321836 1 request.go:752] "Waited before sending request" delay="2.09676916s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-service-ca" 2025-12-12T16:16:51.905490299+00:00 stderr F E1212 16:16:51.903390 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:52.325160745+00:00 stderr F I1212 16:16:52.323000 1 request.go:752] "Waited before sending request" delay="2.375554818s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api" 2025-12-12T16:16:52.867539747+00:00 stderr F I1212 16:16:52.863466 1 annotations.go:76] Updating "auth.openshift.io/certificate-not-before" annotation for openshift-authenticator-certs/openshift-oauth-apiserver, diff:   string( 2025-12-12T16:16:52.867539747+00:00 stderr F -  "2025-11-03T08:35:50Z", 2025-12-12T16:16:52.867539747+00:00 stderr F +  "2025-12-12T16:11:50Z", 2025-12-12T16:16:52.867539747+00:00 stderr F   ) 2025-12-12T16:16:52.867539747+00:00 stderr F I1212 16:16:52.864431 1 annotations.go:82] Updating "auth.openshift.io/certificate-not-after" annotation for openshift-authenticator-certs/openshift-oauth-apiserver, diff:   string( 2025-12-12T16:16:52.867539747+00:00 stderr F -  "2025-12-03T08:35:50Z", 2025-12-12T16:16:52.867539747+00:00 stderr F +  "2026-01-11T16:11:50Z", 2025-12-12T16:16:52.867539747+00:00 stderr F   ) 2025-12-12T16:16:53.326021359+00:00 stderr F I1212 16:16:53.324675 1 request.go:752] "Waited before sending request" delay="2.788068638s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication-operator/events" 2025-12-12T16:16:54.033225095+00:00 stderr F I1212 16:16:54.032347 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"APIServerDeployment_UnavailablePod::CustomRouteController_SyncError::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"APIServerDeployment_NoPod::OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:54.059129217+00:00 stderr F I1212 16:16:54.059037 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nIngressStateEndpointsDegraded: All 1 endpoints for oauth-server are reporting 'not ready'\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" to "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:54.527648546+00:00 stderr F I1212 16:16:54.526574 1 request.go:752] "Waited before sending request" delay="2.796005232s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/oauth-serving-cert" 2025-12-12T16:16:55.342318746+00:00 stderr F I1212 16:16:55.331274 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ClientCertificateCreated' A new client certificate for OpenShiftAuthenticatorCertRequester is available 2025-12-12T16:16:55.722567629+00:00 stderr F I1212 16:16:55.721302 1 request.go:752] "Waited before sending request" delay="2.193283378s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/trusted-ca-bundle" 2025-12-12T16:16:55.859689597+00:00 stderr F I1212 16:16:55.859610 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"CustomRouteController_SyncError::OAuthServerDeployment_UnavailablePod::OAuthServerRouteEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"OAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"OAuthServerDeployment_NoPod::OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:55.953405935+00:00 stderr F I1212 16:16:55.952479 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "APIServerDeploymentDegraded: 1 of 1 requested instances are unavailable for apiserver.openshift-oauth-apiserver ()\nCustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" to "CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused",Available message changed from "APIServerDeploymentAvailable: no apiserver.openshift-oauth-apiserver pods available on any node.\nOAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" to "OAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:55.954967433+00:00 stderr F I1212 16:16:55.954927 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978096 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.978048347 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978138 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.978127629 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978159 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.978143549 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978172 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.97816427 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978204 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.97819419 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978220 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.978212101 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978236 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.978224901 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978252 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.978241041 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978267 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.978257142 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978289 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.978274512 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978497 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-authentication-operator.svc\" [serving] validServingFor=[metrics.openshift-authentication-operator.svc,metrics.openshift-authentication-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:16:55.978483277 +0000 UTC))" 2025-12-12T16:16:55.979264916+00:00 stderr F I1212 16:16:55.978674 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556204\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556203\" (2025-12-12 15:16:43 +0000 UTC to 2028-12-12 15:16:43 +0000 UTC (now=2025-12-12 16:16:55.978661412 +0000 UTC))" 2025-12-12T16:16:56.723583488+00:00 stderr F I1212 16:16:56.721790 1 request.go:752] "Waited before sending request" delay="1.993189022s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift" 2025-12-12T16:16:57.922609701+00:00 stderr F I1212 16:16:57.922065 1 request.go:752] "Waited before sending request" delay="2.39574787s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication-operator/events" 2025-12-12T16:16:58.922713528+00:00 stderr F I1212 16:16:58.922143 1 request.go:752] "Waited before sending request" delay="2.395204488s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/oauth-serving-cert" 2025-12-12T16:16:59.732661453+00:00 stderr F I1212 16:16:59.732587 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/webhook-authentication-integrated-oauth -n openshift-config because it changed 2025-12-12T16:16:59.923218605+00:00 stderr F I1212 16:16:59.923138 1 request.go:752] "Waited before sending request" delay="1.995803517s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication-operator/events" 2025-12-12T16:17:01.122925194+00:00 stderr F I1212 16:17:01.121971 1 request.go:752] "Waited before sending request" delay="1.792656415s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-service-ca" 2025-12-12T16:17:02.324467269+00:00 stderr F I1212 16:17:02.322739 1 request.go:752] "Waited before sending request" delay="1.997644581s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa" 2025-12-12T16:17:03.521454553+00:00 stderr F I1212 16:17:03.521353 1 request.go:752] "Waited before sending request" delay="1.18537466s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver" 2025-12-12T16:17:04.600651510+00:00 stderr F I1212 16:17:04.600540 1 reflector.go:430] "Caches populated" type="*v1.Route" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" 2025-12-12T16:17:04.624461622+00:00 stderr F I1212 16:17:04.624365 1 base_controller.go:82] Caches are synced for CustomRouteController 2025-12-12T16:17:04.624461622+00:00 stderr F I1212 16:17:04.624408 1 base_controller.go:119] Starting #1 worker of CustomRouteController controller ... 2025-12-12T16:17:04.624543734+00:00 stderr F I1212 16:17:04.624453 1 base_controller.go:82] Caches are synced for WellKnownReadyController 2025-12-12T16:17:04.624543734+00:00 stderr F I1212 16:17:04.624461 1 base_controller.go:119] Starting #1 worker of WellKnownReadyController controller ... 2025-12-12T16:17:04.624543734+00:00 stderr F I1212 16:17:04.624467 1 base_controller.go:82] Caches are synced for openshift-authentication-PayloadConfig 2025-12-12T16:17:04.624543734+00:00 stderr F I1212 16:17:04.624500 1 base_controller.go:119] Starting #1 worker of openshift-authentication-PayloadConfig controller ... 2025-12-12T16:17:04.624543734+00:00 stderr F I1212 16:17:04.624502 1 base_controller.go:82] Caches are synced for OAuthServer-WorkloadWorkloadController 2025-12-12T16:17:04.624555974+00:00 stderr F I1212 16:17:04.624541 1 base_controller.go:82] Caches are synced for openshift-authentication-Metadata 2025-12-12T16:17:04.624555974+00:00 stderr F I1212 16:17:04.624535 1 base_controller.go:82] Caches are synced for ProxyConfigController 2025-12-12T16:17:04.624563644+00:00 stderr F I1212 16:17:04.624547 1 base_controller.go:119] Starting #1 worker of OAuthServer-WorkloadWorkloadController controller ... 2025-12-12T16:17:04.624595525+00:00 stderr F I1212 16:17:04.624578 1 base_controller.go:119] Starting #1 worker of ProxyConfigController controller ... 2025-12-12T16:17:04.624623066+00:00 stderr F I1212 16:17:04.624547 1 base_controller.go:119] Starting #1 worker of openshift-authentication-Metadata controller ... 2025-12-12T16:17:04.625028525+00:00 stderr F I1212 16:17:04.624480 1 base_controller.go:82] Caches are synced for OAuthServerRouteEndpointAccessibleController 2025-12-12T16:17:04.625028525+00:00 stderr F I1212 16:17:04.625012 1 base_controller.go:119] Starting #1 worker of OAuthServerRouteEndpointAccessibleController controller ... 2025-12-12T16:17:04.629640638+00:00 stderr F I1212 16:17:04.629594 1 base_controller.go:82] Caches are synced for OAuthClientsController 2025-12-12T16:17:04.629640638+00:00 stderr F I1212 16:17:04.629621 1 base_controller.go:119] Starting #1 worker of OAuthClientsController controller ... 2025-12-12T16:17:04.671929611+00:00 stderr F I1212 16:17:04.671864 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"CustomRouteController_SyncError::OAuthServerRouteEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:46Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:04.680031898+00:00 stderr F I1212 16:17:04.679127 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerDeploymentDegraded: 1 of 1 requested instances are unavailable for oauth-openshift.openshift-authentication ()\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" to "CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused",Available message changed from "OAuthServerDeploymentAvailable: no oauth-openshift.openshift-authentication pods available on any node.\nOAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" to "OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" 2025-12-12T16:17:04.710830740+00:00 stderr F I1212 16:17:04.710740 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"CustomRouteController_SyncError::OAuthServerRouteEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:04.721896041+00:00 stderr F I1212 16:17:04.721363 1 request.go:752] "Waited before sending request" delay="1.463213522s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config/secrets/webhook-authentication-integrated-oauth" 2025-12-12T16:17:04.728556913+00:00 stderr F I1212 16:17:04.728466 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Available changed from False to True ("All is well") 2025-12-12T16:17:04.758570996+00:00 stderr F I1212 16:17:04.758497 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:44Z","message":"OAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540-\u003e10.217.4.10:53: read: connection refused","reason":"OAuthServerRouteEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:04.771951343+00:00 stderr F I1212 16:17:04.771764 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "CustomRouteControllerDegraded: the server is currently unable to handle the request (get routes.route.openshift.io oauth-openshift)\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" to "OAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": dial tcp: lookup oauth-openshift.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.16:55540->10.217.4.10:53: read: connection refused" 2025-12-12T16:17:04.779034285+00:00 stderr F I1212 16:17:04.778967 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-03T08:58:26Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:04.793932169+00:00 stderr F I1212 16:17:04.793836 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded changed from True to False ("All is well") 2025-12-12T16:17:05.722365656+00:00 stderr F I1212 16:17:05.722275 1 request.go:752] "Waited before sending request" delay="1.396932955s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-router-certs" 2025-12-12T16:17:06.922716552+00:00 stderr F I1212 16:17:06.922237 1 request.go:752] "Waited before sending request" delay="2.243039923s" reason="client-side throttling, not priority and fairness" verb="POST" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication-operator/events" 2025-12-12T16:17:08.122038002+00:00 stderr F I1212 16:17:08.121923 1 request.go:752] "Waited before sending request" delay="2.390661286s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs" 2025-12-12T16:17:09.122332574+00:00 stderr F I1212 16:17:09.122259 1 request.go:752] "Waited before sending request" delay="2.39615783s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/audit" 2025-12-12T16:17:10.321821879+00:00 stderr F I1212 16:17:10.321722 1 request.go:752] "Waited before sending request" delay="2.395998627s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets/openshift-authenticator-certs" 2025-12-12T16:17:11.322625142+00:00 stderr F I1212 16:17:11.322066 1 request.go:752] "Waited before sending request" delay="2.216665748s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-cliconfig" 2025-12-12T16:17:12.521652745+00:00 stderr F I1212 16:17:12.521531 1 request.go:752] "Waited before sending request" delay="2.194881986s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets/openshift-authenticator-certs" 2025-12-12T16:17:13.521955326+00:00 stderr F I1212 16:17:13.521887 1 request.go:752] "Waited before sending request" delay="2.194128287s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-session" 2025-12-12T16:17:14.521960060+00:00 stderr F I1212 16:17:14.521896 1 request.go:752] "Waited before sending request" delay="2.19671266s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift" 2025-12-12T16:17:15.392401771+00:00 stderr F E1212 16:17:15.392289 1 timeout.go:140] "Post-timeout activity" timeElapsed="2.019689537s" method="GET" path="/healthz" result=null 2025-12-12T16:17:15.721410403+00:00 stderr F I1212 16:17:15.721275 1 request.go:752] "Waited before sending request" delay="2.195557821s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-session" 2025-12-12T16:17:16.721648623+00:00 stderr F I1212 16:17:16.721550 1 request.go:752] "Waited before sending request" delay="1.790174005s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs" 2025-12-12T16:17:17.722212170+00:00 stderr F I1212 16:17:17.722029 1 request.go:752] "Waited before sending request" delay="1.788486712s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config/secrets/webhook-authentication-integrated-oauth" 2025-12-12T16:17:18.722477467+00:00 stderr F I1212 16:17:18.722112 1 request.go:752] "Waited before sending request" delay="1.785517737s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-metadata" 2025-12-12T16:17:19.923369792+00:00 stderr F I1212 16:17:19.921999 1 request.go:752] "Waited before sending request" delay="1.796665079s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift" 2025-12-12T16:17:24.322362072+00:00 stderr F I1212 16:17:24.321859 1 request.go:752] "Waited before sending request" delay="1.021267181s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/configmaps/oauth-serving-cert" 2025-12-12T16:17:25.322269839+00:00 stderr F I1212 16:17:25.322138 1 request.go:752] "Waited before sending request" delay="1.39493319s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-router-certs" 2025-12-12T16:17:38.934021183+00:00 stderr F I1212 16:17:38.933247 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/v4-0-config-user-idp-0-file-data -n openshift-authentication because it changed 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319100 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.319045567 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319729 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.319702014 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319748 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.319735754 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319764 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.319753155 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319785 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.319771835 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319805 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.319791186 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319827 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.319812376 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319849 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.319834557 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319869 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.319854847 +0000 UTC))" 2025-12-12T16:17:46.319923179+00:00 stderr F I1212 16:17:46.319892 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.319879518 +0000 UTC))" 2025-12-12T16:17:46.319982880+00:00 stderr F I1212 16:17:46.319914 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.319901448 +0000 UTC))" 2025-12-12T16:17:46.320335329+00:00 stderr F I1212 16:17:46.320266 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-authentication-operator.svc\" [serving] validServingFor=[metrics.openshift-authentication-operator.svc,metrics.openshift-authentication-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:09 +0000 UTC to 2027-11-02 07:52:10 +0000 UTC (now=2025-12-12 16:17:46.320245387 +0000 UTC))" 2025-12-12T16:17:46.320533094+00:00 stderr F I1212 16:17:46.320479 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556204\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556203\" (2025-12-12 15:16:43 +0000 UTC to 2028-12-12 15:16:43 +0000 UTC (now=2025-12-12 16:17:46.320462182 +0000 UTC))" 2025-12-12T16:17:47.428632208+00:00 stderr F I1212 16:17:47.428276 1 request.go:752] "Waited before sending request" delay="1.127621007s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-metadata" 2025-12-12T16:18:00.848361762+00:00 stderr F I1212 16:18:00.837216 1 apps.go:155] Deployment "openshift-authentication/oauth-openshift" changes: {"metadata":{"annotations":{"operator.openshift.io/rvs-hash":"4x8prlg06EzB3UocpzbvR-XccCSkiLnt6IMXq_lh6VDWyfJf8ykw91ojKnPish2780jgXL_UszxPsxmlEFZ6_g","operator.openshift.io/spec-hash":"6d2d789d24cc3142734e6230ad8775f059cb135295c8f0990ecb16a1c583caa8"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"operator.openshift.io/rvs-hash":"4x8prlg06EzB3UocpzbvR-XccCSkiLnt6IMXq_lh6VDWyfJf8ykw91ojKnPish2780jgXL_UszxPsxmlEFZ6_g"}},"spec":{"containers":[{"args":["if [ -s /var/config/system/configmaps/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt ]; then\n echo \"Copying system trust bundle\"\n cp -f /var/config/system/configmaps/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem\nfi\nexec oauth-server osinserver \\\n--config=/var/config/system/configmaps/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig \\\n--v=2 \\\n--audit-log-format=json \\\n--audit-log-maxbackup=10 \\\n--audit-log-maxsize=100 \\\n--audit-log-path=/var/log/oauth-server/audit.log \\\n--audit-policy-file=/var/run/configmaps/audit/audit.yaml\n"],"command":["/bin/bash","-ec"],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:036ed6efe4cb5f5b90ee7f9ef5297c8591b8d67aa36b3c58b4fc5417622a140c","lifecycle":{"preStop":{"exec":{"command":["sleep","25"]}}},"livenessProbe":{"failureThreshold":3,"httpGet":{"path":"/healthz","port":6443,"scheme":"HTTPS"},"initialDelaySeconds":30,"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1},"name":"oauth-openshift","ports":[{"containerPort":6443,"name":"https","protocol":"TCP"}],"readinessProbe":{"failureThreshold":3,"httpGet":{"path":"/healthz","port":6443,"scheme":"HTTPS"},"periodSeconds":10,"successThreshold":1,"timeoutSeconds":1},"resources":{"requests":{"cpu":"10m","memory":"50Mi"}},"securityContext":{"privileged":true,"readOnlyRootFilesystem":false,"runAsUser":0},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/audit","name":"audit-policies"},{"mountPath":"/var/log/oauth-server","name":"audit-dir"},{"mountPath":"/var/config/system/secrets/v4-0-config-system-session","name":"v4-0-config-system-session","readOnly":true},{"mountPath":"/var/config/system/configmaps/v4-0-config-system-cliconfig","name":"v4-0-config-system-cliconfig","readOnly":true},{"mountPath":"/var/config/system/secrets/v4-0-config-system-serving-cert","name":"v4-0-config-system-serving-cert","readOnly":true},{"mountPath":"/var/config/system/configmaps/v4-0-config-system-service-ca","name":"v4-0-config-system-service-ca","readOnly":true},{"mountPath":"/var/config/system/secrets/v4-0-config-system-router-certs","name":"v4-0-config-system-router-certs","readOnly":true},{"mountPath":"/var/config/system/secrets/v4-0-config-system-ocp-branding-template","name":"v4-0-config-system-ocp-branding-template","readOnly":true},{"mountPath":"/var/config/user/template/secret/v4-0-config-user-template-login","name":"v4-0-config-user-template-login","readOnly":true},{"mountPath":"/var/config/user/template/secret/v4-0-config-user-template-provider-selection","name":"v4-0-config-user-template-provider-selection","readOnly":true},{"mountPath":"/var/config/user/template/secret/v4-0-config-user-template-error","name":"v4-0-config-user-template-error","readOnly":true},{"mountPath":"/var/config/system/configmaps/v4-0-config-system-trusted-ca-bundle","name":"v4-0-config-system-trusted-ca-bundle","readOnly":true},{"mountPath":"/var/config/user/idp/0/secret/v4-0-config-user-idp-0-file-data","name":"v4-0-config-user-idp-0-file-data","readOnly":true}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"securityContext":null,"serviceAccount":null,"volumes":[{"configMap":{"name":"audit"},"name":"audit-policies"},{"hostPath":{"path":"/var/log/oauth-server"},"name":"audit-dir"},{"name":"v4-0-config-system-session","secret":{"secretName":"v4-0-config-system-session"}},{"configMap":{"name":"v4-0-config-system-cliconfig"},"name":"v4-0-config-system-cliconfig"},{"name":"v4-0-config-system-serving-cert","secret":{"secretName":"v4-0-config-system-serving-cert"}},{"configMap":{"name":"v4-0-config-system-service-ca"},"name":"v4-0-config-system-service-ca"},{"name":"v4-0-config-system-router-certs","secret":{"secretName":"v4-0-config-system-router-certs"}},{"name":"v4-0-config-system-ocp-branding-template","secret":{"secretName":"v4-0-config-system-ocp-branding-template"}},{"name":"v4-0-config-user-template-login","secret":{"optional":true,"secretName":"v4-0-config-user-template-login"}},{"name":"v4-0-config-user-template-provider-selection","secret":{"optional":true,"secretName":"v4-0-config-user-template-provider-selection"}},{"name":"v4-0-config-user-template-error","secret":{"optional":true,"secretName":"v4-0-config-user-template-error"}},{"configMap":{"name":"v4-0-config-system-trusted-ca-bundle","optional":true},"name":"v4-0-config-system-trusted-ca-bundle"},{"name":"v4-0-config-user-idp-0-file-data","secret":{"items":[{"key":"htpasswd","path":"htpasswd"}],"secretName":"v4-0-config-user-idp-0-file-data"}}]}}}} 2025-12-12T16:18:00.859726273+00:00 stderr F I1212 16:18:00.859665 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:18:00.860910002+00:00 stderr F I1212 16:18:00.860853 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/oauth-openshift -n openshift-authentication because it changed 2025-12-12T16:18:00.908411366+00:00 stderr F I1212 16:18:00.908340 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:00.909002521+00:00 stderr F I1212 16:18:00.908903 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:00.953074030+00:00 stderr F I1212 16:18:00.952428 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Progressing changed from False to True ("OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.") 2025-12-12T16:18:00.957987491+00:00 stderr F E1212 16:18:00.957938 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:00.962408540+00:00 stderr F I1212 16:18:00.962376 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:00.976889558+00:00 stderr F I1212 16:18:00.976519 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "All is well" to "OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready" 2025-12-12T16:18:00.982044946+00:00 stderr F I1212 16:18:00.981979 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:01.018909167+00:00 stderr F I1212 16:18:01.018837 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready" to "OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server" 2025-12-12T16:18:01.028717770+00:00 stderr F E1212 16:18:01.028651 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.032114034+00:00 stderr F E1212 16:18:01.032087 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.042209533+00:00 stderr F E1212 16:18:01.042134 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.052281332+00:00 stderr F I1212 16:18:01.052229 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:01.057617694+00:00 stderr F E1212 16:18:01.057592 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.069708683+00:00 stderr F I1212 16:18:01.068932 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Available changed from True to False ("OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF") 2025-12-12T16:18:01.090508857+00:00 stderr F E1212 16:18:01.090444 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.092986919+00:00 stderr F E1212 16:18:01.092954 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.105029496+00:00 stderr F I1212 16:18:01.104953 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:01.106627156+00:00 stderr F E1212 16:18:01.106574 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.116518260+00:00 stderr F E1212 16:18:01.116453 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.118544800+00:00 stderr F I1212 16:18:01.118507 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded message changed from "OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server" to "OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.137012827+00:00 stderr F E1212 16:18:01.136625 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.154153411+00:00 stderr F E1212 16:18:01.154069 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.171339986+00:00 stderr F E1212 16:18:01.169589 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.255861145+00:00 stderr F E1212 16:18:01.254904 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:01.279563341+00:00 stderr F E1212 16:18:01.279471 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.452738033+00:00 stderr F E1212 16:18:01.452471 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.785808397+00:00 stderr F E1212 16:18:01.785737 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:01.901246701+00:00 stderr F E1212 16:18:01.901127 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:02.031742797+00:00 stderr F I1212 16:18:02.031650 1 request.go:752] "Waited before sending request" delay="1.059605697s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/audit" 2025-12-12T16:18:02.234793118+00:00 stderr F I1212 16:18:02.234719 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:02.441583580+00:00 stderr F E1212 16:18:02.441466 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:03.185416410+00:00 stderr F E1212 16:18:03.185334 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:03.231863978+00:00 stderr F I1212 16:18:03.231806 1 request.go:752] "Waited before sending request" delay="1.596760307s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/trusted-ca-bundle" 2025-12-12T16:18:03.753250338+00:00 stderr F E1212 16:18:03.752090 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:04.431332292+00:00 stderr F I1212 16:18:04.431220 1 request.go:752] "Waited before sending request" delay="1.34332257s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-router-certs" 2025-12-12T16:18:05.750344791+00:00 stderr F E1212 16:18:05.750285 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:06.328626648+00:00 stderr F E1212 16:18:06.328543 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:10.661382355+00:00 stderr F E1212 16:18:10.660768 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:10.874881494+00:00 stderr F E1212 16:18:10.874772 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:11.464416669+00:00 stderr F E1212 16:18:11.464342 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:13.968008564+00:00 stderr F I1212 16:18:13.967562 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:13.972750831+00:00 stderr F E1212 16:18:13.972624 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:13.975820377+00:00 stderr F W1212 16:18:13.975433 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:13.975851408+00:00 stderr F E1212 16:18:13.975833 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:13.975851408+00:00 stderr F E1212 16:18:13.975606 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:13.977924309+00:00 stderr F W1212 16:18:13.977895 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:13.977945350+00:00 stderr F E1212 16:18:13.977929 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:13.979720634+00:00 stderr F E1212 16:18:13.979684 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:13.979809356+00:00 stderr F E1212 16:18:13.979789 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:13.980972094+00:00 stderr F E1212 16:18:13.980713 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:13.984067701+00:00 stderr F I1212 16:18:13.984018 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.011727305+00:00 stderr F E1212 16:18:14.011660 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.012402882+00:00 stderr F E1212 16:18:14.012360 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:14.015052467+00:00 stderr F W1212 16:18:14.015015 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:14.015088578+00:00 stderr F E1212 16:18:14.015068 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:14.018204505+00:00 stderr F E1212 16:18:14.017963 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:14.024922271+00:00 stderr F I1212 16:18:14.024868 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.026343366+00:00 stderr F E1212 16:18:14.026301 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.033417111+00:00 stderr F W1212 16:18:14.033390 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:14.033503413+00:00 stderr F E1212 16:18:14.033490 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:14.053704053+00:00 stderr F I1212 16:18:14.053625 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.058244245+00:00 stderr F E1212 16:18:14.058121 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.066334805+00:00 stderr F W1212 16:18:14.066277 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:14.066334805+00:00 stderr F E1212 16:18:14.066326 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:14.101251298+00:00 stderr F I1212 16:18:14.101133 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.102688684+00:00 stderr F E1212 16:18:14.102610 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.116880235+00:00 stderr F W1212 16:18:14.116777 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:14.116880235+00:00 stderr F E1212 16:18:14.116846 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:14.185600574+00:00 stderr F I1212 16:18:14.185502 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.187158162+00:00 stderr F E1212 16:18:14.187087 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.317291249+00:00 stderr F W1212 16:18:14.315326 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:14.317291249+00:00 stderr F E1212 16:18:14.315404 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.350433489+00:00 stderr F I1212 16:18:14.350349 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.352336306+00:00 stderr F E1212 16:18:14.352297 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.515743436+00:00 stderr F W1212 16:18:14.515659 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:14.515790527+00:00 stderr F E1212 16:18:14.515735 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:14.675995717+00:00 stderr F I1212 16:18:14.675905 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:14.677769491+00:00 stderr F E1212 16:18:14.677728 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.715988686+00:00 stderr F E1212 16:18:14.715812 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:14.916203336+00:00 stderr F E1212 16:18:14.916038 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:15.115289818+00:00 stderr F W1212 16:18:15.115092 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:15.115289818+00:00 stderr F E1212 16:18:15.115155 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:15.166952105+00:00 stderr F I1212 16:18:15.166827 1 request.go:752] "Waited before sending request" delay="1.106797623s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift" 2025-12-12T16:18:15.315154068+00:00 stderr F E1212 16:18:15.315074 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:15.320460290+00:00 stderr F I1212 16:18:15.320402 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:15.322165062+00:00 stderr F E1212 16:18:15.322111 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:15.515477441+00:00 stderr F W1212 16:18:15.515397 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:15.515477441+00:00 stderr F E1212 16:18:15.515445 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:15.716758587+00:00 stderr F W1212 16:18:15.716086 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:15.716758587+00:00 stderr F E1212 16:18:15.716746 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:15.916522406+00:00 stderr F E1212 16:18:15.915950 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:16.175553710+00:00 stderr F E1212 16:18:16.174905 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:16.365356842+00:00 stderr F W1212 16:18:16.365155 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:16.365356842+00:00 stderr F E1212 16:18:16.365328 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:16.367865164+00:00 stderr F I1212 16:18:16.367814 1 request.go:752] "Waited before sending request" delay="1.038148447s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets/etcd-client" 2025-12-12T16:18:16.515714480+00:00 stderr F E1212 16:18:16.515625 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:16.605709915+00:00 stderr F I1212 16:18:16.605617 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:16.607226872+00:00 stderr F E1212 16:18:16.607203 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:16.715625562+00:00 stderr F W1212 16:18:16.715507 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:16.715625562+00:00 stderr F E1212 16:18:16.715592 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:16.914741815+00:00 stderr F E1212 16:18:16.914690 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:17.115004556+00:00 stderr F E1212 16:18:17.114946 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:17.315156364+00:00 stderr F E1212 16:18:17.315075 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:17.566869538+00:00 stderr F I1212 16:18:17.566787 1 request.go:752] "Waited before sending request" delay="1.027079673s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets/etcd-client" 2025-12-12T16:18:17.572161058+00:00 stderr F E1212 16:18:17.572113 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:17.714953799+00:00 stderr F W1212 16:18:17.714874 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:17.715033781+00:00 stderr F E1212 16:18:17.714951 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:17.916932792+00:00 stderr F W1212 16:18:17.916583 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:17.916932792+00:00 stderr F E1212 16:18:17.916647 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:18.172783458+00:00 stderr F E1212 16:18:18.172705 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:18.767201603+00:00 stderr F I1212 16:18:18.767110 1 request.go:752] "Waited before sending request" delay="1.15113566s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets/etcd-client" 2025-12-12T16:18:18.771309805+00:00 stderr F E1212 16:18:18.771258 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:18.970795966+00:00 stderr F E1212 16:18:18.970720 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:19.169950610+00:00 stderr F I1212 16:18:19.169414 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:19.170648627+00:00 stderr F E1212 16:18:19.170610 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:19.170975225+00:00 stderr F W1212 16:18:19.170938 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:19.170984955+00:00 stderr F E1212 16:18:19.170977 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:19.371245316+00:00 stderr F E1212 16:18:19.371168 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:19.767963394+00:00 stderr F I1212 16:18:19.767527 1 request.go:752] "Waited before sending request" delay="1.199187007s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift" 2025-12-12T16:18:19.772384743+00:00 stderr F E1212 16:18:19.772313 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:19.971268900+00:00 stderr F E1212 16:18:19.971086 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:20.282480375+00:00 stderr F W1212 16:18:20.282395 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:20.282557686+00:00 stderr F E1212 16:18:20.282470 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:20.371323301+00:00 stderr F W1212 16:18:20.371234 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:20.371323301+00:00 stderr F E1212 16:18:20.371312 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:20.571542921+00:00 stderr F E1212 16:18:20.571480 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:20.772763616+00:00 stderr F E1212 16:18:20.772692 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:20.967766337+00:00 stderr F I1212 16:18:20.967644 1 request.go:752] "Waited before sending request" delay="1.182424393s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication" 2025-12-12T16:18:21.172358925+00:00 stderr F E1212 16:18:21.172276 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:21.372780830+00:00 stderr F E1212 16:18:21.372704 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:21.571470823+00:00 stderr F W1212 16:18:21.571394 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:21.571525044+00:00 stderr F E1212 16:18:21.571461 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:21.723134972+00:00 stderr F E1212 16:18:21.723029 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:21.733159380+00:00 stderr F W1212 16:18:21.733090 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:21.733317794+00:00 stderr F E1212 16:18:21.733274 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:21.974370283+00:00 stderr F E1212 16:18:21.974308 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.167368475+00:00 stderr F I1212 16:18:22.167302 1 request.go:752] "Waited before sending request" delay="1.138369924s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift" 2025-12-12T16:18:22.572980522+00:00 stderr F E1212 16:18:22.572554 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.973962535+00:00 stderr F E1212 16:18:22.973893 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:23.172400321+00:00 stderr F W1212 16:18:23.172304 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:23.172400321+00:00 stderr F E1212 16:18:23.172367 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:23.372297583+00:00 stderr F E1212 16:18:23.372151 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:23.570723939+00:00 stderr F E1212 16:18:23.570653 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:23.772338543+00:00 stderr F E1212 16:18:23.772269 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:23.966851512+00:00 stderr F I1212 16:18:23.966772 1 request.go:752] "Waited before sending request" delay="1.004245948s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift" 2025-12-12T16:18:23.967699633+00:00 stderr F E1212 16:18:23.967666 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:24.293504128+00:00 stderr F I1212 16:18:24.293431 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:24.294374349+00:00 stderr F E1212 16:18:24.294328 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:24.372850230+00:00 stderr F E1212 16:18:24.372369 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:24.968131467+00:00 stderr F E1212 16:18:24.968081 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:25.371266223+00:00 stderr F W1212 16:18:25.370845 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:25.371313644+00:00 stderr F E1212 16:18:25.371269 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:25.411026156+00:00 stderr F W1212 16:18:25.410964 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:25.411026156+00:00 stderr F E1212 16:18:25.411013 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:25.577075191+00:00 stderr F E1212 16:18:25.576996 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:25.966883759+00:00 stderr F I1212 16:18:25.966773 1 request.go:752] "Waited before sending request" delay="1.113424397s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/audit" 2025-12-12T16:18:25.972005095+00:00 stderr F E1212 16:18:25.971946 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:26.168712237+00:00 stderr F E1212 16:18:26.168546 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:26.373247364+00:00 stderr F E1212 16:18:26.373162 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:26.571255739+00:00 stderr F E1212 16:18:26.571165 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:26.774527725+00:00 stderr F E1212 16:18:26.774451 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:26.967331401+00:00 stderr F I1212 16:18:26.967276 1 request.go:752] "Waited before sending request" delay="1.068591487s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-session" 2025-12-12T16:18:27.169068969+00:00 stderr F E1212 16:18:27.169009 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:27.771959364+00:00 stderr F E1212 16:18:27.771320 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:27.968970064+00:00 stderr F E1212 16:18:27.968894 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:28.571132142+00:00 stderr F W1212 16:18:28.571069 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:28.571132142+00:00 stderr F E1212 16:18:28.571120 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:28.768881741+00:00 stderr F E1212 16:18:28.768807 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:28.974590947+00:00 stderr F E1212 16:18:28.974523 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:29.181392669+00:00 stderr F E1212 16:18:29.180625 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:29.572659481+00:00 stderr F I1212 16:18:29.568367 1 request.go:752] "Waited before sending request" delay="1.034267809s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/audit" 2025-12-12T16:18:29.577771338+00:00 stderr F E1212 16:18:29.577512 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:29.769415836+00:00 stderr F E1212 16:18:29.769359 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.171846325+00:00 stderr F E1212 16:18:30.171735 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.580421437+00:00 stderr F E1212 16:18:30.580106 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.971217358+00:00 stderr F E1212 16:18:30.969707 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.301776591+00:00 stderr F W1212 16:18:31.301730 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.301776591+00:00 stderr F E1212 16:18:31.301764 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.310898796+00:00 stderr F W1212 16:18:31.310876 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.310988098+00:00 stderr F E1212 16:18:31.310977 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.325735173+00:00 stderr F W1212 16:18:31.325691 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.325735173+00:00 stderr F E1212 16:18:31.325722 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.350557727+00:00 stderr F W1212 16:18:31.350517 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.350648969+00:00 stderr F E1212 16:18:31.350633 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.358080153+00:00 stderr F E1212 16:18:31.358033 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:18:31.372042878+00:00 stderr F E1212 16:18:31.372015 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:31.396203155+00:00 stderr F W1212 16:18:31.396148 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.396298167+00:00 stderr F E1212 16:18:31.396284 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.492333592+00:00 stderr F W1212 16:18:31.491215 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.492333592+00:00 stderr F E1212 16:18:31.491284 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.573024637+00:00 stderr F E1212 16:18:31.572953 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:31.642979056+00:00 stderr F W1212 16:18:31.642786 1 base_controller.go:242] Updating status of "OAuthServerServiceEndpointAccessibleController" failed: unable to ApplyStatus for operator using fieldManager "OAuthServerServiceEndpointAccessibleController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerServiceEndpointAccessibleController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.643123680+00:00 stderr F E1212 16:18:31.643109 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: [Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers), unable to ApplyStatus for operator using fieldManager \"OAuthServerService-EndpointAccessible\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerService-EndpointAccessible&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:31.660710364+00:00 stderr F W1212 16:18:31.660636 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.660710364+00:00 stderr F E1212 16:18:31.660684 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.968585726+00:00 stderr F E1212 16:18:31.968527 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.987205266+00:00 stderr F W1212 16:18:31.987093 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.987250897+00:00 stderr F E1212 16:18:31.987230 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:32.634114380+00:00 stderr F W1212 16:18:32.634018 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:32.634150841+00:00 stderr F E1212 16:18:32.634135 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:32.776970352+00:00 stderr F E1212 16:18:32.776868 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:32.970869315+00:00 stderr F E1212 16:18:32.970791 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:33.574348214+00:00 stderr F E1212 16:18:33.574274 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:33.768268518+00:00 stderr F E1212 16:18:33.768016 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:33.920068301+00:00 stderr F W1212 16:18:33.919996 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:33.920068301+00:00 stderr F E1212 16:18:33.920036 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:34.175612849+00:00 stderr F W1212 16:18:34.175527 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:34.175647610+00:00 stderr F E1212 16:18:34.175602 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:34.373321127+00:00 stderr F E1212 16:18:34.372403 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:34.536880151+00:00 stderr F I1212 16:18:34.536786 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:34.538581993+00:00 stderr F E1212 16:18:34.538534 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:34.972952912+00:00 stderr F E1212 16:18:34.972079 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:35.174108615+00:00 stderr F E1212 16:18:35.174032 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:35.573430908+00:00 stderr F E1212 16:18:35.573311 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:36.173410181+00:00 stderr F E1212 16:18:36.173340 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:36.374550754+00:00 stderr F E1212 16:18:36.374471 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:36.485989839+00:00 stderr F W1212 16:18:36.485842 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:36.485989839+00:00 stderr F E1212 16:18:36.485918 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:36.569021281+00:00 stderr F E1212 16:18:36.568944 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:36.656686499+00:00 stderr F W1212 16:18:36.656567 1 base_controller.go:242] Updating status of "OAuthServerServiceEndpointAccessibleController" failed: unable to ApplyStatus for operator using fieldManager "OAuthServerServiceEndpointAccessibleController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerServiceEndpointAccessibleController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:36.656686499+00:00 stderr F E1212 16:18:36.656662 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: [Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers), unable to ApplyStatus for operator using fieldManager \"OAuthServerService-EndpointAccessible\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerService-EndpointAccessible&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:37.180757154+00:00 stderr F E1212 16:18:37.180688 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:37.774980945+00:00 stderr F E1212 16:18:37.774848 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:39.754288529+00:00 stderr F E1212 16:18:39.754166 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:40.398163467+00:00 stderr F E1212 16:18:40.397361 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:41.501858514+00:00 stderr F E1212 16:18:41.501477 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:41.618683822+00:00 stderr F W1212 16:18:41.618580 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:41.618683822+00:00 stderr F E1212 16:18:41.618634 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:41.678442040+00:00 stderr F W1212 16:18:41.678360 1 base_controller.go:242] Updating status of "OAuthServerServiceEndpointAccessibleController" failed: unable to ApplyStatus for operator using fieldManager "OAuthServerServiceEndpointAccessibleController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerServiceEndpointAccessibleController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:41.678493191+00:00 stderr F E1212 16:18:41.678434 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: [Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers), unable to ApplyStatus for operator using fieldManager \"OAuthServerService-EndpointAccessible\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerService-EndpointAccessible&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:41.692204340+00:00 stderr F E1212 16:18:41.692108 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.117997652+00:00 stderr F E1212 16:18:44.117921 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-authentication-operator/leases/cluster-authentication-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:44.119586252+00:00 stderr F E1212 16:18:44.119142 1 leaderelection.go:436] error retrieving resource lock openshift-authentication-operator/cluster-authentication-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-authentication-operator/leases/cluster-authentication-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:44.421917996+00:00 stderr F W1212 16:18:44.421853 1 base_controller.go:242] Updating status of "RouterCertsDomainValidationController" failed: unable to ApplyStatus for operator using fieldManager "RouterCertsDomainValidationController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=RouterCertsDomainValidationController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:44.421963867+00:00 stderr F E1212 16:18:44.421911 1 base_controller.go:279] "Unhandled Error" err="RouterCertsDomainValidationController reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.828737084+00:00 stderr F E1212 16:18:44.828675 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.832688992+00:00 stderr F I1212 16:18:44.832617 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:44.832724193+00:00 stderr F E1212 16:18:44.832711 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.833284386+00:00 stderr F E1212 16:18:44.833260 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.836297751+00:00 stderr F E1212 16:18:44.836257 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.837057180+00:00 stderr F E1212 16:18:44.837013 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:44.849114458+00:00 stderr F E1212 16:18:44.849066 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:44.871748197+00:00 stderr F E1212 16:18:44.871715 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.033515047+00:00 stderr F E1212 16:18:45.033448 1 base_controller.go:279] "Unhandled Error" err="APIServerStaticResources-StaticResources reconciliation failed: [\"oauth-apiserver/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/oauth-apiserver-pdb.yaml\" (string): Delete \"https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-oauth-apiserver/poddisruptionbudgets/oauth-apiserver-pdb\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/apiserver-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/services/api\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_binding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-apiserver/RBAC/useroauthaccesstokens_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:useroauthaccesstoken-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"APIServerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=APIServerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:45.228039756+00:00 stderr F E1212 16:18:45.227984 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.431576418+00:00 stderr F E1212 16:18:45.431514 1 base_controller.go:279] "Unhandled Error" err="auditPolicyController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"oauth-apiserver-AuditPolicy\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=oauth-apiserver-AuditPolicy&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.628350474+00:00 stderr F E1212 16:18:45.628276 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.899268412+00:00 stderr F W1212 16:18:45.898198 1 base_controller.go:242] Updating status of "WellKnownReadyController" failed: unable to ApplyStatus for operator using fieldManager "WellKnownReadyController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=WellKnownReadyController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:45.899268412+00:00 stderr F E1212 16:18:45.898286 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:18:46.027737378+00:00 stderr F E1212 16:18:46.027677 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.230285205+00:00 stderr F E1212 16:18:46.230221 1 base_controller.go:279] "Unhandled Error" err="OAuthAPIServerController-WorkloadWorkloadController reconciliation failed: unable to ApplyStatus for operator using fieldManager \"OAuthAPIServerController-Workload\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthAPIServerController-Workload&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.628986953+00:00 stderr F E1212 16:18:46.628918 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.709884822+00:00 stderr F W1212 16:18:46.709820 1 base_controller.go:242] Updating status of "OAuthServerServiceEndpointAccessibleController" failed: unable to ApplyStatus for operator using fieldManager "OAuthServerServiceEndpointAccessibleController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerServiceEndpointAccessibleController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.709925313+00:00 stderr F E1212 16:18:46.709880 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: [Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers), unable to ApplyStatus for operator using fieldManager \"OAuthServerService-EndpointAccessible\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerService-EndpointAccessible&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.833436167+00:00 stderr F E1212 16:18:46.833369 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.230167115+00:00 stderr F E1212 16:18:47.230102 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.428169611+00:00 stderr F E1212 16:18:47.428120 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.632530192+00:00 stderr F E1212 16:18:47.632209 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.644072498+00:00 stderr F E1212 16:18:47.644008 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.660253278+00:00 stderr F E1212 16:18:47.660148 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.687605544+00:00 stderr F E1212 16:18:47.687542 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.735125609+00:00 stderr F E1212 16:18:47.735059 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.034233643+00:00 stderr F E1212 16:18:48.034136 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.430762037+00:00 stderr F E1212 16:18:48.430719 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.710687947+00:00 stderr F E1212 16:18:48.710629 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.831024273+00:00 stderr F E1212 16:18:48.830718 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.482364995+00:00 stderr F E1212 16:18:49.482236 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.496247909+00:00 stderr F E1212 16:18:49.496163 1 base_controller.go:279] "Unhandled Error" err="OpenshiftAuthenticationStaticResources-StaticResources reconciliation failed: [\"oauth-openshift/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authorization.openshift.io_rolebindingrestrictions.yaml\" (string): Get \"https://10.217.4.1:443/apis/apiextensions.k8s.io/v1/customresourcedefinitions/rolebindingrestrictions.authorization.openshift.io\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/authentication-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-authentication\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/serviceaccount.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/oauth-service.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/roles/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, \"oauth-openshift/trust_distribution_rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-config-managed/rolebindings/system:openshift:oauth-servercert-trust\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"OpenshiftAuthenticationStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OpenshiftAuthenticationStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:50.679687477+00:00 stderr F E1212 16:18:50.679625 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.689465029+00:00 stderr F E1212 16:18:50.689433 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.704547932+00:00 stderr F E1212 16:18:50.704499 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.729499639+00:00 stderr F E1212 16:18:50.729437 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.769362314+00:00 stderr F E1212 16:18:50.769315 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.774871870+00:00 stderr F E1212 16:18:50.774758 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.861088442+00:00 stderr F E1212 16:18:50.861004 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.026997233+00:00 stderr F E1212 16:18:51.026914 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.274007059+00:00 stderr F E1212 16:18:51.273938 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.353689109+00:00 stderr F E1212 16:18:51.353621 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.748919510+00:00 stderr F E1212 16:18:51.748866 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-PayloadConfig reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-PayloadConfig\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-PayloadConfig&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.758494097+00:00 stderr F W1212 16:18:51.758468 1 base_controller.go:242] Updating status of "OAuthServerServiceEndpointAccessibleController" failed: unable to ApplyStatus for operator using fieldManager "OAuthServerServiceEndpointAccessibleController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerServiceEndpointAccessibleController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.758575669+00:00 stderr F E1212 16:18:51.758563 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: [Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers), unable to ApplyStatus for operator using fieldManager \"OAuthServerService-EndpointAccessible\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=OAuthServerService-EndpointAccessible&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:51.866798145+00:00 stderr F W1212 16:18:51.866745 1 base_controller.go:242] Updating status of "CustomRouteController" failed: unable to ApplyStatus for operator using fieldManager "CustomRouteController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=CustomRouteController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.866902948+00:00 stderr F E1212 16:18:51.866888 1 base_controller.go:279] "Unhandled Error" err="CustomRouteController reconciliation failed: Get \"https://10.217.4.1:443/apis/route.openshift.io/v1/namespaces/openshift-authentication/routes/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.935434512+00:00 stderr F E1212 16:18:51.935358 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-IngressState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.001970737+00:00 stderr F E1212 16:18:52.001598 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.289711943+00:00 stderr F E1212 16:18:53.288927 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.337032423+00:00 stderr F E1212 16:18:53.336940 1 base_controller.go:279] "Unhandled Error" err="openshift-apiserver-APIService reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-apiserver-APIService\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-apiserver-APIService&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.021536398+00:00 stderr F I1212 16:18:55.021449 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:55.022625945+00:00 stderr F E1212 16:18:55.022580 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Put \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/authentication/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.877572402+00:00 stderr F E1212 16:18:55.876877 1 base_controller.go:279] "Unhandled Error" err="openshift-authentication-ServiceCA reconciliation failed: unable to ApplyStatus for operator using fieldManager \"openshift-authentication-ServiceCA\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/authentications/cluster/status?fieldManager=openshift-authentication-ServiceCA&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.070016419+00:00 stderr F E1212 16:18:56.069949 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:18:56.397451555+00:00 stderr F E1212 16:18:56.397385 1 base_controller.go:279] "Unhandled Error" err="NamespaceFinalizerController_openshift-oauth-apiserver reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.896307728+00:00 stderr F E1212 16:18:56.895704 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" 2025-12-12T16:18:58.658729669+00:00 stderr F I1212 16:18:58.657948 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:02.107564393+00:00 stderr F E1212 16:19:02.106912 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" 2025-12-12T16:19:02.698775619+00:00 stderr F E1212 16:19:02.698300 1 base_controller.go:279] "Unhandled Error" err="OAuthServerRouteEndpointAccessibleController reconciliation failed: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" 2025-12-12T16:19:06.728963887+00:00 stderr F I1212 16:19:06.728601 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:06.737091877+00:00 stderr F I1212 16:19:06.736749 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Available message changed from "OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF" to "OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:19:07.483341907+00:00 stderr F E1212 16:19:07.482948 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointAccessibleController reconciliation failed: Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" 2025-12-12T16:19:12.420800104+00:00 stderr F I1212 16:19:12.420057 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:24.673897243+00:00 stderr F I1212 16:19:24.673332 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:24.752887536+00:00 stderr F I1212 16:19:24.752796 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:24.759061878+00:00 stderr F E1212 16:19:24.758700 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:24.766075852+00:00 stderr F I1212 16:19:24.766008 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:24.772844489+00:00 stderr F E1212 16:19:24.772773 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:24.775084404+00:00 stderr F I1212 16:19:24.775054 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:24.780150410+00:00 stderr F E1212 16:19:24.780056 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:24.786382654+00:00 stderr F I1212 16:19:24.786272 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:24.793065249+00:00 stderr F E1212 16:19:24.792995 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:24.835442557+00:00 stderr F I1212 16:19:24.835381 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:24.839271551+00:00 stderr F E1212 16:19:24.839240 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:24.921604977+00:00 stderr F I1212 16:19:24.921538 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:24.925908843+00:00 stderr F E1212 16:19:24.925878 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:25.089659101+00:00 stderr F I1212 16:19:25.089590 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:25.096455509+00:00 stderr F E1212 16:19:25.096414 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:25.419635269+00:00 stderr F I1212 16:19:25.419020 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:25.429326529+00:00 stderr F E1212 16:19:25.427477 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:25.463772110+00:00 stderr F I1212 16:19:25.463697 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:25.470777624+00:00 stderr F I1212 16:19:25.470710 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:25.475089010+00:00 stderr F E1212 16:19:25.475036 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:26.071930156+00:00 stderr F I1212 16:19:26.071451 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:26.078699596+00:00 stderr F E1212 16:19:26.078646 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:26.893800730+00:00 stderr F E1212 16:19:26.893128 1 base_controller.go:279] "Unhandled Error" err="WellKnownReadyController reconciliation failed: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)" 2025-12-12T16:19:28.455872791+00:00 stderr F I1212 16:19:28.455224 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:28.642156888+00:00 stderr F I1212 16:19:28.641702 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:28.649593905+00:00 stderr F E1212 16:19:28.649541 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:29.344960865+00:00 stderr F I1212 16:19:29.344604 1 reflector.go:430] "Caches populated" type="*v1.APIService" reflector="k8s.io/kube-aggregator/pkg/client/informers/externalversions/factory.go:141" 2025-12-12T16:19:30.022416384+00:00 stderr F I1212 16:19:30.022349 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:19:30.033718358+00:00 stderr F I1212 16:19:30.033657 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:30.040238832+00:00 stderr F E1212 16:19:30.040136 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:32.295684331+00:00 stderr F E1212 16:19:32.295595 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:19:33.773170978+00:00 stderr F I1212 16:19:33.772372 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:33.778848231+00:00 stderr F E1212 16:19:33.778786 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:35.238240863+00:00 stderr F I1212 16:19:35.238118 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:35.937632023+00:00 stderr F I1212 16:19:35.937532 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:36.158702274+00:00 stderr F I1212 16:19:36.158605 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:36.158914869+00:00 stderr F I1212 16:19:36.158882 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:36.163101775+00:00 stderr F I1212 16:19:36.161381 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:36.572870813+00:00 stderr F I1212 16:19:36.572752 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:37.570129702+00:00 stderr F I1212 16:19:37.570064 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:39.575804290+00:00 stderr F I1212 16:19:39.575290 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:19:39.803767794+00:00 stderr F I1212 16:19:39.803697 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:39.893285621+00:00 stderr F I1212 16:19:39.893219 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:40.045903843+00:00 stderr F I1212 16:19:40.045805 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:44.833128130+00:00 stderr F I1212 16:19:44.832551 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:44.837409267+00:00 stderr F E1212 16:19:44.837345 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:45.205227342+00:00 stderr F I1212 16:19:45.205154 1 reflector.go:430] "Caches populated" type="*v1.KubeAPIServer" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:19:47.265236705+00:00 stderr F I1212 16:19:47.264662 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:48.781120785+00:00 stderr F I1212 16:19:48.780374 1 reflector.go:430] "Caches populated" type="*v1.Console" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:19:49.513498794+00:00 stderr F I1212 16:19:49.513433 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:19:53.282058704+00:00 stderr F E1212 16:19:53.281980 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:19:53.938654110+00:00 stderr F I1212 16:19:53.938600 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:54.262365638+00:00 stderr F I1212 16:19:54.261995 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:54.267624070+00:00 stderr F E1212 16:19:54.267510 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:54.649131509+00:00 stderr F I1212 16:19:54.649066 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:55.813963815+00:00 stderr F I1212 16:19:55.812759 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:55.828717415+00:00 stderr F I1212 16:19:55.827603 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nIngressStateEndpointsDegraded: No subsets found for the endpoints of oauth-server\nOAuthServerRouteEndpointAccessibleControllerDegraded: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:18:00Z","message":"OAuthServerDeploymentProgressing: deployment/oauth-openshift.openshift-authentication: observed generation is 5, desired generation is 6.","reason":"OAuthServerDeployment_NewGeneration::WellKnownReadyController","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerRouteEndpointAccessibleControllerAvailable: Get \"https://oauth-openshift.apps-crc.testing/healthz\": EOF\nWellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerRouteEndpointAccessibleController_EndpointUnavailable::WellKnown_NotReady","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:19:55.835319411+00:00 stderr F E1212 16:19:55.835258 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:19:56.702439643+00:00 stderr F I1212 16:19:56.702369 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:57.222639874+00:00 stderr F I1212 16:19:57.222530 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:58.413304050+00:00 stderr F I1212 16:19:58.413216 1 request.go:752] "Waited before sending request" delay="1.112582995s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/endpoints/oauth-openshift" 2025-12-12T16:19:58.415925515+00:00 stderr F I1212 16:19:58.415869 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:58.705953837+00:00 stderr F I1212 16:19:58.705837 1 reflector.go:430] "Caches populated" type="*v1.IngressController" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:19:59.613341149+00:00 stderr F I1212 16:19:59.613258 1 request.go:752] "Waited before sending request" delay="1.197183498s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-6567f5ffdb-jrpfr" 2025-12-12T16:19:59.816038069+00:00 stderr F I1212 16:19:59.815675 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:00.224232818+00:00 stderr F I1212 16:20:00.223821 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:00.765594450+00:00 stderr F I1212 16:20:00.765517 1 reflector.go:430] "Caches populated" type="*v1.OAuth" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:01.215404024+00:00 stderr F I1212 16:20:01.214684 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:03.092533934+00:00 stderr F I1212 16:20:03.092446 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:04.177598058+00:00 stderr F I1212 16:20:04.177509 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=authentications" reflector="k8s.io/client-go/dynamic/dynamicinformer/informer.go:108" 2025-12-12T16:20:04.183588329+00:00 stderr F I1212 16:20:04.181765 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"OAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)\nOAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointAccessibleController_SyncError::OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:18:01Z","message":"OAuthServerServiceEndpointAccessibleControllerAvailable: Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)","reason":"OAuthServerServiceEndpointAccessibleController_EndpointUnavailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:04.183588329+00:00 stderr F E1212 16:20:04.181811 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:20:04.193556249+00:00 stderr F E1212 16:20:04.193488 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:04.227001219+00:00 stderr F I1212 16:20:04.226916 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"OAuthServerServiceEndpointAccessibleControllerDegraded: Get \"https://10.217.5.136:443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)\nOAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointAccessibleController_SyncError::OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:04.231717297+00:00 stderr F E1212 16:20:04.231672 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:20:04.235154013+00:00 stderr F E1212 16:20:04.235108 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:04.255930925+00:00 stderr F I1212 16:20:04.255864 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:04Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:04.266061729+00:00 stderr F E1212 16:20:04.265411 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:04.266061729+00:00 stderr F E1212 16:20:04.265780 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:20:05.379261780+00:00 stderr F I1212 16:20:05.378933 1 request.go:752] "Waited before sending request" delay="1.101563328s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift" 2025-12-12T16:20:05.587166440+00:00 stderr F I1212 16:20:05.585508 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:05Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:05Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:05Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:05.591170951+00:00 stderr F E1212 16:20:05.591094 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:06.579127756+00:00 stderr F I1212 16:20:06.578598 1 request.go:752] "Waited before sending request" delay="1.195409964s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift" 2025-12-12T16:20:07.073211771+00:00 stderr F I1212 16:20:07.073086 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:07.080892514+00:00 stderr F I1212 16:20:07.080820 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:07Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:07Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:07Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:07.087325255+00:00 stderr F E1212 16:20:07.087212 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:07.578852217+00:00 stderr F I1212 16:20:07.578697 1 request.go:752] "Waited before sending request" delay="1.392665986s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs" 2025-12-12T16:20:07.984304467+00:00 stderr F I1212 16:20:07.984156 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:08.586571749+00:00 stderr F I1212 16:20:08.586491 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:08.589588515+00:00 stderr F E1212 16:20:08.588564 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:20:08.592284942+00:00 stderr F E1212 16:20:08.592247 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:20:08.779157914+00:00 stderr F I1212 16:20:08.779042 1 request.go:752] "Waited before sending request" delay="1.442135859s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/secrets?resourceVersion=38741" 2025-12-12T16:20:08.783805661+00:00 stderr F I1212 16:20:08.783738 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:09.585427878+00:00 stderr F I1212 16:20:09.585357 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:09Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:09Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:09Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:09.590466635+00:00 stderr F E1212 16:20:09.590277 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:09.779658835+00:00 stderr F I1212 16:20:09.779592 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:09.779718336+00:00 stderr F I1212 16:20:09.779703 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:09.781479571+00:00 stderr F I1212 16:20:09.781455 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:09.978726882+00:00 stderr F I1212 16:20:09.978630 1 request.go:752] "Waited before sending request" delay="1.391762684s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-service-ca" 2025-12-12T16:20:11.986051822+00:00 stderr F I1212 16:20:11.985968 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:11Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready\nWellKnownReadyControllerDegraded: failed to get API server IPs: unable to find kube api server endpointLister port: \u0026v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:\"\", APIVersion:\"\"}, ObjectMeta:v1.ObjectMeta{Name:\"kubernetes\", GenerateName:\"\", Namespace:\"default\", SelfLink:\"\", UID:\"9f2c22a8-5026-4436-90a9-f4bd3fd80926\", ResourceVersion:\"39127\", Generation:0, CreationTimestamp:time.Date(2025, time.November, 2, 7, 39, 5, 0, time.Local), DeletionTimestamp:\u003cnil\u003e, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{\"endpointslice.kubernetes.io/skip-mirror\":\"true\"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:\"kube-apiserver\", Operation:\"Update\", APIVersion:\"v1\", Time:time.Date(2025, time.November, 3, 8, 57, 18, 0, time.Local), FieldsType:\"FieldsV1\", FieldsV1:(*v1.FieldsV1)(0xc002191a10), Subresource:\"\"}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError::WellKnownReadyController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:11Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:11Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:11.992578526+00:00 stderr F E1212 16:20:11.992519 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:14.179048284+00:00 stderr F I1212 16:20:14.178453 1 request.go:752] "Waited before sending request" delay="1.051054469s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift" 2025-12-12T16:20:14.593440939+00:00 stderr F I1212 16:20:14.593356 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:14.655738343+00:00 stderr F I1212 16:20:14.655680 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:14Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:14Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:14Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:14.656296137+00:00 stderr F E1212 16:20:14.656257 1 base_controller.go:279] "Unhandled Error" err="OAuthServerServiceEndpointsEndpointAccessibleController reconciliation failed: oauth service endpoints are not ready" 2025-12-12T16:20:14.661449906+00:00 stderr F E1212 16:20:14.661396 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_authentication reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"authentication\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:14.833743342+00:00 stderr F I1212 16:20:14.833642 1 reflector.go:430] "Caches populated" type="*v1.Authentication" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:15.778791681+00:00 stderr F I1212 16:20:15.778702 1 request.go:752] "Waited before sending request" delay="1.1243331s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/audit" 2025-12-12T16:20:15.805238195+00:00 stderr F I1212 16:20:15.805159 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:20:16.377042332+00:00 stderr F I1212 16:20:16.376958 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:16.378468778+00:00 stderr F I1212 16:20:16.378405 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:16Z","message":"OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready","reason":"OAuthServerServiceEndpointsEndpointAccessibleController_SyncError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:16Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:16Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:16.387006212+00:00 stderr F I1212 16:20:16.386929 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded changed from False to True ("OAuthServerServiceEndpointsEndpointAccessibleControllerDegraded: oauth service endpoints are not ready"),Progressing changed from True to False ("AuthenticatorCertKeyProgressing: All is well"),Available changed from False to True ("All is well") 2025-12-12T16:20:16.979022505+00:00 stderr F I1212 16:20:16.978962 1 request.go:752] "Waited before sending request" delay="2.114291795s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/secrets?resourceVersion=38741" 2025-12-12T16:20:16.981423336+00:00 stderr F I1212 16:20:16.981344 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:18.178362238+00:00 stderr F I1212 16:20:18.178272 1 request.go:752] "Waited before sending request" delay="1.993986644s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-service-ca" 2025-12-12T16:20:18.385032368+00:00 stderr F I1212 16:20:18.384956 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:19.178983933+00:00 stderr F I1212 16:20:19.178881 1 request.go:752] "Waited before sending request" delay="2.391157306s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/configmaps/v4-0-config-system-metadata" 2025-12-12T16:20:20.367811782+00:00 stderr F I1212 16:20:20.367274 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:20.379237949+00:00 stderr F I1212 16:20:20.379112 1 request.go:752] "Waited before sending request" delay="2.39442372s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs" 2025-12-12T16:20:20.980011432+00:00 stderr F I1212 16:20:20.979931 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:21.578819177+00:00 stderr F I1212 16:20:21.578701 1 request.go:752] "Waited before sending request" delay="1.99577059s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/services/oauth-openshift" 2025-12-12T16:20:21.780400988+00:00 stderr F I1212 16:20:21.780254 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:21.780531241+00:00 stderr F I1212 16:20:21.780494 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:21.782156242+00:00 stderr F I1212 16:20:21.782071 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:22.191553051+00:00 stderr F I1212 16:20:22.191473 1 reflector.go:430] "Caches populated" type="*v1alpha1.StorageVersionMigration" reflector="sigs.k8s.io/kube-storage-version-migrator/pkg/clients/informer/factory.go:132" 2025-12-12T16:20:22.578968249+00:00 stderr F I1212 16:20:22.578879 1 request.go:752] "Waited before sending request" delay="2.130548093s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/default/configmaps?resourceVersion=38860" 2025-12-12T16:20:22.580929478+00:00 stderr F I1212 16:20:22.580884 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:22.781523125+00:00 stderr F I1212 16:20:22.781441 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:23.167910616+00:00 stderr F I1212 16:20:23.167817 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:23.181796265+00:00 stderr F I1212 16:20:23.181713 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:23.978134679+00:00 stderr F I1212 16:20:23.978024 1 request.go:752] "Waited before sending request" delay="1.194224245s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-config-managed/secrets/router-certs" 2025-12-12T16:20:24.379921317+00:00 stderr F I1212 16:20:24.379837 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:24.379974228+00:00 stderr F I1212 16:20:24.379958 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:24.380875751+00:00 stderr F I1212 16:20:24.380843 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:24.408725880+00:00 stderr F I1212 16:20:24.408621 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:24.420331401+00:00 stderr F I1212 16:20:24.420268 1 status_controller.go:230] clusteroperator/authentication diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:20:24Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:16Z","message":"AuthenticatorCertKeyProgressing: All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:20:16Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:58Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:58Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:24.433537773+00:00 stderr F I1212 16:20:24.433350 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-authentication-operator", Name:"authentication-operator", UID:"", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/authentication changed: Degraded changed from True to False ("All is well") 2025-12-12T16:20:24.582811221+00:00 stderr F I1212 16:20:24.582758 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:24.978660000+00:00 stderr F I1212 16:20:24.978559 1 request.go:752] "Waited before sending request" delay="1.395073257s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/trusted-ca-bundle" 2025-12-12T16:20:25.978789681+00:00 stderr F I1212 16:20:25.978691 1 request.go:752] "Waited before sending request" delay="1.550670514s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/audit" 2025-12-12T16:20:27.178942095+00:00 stderr F I1212 16:20:27.178401 1 request.go:752] "Waited before sending request" delay="1.589730786s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication" 2025-12-12T16:20:27.781976294+00:00 stderr F I1212 16:20:27.781646 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:20:28.285925527+00:00 stderr F I1212 16:20:28.285853 1 request.go:752] "Waited before sending request" delay="1.504704429s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-oauth-apiserver/configmaps/etcd-serving-ca" 2025-12-12T16:20:28.780922436+00:00 stderr F I1212 16:20:28.780835 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:29.378923571+00:00 stderr F I1212 16:20:29.378842 1 request.go:752] "Waited before sending request" delay="1.397792866s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-authentication/secrets/v4-0-config-system-session" 2025-12-12T16:20:29.781613512+00:00 stderr F I1212 16:20:29.781486 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:30.180992747+00:00 stderr F I1212 16:20:30.180917 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:31.841942183+00:00 stderr F I1212 16:20:31.840962 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/authentication/request/headerrequest/requestheader_controller.go:183" 2025-12-12T16:20:33.012766533+00:00 stderr F I1212 16:20:33.012140 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:35.131334303+00:00 stderr F I1212 16:20:35.126978 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:21:31.642309596+00:00 stderr F I1212 16:21:31.641528 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:23:05.983371060+00:00 stderr F I1212 16:23:05.982380 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:24:40.321989532+00:00 stderr F I1212 16:24:40.320839 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:26:14.659881410+00:00 stderr F I1212 16:26:14.658811 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:27:18.167318878+00:00 stderr F I1212 16:27:18.166141 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:27:48.997613241+00:00 stderr F I1212 16:27:48.996924 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:28:10.785646496+00:00 stderr F I1212 16:28:10.785203 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:28:22.785452985+00:00 stderr F I1212 16:28:22.784833 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:29:23.335767254+00:00 stderr F I1212 16:29:23.334610 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:29:56.383540596+00:00 stderr F I1212 16:29:56.382322 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:30:23.185238021+00:00 stderr F I1212 16:30:23.184664 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:30:24.383056175+00:00 stderr F I1212 16:30:24.382993 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:30:57.673691735+00:00 stderr F I1212 16:30:57.673062 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:32:32.013492750+00:00 stderr F I1212 16:32:32.012556 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:33:34.789094962+00:00 stderr F I1212 16:33:34.787341 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:34:06.351558029+00:00 stderr F I1212 16:34:06.350501 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:34:26.171244067+00:00 stderr F I1212 16:34:26.170611 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:35:40.690002609+00:00 stderr F I1212 16:35:40.689509 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:35:45.386072638+00:00 stderr F I1212 16:35:45.385629 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:36:36.790079761+00:00 stderr F I1212 16:36:36.788904 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:37:15.031845706+00:00 stderr F I1212 16:37:15.030727 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:38:49.374284446+00:00 stderr F I1212 16:38:49.371551 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:38:53.792368322+00:00 stderr F I1212 16:38:53.791813 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:40:23.187286176+00:00 stderr F I1212 16:40:23.186281 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:40:23.705048495+00:00 stderr F I1212 16:40:23.704966 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:40:24.390602820+00:00 stderr F I1212 16:40:24.390509 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:41:58.045009862+00:00 stderr F I1212 16:41:58.044404 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:43:09.790943796+00:00 stderr F I1212 16:43:09.790408 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:43:13.173863884+00:00 stderr F I1212 16:43:13.172903 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000755000175000017500000000000015117043043033053 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000755000175000017500000000000015117043062033054 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000644000175000017500000035642315117043043033072 0ustar zuulzuul2025-12-12T16:16:45.215763364+00:00 stderr F I1212 16:16:45.214128 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:45.216371499+00:00 stderr F I1212 16:16:45.216041 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.216741928+00:00 stderr F I1212 16:16:45.216686 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:45.283231772+00:00 stderr F I1212 16:16:45.283150 1 builder.go:304] console-operator version - 2025-12-12T16:16:45.884880099+00:00 stderr F I1212 16:16:45.879482 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:45.884880099+00:00 stderr F W1212 16:16:45.880417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:45.884880099+00:00 stderr F W1212 16:16:45.880425 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:45.884880099+00:00 stderr F W1212 16:16:45.880429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:45.884880099+00:00 stderr F W1212 16:16:45.880434 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:45.884880099+00:00 stderr F W1212 16:16:45.880438 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:45.884880099+00:00 stderr F W1212 16:16:45.880440 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:45.891168723+00:00 stderr F I1212 16:16:45.889719 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:45.891168723+00:00 stderr F I1212 16:16:45.890901 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:45.891168723+00:00 stderr F I1212 16:16:45.891126 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:45.892249319+00:00 stderr F I1212 16:16:45.891638 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:45.892249319+00:00 stderr F I1212 16:16:45.891983 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:45.894347001+00:00 stderr F I1212 16:16:45.892377 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:45.894347001+00:00 stderr F I1212 16:16:45.892725 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:45.894347001+00:00 stderr F I1212 16:16:45.893269 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:45.894347001+00:00 stderr F I1212 16:16:45.893292 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:45.894347001+00:00 stderr F I1212 16:16:45.893356 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:45.894347001+00:00 stderr F I1212 16:16:45.893990 1 leaderelection.go:257] attempting to acquire leader lease openshift-console-operator/console-operator-lock... 2025-12-12T16:16:45.908279501+00:00 stderr F I1212 16:16:45.906601 1 leaderelection.go:271] successfully acquired lease openshift-console-operator/console-operator-lock 2025-12-12T16:16:45.912727899+00:00 stderr F I1212 16:16:45.910821 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-console-operator", Name:"console-operator-lock", UID:"0b5755b3-9bd3-4795-81b3-5aa49eabdd90", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37165", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' console-operator-67c89758df-5tw72_5d5c0bfa-e79a-4742-aabc-15153d43525f became leader 2025-12-12T16:16:45.956336584+00:00 stderr F I1212 16:16:45.952246 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.007235697+00:00 stderr F I1212 16:16:46.006227 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.007235697+00:00 stderr F I1212 16:16:46.006489 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.017256321+00:00 stderr F I1212 16:16:46.013330 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.017256321+00:00 stderr F I1212 16:16:46.013543 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:46.017256321+00:00 stderr F I1212 16:16:46.013592 1 starter.go:212] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:46.053663040+00:00 stderr F I1212 16:16:46.053609 1 base_controller.go:76] Waiting for caches to sync for InformerWithSwitchController 2025-12-12T16:16:46.053663040+00:00 stderr F I1212 16:16:46.053630 1 base_controller.go:82] Caches are synced for InformerWithSwitchController 2025-12-12T16:16:46.053663040+00:00 stderr F I1212 16:16:46.053639 1 base_controller.go:119] Starting #1 worker of InformerWithSwitchController controller ... 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.053974 1 base_controller.go:76] Waiting for caches to sync for ConsoleCLIDownloadsController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054023 1 base_controller.go:76] Waiting for caches to sync for ConsoleDownloadsDeploymentSyncController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054061 1 base_controller.go:76] Waiting for caches to sync for HealthCheckController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054080 1 base_controller.go:76] Waiting for caches to sync for PodDisruptionBudgetController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054099 1 base_controller.go:76] Waiting for caches to sync for PodDisruptionBudgetController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054118 1 base_controller.go:76] Waiting for caches to sync for OAuthClientsController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054128 1 base_controller.go:76] Waiting for caches to sync for console 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054132 1 base_controller.go:76] Waiting for caches to sync for OAuthClientSecretController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054146 1 base_controller.go:76] Waiting for caches to sync for OIDCSetupController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054158 1 base_controller.go:76] Waiting for caches to sync for CLIOIDCClientStatusController 2025-12-12T16:16:46.054239554+00:00 stderr F I1212 16:16:46.054164 1 base_controller.go:76] Waiting for caches to sync for RemoveStaleConditionsController-RemoveStaleConditions 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054282 1 base_controller.go:76] Waiting for caches to sync for ClusterUpgradeNotificationController 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054296 1 base_controller.go:82] Caches are synced for ClusterUpgradeNotificationController 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054300 1 base_controller.go:119] Starting #1 worker of ClusterUpgradeNotificationController controller ... 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054399 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_console 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054416 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054429 1 base_controller.go:76] Waiting for caches to sync for console-ManagementState 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054441 1 base_controller.go:76] Waiting for caches to sync for UnsupportedConfigOverridesController-UnsupportedConfigOverrides 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054452 1 base_controller.go:76] Waiting for caches to sync for ConsoleServiceController 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054463 1 base_controller.go:76] Waiting for caches to sync for ConsoleRouteController 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054593 1 base_controller.go:76] Waiting for caches to sync for ConsoleServiceController 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054611 1 base_controller.go:76] Waiting for caches to sync for DownloadsRouteController 2025-12-12T16:16:46.055199718+00:00 stderr F I1212 16:16:46.054622 1 base_controller.go:76] Waiting for caches to sync for ConsoleOperator 2025-12-12T16:16:46.057196406+00:00 stderr F E1212 16:16:46.056310 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: console.operator.openshift.io \"cluster\" not found" 2025-12-12T16:16:46.079231734+00:00 stderr F E1212 16:16:46.075288 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: console.operator.openshift.io \"cluster\" not found" 2025-12-12T16:16:46.093324279+00:00 stderr F E1212 16:16:46.085344 1 reflector.go:200] "Failed to watch" err="failed to list *v1.OAuthClient: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io)" reflector="github.com/openshift/console-operator/pkg/console/controllers/util/informers.go:106" type="*v1.OAuthClient" 2025-12-12T16:16:46.093324279+00:00 stderr F E1212 16:16:46.086559 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:46.093324279+00:00 stderr F E1212 16:16:46.088255 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: console.operator.openshift.io \"cluster\" not found" 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.172506 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.172531 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.172943 1 reflector.go:430] "Caches populated" type="*v1.ConsolePlugin" reflector="github.com/openshift/client-go/console/informers/externalversions/factory.go:125" 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.173167 1 base_controller.go:82] Caches are synced for console-ManagementState 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.173173 1 base_controller.go:119] Starting #1 worker of console-ManagementState controller ... 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.173218 1 base_controller.go:82] Caches are synced for ConsoleServiceController 2025-12-12T16:16:46.174264105+00:00 stderr F I1212 16:16:46.173222 1 base_controller.go:119] Starting #1 worker of ConsoleServiceController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.174489 1 base_controller.go:82] Caches are synced for ConsoleServiceController 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.174534 1 base_controller.go:119] Starting #1 worker of ConsoleServiceController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175301 1 base_controller.go:82] Caches are synced for CLIOIDCClientStatusController 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175322 1 base_controller.go:119] Starting #1 worker of CLIOIDCClientStatusController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175355 1 base_controller.go:82] Caches are synced for PodDisruptionBudgetController 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175360 1 base_controller.go:119] Starting #1 worker of PodDisruptionBudgetController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175454 1 base_controller.go:82] Caches are synced for OAuthClientSecretController 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175475 1 base_controller.go:119] Starting #1 worker of OAuthClientSecretController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175901 1 base_controller.go:82] Caches are synced for OIDCSetupController 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175908 1 base_controller.go:119] Starting #1 worker of OIDCSetupController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175918 1 base_controller.go:82] Caches are synced for StatusSyncer_console 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.175922 1 base_controller.go:119] Starting #1 worker of StatusSyncer_console controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.176456 1 base_controller.go:82] Caches are synced for ConsoleDownloadsDeploymentSyncController 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.176467 1 base_controller.go:119] Starting #1 worker of ConsoleDownloadsDeploymentSyncController controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.176491 1 base_controller.go:82] Caches are synced for RemoveStaleConditionsController-RemoveStaleConditions 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.176503 1 base_controller.go:82] Caches are synced for UnsupportedConfigOverridesController-UnsupportedConfigOverrides 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.176506 1 base_controller.go:119] Starting #1 worker of RemoveStaleConditionsController-RemoveStaleConditions controller ... 2025-12-12T16:16:46.178231041+00:00 stderr F I1212 16:16:46.176514 1 base_controller.go:119] Starting #1 worker of UnsupportedConfigOverridesController-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:46.188733658+00:00 stderr F I1212 16:16:46.188043 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.197204745+00:00 stderr F I1212 16:16:46.196465 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nOAuthClientSyncDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nOAuthClientsControllerDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"ConsoleCustomRouteSync_FailedDeleteCustomRoutes::DownloadsCustomRouteSync_FailedDeleteCustomRoutes::OAuthClientSync_FailedRegister::OAuthClientsController_SyncError::RouteHealth_FailedGet","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"SyncLoopRefreshProgressing: working toward version 4.20.1, 0 replicas available","reason":"SyncLoopRefresh_InProgress","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"Deployment_InsufficientReplicas::RouteHealth_FailedGet","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-03T09:40:49Z","message":"ConsoleCustomRouteSyncUpgradeable: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncUpgradeable: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)","reason":"ConsoleCustomRouteSync_FailedDeleteCustomRoutes::DownloadsCustomRouteSync_FailedDeleteCustomRoutes","status":"False","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:46.212262492+00:00 stderr F I1212 16:16:46.212029 1 base_controller.go:82] Caches are synced for PodDisruptionBudgetController 2025-12-12T16:16:46.216053395+00:00 stderr F I1212 16:16:46.214386 1 base_controller.go:119] Starting #1 worker of PodDisruptionBudgetController controller ... 2025-12-12T16:16:46.216053395+00:00 stderr F I1212 16:16:46.215538 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Degraded changed from False to True ("ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nOAuthClientSyncDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nOAuthClientsControllerDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused") 2025-12-12T16:16:46.265134823+00:00 stderr F I1212 16:16:46.264257 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.292215214+00:00 stderr F I1212 16:16:46.291071 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:46.356629387+00:00 stderr F I1212 16:16:46.354200 1 base_controller.go:82] Caches are synced for console 2025-12-12T16:16:46.356629387+00:00 stderr F I1212 16:16:46.354239 1 base_controller.go:119] Starting #1 worker of console controller ... 2025-12-12T16:16:46.946842167+00:00 stderr F E1212 16:16:46.940695 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:47.294970686+00:00 stderr F I1212 16:16:47.294260 1 reflector.go:430] "Caches populated" type="*v1.OAuthClient" reflector="github.com/openshift/console-operator/pkg/console/controllers/util/informers.go:106" 2025-12-12T16:16:49.592701943+00:00 stderr F E1212 16:16:49.590498 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:53.674319953+00:00 stderr F E1212 16:16:53.671661 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Route: the server is currently unable to handle the request (get routes.route.openshift.io)" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" type="*v1.Route" 2025-12-12T16:16:55.904277126+00:00 stderr F I1212 16:16:55.897652 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.897600253 +0000 UTC))" 2025-12-12T16:16:55.904361838+00:00 stderr F I1212 16:16:55.904313 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.904253085 +0000 UTC))" 2025-12-12T16:16:55.904361838+00:00 stderr F I1212 16:16:55.904338 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.904330097 +0000 UTC))" 2025-12-12T16:16:55.904361838+00:00 stderr F I1212 16:16:55.904356 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.904344727 +0000 UTC))" 2025-12-12T16:16:55.904375478+00:00 stderr F I1212 16:16:55.904370 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.904360928 +0000 UTC))" 2025-12-12T16:16:55.904405339+00:00 stderr F I1212 16:16:55.904388 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.904377168 +0000 UTC))" 2025-12-12T16:16:55.904413319+00:00 stderr F I1212 16:16:55.904402 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.904392728 +0000 UTC))" 2025-12-12T16:16:55.907386192+00:00 stderr F I1212 16:16:55.904442 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.904408399 +0000 UTC))" 2025-12-12T16:16:55.907386192+00:00 stderr F I1212 16:16:55.904461 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.90445246 +0000 UTC))" 2025-12-12T16:16:55.907386192+00:00 stderr F I1212 16:16:55.904483 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.9044705 +0000 UTC))" 2025-12-12T16:16:55.907386192+00:00 stderr F I1212 16:16:55.904714 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-console-operator.svc\" [serving] validServingFor=[metrics.openshift-console-operator.svc,metrics.openshift-console-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:55.904700126 +0000 UTC))" 2025-12-12T16:16:55.907386192+00:00 stderr F I1212 16:16:55.904846 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:55.904836749 +0000 UTC))" 2025-12-12T16:17:02.061749045+00:00 stderr F I1212 16:17:02.060298 1 reflector.go:430] "Caches populated" type="*v1.Route" reflector="github.com/openshift/client-go/route/informers/externalversions/factory.go:125" 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154671 1 base_controller.go:82] Caches are synced for OAuthClientsController 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154702 1 base_controller.go:119] Starting #1 worker of OAuthClientsController controller ... 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154718 1 base_controller.go:82] Caches are synced for HealthCheckController 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154752 1 base_controller.go:119] Starting #1 worker of HealthCheckController controller ... 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154802 1 base_controller.go:82] Caches are synced for ConsoleOperator 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154818 1 base_controller.go:119] Starting #1 worker of ConsoleOperator controller ... 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154834 1 base_controller.go:82] Caches are synced for ConsoleCLIDownloadsController 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154848 1 base_controller.go:119] Starting #1 worker of ConsoleCLIDownloadsController controller ... 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154852 1 base_controller.go:82] Caches are synced for DownloadsRouteController 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154859 1 base_controller.go:119] Starting #1 worker of DownloadsRouteController controller ... 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154816 1 base_controller.go:82] Caches are synced for ConsoleRouteController 2025-12-12T16:17:02.154885629+00:00 stderr F I1212 16:17:02.154878 1 base_controller.go:119] Starting #1 worker of ConsoleRouteController controller ... 2025-12-12T16:17:02.154984931+00:00 stderr F I1212 16:17:02.154929 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'FastControllerResync' Controller "HealthCheckController" resync interval is set to 30s which might lead to client request throttling 2025-12-12T16:17:02.184913032+00:00 stderr F E1212 16:17:02.184839 1 status.go:130] SyncLoopRefreshProgressing InProgress working toward version 4.20.1, 0 replicas available 2025-12-12T16:17:02.184913032+00:00 stderr F E1212 16:17:02.184862 1 status.go:130] DeploymentAvailable InsufficientReplicas 0 replicas available for console deployment 2025-12-12T16:17:02.205981067+00:00 stderr F I1212 16:17:02.205899 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nOAuthClientsControllerDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"ConsoleCustomRouteSync_FailedDeleteCustomRoutes::DownloadsCustomRouteSync_FailedDeleteCustomRoutes::OAuthClientsController_SyncError::RouteHealth_FailedGet","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"SyncLoopRefreshProgressing: working toward version 4.20.1, 0 replicas available","reason":"SyncLoopRefresh_InProgress","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"Deployment_InsufficientReplicas::RouteHealth_FailedGet","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-03T09:40:49Z","message":"ConsoleCustomRouteSyncUpgradeable: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncUpgradeable: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)","reason":"ConsoleCustomRouteSync_FailedDeleteCustomRoutes::DownloadsCustomRouteSync_FailedDeleteCustomRoutes","status":"False","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:02.223242888+00:00 stderr F I1212 16:17:02.221739 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Degraded message changed from "ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nOAuthClientSyncDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nOAuthClientsControllerDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" to "ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nOAuthClientsControllerDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" 2025-12-12T16:17:02.227251276+00:00 stderr F E1212 16:17:02.223522 1 status.go:130] SyncLoopRefreshProgressing InProgress working toward version 4.20.1, 0 replicas available 2025-12-12T16:17:02.227251276+00:00 stderr F E1212 16:17:02.223538 1 status.go:130] DeploymentAvailable InsufficientReplicas 0 replicas available for console deployment 2025-12-12T16:17:02.242808106+00:00 stderr F E1212 16:17:02.242734 1 status.go:130] SyncLoopRefreshProgressing InProgress working toward version 4.20.1, 0 replicas available 2025-12-12T16:17:02.242808106+00:00 stderr F E1212 16:17:02.242761 1 status.go:130] DeploymentAvailable InsufficientReplicas 0 replicas available for console deployment 2025-12-12T16:17:02.312056096+00:00 stderr F I1212 16:17:02.311873 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"ConsoleCustomRouteSync_FailedDeleteCustomRoutes::DownloadsCustomRouteSync_FailedDeleteCustomRoutes::RouteHealth_FailedGet","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"SyncLoopRefreshProgressing: working toward version 4.20.1, 0 replicas available","reason":"SyncLoopRefresh_InProgress","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"Deployment_InsufficientReplicas::RouteHealth_FailedGet","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-03T09:40:49Z","message":"ConsoleCustomRouteSyncUpgradeable: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncUpgradeable: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)","reason":"ConsoleCustomRouteSync_FailedDeleteCustomRoutes::DownloadsCustomRouteSync_FailedDeleteCustomRoutes","status":"False","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:02.324698195+00:00 stderr F I1212 16:17:02.322320 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Degraded message changed from "ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nOAuthClientsControllerDegraded: the server is currently unable to handle the request (get oauthclients.oauth.openshift.io console)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" to "ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" 2025-12-12T16:17:02.407258571+00:00 stderr F I1212 16:17:02.407190 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"RouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"RouteHealth_FailedGet","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"SyncLoopRefreshProgressing: working toward version 4.20.1, 0 replicas available","reason":"SyncLoopRefresh_InProgress","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"Deployment_InsufficientReplicas::RouteHealth_FailedGet","status":"False","type":"Available"},{"lastTransitionTime":"2025-12-12T16:17:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:02.412424267+00:00 stderr F E1212 16:17:02.412378 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_console reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"console\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:17:02.417208794+00:00 stderr F I1212 16:17:02.417142 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"RouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"RouteHealth_FailedGet","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"SyncLoopRefreshProgressing: working toward version 4.20.1, 0 replicas available","reason":"SyncLoopRefresh_InProgress","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465-\u003e10.217.4.10:53: read: connection refused","reason":"Deployment_InsufficientReplicas::RouteHealth_FailedGet","status":"False","type":"Available"},{"lastTransitionTime":"2025-12-12T16:17:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:02.427621238+00:00 stderr F I1212 16:17:02.426797 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Degraded message changed from "ConsoleCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io console-custom)\nDownloadsCustomRouteSyncDegraded: the server is currently unable to handle the request (delete routes.route.openshift.io downloads-custom)\nRouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" to "RouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused",Upgradeable changed from False to True ("All is well") 2025-12-12T16:17:03.370826576+00:00 stderr F E1212 16:17:03.370736 1 status.go:130] SyncLoopRefreshProgressing InProgress working toward version 4.20.1, 0 replicas available 2025-12-12T16:17:03.370826576+00:00 stderr F E1212 16:17:03.370763 1 status.go:130] DeploymentAvailable InsufficientReplicas 0 replicas available for console deployment 2025-12-12T16:17:09.831482458+00:00 stderr F E1212 16:17:09.830317 1 status.go:130] SyncLoopRefreshProgressing InProgress working toward version 4.20.1, 0 replicas available 2025-12-12T16:17:09.831482458+00:00 stderr F E1212 16:17:09.831211 1 status.go:130] DeploymentAvailable InsufficientReplicas 0 replicas available for console deployment 2025-12-12T16:17:11.825608222+00:00 stderr F E1212 16:17:11.825034 1 status.go:130] RouteHealthDegraded StatusError route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable' 2025-12-12T16:17:11.825608222+00:00 stderr F E1212 16:17:11.825582 1 status.go:130] RouteHealthAvailable StatusError route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable' 2025-12-12T16:17:11.847787964+00:00 stderr F E1212 16:17:11.847691 1 base_controller.go:279] "Unhandled Error" err="HealthCheckController reconciliation failed: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'" 2025-12-12T16:17:11.855149193+00:00 stderr F I1212 16:17:11.855070 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"RouteHealthDegraded: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'","reason":"RouteHealth_StatusError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"SyncLoopRefreshProgressing: working toward version 4.20.1, 0 replicas available","reason":"SyncLoopRefresh_InProgress","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'","reason":"Deployment_InsufficientReplicas::RouteHealth_StatusError","status":"False","type":"Available"},{"lastTransitionTime":"2025-12-12T16:17:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:11.866755737+00:00 stderr F I1212 16:17:11.864103 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Degraded message changed from "RouteHealthDegraded: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" to "RouteHealthDegraded: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'",Available message changed from "DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: failed to GET route (https://console-openshift-console.apps-crc.testing): Get \"https://console-openshift-console.apps-crc.testing\": dial tcp: lookup console-openshift-console.apps-crc.testing on 10.217.4.10:53: read udp 10.217.0.23:58465->10.217.4.10:53: read: connection refused" to "DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'" 2025-12-12T16:17:11.866808648+00:00 stderr F E1212 16:17:11.866771 1 status.go:130] SyncLoopRefreshProgressing InProgress working toward version 4.20.1, 0 replicas available 2025-12-12T16:17:11.866808648+00:00 stderr F E1212 16:17:11.866783 1 status.go:130] DeploymentAvailable InsufficientReplicas 0 replicas available for console deployment 2025-12-12T16:17:15.515816494+00:00 stderr F I1212 16:17:15.515697 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"RouteHealthDegraded: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'","reason":"RouteHealth_StatusError","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:15Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:48Z","message":"RouteHealthAvailable: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'","reason":"RouteHealth_StatusError","status":"False","type":"Available"},{"lastTransitionTime":"2025-12-12T16:17:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:15.526976266+00:00 stderr F I1212 16:17:15.526906 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Progressing changed from True to False ("All is well"),Available message changed from "DeploymentAvailable: 0 replicas available for console deployment\nRouteHealthAvailable: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'" to "RouteHealthAvailable: route not yet available, https://console-openshift-console.apps-crc.testing returns '503 Service Unavailable'" 2025-12-12T16:17:16.117019681+00:00 stderr F I1212 16:17:16.116921 1 status_controller.go:230] clusteroperator/console diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:17:16Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:15Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:16Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-12-12T16:17:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:16Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:16.128634405+00:00 stderr F I1212 16:17:16.128539 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-console-operator", Name:"console-operator", UID:"4982b9f1-eaf4-44fa-a84a-bf9954aedcb1", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/console changed: Degraded changed from True to False ("All is well"),Available changed from False to True ("All is well") 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.317451 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.317415727 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318069 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.318057033 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318081 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.318074013 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318103 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.318085484 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318124 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.318108364 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318169 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.318131255 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318197 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.318187016 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318211 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.318202596 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318230 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.318216827 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318243 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.318236167 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318256 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.318248758 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318453 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-console-operator.svc\" [serving] validServingFor=[metrics.openshift-console-operator.svc,metrics.openshift-console-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:17:46.318441492 +0000 UTC))" 2025-12-12T16:17:46.319225192+00:00 stderr F I1212 16:17:46.318600 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:17:46.318589166 +0000 UTC))" 2025-12-12T16:18:22.010199539+00:00 stderr F E1212 16:18:22.008871 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.020565575+00:00 stderr F W1212 16:18:22.018262 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.020565575+00:00 stderr F E1212 16:18:22.018317 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.025828795+00:00 stderr F E1212 16:18:22.025776 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.031520906+00:00 stderr F W1212 16:18:22.031473 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.031541807+00:00 stderr F E1212 16:18:22.031524 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.044813565+00:00 stderr F E1212 16:18:22.044771 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.048712851+00:00 stderr F W1212 16:18:22.048675 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.048735302+00:00 stderr F E1212 16:18:22.048720 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.073033092+00:00 stderr F E1212 16:18:22.072972 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.076984350+00:00 stderr F W1212 16:18:22.076941 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.077010661+00:00 stderr F E1212 16:18:22.076985 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.119673916+00:00 stderr F E1212 16:18:22.119613 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.124015683+00:00 stderr F W1212 16:18:22.123879 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.124015683+00:00 stderr F E1212 16:18:22.123934 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.207120117+00:00 stderr F E1212 16:18:22.207049 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.412143765+00:00 stderr F W1212 16:18:22.412076 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.412143765+00:00 stderr F E1212 16:18:22.412130 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:22.574833177+00:00 stderr F E1212 16:18:22.574782 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.812703478+00:00 stderr F W1212 16:18:22.812647 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:22.812703478+00:00 stderr F E1212 16:18:22.812692 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:23.134943725+00:00 stderr F E1212 16:18:23.134885 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:23.212076602+00:00 stderr F W1212 16:18:23.212018 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:23.212076602+00:00 stderr F E1212 16:18:23.212064 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:23.856723599+00:00 stderr F E1212 16:18:23.856663 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:23.860647916+00:00 stderr F W1212 16:18:23.860604 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:23.860675677+00:00 stderr F E1212 16:18:23.860650 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:25.144683781+00:00 stderr F E1212 16:18:25.144122 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:25.148341452+00:00 stderr F W1212 16:18:25.148286 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:25.148341452+00:00 stderr F E1212 16:18:25.148326 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:27.712997576+00:00 stderr F E1212 16:18:27.712248 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:27.717432376+00:00 stderr F W1212 16:18:27.717385 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:27.717451587+00:00 stderr F E1212 16:18:27.717436 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:32.841216070+00:00 stderr F E1212 16:18:32.840570 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:32.846517411+00:00 stderr F W1212 16:18:32.846479 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:32.846549312+00:00 stderr F E1212 16:18:32.846518 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:43.090653724+00:00 stderr F E1212 16:18:43.089635 1 status.go:130] OAuthClientSyncDegraded FailedRegister Get "https://10.217.4.1:443/apis/oauth.openshift.io/v1/oauthclients/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:43.095381561+00:00 stderr F W1212 16:18:43.095331 1 base_controller.go:242] Updating status of "OAuthClientsController" failed: unable to ApplyStatus for operator using fieldManager "OAuthClientsController-reportDegraded": Patch "https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status?fieldManager=OAuthClientsController-reportDegraded&force=true": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:43.095404861+00:00 stderr F E1212 16:18:43.095386 1 base_controller.go:279] "Unhandled Error" err="OAuthClientsController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:45.928778841+00:00 stderr F E1212 16:18:45.928714 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-console-operator/leases/console-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:45.930152535+00:00 stderr F E1212 16:18:45.930107 1 leaderelection.go:436] error retrieving resource lock openshift-console-operator/console-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-console-operator/leases/console-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.057149035+00:00 stderr F E1212 16:18:46.057097 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.059096593+00:00 stderr F E1212 16:18:46.059074 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.066168868+00:00 stderr F E1212 16:18:46.066146 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.067963482+00:00 stderr F E1212 16:18:46.067926 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.079974469+00:00 stderr F E1212 16:18:46.079933 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.081636700+00:00 stderr F E1212 16:18:46.081611 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.104133507+00:00 stderr F E1212 16:18:46.104079 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.109469448+00:00 stderr F E1212 16:18:46.109404 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.152652506+00:00 stderr F E1212 16:18:46.152612 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.155235560+00:00 stderr F E1212 16:18:46.155199 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.175812799+00:00 stderr F E1212 16:18:46.175770 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.176725131+00:00 stderr F E1212 16:18:46.176696 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.177495440+00:00 stderr F E1212 16:18:46.177476 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.177889890+00:00 stderr F E1212 16:18:46.177870 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.179266564+00:00 stderr F E1212 16:18:46.179250 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.179307295+00:00 stderr F E1212 16:18:46.179287 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.182976506+00:00 stderr F E1212 16:18:46.182933 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.184484143+00:00 stderr F E1212 16:18:46.184388 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.184853812+00:00 stderr F E1212 16:18:46.184824 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.186331669+00:00 stderr F E1212 16:18:46.186305 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.187787575+00:00 stderr F E1212 16:18:46.187750 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.187787575+00:00 stderr F E1212 16:18:46.187764 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.192041660+00:00 stderr F E1212 16:18:46.192018 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.200046958+00:00 stderr F E1212 16:18:46.200019 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.217128390+00:00 stderr F E1212 16:18:46.217080 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.237963355+00:00 stderr F E1212 16:18:46.237921 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.260280937+00:00 stderr F E1212 16:18:46.260114 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.273218757+00:00 stderr F E1212 16:18:46.273138 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.459780729+00:00 stderr F E1212 16:18:46.459728 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.472689298+00:00 stderr F E1212 16:18:46.472618 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.661214839+00:00 stderr F E1212 16:18:46.661075 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.675289677+00:00 stderr F E1212 16:18:46.675262 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.860677741+00:00 stderr F E1212 16:18:46.860580 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.884290814+00:00 stderr F E1212 16:18:46.884150 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.059741012+00:00 stderr F E1212 16:18:47.059677 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.068436337+00:00 stderr F E1212 16:18:47.067508 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.258869195+00:00 stderr F I1212 16:18:47.258786 1 request.go:752] "Waited before sending request" delay="1.019625278s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status" 2025-12-12T16:18:47.259824179+00:00 stderr F E1212 16:18:47.259796 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.423042054+00:00 stderr F E1212 16:18:47.422972 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.459404653+00:00 stderr F E1212 16:18:47.459304 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.481815206+00:00 stderr F E1212 16:18:47.481770 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.660265158+00:00 stderr F E1212 16:18:47.660209 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.683627915+00:00 stderr F E1212 16:18:47.683574 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:47.860858927+00:00 stderr F E1212 16:18:47.860816 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.884430680+00:00 stderr F E1212 16:18:47.884373 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.059752864+00:00 stderr F E1212 16:18:48.059647 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.102780018+00:00 stderr F E1212 16:18:48.102718 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.259164814+00:00 stderr F I1212 16:18:48.258818 1 request.go:752] "Waited before sending request" delay="1.190279056s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status" 2025-12-12T16:18:48.260429126+00:00 stderr F E1212 16:18:48.260400 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.272908914+00:00 stderr F E1212 16:18:48.272860 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.460299447+00:00 stderr F E1212 16:18:48.460160 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.660420355+00:00 stderr F E1212 16:18:48.660045 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.703007698+00:00 stderr F E1212 16:18:48.702945 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.782892183+00:00 stderr F E1212 16:18:48.782834 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.859588999+00:00 stderr F E1212 16:18:48.859517 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.901900405+00:00 stderr F E1212 16:18:48.901824 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.059914641+00:00 stderr F E1212 16:18:49.059855 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.103047048+00:00 stderr F E1212 16:18:49.102980 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.259248599+00:00 stderr F I1212 16:18:49.259157 1 request.go:752] "Waited before sending request" delay="1.155078517s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status" 2025-12-12T16:18:49.260046239+00:00 stderr F E1212 16:18:49.260001 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.342919198+00:00 stderr F E1212 16:18:49.342852 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.459841169+00:00 stderr F E1212 16:18:49.459779 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.483943655+00:00 stderr F E1212 16:18:49.483897 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.677830168+00:00 stderr F E1212 16:18:49.677747 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.760326128+00:00 stderr F E1212 16:18:49.760261 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.859336065+00:00 stderr F E1212 16:18:49.859252 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.059951495+00:00 stderr F E1212 16:18:50.059888 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.143015429+00:00 stderr F E1212 16:18:50.142949 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.259443727+00:00 stderr F E1212 16:18:50.259351 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.343098866+00:00 stderr F E1212 16:18:50.343014 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.458846197+00:00 stderr F I1212 16:18:50.458740 1 request.go:752] "Waited before sending request" delay="1.114804981s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status" 2025-12-12T16:18:50.459742739+00:00 stderr F E1212 16:18:50.459665 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.503318797+00:00 stderr F E1212 16:18:50.503260 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.622749889+00:00 stderr F E1212 16:18:50.622658 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.659407306+00:00 stderr F E1212 16:18:50.659319 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.702471100+00:00 stderr F E1212 16:18:50.702388 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.860023435+00:00 stderr F E1212 16:18:50.859937 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.023196640+00:00 stderr F E1212 16:18:51.023117 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.059552297+00:00 stderr F E1212 16:18:51.059469 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.222363393+00:00 stderr F E1212 16:18:51.222297 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.260645619+00:00 stderr F E1212 16:18:51.260143 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.423911025+00:00 stderr F E1212 16:18:51.423839 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.459959857+00:00 stderr F E1212 16:18:51.459863 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.658714990+00:00 stderr F I1212 16:18:51.658623 1 request.go:752] "Waited before sending request" delay="1.034864314s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status" 2025-12-12T16:18:51.659754696+00:00 stderr F E1212 16:18:51.659702 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.859886554+00:00 stderr F E1212 16:18:51.859805 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.943405479+00:00 stderr F E1212 16:18:51.943316 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.983063869+00:00 stderr F E1212 16:18:51.982983 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.065268032+00:00 stderr F E1212 16:18:52.065199 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.260788146+00:00 stderr F E1212 16:18:52.259942 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.390536753+00:00 stderr F E1212 16:18:52.390475 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.461596930+00:00 stderr F E1212 16:18:52.460919 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.583971815+00:00 stderr F E1212 16:18:52.583867 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.660050806+00:00 stderr F E1212 16:18:52.659973 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.743143951+00:00 stderr F E1212 16:18:52.743075 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.785073127+00:00 stderr F E1212 16:18:52.784986 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.823659701+00:00 stderr F E1212 16:18:52.823568 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.859588390+00:00 stderr F E1212 16:18:52.859503 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.059327518+00:00 stderr F E1212 16:18:53.059214 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.259586129+00:00 stderr F E1212 16:18:53.259484 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.460330272+00:00 stderr F E1212 16:18:53.460260 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.503221442+00:00 stderr F E1212 16:18:53.503156 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:53.660147342+00:00 stderr F E1212 16:18:53.660040 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.702852118+00:00 stderr F E1212 16:18:53.702689 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:53.858968697+00:00 stderr F I1212 16:18:53.858907 1 request.go:752] "Waited before sending request" delay="1.033989693s" reason="client-side throttling, not priority and fairness" verb="PUT" URL="https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status" 2025-12-12T16:18:53.859748836+00:00 stderr F E1212 16:18:53.859723 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.903206051+00:00 stderr F E1212 16:18:53.903135 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:54.059983227+00:00 stderr F E1212 16:18:54.059902 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.182657150+00:00 stderr F E1212 16:18:54.182593 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:54.259725545+00:00 stderr F E1212 16:18:54.259646 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.304816520+00:00 stderr F E1212 16:18:54.304745 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:54.459906014+00:00 stderr F E1212 16:18:54.459860 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.660058651+00:00 stderr F E1212 16:18:54.659975 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.859769779+00:00 stderr F E1212 16:18:54.859684 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.303265543+00:00 stderr F E1212 16:18:55.303219 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:55.304834762+00:00 stderr F E1212 16:18:55.304797 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.343216301+00:00 stderr F E1212 16:18:55.343121 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:55.344866692+00:00 stderr F E1212 16:18:55.344834 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.542847886+00:00 stderr F E1212 16:18:55.542792 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/console": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:55.545293317+00:00 stderr F E1212 16:18:55.545239 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.742712937+00:00 stderr F E1212 16:18:55.742652 1 status.go:130] ServiceSyncDegraded FailedApply Get "https://10.217.4.1:443/api/v1/namespaces/openshift-console/services/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:55.746939182+00:00 stderr F E1212 16:18:55.746868 1 base_controller.go:279] "Unhandled Error" err="ConsoleServiceController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.023525020+00:00 stderr F E1212 16:18:56.023481 1 status.go:130] ConsoleNotificationSyncDegraded FailedDelete Delete "https://10.217.4.1:443/apis/console.openshift.io/v1/consolenotifications/cluster-upgrade": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.025214272+00:00 stderr F E1212 16:18:56.025163 1 base_controller.go:279] "Unhandled Error" err="ClusterUpgradeNotificationController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.143124107+00:00 stderr F E1212 16:18:56.143077 1 status.go:130] DownloadsDeploymentSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/apps/v1/namespaces/openshift-console/deployments/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.144936982+00:00 stderr F E1212 16:18:56.144901 1 base_controller.go:279] "Unhandled Error" err="ConsoleDownloadsDeploymentSyncController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.587544454+00:00 stderr F E1212 16:18:56.587500 1 status.go:130] PDBSyncDegraded FailedApply Get "https://10.217.4.1:443/apis/policy/v1/namespaces/openshift-console/poddisruptionbudgets/downloads": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.589284727+00:00 stderr F E1212 16:18:56.589241 1 base_controller.go:279] "Unhandled Error" err="PodDisruptionBudgetController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/consoles/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:26.734202074+00:00 stderr F I1212 16:19:26.733241 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:19:27.477212819+00:00 stderr F I1212 16:19:27.475599 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:19:28.102706454+00:00 stderr F I1212 16:19:28.102086 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:28.759412812+00:00 stderr F I1212 16:19:28.758710 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:31.324076126+00:00 stderr F I1212 16:19:31.324005 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:40.851849739+00:00 stderr F I1212 16:19:40.851223 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:49.517572956+00:00 stderr F I1212 16:19:49.516380 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:56.949270980+00:00 stderr F I1212 16:19:56.948680 1 reflector.go:430] "Caches populated" type="*v1.IngressController" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:19:57.235759573+00:00 stderr F I1212 16:19:57.235677 1 reflector.go:430] "Caches populated" type="operators.coreos.com/v1, Resource=olmconfigs" reflector="k8s.io/client-go/dynamic/dynamicinformer/informer.go:108" 2025-12-12T16:20:02.071818647+00:00 stderr F I1212 16:20:02.071027 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:02.966479889+00:00 stderr F I1212 16:20:02.966419 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:03.169412514+00:00 stderr F I1212 16:20:03.169345 1 reflector.go:430] "Caches populated" type="*v1.Ingress" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:04.270080230+00:00 stderr F I1212 16:20:04.269919 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:06.067221703+00:00 stderr F I1212 16:20:06.066616 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:06.805519880+00:00 stderr F I1212 16:20:06.805445 1 reflector.go:430] "Caches populated" type="*v1.Authentication" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:07.420504271+00:00 stderr F I1212 16:20:07.420422 1 reflector.go:430] "Caches populated" type="*v1.ConsolePlugin" reflector="github.com/openshift/client-go/console/informers/externalversions/factory.go:125" 2025-12-12T16:20:08.059661739+00:00 stderr F I1212 16:20:08.059582 1 reflector.go:430] "Caches populated" type="*v1.Console" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:20:09.279079507+00:00 stderr F I1212 16:20:09.278982 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:11.230111022+00:00 stderr F I1212 16:20:11.230032 1 reflector.go:430] "Caches populated" type="*v1.Console" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:15.085256477+00:00 stderr F I1212 16:20:15.084647 1 reflector.go:430] "Caches populated" type="*v1.ConsoleCLIDownload" reflector="github.com/openshift/client-go/console/informers/externalversions/factory.go:125" 2025-12-12T16:20:20.266214391+00:00 stderr F I1212 16:20:20.265406 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:21.005090362+00:00 stderr F I1212 16:20:21.005002 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=consoles" reflector="k8s.io/client-go/dynamic/dynamicinformer/informer.go:108" 2025-12-12T16:20:21.738743672+00:00 stderr F I1212 16:20:21.738656 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:24.871244333+00:00 stderr F I1212 16:20:24.871166 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:25.322449942+00:00 stderr F I1212 16:20:25.321806 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/authentication/request/headerrequest/requestheader_controller.go:183" 2025-12-12T16:20:26.449448038+00:00 stderr F I1212 16:20:26.449356 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:26.523285662+00:00 stderr F I1212 16:20:26.523195 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:209" 2025-12-12T16:20:29.490284867+00:00 stderr F I1212 16:20:29.489697 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:30.238652339+00:00 stderr F I1212 16:20:30.238581 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:33.383688693+00:00 stderr F I1212 16:20:33.382659 1 reflector.go:430] "Caches populated" type="*v1.OAuth" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:35.122551632+00:00 stderr F I1212 16:20:35.121530 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:38.263329895+00:00 stderr F I1212 16:20:38.262603 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go/informers/factory.go:160" ././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_infrawatch-operators-6bs58_6510d065-e486-4274-a8ca-4c2cdb8dd1ae/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_infr0000755000175000017500000000000015117043043033137 5ustar zuulzuul././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000755000175000017500000000000015117043043033047 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000755000175000017500000000000015117043062033050 5ustar zuulzuul././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000644000175000017500000000000015117043043033037 0ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000755000175000017500000000000015117043062033050 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000644000175000017500000015122615117043043033060 0ustar zuulzuul2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.403846 1 feature_gate.go:385] feature gates: {map[]} 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404003 1 flags.go:64] FLAG: --accesstoken-inactivity-timeout="0s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404012 1 flags.go:64] FLAG: --admission-control-config-file="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404018 1 flags.go:64] FLAG: --advertise-address="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404027 1 flags.go:64] FLAG: --api-audiences="[https://kubernetes.default.svc]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404036 1 flags.go:64] FLAG: --audit-log-batch-buffer-size="10000" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404041 1 flags.go:64] FLAG: --audit-log-batch-max-size="1" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404045 1 flags.go:64] FLAG: --audit-log-batch-max-wait="0s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404048 1 flags.go:64] FLAG: --audit-log-batch-throttle-burst="0" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404051 1 flags.go:64] FLAG: --audit-log-batch-throttle-enable="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404057 1 flags.go:64] FLAG: --audit-log-batch-throttle-qps="0" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404061 1 flags.go:64] FLAG: --audit-log-compress="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404065 1 flags.go:64] FLAG: --audit-log-format="json" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404069 1 flags.go:64] FLAG: --audit-log-maxage="0" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404073 1 flags.go:64] FLAG: --audit-log-maxbackup="10" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404077 1 flags.go:64] FLAG: --audit-log-maxsize="100" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404080 1 flags.go:64] FLAG: --audit-log-mode="blocking" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404084 1 flags.go:64] FLAG: --audit-log-path="/var/log/oauth-apiserver/audit.log" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404088 1 flags.go:64] FLAG: --audit-log-truncate-enabled="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404091 1 flags.go:64] FLAG: --audit-log-truncate-max-batch-size="10485760" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404102 1 flags.go:64] FLAG: --audit-log-truncate-max-event-size="102400" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404106 1 flags.go:64] FLAG: --audit-log-version="audit.k8s.io/v1" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404109 1 flags.go:64] FLAG: --audit-policy-file="/var/run/configmaps/audit/policy.yaml" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404113 1 flags.go:64] FLAG: --audit-webhook-batch-buffer-size="10000" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404116 1 flags.go:64] FLAG: --audit-webhook-batch-initial-backoff="10s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404120 1 flags.go:64] FLAG: --audit-webhook-batch-max-size="400" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404123 1 flags.go:64] FLAG: --audit-webhook-batch-max-wait="30s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404127 1 flags.go:64] FLAG: --audit-webhook-batch-throttle-burst="15" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404130 1 flags.go:64] FLAG: --audit-webhook-batch-throttle-enable="true" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404133 1 flags.go:64] FLAG: --audit-webhook-batch-throttle-qps="10" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404137 1 flags.go:64] FLAG: --audit-webhook-config-file="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404141 1 flags.go:64] FLAG: --audit-webhook-initial-backoff="10s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404146 1 flags.go:64] FLAG: --audit-webhook-mode="batch" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404149 1 flags.go:64] FLAG: --audit-webhook-truncate-enabled="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404153 1 flags.go:64] FLAG: --audit-webhook-truncate-max-batch-size="10485760" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404157 1 flags.go:64] FLAG: --audit-webhook-truncate-max-event-size="102400" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404160 1 flags.go:64] FLAG: --audit-webhook-version="audit.k8s.io/v1" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404163 1 flags.go:64] FLAG: --authentication-kubeconfig="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404167 1 flags.go:64] FLAG: --authentication-skip-lookup="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404170 1 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="10s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404199 1 flags.go:64] FLAG: --authentication-tolerate-lookup-failure="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404203 1 flags.go:64] FLAG: --authorization-always-allow-paths="[/healthz,/readyz,/livez]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404208 1 flags.go:64] FLAG: --authorization-kubeconfig="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404211 1 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="10s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404215 1 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="10s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404219 1 flags.go:64] FLAG: --bind-address="0.0.0.0" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404223 1 flags.go:64] FLAG: --cert-dir="apiserver.local.config/certificates" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404229 1 flags.go:64] FLAG: --client-ca-file="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404232 1 flags.go:64] FLAG: --contention-profiling="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404236 1 flags.go:64] FLAG: --cors-allowed-origins="[//127\\.0\\.0\\.1(:|$),//localhost(:|$)]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404242 1 flags.go:64] FLAG: --debug-socket-path="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404245 1 flags.go:64] FLAG: --default-watch-cache-size="100" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404249 1 flags.go:64] FLAG: --delete-collection-workers="1" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404252 1 flags.go:64] FLAG: --disable-admission-plugins="[]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404257 1 flags.go:64] FLAG: --disable-http2-serving="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404260 1 flags.go:64] FLAG: --egress-selector-config-file="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404264 1 flags.go:64] FLAG: --emulated-version="[]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404268 1 flags.go:64] FLAG: --emulation-forward-compatible="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404272 1 flags.go:64] FLAG: --enable-admission-plugins="[]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404276 1 flags.go:64] FLAG: --enable-garbage-collector="true" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404280 1 flags.go:64] FLAG: --enable-priority-and-fairness="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404283 1 flags.go:64] FLAG: --encryption-provider-config="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404287 1 flags.go:64] FLAG: --encryption-provider-config-automatic-reload="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404290 1 flags.go:64] FLAG: --etcd-cafile="/var/run/configmaps/etcd-serving-ca/ca-bundle.crt" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404294 1 flags.go:64] FLAG: --etcd-certfile="/var/run/secrets/etcd-client/tls.crt" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404298 1 flags.go:64] FLAG: --etcd-compaction-interval="5m0s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404302 1 flags.go:64] FLAG: --etcd-count-metric-poll-period="1m0s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404306 1 flags.go:64] FLAG: --etcd-db-metric-poll-interval="30s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404309 1 flags.go:64] FLAG: --etcd-healthcheck-timeout="9s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404313 1 flags.go:64] FLAG: --etcd-keyfile="/var/run/secrets/etcd-client/tls.key" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404317 1 flags.go:64] FLAG: --etcd-prefix="openshift.io" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404320 1 flags.go:64] FLAG: --etcd-readycheck-timeout="9s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404324 1 flags.go:64] FLAG: --etcd-servers="[https://192.168.126.11:2379]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404330 1 flags.go:64] FLAG: --etcd-servers-overrides="[]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404335 1 flags.go:64] FLAG: --external-hostname="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404338 1 flags.go:64] FLAG: --feature-gates="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404344 1 flags.go:64] FLAG: --goaway-chance="0" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404350 1 flags.go:64] FLAG: --help="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404353 1 flags.go:64] FLAG: --http2-max-streams-per-connection="1000" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404357 1 flags.go:64] FLAG: --kubeconfig="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404361 1 flags.go:64] FLAG: --lease-reuse-duration-seconds="60" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404364 1 flags.go:64] FLAG: --livez-grace-period="0s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404368 1 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404373 1 flags.go:64] FLAG: --max-mutating-requests-inflight="200" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404377 1 flags.go:64] FLAG: --max-requests-inflight="400" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404380 1 flags.go:64] FLAG: --min-request-timeout="1800" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404384 1 flags.go:64] FLAG: --permit-address-sharing="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404388 1 flags.go:64] FLAG: --permit-port-sharing="false" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404391 1 flags.go:64] FLAG: --profiling="true" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404395 1 flags.go:64] FLAG: --request-timeout="1m0s" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404398 1 flags.go:64] FLAG: --requestheader-allowed-names="[]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404403 1 flags.go:64] FLAG: --requestheader-client-ca-file="" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404407 1 flags.go:64] FLAG: --requestheader-extra-headers-prefix="[x-remote-extra-]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404412 1 flags.go:64] FLAG: --requestheader-group-headers="[x-remote-group]" 2025-12-12T16:16:42.405345070+00:00 stderr F I1212 16:16:42.404417 1 flags.go:64] FLAG: --requestheader-uid-headers="[]" 2025-12-12T16:16:42.405345070+00:00 stderr P I1212 16:16:42.404422 1 flags.go:64] FLAG: --re 2025-12-12T16:16:42.405608306+00:00 stderr F questheader-username-headers="[x-remote-user]" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404428 1 flags.go:64] FLAG: --runtime-config-emulation-forward-compatible="false" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404431 1 flags.go:64] FLAG: --secure-port="8443" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404435 1 flags.go:64] FLAG: --shutdown-delay-duration="50s" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404438 1 flags.go:64] FLAG: --shutdown-send-retry-after="true" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404442 1 flags.go:64] FLAG: --shutdown-watch-termination-grace-period="0s" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404445 1 flags.go:64] FLAG: --storage-backend="" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404449 1 flags.go:64] FLAG: --storage-initialization-timeout="1m0s" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404452 1 flags.go:64] FLAG: --storage-media-type="application/json" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404455 1 flags.go:64] FLAG: --strict-transport-security-directives="[]" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404459 1 flags.go:64] FLAG: --tls-cert-file="/var/run/secrets/serving-cert/tls.crt" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404463 1 flags.go:64] FLAG: --tls-cipher-suites="[TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256]" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404474 1 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404478 1 flags.go:64] FLAG: --tls-private-key-file="/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404481 1 flags.go:64] FLAG: --tls-sni-cert-key="[]" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404486 1 flags.go:64] FLAG: --tracing-config-file="" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404489 1 flags.go:64] FLAG: --v="2" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404493 1 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404497 1 flags.go:64] FLAG: --watch-cache="true" 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404501 1 flags.go:64] FLAG: --watch-cache-sizes="[]" 2025-12-12T16:16:42.405608306+00:00 stderr F W1212 16:16:42.404893 1 registry.go:321] setting componentGlobalsRegistry in SetFallback. We recommend calling componentGlobalsRegistry.Set() right after parsing flags to avoid using feature gates before their final values are set by the flags. 2025-12-12T16:16:42.405608306+00:00 stderr F I1212 16:16:42.404911 1 registry.go:355] setting kube:feature gate emulation version to 1.33 2025-12-12T16:16:42.446877594+00:00 stderr F I1212 16:16:42.446775 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:43.144739991+00:00 stderr F I1212 16:16:43.144656 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:16:43.149323943+00:00 stderr F I1212 16:16:43.149287 1 audit.go:340] Using audit backend: ignoreErrors 2025-12-12T16:16:43.185529927+00:00 stderr F I1212 16:16:43.185437 1 shared_informer.go:350] "Waiting for caches to sync" controller="*generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]" 2025-12-12T16:16:43.221608118+00:00 stderr F I1212 16:16:43.221523 1 plugins.go:157] Loaded 3 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,MutatingAdmissionPolicy,MutatingAdmissionWebhook. 2025-12-12T16:16:43.221608118+00:00 stderr F I1212 16:16:43.221559 1 plugins.go:160] Loaded 2 validating admission controller(s) successfully in the following order: ValidatingAdmissionPolicy,ValidatingAdmissionWebhook. 2025-12-12T16:16:43.228911136+00:00 stderr F I1212 16:16:43.228849 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:43.228911136+00:00 stderr F I1212 16:16:43.228878 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:43.228911136+00:00 stderr F I1212 16:16:43.228896 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:16:43.228911136+00:00 stderr F I1212 16:16:43.228901 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:16:43.269076537+00:00 stderr F I1212 16:16:43.266928 1 store.go:1663] "Monitoring resource count at path" resource="oauthclients.oauth.openshift.io" path="//oauth/clients" 2025-12-12T16:16:43.279747778+00:00 stderr F I1212 16:16:43.272635 1 cacher.go:469] cacher (oauthclients.oauth.openshift.io): initialized 2025-12-12T16:16:43.279747778+00:00 stderr F I1212 16:16:43.272684 1 reflector.go:430] "Caches populated" type="*oauth.OAuthClient" reflector="storage/cacher.go:/oauth/clients" 2025-12-12T16:16:43.279747778+00:00 stderr F I1212 16:16:43.277074 1 store.go:1663] "Monitoring resource count at path" resource="oauthauthorizetokens.oauth.openshift.io" path="//oauth/authorizetokens" 2025-12-12T16:16:43.280951807+00:00 stderr F I1212 16:16:43.280912 1 cacher.go:469] cacher (oauthauthorizetokens.oauth.openshift.io): initialized 2025-12-12T16:16:43.280976887+00:00 stderr F I1212 16:16:43.280956 1 reflector.go:430] "Caches populated" type="*oauth.OAuthAuthorizeToken" reflector="storage/cacher.go:/oauth/authorizetokens" 2025-12-12T16:16:43.286819550+00:00 stderr F I1212 16:16:43.286761 1 store.go:1663] "Monitoring resource count at path" resource="oauthaccesstokens.oauth.openshift.io" path="//oauth/accesstokens" 2025-12-12T16:16:43.292241032+00:00 stderr F I1212 16:16:43.289051 1 cacher.go:469] cacher (oauthaccesstokens.oauth.openshift.io): initialized 2025-12-12T16:16:43.292241032+00:00 stderr F I1212 16:16:43.289094 1 reflector.go:430] "Caches populated" type="*oauth.OAuthAccessToken" reflector="storage/cacher.go:/oauth/accesstokens" 2025-12-12T16:16:43.300226748+00:00 stderr F I1212 16:16:43.299755 1 store.go:1663] "Monitoring resource count at path" resource="oauthclientauthorizations.oauth.openshift.io" path="//oauth/clientauthorizations" 2025-12-12T16:16:43.308238153+00:00 stderr F I1212 16:16:43.307226 1 handler.go:288] Adding GroupVersion oauth.openshift.io v1 to ResourceManager 2025-12-12T16:16:43.308238153+00:00 stderr F I1212 16:16:43.308011 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:43.308238153+00:00 stderr F I1212 16:16:43.308024 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:43.312224480+00:00 stderr F I1212 16:16:43.308548 1 cacher.go:469] cacher (oauthclientauthorizations.oauth.openshift.io): initialized 2025-12-12T16:16:43.312224480+00:00 stderr F I1212 16:16:43.308592 1 reflector.go:430] "Caches populated" type="*oauth.OAuthClientAuthorization" reflector="storage/cacher.go:/oauth/clientauthorizations" 2025-12-12T16:16:43.322285146+00:00 stderr F I1212 16:16:43.321508 1 store.go:1663] "Monitoring resource count at path" resource="users.user.openshift.io" path="//users" 2025-12-12T16:16:43.324448789+00:00 stderr F I1212 16:16:43.324420 1 cacher.go:469] cacher (users.user.openshift.io): initialized 2025-12-12T16:16:43.324464739+00:00 stderr F I1212 16:16:43.324445 1 reflector.go:430] "Caches populated" type="*user.User" reflector="storage/cacher.go:/users" 2025-12-12T16:16:43.328391905+00:00 stderr F I1212 16:16:43.328363 1 store.go:1663] "Monitoring resource count at path" resource="identities.user.openshift.io" path="//useridentities" 2025-12-12T16:16:43.330113187+00:00 stderr F I1212 16:16:43.330074 1 cacher.go:469] cacher (identities.user.openshift.io): initialized 2025-12-12T16:16:43.330131158+00:00 stderr F I1212 16:16:43.330120 1 reflector.go:430] "Caches populated" type="*user.Identity" reflector="storage/cacher.go:/useridentities" 2025-12-12T16:16:43.334339250+00:00 stderr F I1212 16:16:43.334320 1 store.go:1663] "Monitoring resource count at path" resource="groups.user.openshift.io" path="//groups" 2025-12-12T16:16:43.335415937+00:00 stderr F I1212 16:16:43.335387 1 cacher.go:469] cacher (groups.user.openshift.io): initialized 2025-12-12T16:16:43.335435487+00:00 stderr F I1212 16:16:43.335415 1 reflector.go:430] "Caches populated" type="*user.Group" reflector="storage/cacher.go:/groups" 2025-12-12T16:16:43.335853517+00:00 stderr F I1212 16:16:43.335836 1 handler.go:288] Adding GroupVersion user.openshift.io v1 to ResourceManager 2025-12-12T16:16:43.335984550+00:00 stderr F I1212 16:16:43.335940 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:43.336018631+00:00 stderr F I1212 16:16:43.336008 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:43.464401166+00:00 stderr F I1212 16:16:43.464334 1 genericapiserver.go:583] "[graceful-termination] using HTTP Server shutdown timeout" shutdownTimeout="2s" 2025-12-12T16:16:43.464438487+00:00 stderr F I1212 16:16:43.464418 1 genericapiserver.go:551] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:16:43.471282084+00:00 stderr F I1212 16:16:43.470944 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:43.471282084+00:00 stderr F I1212 16:16:43.470981 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:43.471282084+00:00 stderr F I1212 16:16:43.471024 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:43.471282084+00:00 stderr F I1212 16:16:43.471033 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:43.471282084+00:00 stderr F I1212 16:16:43.471046 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:43.471282084+00:00 stderr F I1212 16:16:43.471052 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:43.472215496+00:00 stderr F I1212 16:16:43.471640 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-oauth-apiserver.svc\" [serving] validServingFor=[api.openshift-oauth-apiserver.svc,api.openshift-oauth-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:43.471604582 +0000 UTC))" 2025-12-12T16:16:43.472215496+00:00 stderr F I1212 16:16:43.471811 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556203\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556202\" (2025-12-12 15:16:42 +0000 UTC to 2028-12-12 15:16:42 +0000 UTC (now=2025-12-12 16:16:43.471793106 +0000 UTC))" 2025-12-12T16:16:43.472215496+00:00 stderr F I1212 16:16:43.471831 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:43.472215496+00:00 stderr F I1212 16:16:43.471859 1 genericapiserver.go:706] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:16:43.472215496+00:00 stderr F I1212 16:16:43.471882 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:43.472765080+00:00 stderr F I1212 16:16:43.472738 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:43.479301410+00:00 stderr F I1212 16:16:43.479081 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.479301410+00:00 stderr F I1212 16:16:43.479080 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.479519155+00:00 stderr F I1212 16:16:43.479477 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicyBinding" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.479564376+00:00 stderr F I1212 16:16:43.479544 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.479622647+00:00 stderr F I1212 16:16:43.479601 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.483215955+00:00 stderr F I1212 16:16:43.480335 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.485629984+00:00 stderr F I1212 16:16:43.484695 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.485629984+00:00 stderr F I1212 16:16:43.485352 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.489742314+00:00 stderr F I1212 16:16:43.489639 1 shared_informer.go:357] "Caches are synced" controller="*generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]" 2025-12-12T16:16:43.489742314+00:00 stderr F I1212 16:16:43.489671 1 policy_source.go:240] refreshing policies 2025-12-12T16:16:43.491604390+00:00 stderr F I1212 16:16:43.491494 1 reflector.go:430] "Caches populated" type="*v1.Group" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.491604390+00:00 stderr F I1212 16:16:43.491573 1 reflector.go:430] "Caches populated" type="*v1.OAuthClient" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.512161072+00:00 stderr F I1212 16:16:43.512078 1 policy_source.go:435] informer started for config.openshift.io/v1, Kind=Infrastructure 2025-12-12T16:16:43.522558016+00:00 stderr F I1212 16:16:43.521650 1 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Resource=infrastructures" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:43.572433393+00:00 stderr F I1212 16:16:43.571826 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:43.572433393+00:00 stderr F I1212 16:16:43.571913 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:43.572433393+00:00 stderr F I1212 16:16:43.572031 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:43.572433393+00:00 stderr F I1212 16:16:43.572289 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:43.572247739 +0000 UTC))" 2025-12-12T16:16:43.572502855+00:00 stderr F I1212 16:16:43.572485 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-oauth-apiserver.svc\" [serving] validServingFor=[api.openshift-oauth-apiserver.svc,api.openshift-oauth-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:43.572472874 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.572625 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556203\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556202\" (2025-12-12 15:16:42 +0000 UTC to 2028-12-12 15:16:42 +0000 UTC (now=2025-12-12 16:16:43.572612008 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.572940 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:43.572924165 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.572962 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:43.572950236 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.572978 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:43.572968116 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.572992 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:43.572983827 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.573004 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:43.572996817 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.573017 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:43.573008917 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.573031 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:43.573021438 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.573062 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:43.573049558 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.573292 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-oauth-apiserver.svc\" [serving] validServingFor=[api.openshift-oauth-apiserver.svc,api.openshift-oauth-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:43.573270364 +0000 UTC))" 2025-12-12T16:16:43.576219936+00:00 stderr F I1212 16:16:43.573468 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556203\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556202\" (2025-12-12 15:16:42 +0000 UTC to 2028-12-12 15:16:42 +0000 UTC (now=2025-12-12 16:16:43.573436128 +0000 UTC))" 2025-12-12T16:16:44.588548661+00:00 stderr F I1212 16:16:44.586780 1 policy_source.go:240] refreshing policies 2025-12-12T16:16:55.907665018+00:00 stderr F I1212 16:16:55.907600 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.907559486 +0000 UTC))" 2025-12-12T16:16:55.907665018+00:00 stderr F I1212 16:16:55.907636 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.907625227 +0000 UTC))" 2025-12-12T16:16:55.907665018+00:00 stderr F I1212 16:16:55.907650 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.907642728 +0000 UTC))" 2025-12-12T16:16:55.907721880+00:00 stderr F I1212 16:16:55.907672 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.907654858 +0000 UTC))" 2025-12-12T16:16:55.907721880+00:00 stderr F I1212 16:16:55.907685 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.907676629 +0000 UTC))" 2025-12-12T16:16:55.907721880+00:00 stderr F I1212 16:16:55.907699 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.907689359 +0000 UTC))" 2025-12-12T16:16:55.907721880+00:00 stderr F I1212 16:16:55.907714 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.907703079 +0000 UTC))" 2025-12-12T16:16:55.907733780+00:00 stderr F I1212 16:16:55.907726 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.9077187 +0000 UTC))" 2025-12-12T16:16:55.907747550+00:00 stderr F I1212 16:16:55.907739 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.90773112 +0000 UTC))" 2025-12-12T16:16:55.907780271+00:00 stderr F I1212 16:16:55.907754 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.9077461 +0000 UTC))" 2025-12-12T16:16:55.908037437+00:00 stderr F I1212 16:16:55.908009 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-oauth-apiserver.svc\" [serving] validServingFor=[api.openshift-oauth-apiserver.svc,api.openshift-oauth-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:55.907952015 +0000 UTC))" 2025-12-12T16:16:55.910593740+00:00 stderr F I1212 16:16:55.908169 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556203\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556202\" (2025-12-12 15:16:42 +0000 UTC to 2028-12-12 15:16:42 +0000 UTC (now=2025-12-12 16:16:55.9081541 +0000 UTC))" 2025-12-12T16:17:46.324217695+00:00 stderr F I1212 16:17:46.324126 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.324067801 +0000 UTC))" 2025-12-12T16:17:46.324217695+00:00 stderr F I1212 16:17:46.324194 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.324159454 +0000 UTC))" 2025-12-12T16:17:46.324279507+00:00 stderr F I1212 16:17:46.324216 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.324204165 +0000 UTC))" 2025-12-12T16:17:46.324279507+00:00 stderr F I1212 16:17:46.324252 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.324223485 +0000 UTC))" 2025-12-12T16:17:46.324316728+00:00 stderr F I1212 16:17:46.324290 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.324274957 +0000 UTC))" 2025-12-12T16:17:46.324346578+00:00 stderr F I1212 16:17:46.324324 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.324305577 +0000 UTC))" 2025-12-12T16:17:46.324444061+00:00 stderr F I1212 16:17:46.324350 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.324336648 +0000 UTC))" 2025-12-12T16:17:46.324444061+00:00 stderr F I1212 16:17:46.324370 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.324357249 +0000 UTC))" 2025-12-12T16:17:46.324444061+00:00 stderr F I1212 16:17:46.324387 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.324375799 +0000 UTC))" 2025-12-12T16:17:46.324444061+00:00 stderr F I1212 16:17:46.324411 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.32439559 +0000 UTC))" 2025-12-12T16:17:46.324478602+00:00 stderr F I1212 16:17:46.324443 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.32442012 +0000 UTC))" 2025-12-12T16:17:46.324770369+00:00 stderr F I1212 16:17:46.324732 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"api.openshift-oauth-apiserver.svc\" [serving] validServingFor=[api.openshift-oauth-apiserver.svc,api.openshift-oauth-apiserver.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:17:46.324705847 +0000 UTC))" 2025-12-12T16:17:46.324985214+00:00 stderr F I1212 16:17:46.324952 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556203\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556202\" (2025-12-12 15:16:42 +0000 UTC to 2028-12-12 15:16:42 +0000 UTC (now=2025-12-12 16:17:46.324904502 +0000 UTC))" 2025-12-12T16:18:30.613246348+00:00 stderr F E1212 16:18:30.612474 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.613246348+00:00 stderr F E1212 16:18:30.612543 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:30.656637451+00:00 stderr F E1212 16:18:30.656496 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:30.656697602+00:00 stderr F E1212 16:18:30.656667 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.529127591+00:00 stderr F E1212 16:18:31.529072 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.529271385+00:00 stderr F E1212 16:18:31.529255 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.562223939+00:00 stderr F E1212 16:18:31.561572 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.562223939+00:00 stderr F E1212 16:18:31.561702 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.567216123+00:00 stderr F E1212 16:18:31.566733 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.567216123+00:00 stderr F E1212 16:18:31.566795 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:31.589233617+00:00 stderr F E1212 16:18:31.588650 1 webhook.go:269] Failed to make webhook authorizer request: Post "https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:31.589233617+00:00 stderr F E1212 16:18:31.588702 1 errors.go:83] "Unhandled Error" err="Post \"https://10.217.4.1:443/apis/authorization.k8s.io/v1/subjectaccessreviews?timeout=10s\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:29.188645250+00:00 stderr F I1212 16:19:29.188579 1 reflector.go:430] "Caches populated" type="config.openshift.io/v1, Resource=infrastructures" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:48.252956415+00:00 stderr F I1212 16:19:48.252878 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:53.385003119+00:00 stderr F I1212 16:19:53.384490 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:53.734946606+00:00 stderr F I1212 16:19:53.734858 1 policy_source.go:240] refreshing policies 2025-12-12T16:19:57.962040189+00:00 stderr F I1212 16:19:57.961777 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:16.207961876+00:00 stderr F I1212 16:20:16.207907 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:17.648368741+00:00 stderr F I1212 16:20:17.648300 1 reflector.go:430] "Caches populated" type="*v1.ValidatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:20.830726614+00:00 stderr F I1212 16:20:20.830407 1 reflector.go:430] "Caches populated" type="*v1.MutatingWebhookConfiguration" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:27.089310674+00:00 stderr F I1212 16:20:27.089245 1 reflector.go:430] "Caches populated" type="*v1.ValidatingAdmissionPolicyBinding" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:20:27.757669224+00:00 stderr F I1212 16:20:27.757609 1 policy_source.go:240] refreshing policies 2025-12-12T16:20:31.813360903+00:00 stderr F I1212 16:20:31.813282 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:29:54.135327927+00:00 stderr F I1212 16:29:54.135254 1 policy_source.go:240] refreshing policies 2025-12-12T16:30:27.156545107+00:00 stderr F I1212 16:30:27.156444 1 policy_source.go:240] refreshing policies 2025-12-12T16:39:54.045174358+00:00 stderr F I1212 16:39:54.044299 1 policy_source.go:240] refreshing policies 2025-12-12T16:40:28.068658932+00:00 stderr F I1212 16:40:28.068543 1 policy_source.go:240] refreshing policies ././@LongLink0000644000000000000000000000022600000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-res0000755000175000017500000000000015117043043033026 5ustar zuulzuul././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-res0000755000175000017500000000000015117043062033027 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-res0000644000175000017500000000014015117043043033023 0ustar zuulzuul2025-12-12T16:17:00.460454750+00:00 stdout F /tmp/hosts.tmp /etc/hosts differ: char 159, line 3 ././@LongLink0000644000000000000000000000031100000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043043033021 5ustar zuulzuul././@LongLink0000644000000000000000000000035300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043062033022 5ustar zuulzuul././@LongLink0000644000000000000000000000036000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000644000175000017500000040507215117043043033033 0ustar zuulzuul2025-12-12T16:16:47.598526387+00:00 stderr F I1212 16:16:47.597981 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:47.598526387+00:00 stderr F I1212 16:16:47.598387 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:47.600168727+00:00 stderr F I1212 16:16:47.600137 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:47.675022045+00:00 stderr F I1212 16:16:47.674561 1 builder.go:304] openshift-cluster-kube-scheduler-operator version 4.20.0-202510211040.p2.g58cbd29.assembly.stream.el9-58cbd29-58cbd296eecc61c0871739588ae65af9c05e87a6 2025-12-12T16:16:49.289021160+00:00 stderr F I1212 16:16:49.287452 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:49.289021160+00:00 stderr F W1212 16:16:49.288080 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:49.289021160+00:00 stderr F W1212 16:16:49.288097 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:49.289021160+00:00 stderr F W1212 16:16:49.288102 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:49.289021160+00:00 stderr F W1212 16:16:49.288105 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:49.289021160+00:00 stderr F W1212 16:16:49.288108 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:49.289021160+00:00 stderr F W1212 16:16:49.288111 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:49.292498215+00:00 stderr F I1212 16:16:49.292449 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:49.292525815+00:00 stderr F I1212 16:16:49.292488 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:49.292559186+00:00 stderr F I1212 16:16:49.292535 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:49.292559186+00:00 stderr F I1212 16:16:49.292551 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:49.292590127+00:00 stderr F I1212 16:16:49.292571 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:49.292590127+00:00 stderr F I1212 16:16:49.292584 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:49.293340305+00:00 stderr F I1212 16:16:49.293254 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:49.293340305+00:00 stderr F I1212 16:16:49.293308 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:49.293358856+00:00 stderr F I1212 16:16:49.293332 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:49.293469818+00:00 stderr F I1212 16:16:49.293450 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:49.294857032+00:00 stderr F I1212 16:16:49.293916 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-scheduler-operator/openshift-cluster-kube-scheduler-operator-lock... 2025-12-12T16:16:49.327556581+00:00 stderr F I1212 16:16:49.327504 1 leaderelection.go:271] successfully acquired lease openshift-kube-scheduler-operator/openshift-cluster-kube-scheduler-operator-lock 2025-12-12T16:16:49.341349717+00:00 stderr F I1212 16:16:49.332046 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-cluster-kube-scheduler-operator-lock", UID:"ce9ec4fe-c0eb-47a9-9bdd-9f6a6f04a33c", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37508", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' openshift-kube-scheduler-operator-54f497555d-dcs9d_de408fe3-7ee6-4929-b8b3-97ad2941aa1b became leader 2025-12-12T16:16:49.354461227+00:00 stderr F I1212 16:16:49.349848 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:49.371879772+00:00 stderr F I1212 16:16:49.371395 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:49.371879772+00:00 stderr F I1212 16:16:49.371444 1 starter.go:90] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:49.395617401+00:00 stderr F I1212 16:16:49.395524 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:49.395756755+00:00 stderr F I1212 16:16:49.395734 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:49.398098182+00:00 stderr F I1212 16:16:49.398076 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:49.417101786+00:00 stderr F I1212 16:16:49.417022 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager-RemoveStaleConditions 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.421980 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_kube-scheduler 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422067 1 base_controller.go:76] Waiting for caches to sync for MissingStaticPodController 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422629 1 base_controller.go:76] Waiting for caches to sync for KubeControllerManagerStaticResources-StaticResources 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422646 1 base_controller.go:76] Waiting for caches to sync for kube-scheduler 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422662 1 base_controller.go:76] Waiting for caches to sync for TargetConfigController 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422685 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422734 1 base_controller.go:76] Waiting for caches to sync for RevisionController 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422856 1 base_controller.go:76] Waiting for caches to sync for Installer 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422876 1 base_controller.go:76] Waiting for caches to sync for kube-scheduler-InstallerState 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422886 1 base_controller.go:76] Waiting for caches to sync for kube-scheduler-StaticPodState 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422896 1 base_controller.go:76] Waiting for caches to sync for PruneController 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.422906 1 base_controller.go:76] Waiting for caches to sync for kube-scheduler-Node 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.423052 1 base_controller.go:76] Waiting for caches to sync for BackingResourceController-StaticResources 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.423071 1 base_controller.go:76] Waiting for caches to sync for cluster-kube-scheduler-operator-UnsupportedConfigOverrides 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.423111 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:49.424309922+00:00 stderr F I1212 16:16:49.423124 1 base_controller.go:76] Waiting for caches to sync for GuardController 2025-12-12T16:16:49.519084336+00:00 stderr F I1212 16:16:49.517885 1 base_controller.go:82] Caches are synced for kube-controller-manager-RemoveStaleConditions 2025-12-12T16:16:49.519084336+00:00 stderr F I1212 16:16:49.517920 1 base_controller.go:119] Starting #1 worker of kube-controller-manager-RemoveStaleConditions controller ... 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.526243 1 base_controller.go:82] Caches are synced for StatusSyncer_kube-scheduler 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.526278 1 base_controller.go:119] Starting #1 worker of StatusSyncer_kube-scheduler controller ... 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.526356 1 base_controller.go:82] Caches are synced for kube-scheduler-Node 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.526362 1 base_controller.go:119] Starting #1 worker of kube-scheduler-Node controller ... 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.526974 1 base_controller.go:82] Caches are synced for BackingResourceController-StaticResources 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.526991 1 base_controller.go:119] Starting #1 worker of BackingResourceController-StaticResources controller ... 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.527007 1 base_controller.go:82] Caches are synced for cluster-kube-scheduler-operator-UnsupportedConfigOverrides 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.527015 1 base_controller.go:119] Starting #1 worker of cluster-kube-scheduler-operator-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.528568 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:49.529812368+00:00 stderr F I1212 16:16:49.528588 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:49.546337801+00:00 stderr F I1212 16:16:49.545884 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)","reason":"NodeController_MasterNodesReady","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.575929814+00:00 stderr F I1212 16:16:49.575846 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded changed from False to True ("NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)") 2025-12-12T16:16:49.628530328+00:00 stderr F I1212 16:16:49.627003 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.682170387+00:00 stderr F I1212 16:16:49.682082 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'MasterNodesReadyChanged' All master nodes are ready 2025-12-12T16:16:49.686107824+00:00 stderr F I1212 16:16:49.686067 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.711374860+00:00 stderr F I1212 16:16:49.711211 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded changed from True to False ("NodeControllerDegraded: All master nodes are ready") 2025-12-12T16:16:49.728242212+00:00 stderr F I1212 16:16:49.724930 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:49.728242212+00:00 stderr F I1212 16:16:49.724971 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:49.728242212+00:00 stderr F I1212 16:16:49.725000 1 base_controller.go:82] Caches are synced for RevisionController 2025-12-12T16:16:49.728242212+00:00 stderr F I1212 16:16:49.725005 1 base_controller.go:119] Starting #1 worker of RevisionController controller ... 2025-12-12T16:16:49.728242212+00:00 stderr F I1212 16:16:49.725028 1 base_controller.go:82] Caches are synced for PruneController 2025-12-12T16:16:49.728242212+00:00 stderr F I1212 16:16:49.725032 1 base_controller.go:119] Starting #1 worker of PruneController controller ... 2025-12-12T16:16:49.823925948+00:00 stderr F I1212 16:16:49.823824 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.924804681+00:00 stderr F I1212 16:16:49.923826 1 base_controller.go:82] Caches are synced for KubeControllerManagerStaticResources-StaticResources 2025-12-12T16:16:49.924804681+00:00 stderr F I1212 16:16:49.923854 1 base_controller.go:119] Starting #1 worker of KubeControllerManagerStaticResources-StaticResources controller ... 2025-12-12T16:16:49.924804681+00:00 stderr F I1212 16:16:49.923888 1 base_controller.go:82] Caches are synced for TargetConfigController 2025-12-12T16:16:49.924804681+00:00 stderr F I1212 16:16:49.923893 1 base_controller.go:119] Starting #1 worker of TargetConfigController controller ... 2025-12-12T16:16:50.026105634+00:00 stderr F I1212 16:16:50.025720 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.122739324+00:00 stderr F I1212 16:16:50.122375 1 base_controller.go:82] Caches are synced for MissingStaticPodController 2025-12-12T16:16:50.122810795+00:00 stderr F I1212 16:16:50.122797 1 base_controller.go:119] Starting #1 worker of MissingStaticPodController controller ... 2025-12-12T16:16:50.127359006+00:00 stderr F I1212 16:16:50.127261 1 base_controller.go:82] Caches are synced for GuardController 2025-12-12T16:16:50.127359006+00:00 stderr F I1212 16:16:50.127292 1 base_controller.go:119] Starting #1 worker of GuardController controller ... 2025-12-12T16:16:50.128165366+00:00 stderr F I1212 16:16:50.128112 1 base_controller.go:82] Caches are synced for kube-scheduler-InstallerState 2025-12-12T16:16:50.128165366+00:00 stderr F I1212 16:16:50.128142 1 base_controller.go:119] Starting #1 worker of kube-scheduler-InstallerState controller ... 2025-12-12T16:16:50.128242728+00:00 stderr F I1212 16:16:50.128206 1 base_controller.go:82] Caches are synced for Installer 2025-12-12T16:16:50.128242728+00:00 stderr F I1212 16:16:50.128221 1 base_controller.go:119] Starting #1 worker of Installer controller ... 2025-12-12T16:16:50.128590126+00:00 stderr F I1212 16:16:50.128547 1 base_controller.go:82] Caches are synced for kube-scheduler-StaticPodState 2025-12-12T16:16:50.128590126+00:00 stderr F I1212 16:16:50.128563 1 base_controller.go:119] Starting #1 worker of kube-scheduler-StaticPodState controller ... 2025-12-12T16:16:50.221741691+00:00 stderr F I1212 16:16:50.221675 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.423212890+00:00 stderr F I1212 16:16:50.421299 1 request.go:752] "Waited before sending request" delay="1.000718272s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/kube-system/secrets?limit=500&resourceVersion=0" 2025-12-12T16:16:50.435427768+00:00 stderr F I1212 16:16:50.434014 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.529816902+00:00 stderr F I1212 16:16:50.528972 1 base_controller.go:82] Caches are synced for kube-scheduler 2025-12-12T16:16:50.529816902+00:00 stderr F I1212 16:16:50.529006 1 base_controller.go:119] Starting #1 worker of kube-scheduler controller ... 2025-12-12T16:16:51.228502130+00:00 stderr F I1212 16:16:51.227129 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:16:51.228502130+00:00 stderr F - "", 2025-12-12T16:16:51.228502130+00:00 stderr F + "kube-scheduler", 2025-12-12T16:16:51.228502130+00:00 stderr F ) 2025-12-12T16:16:51.618719877+00:00 stderr F I1212 16:16:51.618443 1 request.go:752] "Waited before sending request" delay="1.489788573s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller" 2025-12-12T16:16:52.233047666+00:00 stderr F I1212 16:16:52.232203 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'PodCreated' Created Pod/revision-pruner-6-crc -n openshift-kube-scheduler because it was missing 2025-12-12T16:16:52.819551325+00:00 stderr F I1212 16:16:52.818624 1 request.go:752] "Waited before sending request" delay="1.39547731s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:16:53.423825927+00:00 stderr F I1212 16:16:53.423017 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'SecretUpdateFailed' Failed to update Secret/kube-scheduler-client-cert-key -n openshift-kube-scheduler: Operation cannot be fulfilled on secrets "kube-scheduler-client-cert-key": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:53.460251506+00:00 stderr F I1212 16:16:53.448843 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nResourceSyncControllerDegraded: Operation cannot be fulfilled on secrets \"kube-scheduler-client-cert-key\": the object has been modified; please apply your changes to the latest version and try again","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:53.460251506+00:00 stderr F I1212 16:16:53.459639 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready" to "NodeControllerDegraded: All master nodes are ready\nResourceSyncControllerDegraded: Operation cannot be fulfilled on secrets \"kube-scheduler-client-cert-key\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:53.469527803+00:00 stderr F I1212 16:16:53.469436 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:53.485576284+00:00 stderr F I1212 16:16:53.482900 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nResourceSyncControllerDegraded: Operation cannot be fulfilled on secrets \"kube-scheduler-client-cert-key\": the object has been modified; please apply your changes to the latest version and try again" to "NodeControllerDegraded: All master nodes are ready" 2025-12-12T16:16:54.023048857+00:00 stderr F I1212 16:16:54.020843 1 request.go:752] "Waited before sending request" delay="1.16983111s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:16:55.021688838+00:00 stderr F I1212 16:16:55.021010 1 request.go:752] "Waited before sending request" delay="1.397089349s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/localhost-recovery-client" 2025-12-12T16:16:55.904928752+00:00 stderr F I1212 16:16:55.900036 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.899998961 +0000 UTC))" 2025-12-12T16:16:55.905068365+00:00 stderr F I1212 16:16:55.905053 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.905014154 +0000 UTC))" 2025-12-12T16:16:55.905106396+00:00 stderr F I1212 16:16:55.905097 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.905086385 +0000 UTC))" 2025-12-12T16:16:55.905138387+00:00 stderr F I1212 16:16:55.905129 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.905120706 +0000 UTC))" 2025-12-12T16:16:55.905171337+00:00 stderr F I1212 16:16:55.905162 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.905152677 +0000 UTC))" 2025-12-12T16:16:55.906589162+00:00 stderr F I1212 16:16:55.906574 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.906529031 +0000 UTC))" 2025-12-12T16:16:55.906641953+00:00 stderr F I1212 16:16:55.906632 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.906619773 +0000 UTC))" 2025-12-12T16:16:55.906673954+00:00 stderr F I1212 16:16:55.906665 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.906655754 +0000 UTC))" 2025-12-12T16:16:55.906705495+00:00 stderr F I1212 16:16:55.906696 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.906686994 +0000 UTC))" 2025-12-12T16:16:55.906745996+00:00 stderr F I1212 16:16:55.906736 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.906724445 +0000 UTC))" 2025-12-12T16:16:55.907029693+00:00 stderr F I1212 16:16:55.907012 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-scheduler-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-scheduler-operator.svc,metrics.openshift-kube-scheduler-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:14 +0000 UTC to 2027-11-02 07:52:15 +0000 UTC (now=2025-12-12 16:16:55.906994852 +0000 UTC))" 2025-12-12T16:16:55.907213777+00:00 stderr F I1212 16:16:55.907201 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556209\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556208\" (2025-12-12 15:16:47 +0000 UTC to 2028-12-12 15:16:47 +0000 UTC (now=2025-12-12 16:16:55.907171826 +0000 UTC))" 2025-12-12T16:16:56.222010353+00:00 stderr F I1212 16:16:56.220657 1 request.go:752] "Waited before sending request" delay="1.395849109s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/revision-pruner-6-crc" 2025-12-12T16:16:57.230172436+00:00 stderr F I1212 16:16:57.227827 1 request.go:752] "Waited before sending request" delay="1.387556966s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:16:57.424817248+00:00 stderr F I1212 16:16:57.424716 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'SecretUpdateFailed' Failed to update Secret/kube-scheduler-client-cert-key -n openshift-kube-scheduler: Operation cannot be fulfilled on secrets "kube-scheduler-client-cert-key": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:58.494239467+00:00 stderr F I1212 16:16:58.488262 1 request.go:752] "Waited before sending request" delay="1.254008896s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:16:58.494239467+00:00 stderr F I1212 16:16:58.492086 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nResourceSyncControllerDegraded: Operation cannot be fulfilled on secrets \"kube-scheduler-client-cert-key\": the object has been modified; please apply your changes to the latest version and try again","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:58.525719326+00:00 stderr F I1212 16:16:58.525613 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready" to "NodeControllerDegraded: All master nodes are ready\nResourceSyncControllerDegraded: Operation cannot be fulfilled on secrets \"kube-scheduler-client-cert-key\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:58.533330482+00:00 stderr F I1212 16:16:58.529437 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:58.553948955+00:00 stderr F I1212 16:16:58.551587 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nResourceSyncControllerDegraded: Operation cannot be fulfilled on secrets \"kube-scheduler-client-cert-key\": the object has been modified; please apply your changes to the latest version and try again" to "NodeControllerDegraded: All master nodes are ready" 2025-12-12T16:16:59.618339132+00:00 stderr F I1212 16:16:59.617844 1 request.go:752] "Waited before sending request" delay="1.077369244s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:17:00.622085507+00:00 stderr F I1212 16:17:00.621497 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:00.622085507+00:00 stderr F - "", 2025-12-12T16:17:00.622085507+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:00.622085507+00:00 stderr F ) 2025-12-12T16:17:00.818197665+00:00 stderr F I1212 16:17:00.817775 1 request.go:752] "Waited before sending request" delay="1.194937463s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:17:03.021284642+00:00 stderr F I1212 16:17:03.021167 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:03.021284642+00:00 stderr F - "", 2025-12-12T16:17:03.021284642+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:03.021284642+00:00 stderr F ) 2025-12-12T16:17:38.924261043+00:00 stderr F I1212 16:17:38.923551 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:38.924261043+00:00 stderr F - "", 2025-12-12T16:17:38.924261043+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:38.924261043+00:00 stderr F ) 2025-12-12T16:17:44.885012674+00:00 stderr F I1212 16:17:44.884628 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:44.885012674+00:00 stderr F - "", 2025-12-12T16:17:44.885012674+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:44.885012674+00:00 stderr F ) 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326305 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.326259396 +0000 UTC))" 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326497 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.326453 +0000 UTC))" 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326517 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.326504822 +0000 UTC))" 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326532 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.326521832 +0000 UTC))" 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326548 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.326537473 +0000 UTC))" 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326567 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.326552733 +0000 UTC))" 2025-12-12T16:17:46.326593594+00:00 stderr F I1212 16:17:46.326583 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.326572343 +0000 UTC))" 2025-12-12T16:17:46.326721737+00:00 stderr F I1212 16:17:46.326601 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.326588764 +0000 UTC))" 2025-12-12T16:17:46.326721737+00:00 stderr F I1212 16:17:46.326619 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.326609654 +0000 UTC))" 2025-12-12T16:17:46.326721737+00:00 stderr F I1212 16:17:46.326636 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.326626705 +0000 UTC))" 2025-12-12T16:17:46.326721737+00:00 stderr F I1212 16:17:46.326659 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.326641535 +0000 UTC))" 2025-12-12T16:17:46.334078429+00:00 stderr F I1212 16:17:46.332789 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-scheduler-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-scheduler-operator.svc,metrics.openshift-kube-scheduler-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:14 +0000 UTC to 2027-11-02 07:52:15 +0000 UTC (now=2025-12-12 16:17:46.332757286 +0000 UTC))" 2025-12-12T16:17:46.334078429+00:00 stderr F I1212 16:17:46.333004 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556209\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556208\" (2025-12-12 15:16:47 +0000 UTC to 2028-12-12 15:16:47 +0000 UTC (now=2025-12-12 16:17:46.332977522 +0000 UTC))" 2025-12-12T16:17:47.109747975+00:00 stderr F I1212 16:17:47.106424 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:47.109747975+00:00 stderr F - "", 2025-12-12T16:17:47.109747975+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:47.109747975+00:00 stderr F ) 2025-12-12T16:17:47.156995463+00:00 stderr F I1212 16:17:47.156920 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:47.156995463+00:00 stderr F - "", 2025-12-12T16:17:47.156995463+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:47.156995463+00:00 stderr F ) 2025-12-12T16:17:49.929026637+00:00 stderr F I1212 16:17:49.928049 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:49.929026637+00:00 stderr F - "", 2025-12-12T16:17:49.929026637+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:49.929026637+00:00 stderr F ) 2025-12-12T16:17:52.324494529+00:00 stderr F I1212 16:17:52.322119 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:17:52.324494529+00:00 stderr F - "", 2025-12-12T16:17:52.324494529+00:00 stderr F + "kube-scheduler", 2025-12-12T16:17:52.324494529+00:00 stderr F ) 2025-12-12T16:18:49.349758247+00:00 stderr F E1212 16:18:49.349167 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-scheduler-operator/leases/openshift-cluster-kube-scheduler-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:49.350423774+00:00 stderr F E1212 16:18:49.350387 1 leaderelection.go:436] error retrieving resource lock openshift-kube-scheduler-operator/openshift-cluster-kube-scheduler-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-scheduler-operator/leases/openshift-cluster-kube-scheduler-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.532240279+00:00 stderr F E1212 16:18:49.532159 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.542860951+00:00 stderr F E1212 16:18:49.542820 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.557594275+00:00 stderr F E1212 16:18:49.557507 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.582633044+00:00 stderr F E1212 16:18:49.582554 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.627900724+00:00 stderr F E1212 16:18:49.627817 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.715089759+00:00 stderr F E1212 16:18:49.715013 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.881241487+00:00 stderr F E1212 16:18:49.881157 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.926590148+00:00 stderr F I1212 16:18:49.926507 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:18:49.926590148+00:00 stderr F - "", 2025-12-12T16:18:49.926590148+00:00 stderr F + "kube-scheduler", 2025-12-12T16:18:49.926590148+00:00 stderr F ) 2025-12-12T16:18:50.332342820+00:00 stderr F E1212 16:18:50.332280 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.530925449+00:00 stderr F E1212 16:18:50.530835 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.732150314+00:00 stderr F E1212 16:18:50.732067 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.133801843+00:00 stderr F E1212 16:18:51.133739 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:51.331061330+00:00 stderr F I1212 16:18:51.330999 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:18:51.331061330+00:00 stderr F - "", 2025-12-12T16:18:51.331061330+00:00 stderr F + "kube-scheduler", 2025-12-12T16:18:51.331061330+00:00 stderr F ) 2025-12-12T16:18:51.530682535+00:00 stderr F E1212 16:18:51.530603 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.733777416+00:00 stderr F E1212 16:18:51.733704 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.933079694+00:00 stderr F E1212 16:18:51.932998 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/services/scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/openshift-kube-scheduler-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeControllerManagerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=KubeControllerManagerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:52.331709029+00:00 stderr F E1212 16:18:52.331218 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.534988254+00:00 stderr F E1212 16:18:52.534582 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.734428225+00:00 stderr F E1212 16:18:52.734290 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.331025685+00:00 stderr F E1212 16:18:53.330957 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.532373093+00:00 stderr F E1212 16:18:53.532301 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.131380182+00:00 stderr F E1212 16:18:54.131300 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.333093489+00:00 stderr F E1212 16:18:54.333028 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.733589789+00:00 stderr F E1212 16:18:54.733489 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.933670456+00:00 stderr F E1212 16:18:54.933603 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-scheduler-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:55.131015315+00:00 stderr F E1212 16:18:55.130947 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.331872090+00:00 stderr F E1212 16:18:55.331791 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.533161977+00:00 stderr F E1212 16:18:55.533087 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/services/scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/openshift-kube-scheduler-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeControllerManagerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=KubeControllerManagerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:55.733964551+00:00 stderr F I1212 16:18:55.733890 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:18:55.733964551+00:00 stderr F - "", 2025-12-12T16:18:55.733964551+00:00 stderr F + "kube-scheduler", 2025-12-12T16:18:55.733964551+00:00 stderr F ) 2025-12-12T16:18:55.930927801+00:00 stderr F E1212 16:18:55.930842 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.132558406+00:00 stderr F E1212 16:18:56.132488 1 base_controller.go:279] "Unhandled Error" err="kube-scheduler-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-scheduler-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubeschedulers/cluster/status?fieldManager=kube-scheduler-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:58.148340041+00:00 stderr F E1212 16:18:58.147771 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:59.156524126+00:00 stderr F I1212 16:18:59.156198 1 helpers.go:264] lister was stale at resourceVersion=38164, live get showed resourceVersion=39232 2025-12-12T16:18:59.169553198+00:00 stderr F E1212 16:18:59.169477 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: synthetic requeue request" 2025-12-12T16:18:59.533349642+00:00 stderr F I1212 16:18:59.532780 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:18:59.533349642+00:00 stderr F - "", 2025-12-12T16:18:59.533349642+00:00 stderr F + "kube-scheduler", 2025-12-12T16:18:59.533349642+00:00 stderr F ) 2025-12-12T16:19:27.910546499+00:00 stderr F I1212 16:19:27.909796 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:30.617111785+00:00 stderr F I1212 16:19:30.614688 1 reflector.go:430] "Caches populated" type="*v1.Scheduler" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:30.621762072+00:00 stderr F I1212 16:19:30.621718 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:30.621762072+00:00 stderr F - "", 2025-12-12T16:19:30.621762072+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:30.621762072+00:00 stderr F ) 2025-12-12T16:19:32.032230186+00:00 stderr F I1212 16:19:32.032155 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:33.655925625+00:00 stderr F I1212 16:19:33.655401 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:35.588065316+00:00 stderr F I1212 16:19:35.587967 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:35.593370679+00:00 stderr F I1212 16:19:35.593313 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:35.593370679+00:00 stderr F - "", 2025-12-12T16:19:35.593370679+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:35.593370679+00:00 stderr F ) 2025-12-12T16:19:35.634407500+00:00 stderr F I1212 16:19:35.634320 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:35.634407500+00:00 stderr F - "", 2025-12-12T16:19:35.634407500+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:35.634407500+00:00 stderr F ) 2025-12-12T16:19:38.246393331+00:00 stderr F I1212 16:19:38.246017 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:38.373672807+00:00 stderr F I1212 16:19:38.373580 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:38.377924344+00:00 stderr F I1212 16:19:38.377846 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:38.377924344+00:00 stderr F - "", 2025-12-12T16:19:38.377924344+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:38.377924344+00:00 stderr F ) 2025-12-12T16:19:38.816213108+00:00 stderr F I1212 16:19:38.816051 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:39.344761599+00:00 stderr F I1212 16:19:39.344678 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:39.988617915+00:00 stderr F I1212 16:19:39.988052 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:39.988617915+00:00 stderr F - "", 2025-12-12T16:19:39.988617915+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:39.988617915+00:00 stderr F ) 2025-12-12T16:19:40.228559780+00:00 stderr F I1212 16:19:40.228500 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:42.270761094+00:00 stderr F I1212 16:19:42.270402 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:42.986829834+00:00 stderr F I1212 16:19:42.986747 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:43.387335149+00:00 stderr F I1212 16:19:43.386951 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:43.387335149+00:00 stderr F - "", 2025-12-12T16:19:43.387335149+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:43.387335149+00:00 stderr F ) 2025-12-12T16:19:44.787449643+00:00 stderr F I1212 16:19:44.787377 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:44.787449643+00:00 stderr F - "", 2025-12-12T16:19:44.787449643+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:44.787449643+00:00 stderr F ) 2025-12-12T16:19:46.912910539+00:00 stderr F I1212 16:19:46.911681 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:49.930330979+00:00 stderr F I1212 16:19:49.929197 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:49.930330979+00:00 stderr F - "", 2025-12-12T16:19:49.930330979+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:49.930330979+00:00 stderr F ) 2025-12-12T16:19:53.870192441+00:00 stderr F I1212 16:19:53.869826 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.873414112+00:00 stderr F I1212 16:19:53.873357 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:53.873414112+00:00 stderr F - "", 2025-12-12T16:19:53.873414112+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:53.873414112+00:00 stderr F ) 2025-12-12T16:19:54.877127413+00:00 stderr F I1212 16:19:54.876722 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:19:54.877127413+00:00 stderr F - "", 2025-12-12T16:19:54.877127413+00:00 stderr F + "kube-scheduler", 2025-12-12T16:19:54.877127413+00:00 stderr F ) 2025-12-12T16:19:56.120160503+00:00 stderr F I1212 16:19:56.120062 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:58.039554135+00:00 stderr F I1212 16:19:58.039221 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:00.486241096+00:00 stderr F I1212 16:20:00.485741 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:00.492397871+00:00 stderr F I1212 16:20:00.492334 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:00.492397871+00:00 stderr F - "", 2025-12-12T16:20:00.492397871+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:00.492397871+00:00 stderr F ) 2025-12-12T16:20:00.511737096+00:00 stderr F I1212 16:20:00.511669 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:00.511737096+00:00 stderr F - "", 2025-12-12T16:20:00.511737096+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:00.511737096+00:00 stderr F ) 2025-12-12T16:20:03.970335514+00:00 stderr F I1212 16:20:03.969933 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:04.010731419+00:00 stderr F I1212 16:20:04.010652 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:04.015126709+00:00 stderr F I1212 16:20:04.015055 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:04.015126709+00:00 stderr F - "", 2025-12-12T16:20:04.015126709+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:04.015126709+00:00 stderr F ) 2025-12-12T16:20:04.610424136+00:00 stderr F I1212 16:20:04.610299 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:04.610424136+00:00 stderr F - "", 2025-12-12T16:20:04.610424136+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:04.610424136+00:00 stderr F ) 2025-12-12T16:20:04.814782877+00:00 stderr F I1212 16:20:04.814716 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:07.008847635+00:00 stderr F I1212 16:20:07.008427 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:07.809689303+00:00 stderr F I1212 16:20:07.809607 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:07.809689303+00:00 stderr F - "", 2025-12-12T16:20:07.809689303+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:07.809689303+00:00 stderr F ) 2025-12-12T16:20:13.391162841+00:00 stderr F I1212 16:20:13.390106 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.393704645+00:00 stderr F I1212 16:20:13.393650 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:13.393704645+00:00 stderr F - "", 2025-12-12T16:20:13.393704645+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:13.393704645+00:00 stderr F ) 2025-12-12T16:20:13.408409954+00:00 stderr F I1212 16:20:13.408314 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:13.408409954+00:00 stderr F - "", 2025-12-12T16:20:13.408409954+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:13.408409954+00:00 stderr F ) 2025-12-12T16:20:19.773434918+00:00 stderr F I1212 16:20:19.773069 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:19.776549726+00:00 stderr F I1212 16:20:19.776507 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:19.776549726+00:00 stderr F - "", 2025-12-12T16:20:19.776549726+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:19.776549726+00:00 stderr F ) 2025-12-12T16:20:19.792918877+00:00 stderr F I1212 16:20:19.792866 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:19.795567624+00:00 stderr F I1212 16:20:19.795544 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:19.795567624+00:00 stderr F - "", 2025-12-12T16:20:19.795567624+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:19.795567624+00:00 stderr F ) 2025-12-12T16:20:20.778150554+00:00 stderr F I1212 16:20:20.778070 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:21.378214130+00:00 stderr F I1212 16:20:21.377070 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:21.378214130+00:00 stderr F - "", 2025-12-12T16:20:21.378214130+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:21.378214130+00:00 stderr F ) 2025-12-12T16:20:24.617062111+00:00 stderr F I1212 16:20:24.616961 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.930156922+00:00 stderr F I1212 16:20:24.930101 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.976151487+00:00 stderr F I1212 16:20:24.976088 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:27.628631095+00:00 stderr F I1212 16:20:27.628319 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:27.631806485+00:00 stderr F I1212 16:20:27.631764 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:27.631806485+00:00 stderr F - "", 2025-12-12T16:20:27.631806485+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:27.631806485+00:00 stderr F ) 2025-12-12T16:20:32.030233312+00:00 stderr F I1212 16:20:32.029421 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubeschedulers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:32.051854909+00:00 stderr F I1212 16:20:32.050773 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:32.051854909+00:00 stderr F - "", 2025-12-12T16:20:32.051854909+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:32.051854909+00:00 stderr F ) 2025-12-12T16:20:32.051854909+00:00 stderr F I1212 16:20:32.051367 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/serviceaccount-ca\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/serviceaccount-ca\": dial tcp 10.217.4.1:443: connect: connection refused","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:32.067290444+00:00 stderr F I1212 16:20:32.067210 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/serviceaccount-ca\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/serviceaccount-ca\": dial tcp 10.217.4.1:443: connect: connection refused","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:32.067420967+00:00 stderr F I1212 16:20:32.067350 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready" to "NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/serviceaccount-ca\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/serviceaccount-ca\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:20:32.077004258+00:00 stderr F E1212 16:20:32.076934 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_kube-scheduler reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"kube-scheduler\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:33.045299407+00:00 stderr F I1212 16:20:33.044706 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: ","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:33.052522876+00:00 stderr F I1212 16:20:33.052417 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \nTargetConfigControllerDegraded: \"configmap\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/config\": dial tcp 10.217.4.1:443: connect: connection refused\nTargetConfigControllerDegraded: \"configmap/serviceaccount-ca\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/configmaps/serviceaccount-ca\": dial tcp 10.217.4.1:443: connect: connection refused" to "NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: " 2025-12-12T16:20:33.251545807+00:00 stderr F I1212 16:20:33.251480 1 status_controller.go:229] clusteroperator/kube-scheduler diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:13:36Z","message":"NodeInstallerProgressing: 1 node is at revision 6","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:56:14Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 6","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:02Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:33.261050286+00:00 stderr F I1212 16:20:33.260954 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator", UID:"c3ff943a-b570-4a98-8388-1f8a3280a85a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-scheduler changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-scheduler\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/scheduler-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-scheduler:public-2\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/roles/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-scheduler/policyconfigmap-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-scheduler/rolebindings/system:openshift:sa-listing-configmaps\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: " to "NodeControllerDegraded: All master nodes are ready" 2025-12-12T16:20:34.033127818+00:00 stderr F I1212 16:20:34.033062 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:34.033127818+00:00 stderr F - "", 2025-12-12T16:20:34.033127818+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:34.033127818+00:00 stderr F ) 2025-12-12T16:20:34.230506735+00:00 stderr F I1212 16:20:34.230428 1 request.go:752] "Waited before sending request" delay="1.17958202s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:20:35.431583558+00:00 stderr F I1212 16:20:35.430989 1 request.go:752] "Waited before sending request" delay="1.197085109s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-scheduler/pods/openshift-kube-scheduler-crc" 2025-12-12T16:20:37.634016248+00:00 stderr F I1212 16:20:37.633506 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:37.634016248+00:00 stderr F - "", 2025-12-12T16:20:37.634016248+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:37.634016248+00:00 stderr F ) 2025-12-12T16:20:49.931028234+00:00 stderr F I1212 16:20:49.929693 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:20:49.931028234+00:00 stderr F - "", 2025-12-12T16:20:49.931028234+00:00 stderr F + "kube-scheduler", 2025-12-12T16:20:49.931028234+00:00 stderr F ) 2025-12-12T16:21:49.930790995+00:00 stderr F I1212 16:21:49.930070 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:21:49.930790995+00:00 stderr F - "", 2025-12-12T16:21:49.930790995+00:00 stderr F + "kube-scheduler", 2025-12-12T16:21:49.930790995+00:00 stderr F ) 2025-12-12T16:22:49.931894857+00:00 stderr F I1212 16:22:49.931290 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:22:49.931894857+00:00 stderr F - "", 2025-12-12T16:22:49.931894857+00:00 stderr F + "kube-scheduler", 2025-12-12T16:22:49.931894857+00:00 stderr F ) 2025-12-12T16:23:49.933052457+00:00 stderr F I1212 16:23:49.932467 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:23:49.933052457+00:00 stderr F - "", 2025-12-12T16:23:49.933052457+00:00 stderr F + "kube-scheduler", 2025-12-12T16:23:49.933052457+00:00 stderr F ) 2025-12-12T16:24:49.934210491+00:00 stderr F I1212 16:24:49.933285 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:24:49.934210491+00:00 stderr F - "", 2025-12-12T16:24:49.934210491+00:00 stderr F + "kube-scheduler", 2025-12-12T16:24:49.934210491+00:00 stderr F ) 2025-12-12T16:25:49.935554146+00:00 stderr F I1212 16:25:49.934942 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:25:49.935554146+00:00 stderr F - "", 2025-12-12T16:25:49.935554146+00:00 stderr F + "kube-scheduler", 2025-12-12T16:25:49.935554146+00:00 stderr F ) 2025-12-12T16:26:49.936615019+00:00 stderr F I1212 16:26:49.935547 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:26:49.936615019+00:00 stderr F - "", 2025-12-12T16:26:49.936615019+00:00 stderr F + "kube-scheduler", 2025-12-12T16:26:49.936615019+00:00 stderr F ) 2025-12-12T16:27:49.938802441+00:00 stderr F I1212 16:27:49.937910 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:27:49.938802441+00:00 stderr F - "", 2025-12-12T16:27:49.938802441+00:00 stderr F + "kube-scheduler", 2025-12-12T16:27:49.938802441+00:00 stderr F ) 2025-12-12T16:28:49.935987211+00:00 stderr F I1212 16:28:49.935367 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:28:49.935987211+00:00 stderr F - "", 2025-12-12T16:28:49.935987211+00:00 stderr F + "kube-scheduler", 2025-12-12T16:28:49.935987211+00:00 stderr F ) 2025-12-12T16:29:30.622892147+00:00 stderr F I1212 16:29:30.622030 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:30.622892147+00:00 stderr F - "", 2025-12-12T16:29:30.622892147+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:30.622892147+00:00 stderr F ) 2025-12-12T16:29:35.594338290+00:00 stderr F I1212 16:29:35.593774 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:35.594338290+00:00 stderr F - "", 2025-12-12T16:29:35.594338290+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:35.594338290+00:00 stderr F ) 2025-12-12T16:29:35.612465887+00:00 stderr F I1212 16:29:35.612394 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:35.612465887+00:00 stderr F - "", 2025-12-12T16:29:35.612465887+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:35.612465887+00:00 stderr F ) 2025-12-12T16:29:38.383256021+00:00 stderr F I1212 16:29:38.383116 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:38.383256021+00:00 stderr F - "", 2025-12-12T16:29:38.383256021+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:38.383256021+00:00 stderr F ) 2025-12-12T16:29:39.795777231+00:00 stderr F I1212 16:29:39.795135 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:39.795777231+00:00 stderr F - "", 2025-12-12T16:29:39.795777231+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:39.795777231+00:00 stderr F ) 2025-12-12T16:29:43.192876522+00:00 stderr F I1212 16:29:43.192516 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:43.192876522+00:00 stderr F - "", 2025-12-12T16:29:43.192876522+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:43.192876522+00:00 stderr F ) 2025-12-12T16:29:44.793601418+00:00 stderr F I1212 16:29:44.793532 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:44.793601418+00:00 stderr F - "", 2025-12-12T16:29:44.793601418+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:44.793601418+00:00 stderr F ) 2025-12-12T16:29:49.937127598+00:00 stderr F I1212 16:29:49.936108 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:49.937127598+00:00 stderr F - "", 2025-12-12T16:29:49.937127598+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:49.937127598+00:00 stderr F ) 2025-12-12T16:29:53.876022330+00:00 stderr F I1212 16:29:53.875264 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:53.876022330+00:00 stderr F - "", 2025-12-12T16:29:53.876022330+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:53.876022330+00:00 stderr F ) 2025-12-12T16:29:54.674196822+00:00 stderr F I1212 16:29:54.673883 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:29:54.674196822+00:00 stderr F - "", 2025-12-12T16:29:54.674196822+00:00 stderr F + "kube-scheduler", 2025-12-12T16:29:54.674196822+00:00 stderr F ) 2025-12-12T16:30:00.494051194+00:00 stderr F I1212 16:30:00.493543 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:00.494051194+00:00 stderr F - "", 2025-12-12T16:30:00.494051194+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:00.494051194+00:00 stderr F ) 2025-12-12T16:30:00.678231208+00:00 stderr F I1212 16:30:00.676143 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:00.678231208+00:00 stderr F - "", 2025-12-12T16:30:00.678231208+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:00.678231208+00:00 stderr F ) 2025-12-12T16:30:04.018015015+00:00 stderr F I1212 16:30:04.017420 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:04.018015015+00:00 stderr F - "", 2025-12-12T16:30:04.018015015+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:04.018015015+00:00 stderr F ) 2025-12-12T16:30:05.416997635+00:00 stderr F I1212 16:30:05.416903 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:05.416997635+00:00 stderr F - "", 2025-12-12T16:30:05.416997635+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:05.416997635+00:00 stderr F ) 2025-12-12T16:30:08.218278957+00:00 stderr F I1212 16:30:08.217683 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:08.218278957+00:00 stderr F - "", 2025-12-12T16:30:08.218278957+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:08.218278957+00:00 stderr F ) 2025-12-12T16:30:13.397860580+00:00 stderr F I1212 16:30:13.397016 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:13.397860580+00:00 stderr F - "", 2025-12-12T16:30:13.397860580+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:13.397860580+00:00 stderr F ) 2025-12-12T16:30:13.420982568+00:00 stderr F I1212 16:30:13.420879 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:13.420982568+00:00 stderr F - "", 2025-12-12T16:30:13.420982568+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:13.420982568+00:00 stderr F ) 2025-12-12T16:30:19.779421461+00:00 stderr F I1212 16:30:19.779356 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:19.779421461+00:00 stderr F - "", 2025-12-12T16:30:19.779421461+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:19.779421461+00:00 stderr F ) 2025-12-12T16:30:19.802704962+00:00 stderr F I1212 16:30:19.802639 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:19.802704962+00:00 stderr F - "", 2025-12-12T16:30:19.802704962+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:19.802704962+00:00 stderr F ) 2025-12-12T16:30:27.633836353+00:00 stderr F I1212 16:30:27.633288 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:27.633836353+00:00 stderr F - "", 2025-12-12T16:30:27.633836353+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:27.633836353+00:00 stderr F ) 2025-12-12T16:30:49.937788612+00:00 stderr F I1212 16:30:49.936999 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:30:49.937788612+00:00 stderr F - "", 2025-12-12T16:30:49.937788612+00:00 stderr F + "kube-scheduler", 2025-12-12T16:30:49.937788612+00:00 stderr F ) 2025-12-12T16:31:49.939606749+00:00 stderr F I1212 16:31:49.938678 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:31:49.939606749+00:00 stderr F - "", 2025-12-12T16:31:49.939606749+00:00 stderr F + "kube-scheduler", 2025-12-12T16:31:49.939606749+00:00 stderr F ) 2025-12-12T16:32:49.942609344+00:00 stderr F I1212 16:32:49.941414 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:32:49.942609344+00:00 stderr F - "", 2025-12-12T16:32:49.942609344+00:00 stderr F + "kube-scheduler", 2025-12-12T16:32:49.942609344+00:00 stderr F ) 2025-12-12T16:33:49.942446662+00:00 stderr F I1212 16:33:49.941334 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:33:49.942446662+00:00 stderr F - "", 2025-12-12T16:33:49.942446662+00:00 stderr F + "kube-scheduler", 2025-12-12T16:33:49.942446662+00:00 stderr F ) 2025-12-12T16:34:49.941566140+00:00 stderr F I1212 16:34:49.940547 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:34:49.941566140+00:00 stderr F - "", 2025-12-12T16:34:49.941566140+00:00 stderr F + "kube-scheduler", 2025-12-12T16:34:49.941566140+00:00 stderr F ) 2025-12-12T16:35:49.939977886+00:00 stderr F I1212 16:35:49.939361 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:35:49.939977886+00:00 stderr F - "", 2025-12-12T16:35:49.939977886+00:00 stderr F + "kube-scheduler", 2025-12-12T16:35:49.939977886+00:00 stderr F ) 2025-12-12T16:36:49.944894940+00:00 stderr F I1212 16:36:49.943685 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:36:49.944894940+00:00 stderr F - "", 2025-12-12T16:36:49.944894940+00:00 stderr F + "kube-scheduler", 2025-12-12T16:36:49.944894940+00:00 stderr F ) 2025-12-12T16:37:49.942388946+00:00 stderr F I1212 16:37:49.941812 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:37:49.942388946+00:00 stderr F - "", 2025-12-12T16:37:49.942388946+00:00 stderr F + "kube-scheduler", 2025-12-12T16:37:49.942388946+00:00 stderr F ) 2025-12-12T16:38:49.945005736+00:00 stderr F I1212 16:38:49.943862 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:38:49.945005736+00:00 stderr F - "", 2025-12-12T16:38:49.945005736+00:00 stderr F + "kube-scheduler", 2025-12-12T16:38:49.945005736+00:00 stderr F ) 2025-12-12T16:39:30.623505524+00:00 stderr F I1212 16:39:30.622498 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:30.623505524+00:00 stderr F - "", 2025-12-12T16:39:30.623505524+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:30.623505524+00:00 stderr F ) 2025-12-12T16:39:35.602936755+00:00 stderr F I1212 16:39:35.602352 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:35.602936755+00:00 stderr F - "", 2025-12-12T16:39:35.602936755+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:35.602936755+00:00 stderr F ) 2025-12-12T16:39:35.625103062+00:00 stderr F I1212 16:39:35.625016 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:35.625103062+00:00 stderr F - "", 2025-12-12T16:39:35.625103062+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:35.625103062+00:00 stderr F ) 2025-12-12T16:39:38.384958935+00:00 stderr F I1212 16:39:38.384879 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:38.384958935+00:00 stderr F - "", 2025-12-12T16:39:38.384958935+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:38.384958935+00:00 stderr F ) 2025-12-12T16:39:40.001234495+00:00 stderr F I1212 16:39:40.000855 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:40.001234495+00:00 stderr F - "", 2025-12-12T16:39:40.001234495+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:40.001234495+00:00 stderr F ) 2025-12-12T16:39:43.202390952+00:00 stderr F I1212 16:39:43.201828 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:43.202390952+00:00 stderr F - "", 2025-12-12T16:39:43.202390952+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:43.202390952+00:00 stderr F ) 2025-12-12T16:39:44.802052263+00:00 stderr F I1212 16:39:44.801905 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:44.802052263+00:00 stderr F - "", 2025-12-12T16:39:44.802052263+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:44.802052263+00:00 stderr F ) 2025-12-12T16:39:49.943767550+00:00 stderr F I1212 16:39:49.943066 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:49.943767550+00:00 stderr F - "", 2025-12-12T16:39:49.943767550+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:49.943767550+00:00 stderr F ) 2025-12-12T16:39:53.879513355+00:00 stderr F I1212 16:39:53.877602 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:53.879513355+00:00 stderr F - "", 2025-12-12T16:39:53.879513355+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:53.879513355+00:00 stderr F ) 2025-12-12T16:39:54.477314905+00:00 stderr F I1212 16:39:54.477221 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:39:54.477314905+00:00 stderr F - "", 2025-12-12T16:39:54.477314905+00:00 stderr F + "kube-scheduler", 2025-12-12T16:39:54.477314905+00:00 stderr F ) 2025-12-12T16:40:00.496998752+00:00 stderr F I1212 16:40:00.495959 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:00.496998752+00:00 stderr F - "", 2025-12-12T16:40:00.496998752+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:00.496998752+00:00 stderr F ) 2025-12-12T16:40:00.516274897+00:00 stderr F I1212 16:40:00.516205 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:00.516274897+00:00 stderr F - "", 2025-12-12T16:40:00.516274897+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:00.516274897+00:00 stderr F ) 2025-12-12T16:40:04.019438964+00:00 stderr F I1212 16:40:04.018785 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:04.019438964+00:00 stderr F - "", 2025-12-12T16:40:04.019438964+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:04.019438964+00:00 stderr F ) 2025-12-12T16:40:05.417274275+00:00 stderr F I1212 16:40:05.416700 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:05.417274275+00:00 stderr F - "", 2025-12-12T16:40:05.417274275+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:05.417274275+00:00 stderr F ) 2025-12-12T16:40:08.219698978+00:00 stderr F I1212 16:40:08.218668 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:08.219698978+00:00 stderr F - "", 2025-12-12T16:40:08.219698978+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:08.219698978+00:00 stderr F ) 2025-12-12T16:40:13.398935929+00:00 stderr F I1212 16:40:13.397804 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:13.398935929+00:00 stderr F - "", 2025-12-12T16:40:13.398935929+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:13.398935929+00:00 stderr F ) 2025-12-12T16:40:13.415801043+00:00 stderr F I1212 16:40:13.415739 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:13.415801043+00:00 stderr F - "", 2025-12-12T16:40:13.415801043+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:13.415801043+00:00 stderr F ) 2025-12-12T16:40:19.782036048+00:00 stderr F I1212 16:40:19.780510 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:19.782036048+00:00 stderr F - "", 2025-12-12T16:40:19.782036048+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:19.782036048+00:00 stderr F ) 2025-12-12T16:40:19.817221932+00:00 stderr F I1212 16:40:19.817077 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:19.817221932+00:00 stderr F - "", 2025-12-12T16:40:19.817221932+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:19.817221932+00:00 stderr F ) 2025-12-12T16:40:27.636846892+00:00 stderr F I1212 16:40:27.635986 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:27.636846892+00:00 stderr F - "", 2025-12-12T16:40:27.636846892+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:27.636846892+00:00 stderr F ) 2025-12-12T16:40:49.946226534+00:00 stderr F I1212 16:40:49.945023 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:40:49.946226534+00:00 stderr F - "", 2025-12-12T16:40:49.946226534+00:00 stderr F + "kube-scheduler", 2025-12-12T16:40:49.946226534+00:00 stderr F ) 2025-12-12T16:41:49.948291288+00:00 stderr F I1212 16:41:49.947160 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:41:49.948291288+00:00 stderr F - "", 2025-12-12T16:41:49.948291288+00:00 stderr F + "kube-scheduler", 2025-12-12T16:41:49.948291288+00:00 stderr F ) 2025-12-12T16:42:49.948070288+00:00 stderr F I1212 16:42:49.947248 1 annotations.go:45] Updating "openshift.io/owning-component" annotation for serviceaccount-ca/openshift-kube-scheduler, diff: string( 2025-12-12T16:42:49.948070288+00:00 stderr F - "", 2025-12-12T16:42:49.948070288+00:00 stderr F + "kube-scheduler", 2025-12-12T16:42:49.948070288+00:00 stderr F ) ././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015117043043032745 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015117043062032746 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000644000175000017500000000766115117043043032761 0ustar zuulzuul2025-12-12T16:28:08.231322471+00:00 stderr F I1212 16:28:08.231098 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:08.231322471+00:00 stderr F I1212 16:28:08.231259 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:08.231322471+00:00 stderr F I1212 16:28:08.231264 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:08.231322471+00:00 stderr F I1212 16:28:08.231268 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:08.245290294+00:00 stderr F I1212 16:28:08.245194 1 webhook.go:132] "using dynamic certificate generating using CA stored in Secret resource" logger="cert-manager.webhook.webhook" secret_namespace="cert-manager" secret_name="cert-manager-webhook-ca" 2025-12-12T16:28:08.245290294+00:00 stderr F I1212 16:28:08.245244 1 webhook.go:144] "serving insecurely as tls certificate data not provided" logger="cert-manager.webhook.webhook" 2025-12-12T16:28:08.246333590+00:00 stderr F I1212 16:28:08.246297 1 server.go:192] "listening for insecure healthz connections" logger="cert-manager.webhook" address=6080 2025-12-12T16:28:08.246436543+00:00 stderr F I1212 16:28:08.246415 1 server.go:183] "Registering webhook" logger="cert-manager.controller-runtime.webhook" path="/mutate" 2025-12-12T16:28:08.246510445+00:00 stderr F I1212 16:28:08.246491 1 server.go:183] "Registering webhook" logger="cert-manager.controller-runtime.webhook" path="/validate" 2025-12-12T16:28:08.246577147+00:00 stderr F I1212 16:28:08.246557 1 server.go:208] "Starting metrics server" logger="cert-manager.controller-runtime.metrics" 2025-12-12T16:28:08.246882374+00:00 stderr F I1212 16:28:08.246854 1 server.go:247] "Serving metrics server" logger="cert-manager.controller-runtime.metrics" bindAddress="0.0.0.0:9402" secure=false 2025-12-12T16:28:08.246951216+00:00 stderr F I1212 16:28:08.246932 1 server.go:191] "Starting webhook server" logger="cert-manager.controller-runtime.webhook" 2025-12-12T16:28:08.247038968+00:00 stderr F I1212 16:28:08.247007 1 server.go:242] "Serving webhook server" logger="cert-manager.controller-runtime.webhook" host="" port=10250 2025-12-12T16:28:08.256561709+00:00 stderr F E1212 16:28:08.256231 1 dynamic_source.go:221] "Failed to generate serving certificate, retrying..." err="no tls.Certificate available yet, try again later" logger="cert-manager" interval="1s" 2025-12-12T16:28:08.275138660+00:00 stderr F I1212 16:28:08.275068 1 reflector.go:376] Caches populated for *v1.Secret from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.375458718+00:00 stderr F I1212 16:28:08.375387 1 authority.go:273] "Will regenerate CA" logger="cert-manager" reason="CA secret not found" 2025-12-12T16:28:08.450096677+00:00 stderr F I1212 16:28:08.413154 1 authority.go:416] "Created new root CA Secret" logger="cert-manager" 2025-12-12T16:28:08.478258330+00:00 stderr F I1212 16:28:08.475315 1 authority.go:293] "Detected change in CA secret data, update current CA data and notify watches" logger="cert-manager" 2025-12-12T16:28:09.253378498+00:00 stderr F I1212 16:28:09.252271 1 dynamic_source.go:290] "Updated cert-manager TLS certificate" logger="cert-manager" DNSNames=["cert-manager-webhook","cert-manager-webhook.cert-manager","cert-manager-webhook.cert-manager.svc"] 2025-12-12T16:28:09.253378498+00:00 stderr F I1212 16:28:09.252353 1 dynamic_source.go:172] "Detected root CA rotation - regenerating serving certificates" logger="cert-manager" 2025-12-12T16:28:09.255758368+00:00 stderr F I1212 16:28:09.255715 1 dynamic_source.go:290] "Updated cert-manager TLS certificate" logger="cert-manager" DNSNames=["cert-manager-webhook","cert-manager-webhook.cert-manager","cert-manager-webhook.cert-manager.svc"] ././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015117043043033102 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015117043063033104 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000644000175000017500000323256215117043044033122 0ustar zuulzuul2025-12-12T16:16:23.260281139+00:00 stderr F I1212 16:16:23.258725 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:23.264377409+00:00 stderr F I1212 16:16:23.260620 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:23.264377409+00:00 stderr F I1212 16:16:23.261588 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:23.292139456+00:00 stderr F I1212 16:16:23.292046 1 builder.go:304] network-operator version 4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa-b0393aa3e67302d89e91b8f7b1013b6d2e317f04 2025-12-12T16:16:23.584371081+00:00 stderr F I1212 16:16:23.582985 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:23.584371081+00:00 stderr F W1212 16:16:23.583572 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:23.584371081+00:00 stderr F W1212 16:16:23.583579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:23.584371081+00:00 stderr F W1212 16:16:23.583587 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:23.584371081+00:00 stderr F W1212 16:16:23.583590 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:23.584371081+00:00 stderr F W1212 16:16:23.583592 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:23.584371081+00:00 stderr F W1212 16:16:23.583595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:23.587565429+00:00 stderr F I1212 16:16:23.587489 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:23.587784204+00:00 stderr F I1212 16:16:23.587714 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:23.587784204+00:00 stderr F I1212 16:16:23.587742 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:23.587946858+00:00 stderr F I1212 16:16:23.587901 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:23.588207125+00:00 stderr F I1212 16:16:23.588164 1 secure_serving.go:211] Serving securely on [::]:9104 2025-12-12T16:16:23.588285997+00:00 stderr F I1212 16:16:23.588255 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:23.588404320+00:00 stderr F I1212 16:16:23.588379 1 leaderelection.go:257] attempting to acquire leader lease openshift-network-operator/network-operator-lock... 2025-12-12T16:16:23.588415560+00:00 stderr F I1212 16:16:23.588407 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:23.588527123+00:00 stderr F I1212 16:16:23.588489 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:23.588563063+00:00 stderr F I1212 16:16:23.588493 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:23.589052315+00:00 stderr F I1212 16:16:23.589000 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:23.594525889+00:00 stderr F I1212 16:16:23.594491 1 leaderelection.go:271] successfully acquired lease openshift-network-operator/network-operator-lock 2025-12-12T16:16:23.595507993+00:00 stderr F I1212 16:16:23.595432 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-network-operator", Name:"network-operator-lock", UID:"8a9ee729-0957-486a-9f31-073de9b712c1", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"36389", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' crc_95484a66-a993-47ab-8ce5-7ead61fc20c6 became leader 2025-12-12T16:16:23.613302217+00:00 stderr F I1212 16:16:23.613238 1 operator.go:104] Creating status manager for stand-alone cluster 2025-12-12T16:16:23.613341408+00:00 stderr F I1212 16:16:23.613312 1 operator.go:108] Fetching cluster feature gates... 2025-12-12T16:16:23.614588459+00:00 stderr F I1212 16:16:23.614532 1 operator.go:126] Waiting for feature gates initialization... 2025-12-12T16:16:23.614606279+00:00 stderr F I1212 16:16:23.614589 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:23.620445842+00:00 stderr F I1212 16:16:23.620365 1 operator.go:133] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:23.620481163+00:00 stderr F I1212 16:16:23.620408 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-network-operator", Name:"network-operator", UID:"2c897060-d3cf-4d7f-8d38-ef464b7a697a", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:23.620481163+00:00 stderr F I1212 16:16:23.620436 1 operator.go:145] Adding controller-runtime controllers 2025-12-12T16:16:23.632942437+00:00 stderr F I1212 16:16:23.632819 1 client.go:241] Starting informers... 2025-12-12T16:16:23.633211914+00:00 stderr F I1212 16:16:23.633094 1 client.go:252] Waiting for informers to sync... 2025-12-12T16:16:23.688921444+00:00 stderr F I1212 16:16:23.688833 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:23.689013856+00:00 stderr F I1212 16:16:23.688948 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:23.689317963+00:00 stderr F I1212 16:16:23.689292 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:23.733364879+00:00 stderr F I1212 16:16:23.733279 1 client.go:273] Informers started and synced 2025-12-12T16:16:23.733364879+00:00 stderr F I1212 16:16:23.733345 1 operator.go:169] Starting controller-manager 2025-12-12T16:16:23.733599394+00:00 stderr F I1212 16:16:23.733560 1 base_controller.go:76] Waiting for caches to sync for cluster-network-operator-ManagementState 2025-12-12T16:16:23.733599394+00:00 stderr F I1212 16:16:23.733562 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:23.733609675+00:00 stderr F I1212 16:16:23.733601 1 base_controller.go:82] Caches are synced for cluster-network-operator-ManagementState 2025-12-12T16:16:23.733609675+00:00 stderr F I1212 16:16:23.733604 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:23.733629685+00:00 stderr F I1212 16:16:23.733618 1 base_controller.go:119] Starting #1 worker of cluster-network-operator-ManagementState controller ... 2025-12-12T16:16:23.733643526+00:00 stderr F I1212 16:16:23.733630 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:23.736408423+00:00 stderr F I1212 16:16:23.736295 1 base_controller.go:76] Waiting for caches to sync for ConnectivityCheckController 2025-12-12T16:16:23.791749404+00:00 stderr F I1212 16:16:23.791667 1 controller.go:246] "Starting EventSource" controller="proxyconfig-controller" source="kind source: *v1.Proxy" 2025-12-12T16:16:23.791749404+00:00 stderr F I1212 16:16:23.791696 1 controller.go:246] "Starting EventSource" controller="egress-router-controller" source="kind source: *v1.EgressRouter" 2025-12-12T16:16:23.791866197+00:00 stderr F I1212 16:16:23.791824 1 controller.go:246] "Starting EventSource" controller="ingress-config-controller" source="kind source: *v1.IngressController" 2025-12-12T16:16:23.792000270+00:00 stderr F I1212 16:16:23.791709 1 controller.go:246] "Starting EventSource" controller="machineconfig-watcher" source="kind source: *v1.MachineConfig" 2025-12-12T16:16:23.792000270+00:00 stderr F I1212 16:16:23.791982 1 controller.go:246] "Starting EventSource" controller="pod-watcher" source="informer source: 0xc0010cc630" 2025-12-12T16:16:23.792128193+00:00 stderr F I1212 16:16:23.792098 1 controller.go:246] "Starting EventSource" controller="configmap-trust-bundle-injector-controller" source="informer source: 0xc000293ad0" 2025-12-12T16:16:23.792128193+00:00 stderr F I1212 16:16:23.791829 1 controller.go:246] "Starting EventSource" controller="proxyconfig-controller" source="informer source: 0xc0002936b0" 2025-12-12T16:16:23.792217626+00:00 stderr F I1212 16:16:23.792192 1 controller.go:246] "Starting EventSource" controller="configmap-trust-bundle-injector-controller" source="informer source: 0xc000293a20" 2025-12-12T16:16:23.792217626+00:00 stderr F I1212 16:16:23.792170 1 controller.go:246] "Starting EventSource" controller="pod-watcher" source="informer source: 0xc0010cc580" 2025-12-12T16:16:23.792259547+00:00 stderr F I1212 16:16:23.792236 1 controller.go:186] "Starting Controller" controller="configmap-trust-bundle-injector-controller" 2025-12-12T16:16:23.792287037+00:00 stderr F I1212 16:16:23.792167 1 controller.go:246] "Starting EventSource" controller="pod-watcher" source="informer source: 0xc0010cc4d0" 2025-12-12T16:16:23.792287037+00:00 stderr F I1212 16:16:23.792279 1 controller.go:246] "Starting EventSource" controller="dashboard-controller" source="informer source: 0xc0010cc420" 2025-12-12T16:16:23.792296708+00:00 stderr F I1212 16:16:23.792286 1 controller.go:246] "Starting EventSource" controller="allowlist-controller" source="informer source: 0xc0010cc2c0" 2025-12-12T16:16:23.792331888+00:00 stderr F I1212 16:16:23.792311 1 controller.go:186] "Starting Controller" controller="pod-watcher" 2025-12-12T16:16:23.792340999+00:00 stderr F I1212 16:16:23.792334 1 controller.go:195] "Starting workers" controller="pod-watcher" worker count=1 2025-12-12T16:16:23.792348599+00:00 stderr F I1212 16:16:23.792339 1 controller.go:186] "Starting Controller" controller="allowlist-controller" 2025-12-12T16:16:23.792378750+00:00 stderr F I1212 16:16:23.792355 1 controller.go:186] "Starting Controller" controller="dashboard-controller" 2025-12-12T16:16:23.792378750+00:00 stderr F I1212 16:16:23.792364 1 controller.go:195] "Starting workers" controller="allowlist-controller" worker count=1 2025-12-12T16:16:23.792389400+00:00 stderr F I1212 16:16:23.792375 1 controller.go:195] "Starting workers" controller="dashboard-controller" worker count=1 2025-12-12T16:16:23.792472482+00:00 stderr F I1212 16:16:23.792445 1 controller.go:246] "Starting EventSource" controller="infrastructureconfig-controller" source="kind source: *v1.Infrastructure" 2025-12-12T16:16:23.792497472+00:00 stderr F I1212 16:16:23.792488 1 dashboard_controller.go:117] Reconcile dashboards 2025-12-12T16:16:23.792539723+00:00 stderr F I1212 16:16:23.792511 1 controller.go:246] "Starting EventSource" controller="operconfig-controller" source="kind source: *v1.Node" 2025-12-12T16:16:23.792565134+00:00 stderr F I1212 16:16:23.792237 1 controller.go:246] "Starting EventSource" controller="clusterconfig-controller" source="kind source: *v1.Network" 2025-12-12T16:16:23.792660206+00:00 stderr F I1212 16:16:23.792265 1 controller.go:195] "Starting workers" controller="configmap-trust-bundle-injector-controller" worker count=1 2025-12-12T16:16:23.792710698+00:00 stderr F I1212 16:16:23.792649 1 controller.go:246] "Starting EventSource" controller="operconfig-controller" source="kind source: *v1.Network" 2025-12-12T16:16:23.792710698+00:00 stderr F I1212 16:16:23.792704 1 controller.go:246] "Starting EventSource" controller="pki-controller" source="kind source: *v1.OperatorPKI" 2025-12-12T16:16:23.792734388+00:00 stderr F I1212 16:16:23.792657 1 controller.go:246] "Starting EventSource" controller="operconfig-controller" source="kind source: *v1.Network" 2025-12-12T16:16:23.792743558+00:00 stderr F I1212 16:16:23.792668 1 controller.go:246] "Starting EventSource" controller="operconfig-controller" source="informer source: 0xc0002938c0" 2025-12-12T16:16:23.792793050+00:00 stderr F I1212 16:16:23.792684 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:23.792803400+00:00 stderr F I1212 16:16:23.792796 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:23.792810830+00:00 stderr F I1212 16:16:23.792688 1 controller.go:246] "Starting EventSource" controller="signer-controller" source="kind source: *v1.CertificateSigningRequest" 2025-12-12T16:16:23.792810830+00:00 stderr F I1212 16:16:23.792805 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/network-metrics-daemon updated, re-generating status 2025-12-12T16:16:23.792818530+00:00 stderr F I1212 16:16:23.792812 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:23.792827080+00:00 stderr F I1212 16:16:23.792820 1 pod_watcher.go:132] Operand /, Kind= openshift-network-operator/iptables-alerter updated, re-generating status 2025-12-12T16:16:23.792833441+00:00 stderr F I1212 16:16:23.792773 1 controller.go:246] "Starting EventSource" controller="machineconfigpool-watcher" source="kind source: *v1.MachineConfigPool" 2025-12-12T16:16:23.792840401+00:00 stderr F I1212 16:16:23.792831 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:16:23.793068406+00:00 stderr F I1212 16:16:23.793054 1 log.go:245] Reconciling configmap from openshift-image-registry/trusted-ca 2025-12-12T16:16:23.793451556+00:00 stderr F I1212 16:16:23.793431 1 log.go:245] openshift-network-operator/iptables-alerter-script changed, triggering operconf reconciliation 2025-12-12T16:16:23.793466256+00:00 stderr F I1212 16:16:23.793458 1 log.go:245] openshift-network-operator/kube-root-ca.crt changed, triggering operconf reconciliation 2025-12-12T16:16:23.793489417+00:00 stderr F I1212 16:16:23.793472 1 log.go:245] openshift-network-operator/mtu changed, triggering operconf reconciliation 2025-12-12T16:16:23.793489417+00:00 stderr F I1212 16:16:23.793485 1 log.go:245] openshift-network-operator/openshift-service-ca.crt changed, triggering operconf reconciliation 2025-12-12T16:16:23.797192197+00:00 stderr F I1212 16:16:23.797159 1 log.go:245] ConfigMap openshift-image-registry/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:23.797795042+00:00 stderr F I1212 16:16:23.797768 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.797915175+00:00 stderr F I1212 16:16:23.797885 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:23.797980436+00:00 stderr F I1212 16:16:23.797953 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-admission-controller updated, re-generating status 2025-12-12T16:16:23.798075819+00:00 stderr F I1212 16:16:23.798052 1 pod_watcher.go:132] Operand /, Kind= openshift-network-console/networking-console-plugin updated, re-generating status 2025-12-12T16:16:23.798085789+00:00 stderr F I1212 16:16:23.798080 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:23.798265453+00:00 stderr F I1212 16:16:23.798238 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.798369926+00:00 stderr F I1212 16:16:23.798343 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.798573821+00:00 stderr F I1212 16:16:23.798559 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.798884358+00:00 stderr F I1212 16:16:23.798856 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.799028892+00:00 stderr F I1212 16:16:23.799002 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=openshiftapiservers" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.799104464+00:00 stderr F I1212 16:16:23.799080 1 reflector.go:430] "Caches populated" type="*v1.EgressRouter" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.799160305+00:00 stderr F I1212 16:16:23.799141 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:23.799203476+00:00 stderr F I1212 16:16:23.799191 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.799243967+00:00 stderr F I1212 16:16:23.799226 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.803242215+00:00 stderr F I1212 16:16:23.800315 1 reflector.go:430] "Caches populated" type="*v1.IngressController" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.806438 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.806471 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.806510 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.808344 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.808706 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.808947 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.809868 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.811902 1 reflector.go:430] "Caches populated" type="*v1.OperatorPKI" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.813037 1 dashboard_controller.go:143] Applying dashboards manifests 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.813080 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.813533 1 allowlist_controller.go:106] Reconcile allowlist for openshift-multus/cni-sysctl-allowlist 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.813715 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.813853 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.813893 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.815214 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.816634 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.816964 1 reflector.go:430] "Caches populated" type="*v1alpha1.PodNetworkConnectivityCheck" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.822289040+00:00 stderr F I1212 16:16:23.818359 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.833601156+00:00 stderr F I1212 16:16:23.833286 1 reflector.go:430] "Caches populated" type="*v1.MachineConfigPool" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.844706237+00:00 stderr F I1212 16:16:23.843112 1 warning_handler.go:64] "unknown field \"spec.template.spec.volumes[0].configMap.namespace\"" controller="allowlist-controller" object="openshift-multus/cni-sysctl-allowlist" namespace="openshift-multus" name="cni-sysctl-allowlist" reconcileID="fcdcc3df-0640-4686-b9b7-48d24971da6d" 2025-12-12T16:16:23.844706237+00:00 stderr F I1212 16:16:23.843255 1 warning_handler.go:64] "unknown field \"spec.template.spec.volumes[0].defaultMode\"" controller="allowlist-controller" object="openshift-multus/cni-sysctl-allowlist" namespace="openshift-multus" name="cni-sysctl-allowlist" reconcileID="fcdcc3df-0640-4686-b9b7-48d24971da6d" 2025-12-12T16:16:23.845450515+00:00 stderr F I1212 16:16:23.845412 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-ovn-health 2025-12-12T16:16:23.858399511+00:00 stderr F I1212 16:16:23.858345 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:23.858492824+00:00 stderr F I1212 16:16:23.858476 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:23.862899361+00:00 stderr F I1212 16:16:23.862874 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-ovn-health was successful 2025-12-12T16:16:23.862953373+00:00 stderr F I1212 16:16:23.862944 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-network-stats 2025-12-12T16:16:23.866533420+00:00 stderr F I1212 16:16:23.866035 1 reflector.go:430] "Caches populated" type="*v1.MachineConfig" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.872601298+00:00 stderr F I1212 16:16:23.872563 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:23.872656970+00:00 stderr F I1212 16:16:23.872647 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:23.878142673+00:00 stderr F I1212 16:16:23.878105 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-network-stats was successful 2025-12-12T16:16:23.892450503+00:00 stderr F I1212 16:16:23.892350 1 controller.go:186] "Starting Controller" controller="egress-router-controller" 2025-12-12T16:16:23.892450503+00:00 stderr F I1212 16:16:23.892389 1 controller.go:195] "Starting workers" controller="egress-router-controller" worker count=1 2025-12-12T16:16:23.892536935+00:00 stderr F I1212 16:16:23.892500 1 controller.go:186] "Starting Controller" controller="machineconfig-watcher" 2025-12-12T16:16:23.892547215+00:00 stderr F I1212 16:16:23.892539 1 controller.go:195] "Starting workers" controller="machineconfig-watcher" worker count=1 2025-12-12T16:16:23.892609557+00:00 stderr F I1212 16:16:23.892589 1 controller.go:186] "Starting Controller" controller="ingress-config-controller" 2025-12-12T16:16:23.892609557+00:00 stderr F I1212 16:16:23.892600 1 controller.go:195] "Starting workers" controller="ingress-config-controller" worker count=1 2025-12-12T16:16:23.892800161+00:00 stderr F I1212 16:16:23.892778 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:16:23.893042037+00:00 stderr F I1212 16:16:23.892788 1 controller.go:186] "Starting Controller" controller="infrastructureconfig-controller" 2025-12-12T16:16:23.893151150+00:00 stderr F I1212 16:16:23.893091 1 controller.go:195] "Starting workers" controller="infrastructureconfig-controller" worker count=1 2025-12-12T16:16:23.904960538+00:00 stderr F I1212 16:16:23.902630 1 log.go:245] /crc changed, triggering operconf reconciliation 2025-12-12T16:16:23.904960538+00:00 stderr F I1212 16:16:23.903197 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:16:23.904960538+00:00 stderr F I1212 16:16:23.903225 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:16:23.904960538+00:00 stderr F I1212 16:16:23.904587 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.925213043+00:00 stderr F I1212 16:16:23.925113 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:23.925213043+00:00 stderr F I1212 16:16:23.925150 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:23.936467017+00:00 stderr F I1212 16:16:23.936328 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:23.936467017+00:00 stderr F I1212 16:16:23.936365 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:23.941749846+00:00 stderr F I1212 16:16:23.941590 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:23.941749846+00:00 stderr F I1212 16:16:23.941631 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:23.943388166+00:00 stderr F I1212 16:16:23.943196 1 warning_handler.go:64] "spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:23.963456206+00:00 stderr F I1212 16:16:23.963338 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:23.963456206+00:00 stderr F I1212 16:16:23.963373 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:23.982845830+00:00 stderr F I1212 16:16:23.976421 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:23.997850816+00:00 stderr F I1212 16:16:23.997781 1 controller.go:186] "Starting Controller" controller="clusterconfig-controller" 2025-12-12T16:16:23.997952459+00:00 stderr F I1212 16:16:23.997938 1 controller.go:195] "Starting workers" controller="clusterconfig-controller" worker count=1 2025-12-12T16:16:23.998072322+00:00 stderr F I1212 16:16:23.998057 1 log.go:245] Reconciling Network.config.openshift.io cluster 2025-12-12T16:16:24.001359502+00:00 stderr F I1212 16:16:24.001309 1 controller.go:186] "Starting Controller" controller="signer-controller" 2025-12-12T16:16:24.001359502+00:00 stderr F I1212 16:16:24.001353 1 controller.go:195] "Starting workers" controller="signer-controller" worker count=1 2025-12-12T16:16:24.001462124+00:00 stderr F I1212 16:16:24.001417 1 controller.go:186] "Starting Controller" controller="machineconfigpool-watcher" 2025-12-12T16:16:24.001497555+00:00 stderr F I1212 16:16:24.001485 1 controller.go:195] "Starting workers" controller="machineconfigpool-watcher" worker count=1 2025-12-12T16:16:24.001637159+00:00 stderr F I1212 16:16:24.001621 1 controller.go:186] "Starting Controller" controller="proxyconfig-controller" 2025-12-12T16:16:24.002328275+00:00 stderr F I1212 16:16:24.001662 1 controller.go:195] "Starting workers" controller="proxyconfig-controller" worker count=1 2025-12-12T16:16:24.002568711+00:00 stderr F I1212 16:16:24.002458 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/etcd-serving-ca' 2025-12-12T16:16:24.002622433+00:00 stderr F I1212 16:16:24.001695 1 controller.go:186] "Starting Controller" controller="pki-controller" 2025-12-12T16:16:24.002653683+00:00 stderr F I1212 16:16:24.002632 1 controller.go:195] "Starting workers" controller="pki-controller" worker count=1 2025-12-12T16:16:24.002729405+00:00 stderr F I1212 16:16:24.002696 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:16:24.003672338+00:00 stderr F I1212 16:16:24.003582 1 controller.go:186] "Starting Controller" controller="operconfig-controller" 2025-12-12T16:16:24.003672338+00:00 stderr F I1212 16:16:24.003611 1 controller.go:195] "Starting workers" controller="operconfig-controller" worker count=1 2025-12-12T16:16:24.003708779+00:00 stderr F I1212 16:16:24.003689 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:16:24.007738667+00:00 stderr F I1212 16:16:24.007396 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:24.008575458+00:00 stderr F I1212 16:16:24.008384 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:24.008575458+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.008575458+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.008575458+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.008575458+00:00 stderr F status: "False" 2025-12-12T16:16:24.008575458+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.008575458+00:00 stderr F - lastTransitionTime: "2025-11-03T08:57:46Z" 2025-12-12T16:16:24.008575458+00:00 stderr F status: "False" 2025-12-12T16:16:24.008575458+00:00 stderr F type: Degraded 2025-12-12T16:16:24.008575458+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.008575458+00:00 stderr F status: "True" 2025-12-12T16:16:24.008575458+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.008575458+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.008575458+00:00 stderr F message: |- 2025-12-12T16:16:24.008575458+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.008575458+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.008575458+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.008575458+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.008575458+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.008575458+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.008575458+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.008575458+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.008575458+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.008575458+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.008575458+00:00 stderr F status: "True" 2025-12-12T16:16:24.008575458+00:00 stderr F type: Progressing 2025-12-12T16:16:24.008575458+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.008575458+00:00 stderr F status: "True" 2025-12-12T16:16:24.008575458+00:00 stderr F type: Available 2025-12-12T16:16:24.008575458+00:00 stderr F I1212 16:16:24.008563 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:24.009590483+00:00 stderr F I1212 16:16:24.009273 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:24.017142237+00:00 stderr F I1212 16:16:24.015739 1 log.go:245] configmap 'openshift-config/etcd-serving-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.017142237+00:00 stderr F I1212 16:16:24.015805 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/initial-kube-apiserver-server-ca' 2025-12-12T16:16:24.024122888+00:00 stderr F I1212 16:16:24.023910 1 log.go:245] configmap 'openshift-config/initial-kube-apiserver-server-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.024122888+00:00 stderr F I1212 16:16:24.023966 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/openshift-install-manifests' 2025-12-12T16:16:24.029056178+00:00 stderr F I1212 16:16:24.028328 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:24.029056178+00:00 stderr F - lastTransitionTime: "2025-11-03T08:57:46Z" 2025-12-12T16:16:24.029056178+00:00 stderr F status: "False" 2025-12-12T16:16:24.029056178+00:00 stderr F type: Degraded 2025-12-12T16:16:24.029056178+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.029056178+00:00 stderr F status: "True" 2025-12-12T16:16:24.029056178+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.029056178+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.029056178+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.029056178+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.029056178+00:00 stderr F status: "False" 2025-12-12T16:16:24.029056178+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.029056178+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.029056178+00:00 stderr F message: |- 2025-12-12T16:16:24.029056178+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.029056178+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.029056178+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.029056178+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.029056178+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.029056178+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.029056178+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.029056178+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.029056178+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.029056178+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.029056178+00:00 stderr F status: "True" 2025-12-12T16:16:24.029056178+00:00 stderr F type: Progressing 2025-12-12T16:16:24.029056178+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.029056178+00:00 stderr F status: "True" 2025-12-12T16:16:24.029056178+00:00 stderr F type: Available 2025-12-12T16:16:24.030896753+00:00 stderr F I1212 16:16:24.030688 1 log.go:245] configmap 'openshift-config/openshift-install-manifests' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.030896753+00:00 stderr F I1212 16:16:24.030721 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/registry-certs' 2025-12-12T16:16:24.038297944+00:00 stderr F I1212 16:16:24.037859 1 base_controller.go:82] Caches are synced for ConnectivityCheckController 2025-12-12T16:16:24.038297944+00:00 stderr F I1212 16:16:24.037884 1 base_controller.go:119] Starting #1 worker of ConnectivityCheckController controller ... 2025-12-12T16:16:24.040729723+00:00 stderr F I1212 16:16:24.040515 1 log.go:245] configmap 'openshift-config/registry-certs' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.040729723+00:00 stderr F I1212 16:16:24.040590 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/admin-acks' 2025-12-12T16:16:24.041653295+00:00 stderr F I1212 16:16:24.041614 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:24.042861655+00:00 stderr F I1212 16:16:24.042550 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:24.042861655+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.042861655+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.042861655+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.042861655+00:00 stderr F status: "False" 2025-12-12T16:16:24.042861655+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.042861655+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:24.042861655+00:00 stderr F message: |- 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" rollout is not making progress - last change 2025-11-03T09:40:49Z 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:24.042861655+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" rollout is not making progress - last change 2025-11-03T09:40:47Z 2025-12-12T16:16:24.042861655+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:24.042861655+00:00 stderr F status: "True" 2025-12-12T16:16:24.042861655+00:00 stderr F type: Degraded 2025-12-12T16:16:24.042861655+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.042861655+00:00 stderr F status: "True" 2025-12-12T16:16:24.042861655+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.042861655+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.042861655+00:00 stderr F message: |- 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.042861655+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.042861655+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.042861655+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.042861655+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.042861655+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.042861655+00:00 stderr F status: "True" 2025-12-12T16:16:24.042861655+00:00 stderr F type: Progressing 2025-12-12T16:16:24.042861655+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.042861655+00:00 stderr F status: "True" 2025-12-12T16:16:24.042861655+00:00 stderr F type: Available 2025-12-12T16:16:24.043146992+00:00 stderr F I1212 16:16:24.043098 1 log.go:245] unable to determine openshift-apiserver apiserver service endpoints: no openshift-apiserver api endpoints found 2025-12-12T16:16:24.045407337+00:00 stderr F I1212 16:16:24.045252 1 log.go:245] configmap 'openshift-config/admin-acks' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.045407337+00:00 stderr F I1212 16:16:24.045316 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/admin-kubeconfig-client-ca' 2025-12-12T16:16:24.062375821+00:00 stderr F I1212 16:16:24.059522 1 log.go:245] configmap 'openshift-config/admin-kubeconfig-client-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.062375821+00:00 stderr F I1212 16:16:24.059594 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/installer-images' 2025-12-12T16:16:24.068576773+00:00 stderr F I1212 16:16:24.068535 1 log.go:245] configmap 'openshift-config/installer-images' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.068678035+00:00 stderr F I1212 16:16:24.068668 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/kube-root-ca.crt' 2025-12-12T16:16:24.090787275+00:00 stderr F I1212 16:16:24.090717 1 log.go:245] configmap 'openshift-config/kube-root-ca.crt' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.090926568+00:00 stderr F I1212 16:16:24.090916 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/openshift-service-ca.crt' 2025-12-12T16:16:24.091691327+00:00 stderr F I1212 16:16:24.091674 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:24.091691327+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:24.091691327+00:00 stderr F message: |- 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" rollout is not making progress - last change 2025-11-03T09:40:49Z 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:24.091691327+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" rollout is not making progress - last change 2025-11-03T09:40:47Z 2025-12-12T16:16:24.091691327+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:24.091691327+00:00 stderr F status: "True" 2025-12-12T16:16:24.091691327+00:00 stderr F type: Degraded 2025-12-12T16:16:24.091691327+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.091691327+00:00 stderr F status: "True" 2025-12-12T16:16:24.091691327+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.091691327+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.091691327+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.091691327+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.091691327+00:00 stderr F status: "False" 2025-12-12T16:16:24.091691327+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.091691327+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.091691327+00:00 stderr F message: |- 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.091691327+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.091691327+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.091691327+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.091691327+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.091691327+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.091691327+00:00 stderr F status: "True" 2025-12-12T16:16:24.091691327+00:00 stderr F type: Progressing 2025-12-12T16:16:24.091691327+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.091691327+00:00 stderr F status: "True" 2025-12-12T16:16:24.091691327+00:00 stderr F type: Available 2025-12-12T16:16:24.134903332+00:00 stderr F I1212 16:16:24.134638 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.136804709+00:00 stderr F I1212 16:16:24.136750 1 log.go:245] successful reconciliation 2025-12-12T16:16:24.149448677+00:00 stderr F I1212 16:16:24.149357 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.149855877+00:00 stderr F I1212 16:16:24.149816 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:24.153515547+00:00 stderr F I1212 16:16:24.152804 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:24.153515547+00:00 stderr F I1212 16:16:24.152836 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:16:24.158787135+00:00 stderr F I1212 16:16:24.158711 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.163344917+00:00 stderr F I1212 16:16:24.162967 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:16:24.163344917+00:00 stderr F I1212 16:16:24.162994 1 log.go:245] Successfully updated Operator config from Cluster config 2025-12-12T16:16:24.164287160+00:00 stderr F I1212 16:16:24.163781 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:24.167338504+00:00 stderr F I1212 16:16:24.166887 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.173274829+00:00 stderr F I1212 16:16:24.172896 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.182013192+00:00 stderr F I1212 16:16:24.181928 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.186350778+00:00 stderr F I1212 16:16:24.185286 1 warning_handler.go:64] "spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:24.188274165+00:00 stderr F I1212 16:16:24.186411 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:24.188274165+00:00 stderr F I1212 16:16:24.186447 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:24.190937510+00:00 stderr F I1212 16:16:24.190837 1 log.go:245] unable to determine openshift-apiserver apiserver service endpoints: no openshift-apiserver api endpoints found 2025-12-12T16:16:24.196720591+00:00 stderr F I1212 16:16:24.196579 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:24.196720591+00:00 stderr F I1212 16:16:24.196615 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:24.201130149+00:00 stderr F I1212 16:16:24.201087 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.209055262+00:00 stderr F I1212 16:16:24.208979 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.218423701+00:00 stderr F I1212 16:16:24.218249 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.225296479+00:00 stderr F I1212 16:16:24.224534 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.225296479+00:00 stderr F I1212 16:16:24.224687 1 log.go:245] configmap 'openshift-config/openshift-service-ca.crt' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.226313454+00:00 stderr F I1212 16:16:24.226276 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/etcd-ca-bundle' 2025-12-12T16:16:24.255417234+00:00 stderr F I1212 16:16:24.253917 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:24.255417234+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.255417234+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.255417234+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.255417234+00:00 stderr F status: "False" 2025-12-12T16:16:24.255417234+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.255417234+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:24.255417234+00:00 stderr F message: |- 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" rollout is not making progress - last change 2025-11-03T09:40:49Z 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:24.255417234+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" rollout is not making progress - last change 2025-11-03T09:40:47Z 2025-12-12T16:16:24.255417234+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:24.255417234+00:00 stderr F status: "True" 2025-12-12T16:16:24.255417234+00:00 stderr F type: Degraded 2025-12-12T16:16:24.255417234+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.255417234+00:00 stderr F status: "True" 2025-12-12T16:16:24.255417234+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.255417234+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.255417234+00:00 stderr F message: |- 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.255417234+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.255417234+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.255417234+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.255417234+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.255417234+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.255417234+00:00 stderr F status: "True" 2025-12-12T16:16:24.255417234+00:00 stderr F type: Progressing 2025-12-12T16:16:24.255417234+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.255417234+00:00 stderr F status: "True" 2025-12-12T16:16:24.255417234+00:00 stderr F type: Available 2025-12-12T16:16:24.255417234+00:00 stderr F I1212 16:16:24.254947 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:24.285739265+00:00 stderr F I1212 16:16:24.283646 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:24.285739265+00:00 stderr F I1212 16:16:24.283676 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:24.285739265+00:00 stderr F I1212 16:16:24.283982 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.313801259+00:00 stderr F I1212 16:16:24.313650 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:24.313801259+00:00 stderr F I1212 16:16:24.313768 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:16:24.478655744+00:00 stderr F I1212 16:16:24.478549 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:24.622890285+00:00 stderr F I1212 16:16:24.622804 1 log.go:245] configmap 'openshift-config/etcd-ca-bundle' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:16:24.622948346+00:00 stderr F I1212 16:16:24.622912 1 log.go:245] Reconciling proxy 'cluster' 2025-12-12T16:16:24.638278391+00:00 stderr F I1212 16:16:24.637751 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:24.638278391+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:24.638278391+00:00 stderr F message: |- 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-network-node-identity/network-node-identity" rollout is not making progress - last change 2025-11-03T09:40:49Z 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:24.638278391+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" rollout is not making progress - last change 2025-11-03T09:40:47Z 2025-12-12T16:16:24.638278391+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:24.638278391+00:00 stderr F status: "True" 2025-12-12T16:16:24.638278391+00:00 stderr F type: Degraded 2025-12-12T16:16:24.638278391+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.638278391+00:00 stderr F status: "True" 2025-12-12T16:16:24.638278391+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.638278391+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.638278391+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.638278391+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.638278391+00:00 stderr F status: "False" 2025-12-12T16:16:24.638278391+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.638278391+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.638278391+00:00 stderr F message: |- 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.638278391+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.638278391+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.638278391+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.638278391+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.638278391+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.638278391+00:00 stderr F status: "True" 2025-12-12T16:16:24.638278391+00:00 stderr F type: Progressing 2025-12-12T16:16:24.638278391+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.638278391+00:00 stderr F status: "True" 2025-12-12T16:16:24.638278391+00:00 stderr F type: Available 2025-12-12T16:16:24.813840907+00:00 stderr F I1212 16:16:24.813539 1 log.go:245] httpProxy, httpsProxy and noProxy not defined for proxy 'cluster'; validation will be skipped 2025-12-12T16:16:24.843842200+00:00 stderr F I1212 16:16:24.843760 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:24.843842200+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:24.843842200+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:24.843842200+00:00 stderr F reason: Unknown 2025-12-12T16:16:24.843842200+00:00 stderr F status: "False" 2025-12-12T16:16:24.843842200+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:24.843842200+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:24.843842200+00:00 stderr F message: |- 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:24.843842200+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:24.843842200+00:00 stderr F status: "True" 2025-12-12T16:16:24.843842200+00:00 stderr F type: Degraded 2025-12-12T16:16:24.843842200+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:24.843842200+00:00 stderr F status: "True" 2025-12-12T16:16:24.843842200+00:00 stderr F type: Upgradeable 2025-12-12T16:16:24.843842200+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:24.843842200+00:00 stderr F message: |- 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.843842200+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.843842200+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:24.843842200+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:24.843842200+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:24.843842200+00:00 stderr F reason: Deploying 2025-12-12T16:16:24.843842200+00:00 stderr F status: "True" 2025-12-12T16:16:24.843842200+00:00 stderr F type: Progressing 2025-12-12T16:16:24.843842200+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:24.843842200+00:00 stderr F status: "True" 2025-12-12T16:16:24.843842200+00:00 stderr F type: Available 2025-12-12T16:16:24.843906371+00:00 stderr F I1212 16:16:24.843849 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:25.018571645+00:00 stderr F I1212 16:16:25.018482 1 log.go:245] Reconciling proxy 'cluster' complete 2025-12-12T16:16:25.036124144+00:00 stderr F I1212 16:16:25.036042 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:25.036124144+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:25.036124144+00:00 stderr F message: |- 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:25.036124144+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:25.036124144+00:00 stderr F status: "True" 2025-12-12T16:16:25.036124144+00:00 stderr F type: Degraded 2025-12-12T16:16:25.036124144+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:25.036124144+00:00 stderr F status: "True" 2025-12-12T16:16:25.036124144+00:00 stderr F type: Upgradeable 2025-12-12T16:16:25.036124144+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:25.036124144+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:25.036124144+00:00 stderr F reason: Unknown 2025-12-12T16:16:25.036124144+00:00 stderr F status: "False" 2025-12-12T16:16:25.036124144+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:25.036124144+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:25.036124144+00:00 stderr F message: |- 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:25.036124144+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:16:25.036124144+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:16:25.036124144+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:25.036124144+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:25.036124144+00:00 stderr F reason: Deploying 2025-12-12T16:16:25.036124144+00:00 stderr F status: "True" 2025-12-12T16:16:25.036124144+00:00 stderr F type: Progressing 2025-12-12T16:16:25.036124144+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:25.036124144+00:00 stderr F status: "True" 2025-12-12T16:16:25.036124144+00:00 stderr F type: Available 2025-12-12T16:16:25.431075646+00:00 stderr F I1212 16:16:25.430975 1 log.go:245] Reconciling configmap from openshift-config-managed/trusted-ca-bundle 2025-12-12T16:16:25.435384182+00:00 stderr F I1212 16:16:25.435310 1 log.go:245] trusted-ca-bundle changed, updating 13 configMaps 2025-12-12T16:16:25.435406922+00:00 stderr F I1212 16:16:25.435386 1 log.go:245] ConfigMap openshift-authentication/v4-0-config-system-trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435429353+00:00 stderr F I1212 16:16:25.435413 1 log.go:245] ConfigMap openshift-console-operator/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435455943+00:00 stderr F I1212 16:16:25.435440 1 log.go:245] ConfigMap openshift-console/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435493644+00:00 stderr F I1212 16:16:25.435479 1 log.go:245] ConfigMap openshift-image-registry/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435531245+00:00 stderr F I1212 16:16:25.435517 1 log.go:245] ConfigMap openshift-kube-apiserver/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435569326+00:00 stderr F I1212 16:16:25.435554 1 log.go:245] ConfigMap openshift-controller-manager/openshift-global-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435606727+00:00 stderr F I1212 16:16:25.435591 1 log.go:245] ConfigMap openshift-ingress-operator/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435645578+00:00 stderr F I1212 16:16:25.435630 1 log.go:245] ConfigMap openshift-kube-controller-manager/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435683929+00:00 stderr F I1212 16:16:25.435668 1 log.go:245] ConfigMap openshift-machine-api/mao-trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435720880+00:00 stderr F I1212 16:16:25.435706 1 log.go:245] ConfigMap openshift-marketplace/marketplace-trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.435999867+00:00 stderr F I1212 16:16:25.435946 1 log.go:245] ConfigMap openshift-apiserver-operator/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.436038778+00:00 stderr F I1212 16:16:25.436015 1 log.go:245] ConfigMap openshift-apiserver/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.436060978+00:00 stderr F I1212 16:16:25.436044 1 log.go:245] ConfigMap openshift-authentication-operator/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:25.629278585+00:00 stderr F I1212 16:16:25.629041 1 dashboard_controller.go:117] Reconcile dashboards 2025-12-12T16:16:25.632815222+00:00 stderr F I1212 16:16:25.632752 1 dashboard_controller.go:143] Applying dashboards manifests 2025-12-12T16:16:25.639367262+00:00 stderr F I1212 16:16:25.639297 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-ovn-health 2025-12-12T16:16:25.650460463+00:00 stderr F I1212 16:16:25.650326 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-ovn-health was successful 2025-12-12T16:16:25.650460463+00:00 stderr F I1212 16:16:25.650389 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-network-stats 2025-12-12T16:16:25.661699707+00:00 stderr F I1212 16:16:25.661608 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-network-stats was successful 2025-12-12T16:16:25.829388471+00:00 stderr F I1212 16:16:25.828335 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:16:25.835038399+00:00 stderr F I1212 16:16:25.834990 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:25.835411058+00:00 stderr F I1212 16:16:25.835376 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:25.931037873+00:00 stderr F I1212 16:16:25.930949 1 log.go:245] successful reconciliation 2025-12-12T16:16:26.035605286+00:00 stderr F I1212 16:16:26.035525 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:16:26.038708582+00:00 stderr F I1212 16:16:26.038632 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:16:26.042555576+00:00 stderr F I1212 16:16:26.042470 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:16:26.216923813+00:00 stderr F I1212 16:16:26.216789 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc0023d2180 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:16:26.228570867+00:00 stderr F I1212 16:16:26.228475 1 log.go:245] Reconciling Network.config.openshift.io cluster 2025-12-12T16:16:26.421381684+00:00 stderr F I1212 16:16:26.421307 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:16:26.421487127+00:00 stderr F I1212 16:16:26.421474 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:16:26.421513638+00:00 stderr F I1212 16:16:26.421504 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:16:26.424941561+00:00 stderr F I1212 16:16:26.424870 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:26.424941561+00:00 stderr F W1212 16:16:26.424908 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:26.424941561+00:00 stderr F I1212 16:16:26.424915 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:26.424941561+00:00 stderr F W1212 16:16:26.424922 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:26.425005793+00:00 stderr F I1212 16:16:26.424949 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:16:26.441993088+00:00 stderr F I1212 16:16:26.441892 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:26.441993088+00:00 stderr F I1212 16:16:26.441928 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:16:26.785400822+00:00 stderr F I1212 16:16:26.785309 1 pod_watcher.go:132] Operand /, Kind= openshift-network-operator/iptables-alerter updated, re-generating status 2025-12-12T16:16:26.785400822+00:00 stderr F I1212 16:16:26.785342 1 pod_watcher.go:132] Operand /, Kind= openshift-network-operator/iptables-alerter updated, re-generating status 2025-12-12T16:16:26.817851924+00:00 stderr F I1212 16:16:26.817697 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:26.817851924+00:00 stderr F I1212 16:16:26.817739 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:26.854757825+00:00 stderr F I1212 16:16:26.854669 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:26.854757825+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:26.854757825+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:26.854757825+00:00 stderr F reason: Unknown 2025-12-12T16:16:26.854757825+00:00 stderr F status: "False" 2025-12-12T16:16:26.854757825+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:26.854757825+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:26.854757825+00:00 stderr F message: |- 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:26.854757825+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:26.854757825+00:00 stderr F status: "True" 2025-12-12T16:16:26.854757825+00:00 stderr F type: Degraded 2025-12-12T16:16:26.854757825+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:26.854757825+00:00 stderr F status: "True" 2025-12-12T16:16:26.854757825+00:00 stderr F type: Upgradeable 2025-12-12T16:16:26.854757825+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:26.854757825+00:00 stderr F message: |- 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:26.854757825+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:26.854757825+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:26.854757825+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:26.854757825+00:00 stderr F reason: Deploying 2025-12-12T16:16:26.854757825+00:00 stderr F status: "True" 2025-12-12T16:16:26.854757825+00:00 stderr F type: Progressing 2025-12-12T16:16:26.854757825+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:26.854757825+00:00 stderr F status: "True" 2025-12-12T16:16:26.854757825+00:00 stderr F type: Available 2025-12-12T16:16:26.855200876+00:00 stderr F I1212 16:16:26.855122 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:27.017821696+00:00 stderr F I1212 16:16:27.017727 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:27.025733180+00:00 stderr F I1212 16:16:27.025655 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:27.031814678+00:00 stderr F I1212 16:16:27.031756 1 log.go:245] Failed to update the operator configuration: could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:27.031901210+00:00 stderr F E1212 16:16:27.031870 1 controller.go:353] "Reconciler error" err="could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io \"cluster\": the object has been modified; please apply your changes to the latest version and try again" controller="operconfig-controller" object="cluster" namespace="" name="cluster" reconcileID="0e1a86e4-890e-416b-a311-9246110b6050" 2025-12-12T16:16:27.037448136+00:00 stderr F I1212 16:16:27.037364 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:16:27.039266890+00:00 stderr F I1212 16:16:27.038912 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:27.040038039+00:00 stderr F I1212 16:16:27.039579 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:16:27.040038039+00:00 stderr F I1212 16:16:27.039596 1 log.go:245] Successfully updated Operator config from Cluster config 2025-12-12T16:16:27.437358029+00:00 stderr F I1212 16:16:27.437276 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:27.437358029+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:27.437358029+00:00 stderr F message: |- 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-multus/multus" rollout is not making progress - last change 2025-11-03T09:40:46Z 2025-12-12T16:16:27.437358029+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:27.437358029+00:00 stderr F status: "True" 2025-12-12T16:16:27.437358029+00:00 stderr F type: Degraded 2025-12-12T16:16:27.437358029+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:27.437358029+00:00 stderr F status: "True" 2025-12-12T16:16:27.437358029+00:00 stderr F type: Upgradeable 2025-12-12T16:16:27.437358029+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:27.437358029+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:27.437358029+00:00 stderr F reason: Unknown 2025-12-12T16:16:27.437358029+00:00 stderr F status: "False" 2025-12-12T16:16:27.437358029+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:27.437358029+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:27.437358029+00:00 stderr F message: |- 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:27.437358029+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:27.437358029+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:27.437358029+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:27.437358029+00:00 stderr F reason: Deploying 2025-12-12T16:16:27.437358029+00:00 stderr F status: "True" 2025-12-12T16:16:27.437358029+00:00 stderr F type: Progressing 2025-12-12T16:16:27.437358029+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:27.437358029+00:00 stderr F status: "True" 2025-12-12T16:16:27.437358029+00:00 stderr F type: Available 2025-12-12T16:16:27.454041906+00:00 stderr F I1212 16:16:27.453875 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:27.454542899+00:00 stderr F I1212 16:16:27.454507 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:27.454542899+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:27.454542899+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:27.454542899+00:00 stderr F reason: Unknown 2025-12-12T16:16:27.454542899+00:00 stderr F status: "False" 2025-12-12T16:16:27.454542899+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:27.454542899+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:27.454542899+00:00 stderr F message: |- 2025-12-12T16:16:27.454542899+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:27.454542899+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:27.454542899+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:27.454542899+00:00 stderr F status: "True" 2025-12-12T16:16:27.454542899+00:00 stderr F type: Degraded 2025-12-12T16:16:27.454542899+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:27.454542899+00:00 stderr F status: "True" 2025-12-12T16:16:27.454542899+00:00 stderr F type: Upgradeable 2025-12-12T16:16:27.454542899+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:27.454542899+00:00 stderr F message: |- 2025-12-12T16:16:27.454542899+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:27.454542899+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:27.454542899+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:27.454542899+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:27.454542899+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:27.454542899+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:27.454542899+00:00 stderr F reason: Deploying 2025-12-12T16:16:27.454542899+00:00 stderr F status: "True" 2025-12-12T16:16:27.454542899+00:00 stderr F type: Progressing 2025-12-12T16:16:27.454542899+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:27.454542899+00:00 stderr F status: "True" 2025-12-12T16:16:27.454542899+00:00 stderr F type: Available 2025-12-12T16:16:28.235705639+00:00 stderr F I1212 16:16:28.235630 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:28.235705639+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:28.235705639+00:00 stderr F message: |- 2025-12-12T16:16:28.235705639+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:28.235705639+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:28.235705639+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:28.235705639+00:00 stderr F status: "True" 2025-12-12T16:16:28.235705639+00:00 stderr F type: Degraded 2025-12-12T16:16:28.235705639+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:28.235705639+00:00 stderr F status: "True" 2025-12-12T16:16:28.235705639+00:00 stderr F type: Upgradeable 2025-12-12T16:16:28.235705639+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:28.235705639+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:28.235705639+00:00 stderr F reason: Unknown 2025-12-12T16:16:28.235705639+00:00 stderr F status: "False" 2025-12-12T16:16:28.235705639+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:28.235705639+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:28.235705639+00:00 stderr F message: |- 2025-12-12T16:16:28.235705639+00:00 stderr F DaemonSet "/openshift-network-operator/iptables-alerter" is waiting for other operators to become ready 2025-12-12T16:16:28.235705639+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:28.235705639+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:28.235705639+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:28.235705639+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:28.235705639+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:28.235705639+00:00 stderr F reason: Deploying 2025-12-12T16:16:28.235705639+00:00 stderr F status: "True" 2025-12-12T16:16:28.235705639+00:00 stderr F type: Progressing 2025-12-12T16:16:28.235705639+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:28.235705639+00:00 stderr F status: "True" 2025-12-12T16:16:28.235705639+00:00 stderr F type: Available 2025-12-12T16:16:28.434735858+00:00 stderr F I1212 16:16:28.434632 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:16:28.436503822+00:00 stderr F I1212 16:16:28.436466 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:16:28.438315456+00:00 stderr F I1212 16:16:28.438287 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:16:28.618752361+00:00 stderr F I1212 16:16:28.618633 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc001a615c0 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:16:28.822351002+00:00 stderr F I1212 16:16:28.822252 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:16:28.822351002+00:00 stderr F I1212 16:16:28.822295 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:16:28.822351002+00:00 stderr F I1212 16:16:28.822304 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:16:28.825358595+00:00 stderr F I1212 16:16:28.824956 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:28.825358595+00:00 stderr F W1212 16:16:28.824974 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:28.825358595+00:00 stderr F I1212 16:16:28.824980 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:28.825358595+00:00 stderr F W1212 16:16:28.824986 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:28.825358595+00:00 stderr F I1212 16:16:28.825004 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:16:28.826902963+00:00 stderr F I1212 16:16:28.826815 1 log.go:245] Reconciling configmap from openshift-kube-apiserver/trusted-ca-bundle 2025-12-12T16:16:28.829824544+00:00 stderr F I1212 16:16:28.829774 1 log.go:245] ConfigMap openshift-kube-apiserver/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:29.017749562+00:00 stderr F I1212 16:16:29.017583 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:29.017749562+00:00 stderr F I1212 16:16:29.017704 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:29.028010783+00:00 stderr F I1212 16:16:29.027898 1 dashboard_controller.go:117] Reconcile dashboards 2025-12-12T16:16:29.031965649+00:00 stderr F I1212 16:16:29.031901 1 dashboard_controller.go:143] Applying dashboards manifests 2025-12-12T16:16:29.036640583+00:00 stderr F I1212 16:16:29.036579 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-ovn-health 2025-12-12T16:16:29.053158727+00:00 stderr F I1212 16:16:29.053056 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-ovn-health was successful 2025-12-12T16:16:29.053158727+00:00 stderr F I1212 16:16:29.053101 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-network-stats 2025-12-12T16:16:29.063558001+00:00 stderr F I1212 16:16:29.063467 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/grafana-dashboard-network-stats was successful 2025-12-12T16:16:29.218226807+00:00 stderr F I1212 16:16:29.218107 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:29.232124196+00:00 stderr F I1212 16:16:29.232043 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:16:29.233565601+00:00 stderr F I1212 16:16:29.233500 1 log.go:245] Failed to update the operator configuration: could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:29.233613792+00:00 stderr F E1212 16:16:29.233593 1 controller.go:353] "Reconciler error" err="could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io \"cluster\": the object has been modified; please apply your changes to the latest version and try again" controller="operconfig-controller" object="cluster" namespace="" name="cluster" reconcileID="efbeeee8-f837-4b14-8801-3d84875eeb5b" 2025-12-12T16:16:29.235766275+00:00 stderr F I1212 16:16:29.235660 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:29.236481862+00:00 stderr F I1212 16:16:29.236425 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:16:29.244273363+00:00 stderr F I1212 16:16:29.244226 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:16:29.333707196+00:00 stderr F I1212 16:16:29.333596 1 log.go:245] successful reconciliation 2025-12-12T16:16:30.457824691+00:00 stderr F I1212 16:16:30.457674 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:30.457824691+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:30.457824691+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:30.457824691+00:00 stderr F reason: Unknown 2025-12-12T16:16:30.457824691+00:00 stderr F status: "False" 2025-12-12T16:16:30.457824691+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:30.457824691+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:30.457824691+00:00 stderr F message: |- 2025-12-12T16:16:30.457824691+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:30.457824691+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:30.457824691+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:30.457824691+00:00 stderr F status: "True" 2025-12-12T16:16:30.457824691+00:00 stderr F type: Degraded 2025-12-12T16:16:30.457824691+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:30.457824691+00:00 stderr F status: "True" 2025-12-12T16:16:30.457824691+00:00 stderr F type: Upgradeable 2025-12-12T16:16:30.457824691+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:30.457824691+00:00 stderr F message: |- 2025-12-12T16:16:30.457824691+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:30.457824691+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:30.457824691+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:30.457824691+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:30.457824691+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:30.457824691+00:00 stderr F reason: Deploying 2025-12-12T16:16:30.457824691+00:00 stderr F status: "True" 2025-12-12T16:16:30.457824691+00:00 stderr F type: Progressing 2025-12-12T16:16:30.457824691+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:30.457824691+00:00 stderr F status: "True" 2025-12-12T16:16:30.457824691+00:00 stderr F type: Available 2025-12-12T16:16:30.458486527+00:00 stderr F I1212 16:16:30.458328 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:30.631430719+00:00 stderr F I1212 16:16:30.631357 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:16:30.632560277+00:00 stderr F I1212 16:16:30.632530 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:16:30.634152536+00:00 stderr F I1212 16:16:30.634112 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:16:30.819474090+00:00 stderr F I1212 16:16:30.818725 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc00594c840 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:16:31.020361945+00:00 stderr F I1212 16:16:31.020296 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:16:31.020428677+00:00 stderr F I1212 16:16:31.020418 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:16:31.020455207+00:00 stderr F I1212 16:16:31.020446 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:16:31.024221240+00:00 stderr F I1212 16:16:31.024166 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:31.024283261+00:00 stderr F W1212 16:16:31.024273 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:31.024307112+00:00 stderr F I1212 16:16:31.024298 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:31.024330362+00:00 stderr F W1212 16:16:31.024322 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:31.024373113+00:00 stderr F I1212 16:16:31.024364 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:16:31.042960487+00:00 stderr F I1212 16:16:31.042901 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:31.042960487+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:31.042960487+00:00 stderr F message: |- 2025-12-12T16:16:31.042960487+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:31.042960487+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:31.042960487+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:31.042960487+00:00 stderr F status: "True" 2025-12-12T16:16:31.042960487+00:00 stderr F type: Degraded 2025-12-12T16:16:31.042960487+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:31.042960487+00:00 stderr F status: "True" 2025-12-12T16:16:31.042960487+00:00 stderr F type: Upgradeable 2025-12-12T16:16:31.042960487+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:31.042960487+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:31.042960487+00:00 stderr F reason: Unknown 2025-12-12T16:16:31.042960487+00:00 stderr F status: "False" 2025-12-12T16:16:31.042960487+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:31.042960487+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:31.042960487+00:00 stderr F message: |- 2025-12-12T16:16:31.042960487+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:31.042960487+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:31.042960487+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:31.042960487+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:31.042960487+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:31.042960487+00:00 stderr F reason: Deploying 2025-12-12T16:16:31.042960487+00:00 stderr F status: "True" 2025-12-12T16:16:31.042960487+00:00 stderr F type: Progressing 2025-12-12T16:16:31.042960487+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:31.042960487+00:00 stderr F status: "True" 2025-12-12T16:16:31.042960487+00:00 stderr F type: Available 2025-12-12T16:16:31.058974128+00:00 stderr F I1212 16:16:31.058907 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:31.058974128+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:31.058974128+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:31.058974128+00:00 stderr F reason: Unknown 2025-12-12T16:16:31.058974128+00:00 stderr F status: "False" 2025-12-12T16:16:31.058974128+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:31.058974128+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:31.058974128+00:00 stderr F message: |- 2025-12-12T16:16:31.058974128+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:31.058974128+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:31.058974128+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:31.058974128+00:00 stderr F status: "True" 2025-12-12T16:16:31.058974128+00:00 stderr F type: Degraded 2025-12-12T16:16:31.058974128+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:31.058974128+00:00 stderr F status: "True" 2025-12-12T16:16:31.058974128+00:00 stderr F type: Upgradeable 2025-12-12T16:16:31.058974128+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:31.058974128+00:00 stderr F message: |- 2025-12-12T16:16:31.058974128+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:31.058974128+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:31.058974128+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:31.058974128+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:31.058974128+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:31.058974128+00:00 stderr F reason: Deploying 2025-12-12T16:16:31.058974128+00:00 stderr F status: "True" 2025-12-12T16:16:31.058974128+00:00 stderr F type: Progressing 2025-12-12T16:16:31.058974128+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:31.058974128+00:00 stderr F status: "True" 2025-12-12T16:16:31.058974128+00:00 stderr F type: Available 2025-12-12T16:16:31.059658845+00:00 stderr F I1212 16:16:31.059629 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:31.217900618+00:00 stderr F I1212 16:16:31.217832 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:31.217900618+00:00 stderr F I1212 16:16:31.217857 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:31.418039444+00:00 stderr F I1212 16:16:31.417959 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:31.431198186+00:00 stderr F I1212 16:16:31.430980 1 log.go:245] Failed to update the operator configuration: could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:31.431198186+00:00 stderr F E1212 16:16:31.431076 1 controller.go:353] "Reconciler error" err="could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io \"cluster\": the object has been modified; please apply your changes to the latest version and try again" controller="operconfig-controller" object="cluster" namespace="" name="cluster" reconcileID="11769015-f36a-4460-974a-8bbfc87e76ec" 2025-12-12T16:16:31.440209225+00:00 stderr F I1212 16:16:31.437557 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:31.440209225+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:31.440209225+00:00 stderr F message: |- 2025-12-12T16:16:31.440209225+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:31.440209225+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:31.440209225+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:31.440209225+00:00 stderr F status: "True" 2025-12-12T16:16:31.440209225+00:00 stderr F type: Degraded 2025-12-12T16:16:31.440209225+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:31.440209225+00:00 stderr F status: "True" 2025-12-12T16:16:31.440209225+00:00 stderr F type: Upgradeable 2025-12-12T16:16:31.440209225+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:31.440209225+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:31.440209225+00:00 stderr F reason: Unknown 2025-12-12T16:16:31.440209225+00:00 stderr F status: "False" 2025-12-12T16:16:31.440209225+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:31.440209225+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:31.440209225+00:00 stderr F message: |- 2025-12-12T16:16:31.440209225+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" is not available (awaiting 1 nodes) 2025-12-12T16:16:31.440209225+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:31.440209225+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:31.440209225+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:31.440209225+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:31.440209225+00:00 stderr F reason: Deploying 2025-12-12T16:16:31.440209225+00:00 stderr F status: "True" 2025-12-12T16:16:31.440209225+00:00 stderr F type: Progressing 2025-12-12T16:16:31.440209225+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:31.440209225+00:00 stderr F status: "True" 2025-12-12T16:16:31.440209225+00:00 stderr F type: Available 2025-12-12T16:16:31.451407579+00:00 stderr F I1212 16:16:31.451335 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:16:31.629070945+00:00 stderr F I1212 16:16:31.629011 1 log.go:245] Reconciling configmap from openshift-controller-manager/openshift-global-ca 2025-12-12T16:16:31.631778971+00:00 stderr F I1212 16:16:31.631706 1 log.go:245] ConfigMap openshift-controller-manager/openshift-global-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:31.860334001+00:00 stderr F I1212 16:16:31.860062 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:31.860334001+00:00 stderr F I1212 16:16:31.860099 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:32.631903699+00:00 stderr F I1212 16:16:32.631799 1 log.go:245] Reconciling configmap from openshift-ingress-operator/trusted-ca 2025-12-12T16:16:32.636439900+00:00 stderr F I1212 16:16:32.636370 1 log.go:245] ConfigMap openshift-ingress-operator/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:32.838590826+00:00 stderr F I1212 16:16:32.838439 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:16:32.841854555+00:00 stderr F I1212 16:16:32.841793 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:16:32.845674989+00:00 stderr F I1212 16:16:32.845636 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:16:33.024239498+00:00 stderr F I1212 16:16:33.023218 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc004215ec0 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:16:33.052841977+00:00 stderr F I1212 16:16:33.052795 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:33.052899778+00:00 stderr F I1212 16:16:33.052887 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:16:33.217861666+00:00 stderr F I1212 16:16:33.217795 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:16:33.217861666+00:00 stderr F I1212 16:16:33.217816 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:16:33.217861666+00:00 stderr F I1212 16:16:33.217824 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:16:33.220527451+00:00 stderr F I1212 16:16:33.220495 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:33.220527451+00:00 stderr F W1212 16:16:33.220513 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:33.220527451+00:00 stderr F I1212 16:16:33.220519 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:33.220561142+00:00 stderr F W1212 16:16:33.220524 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:33.220561142+00:00 stderr F I1212 16:16:33.220541 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:16:33.418278009+00:00 stderr F I1212 16:16:33.418221 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:33.418341100+00:00 stderr F I1212 16:16:33.418329 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:33.455404455+00:00 stderr F I1212 16:16:33.455335 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:33.455404455+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:33.455404455+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:33.455404455+00:00 stderr F reason: Unknown 2025-12-12T16:16:33.455404455+00:00 stderr F status: "False" 2025-12-12T16:16:33.455404455+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:33.455404455+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:33.455404455+00:00 stderr F message: |- 2025-12-12T16:16:33.455404455+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:33.455404455+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:33.455404455+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:33.455404455+00:00 stderr F status: "True" 2025-12-12T16:16:33.455404455+00:00 stderr F type: Degraded 2025-12-12T16:16:33.455404455+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:33.455404455+00:00 stderr F status: "True" 2025-12-12T16:16:33.455404455+00:00 stderr F type: Upgradeable 2025-12-12T16:16:33.455404455+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:33.455404455+00:00 stderr F message: |- 2025-12-12T16:16:33.455404455+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:33.455404455+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:33.455404455+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:33.455404455+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:33.455404455+00:00 stderr F reason: Deploying 2025-12-12T16:16:33.455404455+00:00 stderr F status: "True" 2025-12-12T16:16:33.455404455+00:00 stderr F type: Progressing 2025-12-12T16:16:33.455404455+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:33.455404455+00:00 stderr F status: "True" 2025-12-12T16:16:33.455404455+00:00 stderr F type: Available 2025-12-12T16:16:33.455926888+00:00 stderr F I1212 16:16:33.455907 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:33.621134331+00:00 stderr F I1212 16:16:33.620497 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:33.635015890+00:00 stderr F I1212 16:16:33.634928 1 log.go:245] Failed to update the operator configuration: could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:33.635050331+00:00 stderr F E1212 16:16:33.635035 1 controller.go:353] "Reconciler error" err="could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io \"cluster\": the object has been modified; please apply your changes to the latest version and try again" controller="operconfig-controller" object="cluster" namespace="" name="cluster" reconcileID="37ae3049-e30a-42e4-b8cd-ce8e839568f9" 2025-12-12T16:16:33.675471998+00:00 stderr F I1212 16:16:33.675426 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:16:33.835770112+00:00 stderr F I1212 16:16:33.835335 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:33.835770112+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:33.835770112+00:00 stderr F message: |- 2025-12-12T16:16:33.835770112+00:00 stderr F DaemonSet "/openshift-multus/multus-additional-cni-plugins" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:33.835770112+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:33.835770112+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:33.835770112+00:00 stderr F status: "True" 2025-12-12T16:16:33.835770112+00:00 stderr F type: Degraded 2025-12-12T16:16:33.835770112+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:33.835770112+00:00 stderr F status: "True" 2025-12-12T16:16:33.835770112+00:00 stderr F type: Upgradeable 2025-12-12T16:16:33.835770112+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:33.835770112+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:33.835770112+00:00 stderr F reason: Unknown 2025-12-12T16:16:33.835770112+00:00 stderr F status: "False" 2025-12-12T16:16:33.835770112+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:33.835770112+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:33.835770112+00:00 stderr F message: |- 2025-12-12T16:16:33.835770112+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:33.835770112+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:33.835770112+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:33.835770112+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:33.835770112+00:00 stderr F reason: Deploying 2025-12-12T16:16:33.835770112+00:00 stderr F status: "True" 2025-12-12T16:16:33.835770112+00:00 stderr F type: Progressing 2025-12-12T16:16:33.835770112+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:33.835770112+00:00 stderr F status: "True" 2025-12-12T16:16:33.835770112+00:00 stderr F type: Available 2025-12-12T16:16:33.851490855+00:00 stderr F I1212 16:16:33.851446 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:33.851490855+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:33.851490855+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:33.851490855+00:00 stderr F reason: Unknown 2025-12-12T16:16:33.851490855+00:00 stderr F status: "False" 2025-12-12T16:16:33.851490855+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:33.851490855+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:33.851490855+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:33.851490855+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:33.851490855+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:33.851490855+00:00 stderr F status: "True" 2025-12-12T16:16:33.851490855+00:00 stderr F type: Degraded 2025-12-12T16:16:33.851490855+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:33.851490855+00:00 stderr F status: "True" 2025-12-12T16:16:33.851490855+00:00 stderr F type: Upgradeable 2025-12-12T16:16:33.851490855+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:33.851490855+00:00 stderr F message: |- 2025-12-12T16:16:33.851490855+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:33.851490855+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:33.851490855+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:33.851490855+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:33.851490855+00:00 stderr F reason: Deploying 2025-12-12T16:16:33.851490855+00:00 stderr F status: "True" 2025-12-12T16:16:33.851490855+00:00 stderr F type: Progressing 2025-12-12T16:16:33.851490855+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:33.851490855+00:00 stderr F status: "True" 2025-12-12T16:16:33.851490855+00:00 stderr F type: Available 2025-12-12T16:16:33.851707321+00:00 stderr F I1212 16:16:33.851616 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:34.438325403+00:00 stderr F I1212 16:16:34.437722 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:34.438325403+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:34.438325403+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:34.438325403+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:34.438325403+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:34.438325403+00:00 stderr F status: "True" 2025-12-12T16:16:34.438325403+00:00 stderr F type: Degraded 2025-12-12T16:16:34.438325403+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:34.438325403+00:00 stderr F status: "True" 2025-12-12T16:16:34.438325403+00:00 stderr F type: Upgradeable 2025-12-12T16:16:34.438325403+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:34.438325403+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:34.438325403+00:00 stderr F reason: Unknown 2025-12-12T16:16:34.438325403+00:00 stderr F status: "False" 2025-12-12T16:16:34.438325403+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:34.438325403+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:34.438325403+00:00 stderr F message: |- 2025-12-12T16:16:34.438325403+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:34.438325403+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:34.438325403+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:34.438325403+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:34.438325403+00:00 stderr F reason: Deploying 2025-12-12T16:16:34.438325403+00:00 stderr F status: "True" 2025-12-12T16:16:34.438325403+00:00 stderr F type: Progressing 2025-12-12T16:16:34.438325403+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:34.438325403+00:00 stderr F status: "True" 2025-12-12T16:16:34.438325403+00:00 stderr F type: Available 2025-12-12T16:16:35.038205177+00:00 stderr F I1212 16:16:35.037754 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:16:35.040171356+00:00 stderr F I1212 16:16:35.040050 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:16:35.044740167+00:00 stderr F I1212 16:16:35.044657 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:16:35.216953122+00:00 stderr F I1212 16:16:35.216852 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc0052000c0 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:16:35.423030283+00:00 stderr F I1212 16:16:35.422704 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:16:35.423030283+00:00 stderr F I1212 16:16:35.422730 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:16:35.423030283+00:00 stderr F I1212 16:16:35.422740 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:16:35.426503358+00:00 stderr F I1212 16:16:35.426412 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:35.426503358+00:00 stderr F W1212 16:16:35.426435 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:35.426503358+00:00 stderr F I1212 16:16:35.426442 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:35.426503358+00:00 stderr F W1212 16:16:35.426449 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:35.426503358+00:00 stderr F I1212 16:16:35.426466 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:16:35.454616284+00:00 stderr F I1212 16:16:35.454535 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:35.454616284+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:35.454616284+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:35.454616284+00:00 stderr F reason: Unknown 2025-12-12T16:16:35.454616284+00:00 stderr F status: "False" 2025-12-12T16:16:35.454616284+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:35.454616284+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:35.454616284+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:35.454616284+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:35.454616284+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:35.454616284+00:00 stderr F status: "True" 2025-12-12T16:16:35.454616284+00:00 stderr F type: Degraded 2025-12-12T16:16:35.454616284+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:35.454616284+00:00 stderr F status: "True" 2025-12-12T16:16:35.454616284+00:00 stderr F type: Upgradeable 2025-12-12T16:16:35.454616284+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:35.454616284+00:00 stderr F message: |- 2025-12-12T16:16:35.454616284+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:35.454616284+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:35.454616284+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:35.454616284+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:35.454616284+00:00 stderr F reason: Deploying 2025-12-12T16:16:35.454616284+00:00 stderr F status: "True" 2025-12-12T16:16:35.454616284+00:00 stderr F type: Progressing 2025-12-12T16:16:35.454616284+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:35.454616284+00:00 stderr F status: "True" 2025-12-12T16:16:35.454616284+00:00 stderr F type: Available 2025-12-12T16:16:35.455124206+00:00 stderr F I1212 16:16:35.455073 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:35.617676555+00:00 stderr F I1212 16:16:35.617571 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:35.617676555+00:00 stderr F I1212 16:16:35.617600 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:35.699360069+00:00 stderr F I1212 16:16:35.699265 1 log.go:245] unable to determine openshift-apiserver apiserver service endpoints: no openshift-apiserver api endpoints found 2025-12-12T16:16:35.706600346+00:00 stderr F I1212 16:16:35.706514 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:35.713592527+00:00 stderr F I1212 16:16:35.713497 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:35.720340292+00:00 stderr F I1212 16:16:35.720246 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:35.726492472+00:00 stderr F I1212 16:16:35.726418 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:35.731661898+00:00 stderr F I1212 16:16:35.731617 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:35.737144142+00:00 stderr F I1212 16:16:35.737091 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:35.818402556+00:00 stderr F I1212 16:16:35.818327 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:35.834547670+00:00 stderr F I1212 16:16:35.834480 1 log.go:245] Failed to update the operator configuration: could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io "cluster": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:35.834634692+00:00 stderr F E1212 16:16:35.834609 1 controller.go:353] "Reconciler error" err="could not apply (/, Kind=) /cluster, err: failed to apply / update (operator.openshift.io/v1, Kind=Network) /cluster: Operation cannot be fulfilled on networks.operator.openshift.io \"cluster\": the object has been modified; please apply your changes to the latest version and try again" controller="operconfig-controller" object="cluster" namespace="" name="cluster" reconcileID="440090fb-a545-4b91-9c3a-369df1b24780" 2025-12-12T16:16:35.843142270+00:00 stderr F I1212 16:16:35.843068 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:35.843142270+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:35.843142270+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:35.843142270+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:35.843142270+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:35.843142270+00:00 stderr F status: "True" 2025-12-12T16:16:35.843142270+00:00 stderr F type: Degraded 2025-12-12T16:16:35.843142270+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:35.843142270+00:00 stderr F status: "True" 2025-12-12T16:16:35.843142270+00:00 stderr F type: Upgradeable 2025-12-12T16:16:35.843142270+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:35.843142270+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:35.843142270+00:00 stderr F reason: Unknown 2025-12-12T16:16:35.843142270+00:00 stderr F status: "False" 2025-12-12T16:16:35.843142270+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:35.843142270+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:35.843142270+00:00 stderr F message: |- 2025-12-12T16:16:35.843142270+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:35.843142270+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:35.843142270+00:00 stderr F Deployment "/openshift-multus/multus-admission-controller" is waiting for other operators to become ready 2025-12-12T16:16:35.843142270+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:35.843142270+00:00 stderr F reason: Deploying 2025-12-12T16:16:35.843142270+00:00 stderr F status: "True" 2025-12-12T16:16:35.843142270+00:00 stderr F type: Progressing 2025-12-12T16:16:35.843142270+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:35.843142270+00:00 stderr F status: "True" 2025-12-12T16:16:35.843142270+00:00 stderr F type: Available 2025-12-12T16:16:35.915120367+00:00 stderr F I1212 16:16:35.914991 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:16:36.228640171+00:00 stderr F I1212 16:16:36.228553 1 log.go:245] Reconciling configmap from openshift-kube-controller-manager/trusted-ca-bundle 2025-12-12T16:16:36.230820624+00:00 stderr F I1212 16:16:36.230720 1 log.go:245] ConfigMap openshift-kube-controller-manager/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:36.627221562+00:00 stderr F I1212 16:16:36.627147 1 log.go:245] Reconciling configmap from openshift-machine-api/mao-trusted-ca 2025-12-12T16:16:36.630493632+00:00 stderr F I1212 16:16:36.629643 1 log.go:245] ConfigMap openshift-machine-api/mao-trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:37.034862384+00:00 stderr F I1212 16:16:37.034756 1 log.go:245] Reconciling configmap from openshift-marketplace/marketplace-trusted-ca 2025-12-12T16:16:37.037475598+00:00 stderr F I1212 16:16:37.037423 1 log.go:245] ConfigMap openshift-marketplace/marketplace-trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:37.233123115+00:00 stderr F I1212 16:16:37.233043 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:16:37.235825591+00:00 stderr F I1212 16:16:37.235748 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:16:37.237607694+00:00 stderr F I1212 16:16:37.237570 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:16:37.417384873+00:00 stderr F I1212 16:16:37.417286 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc002db3080 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:16:37.429678293+00:00 stderr F I1212 16:16:37.429606 1 log.go:245] Reconciling configmap from openshift-apiserver-operator/trusted-ca-bundle 2025-12-12T16:16:37.433189419+00:00 stderr F I1212 16:16:37.433148 1 log.go:245] ConfigMap openshift-apiserver-operator/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:37.622702066+00:00 stderr F I1212 16:16:37.622647 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:16:37.622775508+00:00 stderr F I1212 16:16:37.622765 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:16:37.622801239+00:00 stderr F I1212 16:16:37.622792 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:16:37.625837773+00:00 stderr F I1212 16:16:37.625808 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:37.625890074+00:00 stderr F W1212 16:16:37.625877 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:37.625918455+00:00 stderr F I1212 16:16:37.625908 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout progressing; 1/1 scheduled; 1 unavailable; 0 available; generation 2 -> 2 2025-12-12T16:16:37.625944205+00:00 stderr F W1212 16:16:37.625934 1 ovn_kubernetes.go:1708] daemonset openshift-ovn-kubernetes/ovnkube-node rollout seems to have hung with 0/1 behind, force-continuing 2025-12-12T16:16:37.625985516+00:00 stderr F I1212 16:16:37.625975 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:16:37.628753014+00:00 stderr F I1212 16:16:37.628718 1 log.go:245] Reconciling configmap from openshift-apiserver/trusted-ca-bundle 2025-12-12T16:16:37.631076581+00:00 stderr F I1212 16:16:37.631028 1 log.go:245] ConfigMap openshift-apiserver/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:37.817995764+00:00 stderr F I1212 16:16:37.817911 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:37.817995764+00:00 stderr F I1212 16:16:37.817949 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:16:37.829518435+00:00 stderr F I1212 16:16:37.829449 1 log.go:245] Reconciling configmap from openshift-authentication-operator/trusted-ca-bundle 2025-12-12T16:16:37.831558955+00:00 stderr F I1212 16:16:37.831512 1 log.go:245] ConfigMap openshift-authentication-operator/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:38.017822253+00:00 stderr F I1212 16:16:38.017734 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:16:38.032031290+00:00 stderr F I1212 16:16:38.031894 1 log.go:245] Reconciling configmap from openshift-authentication/v4-0-config-system-trusted-ca-bundle 2025-12-12T16:16:38.033964057+00:00 stderr F I1212 16:16:38.033907 1 log.go:245] ConfigMap openshift-authentication/v4-0-config-system-trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:38.037677028+00:00 stderr F I1212 16:16:38.037643 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:38.038019246+00:00 stderr F I1212 16:16:38.037954 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:16:38.038037756+00:00 stderr F I1212 16:16:38.038023 1 log.go:245] Starting render phase 2025-12-12T16:16:38.072476927+00:00 stderr F I1212 16:16:38.072374 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:16:38.108587459+00:00 stderr F I1212 16:16:38.108496 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:16:38.108587459+00:00 stderr F I1212 16:16:38.108525 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:16:38.108587459+00:00 stderr F I1212 16:16:38.108547 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:16:38.108587459+00:00 stderr F I1212 16:16:38.108567 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:16:38.223515705+00:00 stderr F I1212 16:16:38.223424 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:16:38.223666608+00:00 stderr F I1212 16:16:38.223638 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:16:38.229077890+00:00 stderr F I1212 16:16:38.228998 1 log.go:245] Reconciling configmap from openshift-console-operator/trusted-ca 2025-12-12T16:16:38.231724045+00:00 stderr F I1212 16:16:38.231694 1 log.go:245] ConfigMap openshift-console-operator/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:16:38.429459253+00:00 stderr F I1212 16:16:38.428926 1 log.go:245] Reconciling configmap from openshift-console/trusted-ca-bundle 2025-12-12T16:16:38.431596555+00:00 stderr F I1212 16:16:38.431560 1 log.go:245] ConfigMap openshift-console/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:16:38.439739674+00:00 stderr F I1212 16:16:38.439704 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:16:38.831360114+00:00 stderr F I1212 16:16:38.831281 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:16:38.837435922+00:00 stderr F I1212 16:16:38.837394 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:16:38.837435922+00:00 stderr F I1212 16:16:38.837427 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:16:38.853999156+00:00 stderr F I1212 16:16:38.853931 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:16:38.854053618+00:00 stderr F I1212 16:16:38.854018 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:16:38.862070193+00:00 stderr F I1212 16:16:38.861982 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:16:38.862148125+00:00 stderr F I1212 16:16:38.862105 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:16:38.869782102+00:00 stderr F I1212 16:16:38.869692 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:16:38.869782102+00:00 stderr F I1212 16:16:38.869747 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:16:38.878765141+00:00 stderr F I1212 16:16:38.878713 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:16:38.878795162+00:00 stderr F I1212 16:16:38.878773 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:16:38.888131940+00:00 stderr F I1212 16:16:38.888052 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:16:38.888131940+00:00 stderr F I1212 16:16:38.888114 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:16:38.893695996+00:00 stderr F I1212 16:16:38.893607 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:16:38.893717116+00:00 stderr F I1212 16:16:38.893700 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:16:38.898253717+00:00 stderr F I1212 16:16:38.898199 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:16:38.898270607+00:00 stderr F I1212 16:16:38.898261 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:16:38.904976831+00:00 stderr F I1212 16:16:38.904325 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:16:38.904976831+00:00 stderr F I1212 16:16:38.904370 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:16:38.910643889+00:00 stderr F I1212 16:16:38.910558 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:16:38.910643889+00:00 stderr F I1212 16:16:38.910632 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:16:39.035740584+00:00 stderr F I1212 16:16:39.035695 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:16:39.035839836+00:00 stderr F I1212 16:16:39.035830 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:16:39.237687794+00:00 stderr F I1212 16:16:39.237608 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:16:39.237687794+00:00 stderr F I1212 16:16:39.237669 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:16:39.439666375+00:00 stderr F I1212 16:16:39.439591 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:16:39.439666375+00:00 stderr F I1212 16:16:39.439653 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:16:39.637379122+00:00 stderr F I1212 16:16:39.637316 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:16:39.637511175+00:00 stderr F I1212 16:16:39.637500 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:16:39.838454831+00:00 stderr F I1212 16:16:39.838394 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:16:39.838575854+00:00 stderr F I1212 16:16:39.838560 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:16:40.037911291+00:00 stderr F I1212 16:16:40.037844 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:16:40.037947662+00:00 stderr F I1212 16:16:40.037910 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:16:40.236562821+00:00 stderr F I1212 16:16:40.236508 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:16:40.236598392+00:00 stderr F I1212 16:16:40.236563 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:16:40.437946597+00:00 stderr F I1212 16:16:40.437881 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:16:40.438106601+00:00 stderr F I1212 16:16:40.438095 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:16:40.638805561+00:00 stderr F I1212 16:16:40.638735 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:16:40.638805561+00:00 stderr F I1212 16:16:40.638790 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:16:40.835985205+00:00 stderr F I1212 16:16:40.835929 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:16:40.836022746+00:00 stderr F I1212 16:16:40.835982 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:16:41.044603079+00:00 stderr F I1212 16:16:41.043562 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:16:41.044603079+00:00 stderr F I1212 16:16:41.043623 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:16:41.239445026+00:00 stderr F I1212 16:16:41.239369 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:16:41.239445026+00:00 stderr F I1212 16:16:41.239435 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:16:41.439230623+00:00 stderr F I1212 16:16:41.439136 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:16:41.439355076+00:00 stderr F I1212 16:16:41.439340 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:16:41.651637709+00:00 stderr F I1212 16:16:41.651565 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:16:41.651637709+00:00 stderr F I1212 16:16:41.651620 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:16:41.854075922+00:00 stderr F I1212 16:16:41.853984 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:16:41.854075922+00:00 stderr F I1212 16:16:41.854047 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:16:42.036262309+00:00 stderr F I1212 16:16:42.036208 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:16:42.036309571+00:00 stderr F I1212 16:16:42.036258 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:16:42.236360614+00:00 stderr F I1212 16:16:42.236300 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:16:42.236389925+00:00 stderr F I1212 16:16:42.236359 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:16:42.440290713+00:00 stderr F I1212 16:16:42.440226 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:16:42.440290713+00:00 stderr F I1212 16:16:42.440282 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:16:42.642204072+00:00 stderr F I1212 16:16:42.642144 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:16:42.642236903+00:00 stderr F I1212 16:16:42.642211 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:16:42.844282766+00:00 stderr F I1212 16:16:42.844129 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:16:42.844282766+00:00 stderr F I1212 16:16:42.844241 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:16:43.041008359+00:00 stderr F I1212 16:16:43.040939 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:16:43.041117381+00:00 stderr F I1212 16:16:43.041106 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:16:43.240310665+00:00 stderr F I1212 16:16:43.240023 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:16:43.240310665+00:00 stderr F I1212 16:16:43.240067 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:16:43.451105271+00:00 stderr F I1212 16:16:43.450539 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:16:43.451105271+00:00 stderr F I1212 16:16:43.450596 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:16:43.641220653+00:00 stderr F I1212 16:16:43.638304 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:16:43.641220653+00:00 stderr F I1212 16:16:43.638362 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:16:43.837583607+00:00 stderr F I1212 16:16:43.837414 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:16:43.837583607+00:00 stderr F I1212 16:16:43.837469 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:16:44.041591558+00:00 stderr F I1212 16:16:44.037958 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:16:44.041591558+00:00 stderr F I1212 16:16:44.038009 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:16:44.239793407+00:00 stderr F I1212 16:16:44.238981 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:16:44.239793407+00:00 stderr F I1212 16:16:44.239043 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:16:44.447368174+00:00 stderr F I1212 16:16:44.447309 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:16:44.447368174+00:00 stderr F I1212 16:16:44.447361 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:16:44.649899809+00:00 stderr F I1212 16:16:44.645474 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:16:44.649899809+00:00 stderr F I1212 16:16:44.645525 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:16:44.837977471+00:00 stderr F I1212 16:16:44.837702 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:16:44.837977471+00:00 stderr F I1212 16:16:44.837752 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:16:45.043133850+00:00 stderr F I1212 16:16:45.039631 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:16:45.043133850+00:00 stderr F I1212 16:16:45.039693 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:16:45.250630546+00:00 stderr F I1212 16:16:45.240628 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:16:45.250630546+00:00 stderr F I1212 16:16:45.240688 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:16:45.458754697+00:00 stderr F I1212 16:16:45.447209 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:16:45.458754697+00:00 stderr F I1212 16:16:45.447258 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:16:45.656010673+00:00 stderr F I1212 16:16:45.652682 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:16:45.656010673+00:00 stderr F I1212 16:16:45.654273 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:16:45.848229595+00:00 stderr F I1212 16:16:45.847955 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:16:45.848229595+00:00 stderr F I1212 16:16:45.848014 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:16:46.066995646+00:00 stderr F I1212 16:16:46.066947 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:16:46.067082878+00:00 stderr F I1212 16:16:46.067073 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:16:46.259118656+00:00 stderr F I1212 16:16:46.259070 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:16:46.259229089+00:00 stderr F I1212 16:16:46.259218 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:16:46.473912200+00:00 stderr F I1212 16:16:46.472960 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:16:46.473912200+00:00 stderr F I1212 16:16:46.473010 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:16:46.707283618+00:00 stderr F I1212 16:16:46.700727 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:16:46.707283618+00:00 stderr F I1212 16:16:46.700788 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:16:47.040907413+00:00 stderr F I1212 16:16:47.038777 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:16:47.040907413+00:00 stderr F I1212 16:16:47.038843 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:16:47.096311116+00:00 stderr F I1212 16:16:47.096165 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:16:47.096311116+00:00 stderr F I1212 16:16:47.096243 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:16:47.239554073+00:00 stderr F I1212 16:16:47.239507 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:16:47.239648876+00:00 stderr F I1212 16:16:47.239639 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:16:47.466738700+00:00 stderr F I1212 16:16:47.466087 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:16:47.466738700+00:00 stderr F I1212 16:16:47.466155 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:16:47.734447096+00:00 stderr F I1212 16:16:47.731582 1 log.go:245] unable to determine openshift-apiserver apiserver service endpoints: no openshift-apiserver api endpoints found 2025-12-12T16:16:47.761240390+00:00 stderr F I1212 16:16:47.758836 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:47.761240390+00:00 stderr F I1212 16:16:47.759697 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:16:47.761240390+00:00 stderr F I1212 16:16:47.759755 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:16:47.774375481+00:00 stderr F I1212 16:16:47.769656 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:47.790958625+00:00 stderr F I1212 16:16:47.790886 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:47.826270838+00:00 stderr F I1212 16:16:47.821242 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:47.847521866+00:00 stderr F I1212 16:16:47.846357 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:47.862136423+00:00 stderr F I1212 16:16:47.856039 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:16:47.862136423+00:00 stderr F I1212 16:16:47.856090 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:16:47.867016642+00:00 stderr F I1212 16:16:47.866812 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.048858332+00:00 stderr F I1212 16:16:48.048218 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:16:48.048858332+00:00 stderr F I1212 16:16:48.048721 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:16:48.112125567+00:00 stderr F I1212 16:16:48.111451 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.128597579+00:00 stderr F I1212 16:16:48.128535 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.154420149+00:00 stderr F I1212 16:16:48.153618 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.168846531+00:00 stderr F I1212 16:16:48.162714 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.184093284+00:00 stderr F I1212 16:16:48.183602 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.200445783+00:00 stderr F I1212 16:16:48.200381 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.247248265+00:00 stderr F I1212 16:16:48.244452 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:16:48.247248265+00:00 stderr F I1212 16:16:48.244501 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:16:48.270351990+00:00 stderr F I1212 16:16:48.270003 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-admission-controller updated, re-generating status 2025-12-12T16:16:48.270351990+00:00 stderr F I1212 16:16:48.270033 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-admission-controller updated, re-generating status 2025-12-12T16:16:48.363354600+00:00 stderr F I1212 16:16:48.362787 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.425877507+00:00 stderr F I1212 16:16:48.425803 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:48.425877507+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:48.425877507+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:48.425877507+00:00 stderr F reason: Unknown 2025-12-12T16:16:48.425877507+00:00 stderr F status: "False" 2025-12-12T16:16:48.425877507+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:48.425877507+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:48.425877507+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:48.425877507+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:48.425877507+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:48.425877507+00:00 stderr F status: "True" 2025-12-12T16:16:48.425877507+00:00 stderr F type: Degraded 2025-12-12T16:16:48.425877507+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:48.425877507+00:00 stderr F status: "True" 2025-12-12T16:16:48.425877507+00:00 stderr F type: Upgradeable 2025-12-12T16:16:48.425877507+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:48.425877507+00:00 stderr F message: |- 2025-12-12T16:16:48.425877507+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:48.425877507+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:48.425877507+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:48.425877507+00:00 stderr F reason: Deploying 2025-12-12T16:16:48.425877507+00:00 stderr F status: "True" 2025-12-12T16:16:48.425877507+00:00 stderr F type: Progressing 2025-12-12T16:16:48.425877507+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:48.425877507+00:00 stderr F status: "True" 2025-12-12T16:16:48.425877507+00:00 stderr F type: Available 2025-12-12T16:16:48.458244367+00:00 stderr F I1212 16:16:48.457529 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:48.462279245+00:00 stderr F I1212 16:16:48.458682 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:16:48.462279245+00:00 stderr F I1212 16:16:48.458755 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:16:48.515240968+00:00 stderr F I1212 16:16:48.511463 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:48.515240968+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:48.515240968+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:48.515240968+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:48.515240968+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:48.515240968+00:00 stderr F status: "True" 2025-12-12T16:16:48.515240968+00:00 stderr F type: Degraded 2025-12-12T16:16:48.515240968+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:48.515240968+00:00 stderr F status: "True" 2025-12-12T16:16:48.515240968+00:00 stderr F type: Upgradeable 2025-12-12T16:16:48.515240968+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:48.515240968+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:48.515240968+00:00 stderr F reason: Unknown 2025-12-12T16:16:48.515240968+00:00 stderr F status: "False" 2025-12-12T16:16:48.515240968+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:48.515240968+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:48.515240968+00:00 stderr F message: |- 2025-12-12T16:16:48.515240968+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:48.515240968+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:48.515240968+00:00 stderr F Deployment "/openshift-network-console/networking-console-plugin" is waiting for other operators to become ready 2025-12-12T16:16:48.515240968+00:00 stderr F reason: Deploying 2025-12-12T16:16:48.515240968+00:00 stderr F status: "True" 2025-12-12T16:16:48.515240968+00:00 stderr F type: Progressing 2025-12-12T16:16:48.515240968+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:48.515240968+00:00 stderr F status: "True" 2025-12-12T16:16:48.515240968+00:00 stderr F type: Available 2025-12-12T16:16:48.612095593+00:00 stderr F I1212 16:16:48.612024 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.651711970+00:00 stderr F I1212 16:16:48.651656 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:16:48.651711970+00:00 stderr F I1212 16:16:48.651704 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:16:48.651939166+00:00 stderr F I1212 16:16:48.651913 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:16:48.756888568+00:00 stderr F I1212 16:16:48.753867 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:48.840454358+00:00 stderr F I1212 16:16:48.839688 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:16:48.840454358+00:00 stderr F I1212 16:16:48.839749 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:16:48.868530534+00:00 stderr F I1212 16:16:48.868224 1 allowlist_controller.go:149] Successfully updated sysctl allowlist 2025-12-12T16:16:48.950609808+00:00 stderr F I1212 16:16:48.948714 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:49.043266890+00:00 stderr F I1212 16:16:49.042470 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:16:49.043266890+00:00 stderr F I1212 16:16:49.042530 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:16:49.149280328+00:00 stderr F I1212 16:16:49.146297 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:49.246592494+00:00 stderr F I1212 16:16:49.245566 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:16:49.246592494+00:00 stderr F I1212 16:16:49.245625 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:16:49.359981631+00:00 stderr F I1212 16:16:49.359889 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:49.446372141+00:00 stderr F I1212 16:16:49.445493 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:16:49.446372141+00:00 stderr F I1212 16:16:49.445544 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:16:49.649463709+00:00 stderr F I1212 16:16:49.648963 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:16:49.649463709+00:00 stderr F I1212 16:16:49.649014 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:16:49.841347014+00:00 stderr F I1212 16:16:49.839789 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:16:49.841347014+00:00 stderr F I1212 16:16:49.839857 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:16:50.037390820+00:00 stderr F I1212 16:16:50.036335 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:16:50.037390820+00:00 stderr F I1212 16:16:50.036395 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:16:50.241251357+00:00 stderr F I1212 16:16:50.238608 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:16:50.241251357+00:00 stderr F I1212 16:16:50.238671 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:16:50.453248933+00:00 stderr F I1212 16:16:50.449694 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:16:50.453248933+00:00 stderr F I1212 16:16:50.449759 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:16:50.538326340+00:00 stderr F I1212 16:16:50.537989 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:50.575297263+00:00 stderr F I1212 16:16:50.569797 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:50.590358440+00:00 stderr F I1212 16:16:50.589998 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:50.617374470+00:00 stderr F I1212 16:16:50.617134 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:50.630936171+00:00 stderr F I1212 16:16:50.628608 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:50.645312152+00:00 stderr F I1212 16:16:50.642394 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:50.649637228+00:00 stderr F I1212 16:16:50.648225 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:16:50.649637228+00:00 stderr F I1212 16:16:50.648303 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:16:50.837616587+00:00 stderr F I1212 16:16:50.837527 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:16:50.837616587+00:00 stderr F I1212 16:16:50.837584 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:16:51.037167329+00:00 stderr F I1212 16:16:51.037084 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:16:51.037167329+00:00 stderr F I1212 16:16:51.037160 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:16:51.240834111+00:00 stderr F I1212 16:16:51.240744 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:16:51.240834111+00:00 stderr F I1212 16:16:51.240820 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:16:51.438278682+00:00 stderr F I1212 16:16:51.437042 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:16:51.438278682+00:00 stderr F I1212 16:16:51.437101 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:16:51.640142120+00:00 stderr F I1212 16:16:51.638656 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:16:51.640142120+00:00 stderr F I1212 16:16:51.639933 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:16:51.842892350+00:00 stderr F I1212 16:16:51.841712 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:16:51.842892350+00:00 stderr F I1212 16:16:51.841772 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:16:52.041608902+00:00 stderr F I1212 16:16:52.041521 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:16:52.041608902+00:00 stderr F I1212 16:16:52.041592 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:16:52.239704298+00:00 stderr F I1212 16:16:52.239596 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:16:52.239704298+00:00 stderr F I1212 16:16:52.239663 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:16:52.442360396+00:00 stderr F I1212 16:16:52.441871 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:16:52.442360396+00:00 stderr F I1212 16:16:52.441940 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:16:52.639213332+00:00 stderr F I1212 16:16:52.639129 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:16:52.639253393+00:00 stderr F I1212 16:16:52.639211 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:16:52.848214574+00:00 stderr F I1212 16:16:52.847298 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:16:52.848214574+00:00 stderr F I1212 16:16:52.847354 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:16:53.040076638+00:00 stderr F I1212 16:16:53.039996 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:16:53.040076638+00:00 stderr F I1212 16:16:53.040047 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:16:53.246019176+00:00 stderr F I1212 16:16:53.245950 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:16:53.246019176+00:00 stderr F I1212 16:16:53.246002 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:16:53.307054066+00:00 stderr F I1212 16:16:53.306653 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.315606355+00:00 stderr F I1212 16:16:53.315538 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.330495818+00:00 stderr F I1212 16:16:53.329308 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.337766486+00:00 stderr F I1212 16:16:53.337699 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.351916651+00:00 stderr F I1212 16:16:53.351819 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.364216532+00:00 stderr F I1212 16:16:53.364030 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.441022507+00:00 stderr F I1212 16:16:53.440945 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:16:53.441022507+00:00 stderr F I1212 16:16:53.441014 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:16:53.643461029+00:00 stderr F I1212 16:16:53.642037 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:16:53.643461029+00:00 stderr F I1212 16:16:53.642101 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:16:53.838876430+00:00 stderr F I1212 16:16:53.838778 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:16:53.838876430+00:00 stderr F I1212 16:16:53.838830 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:16:53.955056267+00:00 stderr F I1212 16:16:53.954982 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.974231105+00:00 stderr F I1212 16:16:53.971696 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:53.989843606+00:00 stderr F I1212 16:16:53.987725 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.002314850+00:00 stderr F I1212 16:16:54.002136 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.015664336+00:00 stderr F I1212 16:16:54.015587 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.024770189+00:00 stderr F I1212 16:16:54.023652 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.037757876+00:00 stderr F I1212 16:16:54.037025 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.118415275+00:00 stderr F I1212 16:16:54.112966 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.128356148+00:00 stderr F I1212 16:16:54.127934 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:16:54.128356148+00:00 stderr F I1212 16:16:54.127987 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:16:54.295661982+00:00 stderr F I1212 16:16:54.295587 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.409149343+00:00 stderr F I1212 16:16:54.407768 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:16:54.411163462+00:00 stderr F I1212 16:16:54.410839 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:16:54.456905809+00:00 stderr F I1212 16:16:54.456583 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:16:54.456905809+00:00 stderr F I1212 16:16:54.456640 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:16:54.495139992+00:00 stderr F I1212 16:16:54.494684 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.641476905+00:00 stderr F I1212 16:16:54.640552 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:16:54.641476905+00:00 stderr F I1212 16:16:54.641317 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:16:54.696985150+00:00 stderr F I1212 16:16:54.691469 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:16:54.848321835+00:00 stderr F I1212 16:16:54.846734 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:16:54.848321835+00:00 stderr F I1212 16:16:54.846784 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:16:54.891948180+00:00 stderr F I1212 16:16:54.891885 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:55.055117184+00:00 stderr F I1212 16:16:55.053648 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:16:55.055117184+00:00 stderr F I1212 16:16:55.053698 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:16:55.091154344+00:00 stderr F I1212 16:16:55.091045 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:55.245974874+00:00 stderr F I1212 16:16:55.244725 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:16:55.245974874+00:00 stderr F I1212 16:16:55.244787 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:16:55.296303942+00:00 stderr F I1212 16:16:55.294563 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:55.441306552+00:00 stderr F I1212 16:16:55.438080 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:16:55.441306552+00:00 stderr F I1212 16:16:55.438136 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:16:55.496856689+00:00 stderr F I1212 16:16:55.496786 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:55.647854595+00:00 stderr F I1212 16:16:55.647429 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:16:55.647854595+00:00 stderr F I1212 16:16:55.647480 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:16:55.722353434+00:00 stderr F I1212 16:16:55.720804 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:55.840241892+00:00 stderr F I1212 16:16:55.839904 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:16:55.840241892+00:00 stderr F I1212 16:16:55.839969 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:16:55.902873771+00:00 stderr F I1212 16:16:55.895586 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917341 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.903163638 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917407 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.917388156 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917424 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.917414246 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917439 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.917429817 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917454 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.917444377 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917474 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.917464088 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917497 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.917483568 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917515 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.917505489 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917533 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.917522959 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917554 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.917541949 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.917893 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"*.metrics.openshift-network-operator.svc\" [serving] validServingFor=[*.metrics.openshift-network-operator.svc,*.metrics.openshift-network-operator.svc.cluster.local,metrics.openshift-network-operator.svc,metrics.openshift-network-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:19 +0000 UTC to 2027-11-02 07:52:20 +0000 UTC (now=2025-12-12 16:16:55.917872538 +0000 UTC))" 2025-12-12T16:16:55.922443259+00:00 stderr F I1212 16:16:55.918090 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556183\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556183\" (2025-12-12 15:16:23 +0000 UTC to 2028-12-12 15:16:23 +0000 UTC (now=2025-12-12 16:16:55.918075273 +0000 UTC))" 2025-12-12T16:16:56.036270628+00:00 stderr F I1212 16:16:56.036221 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:16:56.036351900+00:00 stderr F I1212 16:16:56.036342 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:16:56.091911917+00:00 stderr F I1212 16:16:56.091337 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:16:56.237413439+00:00 stderr F I1212 16:16:56.235750 1 pod_watcher.go:132] Operand /, Kind= openshift-network-console/networking-console-plugin updated, re-generating status 2025-12-12T16:16:56.237413439+00:00 stderr F I1212 16:16:56.235781 1 pod_watcher.go:132] Operand /, Kind= openshift-network-console/networking-console-plugin updated, re-generating status 2025-12-12T16:16:56.243019096+00:00 stderr F I1212 16:16:56.242585 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:16:56.243019096+00:00 stderr F I1212 16:16:56.242638 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:16:56.302912748+00:00 stderr F I1212 16:16:56.302852 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:56.331506306+00:00 stderr F I1212 16:16:56.331434 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:56.331827054+00:00 stderr F I1212 16:16:56.331807 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:56.331827054+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:56.331827054+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:56.331827054+00:00 stderr F reason: Unknown 2025-12-12T16:16:56.331827054+00:00 stderr F status: "False" 2025-12-12T16:16:56.331827054+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:56.331827054+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:56.331827054+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:56.331827054+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:56.331827054+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:56.331827054+00:00 stderr F status: "True" 2025-12-12T16:16:56.331827054+00:00 stderr F type: Degraded 2025-12-12T16:16:56.331827054+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:56.331827054+00:00 stderr F status: "True" 2025-12-12T16:16:56.331827054+00:00 stderr F type: Upgradeable 2025-12-12T16:16:56.331827054+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:56.331827054+00:00 stderr F message: |- 2025-12-12T16:16:56.331827054+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:56.331827054+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:56.331827054+00:00 stderr F reason: Deploying 2025-12-12T16:16:56.331827054+00:00 stderr F status: "True" 2025-12-12T16:16:56.331827054+00:00 stderr F type: Progressing 2025-12-12T16:16:56.331827054+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:56.331827054+00:00 stderr F status: "True" 2025-12-12T16:16:56.331827054+00:00 stderr F type: Available 2025-12-12T16:16:56.395263713+00:00 stderr F I1212 16:16:56.392869 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:56.395263713+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:56.395263713+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:56.395263713+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:56.395263713+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:56.395263713+00:00 stderr F status: "True" 2025-12-12T16:16:56.395263713+00:00 stderr F type: Degraded 2025-12-12T16:16:56.395263713+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:56.395263713+00:00 stderr F status: "True" 2025-12-12T16:16:56.395263713+00:00 stderr F type: Upgradeable 2025-12-12T16:16:56.395263713+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:56.395263713+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:56.395263713+00:00 stderr F reason: Unknown 2025-12-12T16:16:56.395263713+00:00 stderr F status: "False" 2025-12-12T16:16:56.395263713+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:56.395263713+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:56.395263713+00:00 stderr F message: |- 2025-12-12T16:16:56.395263713+00:00 stderr F DaemonSet "/openshift-multus/network-metrics-daemon" is waiting for other operators to become ready 2025-12-12T16:16:56.395263713+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:16:56.395263713+00:00 stderr F reason: Deploying 2025-12-12T16:16:56.395263713+00:00 stderr F status: "True" 2025-12-12T16:16:56.395263713+00:00 stderr F type: Progressing 2025-12-12T16:16:56.395263713+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:56.395263713+00:00 stderr F status: "True" 2025-12-12T16:16:56.395263713+00:00 stderr F type: Available 2025-12-12T16:16:56.443845769+00:00 stderr F I1212 16:16:56.443778 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:16:56.443906120+00:00 stderr F I1212 16:16:56.443854 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:16:56.489769020+00:00 stderr F I1212 16:16:56.489704 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:56.651646161+00:00 stderr F I1212 16:16:56.650477 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:16:56.651646161+00:00 stderr F I1212 16:16:56.650533 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:16:56.690322425+00:00 stderr F I1212 16:16:56.690007 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:56.837582661+00:00 stderr F I1212 16:16:56.837528 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:16:56.837675493+00:00 stderr F I1212 16:16:56.837666 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:16:56.890163615+00:00 stderr F I1212 16:16:56.890096 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:57.039809568+00:00 stderr F I1212 16:16:57.039719 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:16:57.039884960+00:00 stderr F I1212 16:16:57.039809 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:16:57.090870635+00:00 stderr F I1212 16:16:57.090797 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:57.241582834+00:00 stderr F I1212 16:16:57.241525 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:16:57.241741578+00:00 stderr F I1212 16:16:57.241715 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:16:57.288848628+00:00 stderr F I1212 16:16:57.288791 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:57.441012393+00:00 stderr F I1212 16:16:57.440936 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:16:57.441143446+00:00 stderr F I1212 16:16:57.441134 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:16:57.488906322+00:00 stderr F I1212 16:16:57.488844 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:16:57.643061836+00:00 stderr F I1212 16:16:57.642881 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:16:57.643061836+00:00 stderr F I1212 16:16:57.642934 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:16:57.688352742+00:00 stderr F I1212 16:16:57.687885 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:57.837369270+00:00 stderr F I1212 16:16:57.837292 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:16:57.837369270+00:00 stderr F I1212 16:16:57.837353 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:16:57.888519249+00:00 stderr F I1212 16:16:57.888466 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:58.038075750+00:00 stderr F I1212 16:16:58.038022 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:16:58.038198633+00:00 stderr F I1212 16:16:58.038173 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:16:58.088804919+00:00 stderr F I1212 16:16:58.088740 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:58.241253411+00:00 stderr F I1212 16:16:58.241163 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:16:58.241366074+00:00 stderr F I1212 16:16:58.241354 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:16:58.289861667+00:00 stderr F I1212 16:16:58.289810 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:16:58.436362274+00:00 stderr F I1212 16:16:58.436300 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:16:58.436473447+00:00 stderr F I1212 16:16:58.436462 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:16:58.497879396+00:00 stderr F I1212 16:16:58.497815 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:58.637111005+00:00 stderr F I1212 16:16:58.637035 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:16:58.637111005+00:00 stderr F I1212 16:16:58.637100 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:16:58.689001722+00:00 stderr F I1212 16:16:58.688941 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:58.836768140+00:00 stderr F I1212 16:16:58.836701 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:16:58.836824921+00:00 stderr F I1212 16:16:58.836770 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:16:58.888878852+00:00 stderr F I1212 16:16:58.888792 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:16:59.038676069+00:00 stderr F I1212 16:16:59.038575 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:16:59.038676069+00:00 stderr F I1212 16:16:59.038636 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:16:59.089226824+00:00 stderr F I1212 16:16:59.089161 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:16:59.236894339+00:00 stderr F I1212 16:16:59.236823 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:16:59.236894339+00:00 stderr F I1212 16:16:59.236887 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:16:59.290299803+00:00 stderr F I1212 16:16:59.290245 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:16:59.391092734+00:00 stderr F I1212 16:16:59.391010 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/network-metrics-daemon updated, re-generating status 2025-12-12T16:16:59.391092734+00:00 stderr F I1212 16:16:59.391043 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/network-metrics-daemon updated, re-generating status 2025-12-12T16:16:59.444740483+00:00 stderr F I1212 16:16:59.441602 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:16:59.444740483+00:00 stderr F I1212 16:16:59.441665 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:16:59.469419756+00:00 stderr F I1212 16:16:59.467359 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:16:59.469419756+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:59.469419756+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:59.469419756+00:00 stderr F reason: Unknown 2025-12-12T16:16:59.469419756+00:00 stderr F status: "False" 2025-12-12T16:16:59.469419756+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:59.469419756+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:59.469419756+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:59.469419756+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:59.469419756+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:59.469419756+00:00 stderr F status: "True" 2025-12-12T16:16:59.469419756+00:00 stderr F type: Degraded 2025-12-12T16:16:59.469419756+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:59.469419756+00:00 stderr F status: "True" 2025-12-12T16:16:59.469419756+00:00 stderr F type: Upgradeable 2025-12-12T16:16:59.469419756+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:59.469419756+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 2025-12-12T16:16:59.469419756+00:00 stderr F 1 nodes) 2025-12-12T16:16:59.469419756+00:00 stderr F reason: Deploying 2025-12-12T16:16:59.469419756+00:00 stderr F status: "True" 2025-12-12T16:16:59.469419756+00:00 stderr F type: Progressing 2025-12-12T16:16:59.469419756+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:59.469419756+00:00 stderr F status: "True" 2025-12-12T16:16:59.469419756+00:00 stderr F type: Available 2025-12-12T16:16:59.469419756+00:00 stderr F I1212 16:16:59.467819 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:16:59.502271158+00:00 stderr F I1212 16:16:59.498703 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:16:59.515585593+00:00 stderr F I1212 16:16:59.515274 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:16:59.515585593+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:16:59.515585593+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:16:59.515585593+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:16:59.515585593+00:00 stderr F reason: RolloutHung 2025-12-12T16:16:59.515585593+00:00 stderr F status: "True" 2025-12-12T16:16:59.515585593+00:00 stderr F type: Degraded 2025-12-12T16:16:59.515585593+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:16:59.515585593+00:00 stderr F status: "True" 2025-12-12T16:16:59.515585593+00:00 stderr F type: Upgradeable 2025-12-12T16:16:59.515585593+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:16:59.515585593+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:16:59.515585593+00:00 stderr F reason: Unknown 2025-12-12T16:16:59.515585593+00:00 stderr F status: "False" 2025-12-12T16:16:59.515585593+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:16:59.515585593+00:00 stderr F - lastTransitionTime: "2025-11-03T09:40:45Z" 2025-12-12T16:16:59.515585593+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 2025-12-12T16:16:59.515585593+00:00 stderr F 1 nodes) 2025-12-12T16:16:59.515585593+00:00 stderr F reason: Deploying 2025-12-12T16:16:59.515585593+00:00 stderr F status: "True" 2025-12-12T16:16:59.515585593+00:00 stderr F type: Progressing 2025-12-12T16:16:59.515585593+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:16:59.515585593+00:00 stderr F status: "True" 2025-12-12T16:16:59.515585593+00:00 stderr F type: Available 2025-12-12T16:16:59.638798051+00:00 stderr F I1212 16:16:59.638710 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:16:59.638798051+00:00 stderr F I1212 16:16:59.638771 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:16:59.838858286+00:00 stderr F I1212 16:16:59.838791 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:16:59.838929437+00:00 stderr F I1212 16:16:59.838853 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:17:00.041309278+00:00 stderr F I1212 16:17:00.041225 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:17:00.041309278+00:00 stderr F I1212 16:17:00.041281 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:17:00.237905297+00:00 stderr F I1212 16:17:00.237828 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:17:00.237905297+00:00 stderr F I1212 16:17:00.237887 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:17:00.444886430+00:00 stderr F I1212 16:17:00.439984 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:17:00.444886430+00:00 stderr F I1212 16:17:00.440044 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:17:00.637114054+00:00 stderr F I1212 16:17:00.637047 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:17:00.637163745+00:00 stderr F I1212 16:17:00.637110 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:17:00.662979865+00:00 stderr F I1212 16:17:00.662879 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:17:00.669933955+00:00 stderr F I1212 16:17:00.669871 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:00.675729766+00:00 stderr F I1212 16:17:00.675674 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:00.680840021+00:00 stderr F I1212 16:17:00.680696 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:00.686209362+00:00 stderr F I1212 16:17:00.686138 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:00.694253869+00:00 stderr F I1212 16:17:00.693984 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:00.836029490+00:00 stderr F I1212 16:17:00.835971 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:17:00.836029490+00:00 stderr F I1212 16:17:00.836024 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:17:00.888349257+00:00 stderr F I1212 16:17:00.888292 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:01.036900884+00:00 stderr F I1212 16:17:01.036840 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:17:01.036900884+00:00 stderr F I1212 16:17:01.036890 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:17:01.088153375+00:00 stderr F I1212 16:17:01.088101 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:17:01.240366782+00:00 stderr F I1212 16:17:01.240282 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:17:01.240366782+00:00 stderr F I1212 16:17:01.240333 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:17:01.287889582+00:00 stderr F I1212 16:17:01.287805 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:17:01.442597159+00:00 stderr F I1212 16:17:01.442493 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:17:01.442597159+00:00 stderr F I1212 16:17:01.442556 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:17:01.491130404+00:00 stderr F I1212 16:17:01.490480 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:01.636914433+00:00 stderr F I1212 16:17:01.636842 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:17:01.636960284+00:00 stderr F I1212 16:17:01.636925 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:17:01.842544383+00:00 stderr F I1212 16:17:01.842444 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:17:01.842544383+00:00 stderr F I1212 16:17:01.842511 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:17:02.038477597+00:00 stderr F I1212 16:17:02.038398 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:17:02.038477597+00:00 stderr F I1212 16:17:02.038447 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:17:02.238732366+00:00 stderr F I1212 16:17:02.238447 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:17:02.259452692+00:00 stderr F I1212 16:17:02.259381 1 log.go:245] Operconfig Controller complete 2025-12-12T16:17:02.921536907+00:00 stderr F I1212 16:17:02.921443 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:17:02.921536907+00:00 stderr F I1212 16:17:02.921484 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:17:02.945718837+00:00 stderr F I1212 16:17:02.945156 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:17:02.945718837+00:00 stderr F I1212 16:17:02.945211 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:17:02.995396170+00:00 stderr F I1212 16:17:02.994597 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:17:02.995396170+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:17:02.995396170+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:17:02.995396170+00:00 stderr F reason: Unknown 2025-12-12T16:17:02.995396170+00:00 stderr F status: "False" 2025-12-12T16:17:02.995396170+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:17:02.995396170+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:17:02.995396170+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:17:02.995396170+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:17:02.995396170+00:00 stderr F reason: RolloutHung 2025-12-12T16:17:02.995396170+00:00 stderr F status: "True" 2025-12-12T16:17:02.995396170+00:00 stderr F type: Degraded 2025-12-12T16:17:02.995396170+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:17:02.995396170+00:00 stderr F status: "True" 2025-12-12T16:17:02.995396170+00:00 stderr F type: Upgradeable 2025-12-12T16:17:02.995396170+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:02Z" 2025-12-12T16:17:02.995396170+00:00 stderr F status: "False" 2025-12-12T16:17:02.995396170+00:00 stderr F type: Progressing 2025-12-12T16:17:02.995396170+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:17:02.995396170+00:00 stderr F status: "True" 2025-12-12T16:17:02.995396170+00:00 stderr F type: Available 2025-12-12T16:17:02.995396170+00:00 stderr F I1212 16:17:02.995143 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:17:03.018109404+00:00 stderr F I1212 16:17:03.017017 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:17:03.018109404+00:00 stderr F - lastTransitionTime: "2025-12-12T16:16:24Z" 2025-12-12T16:17:03.018109404+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" rollout is not making 2025-12-12T16:17:03.018109404+00:00 stderr F progress - last change 2025-11-03T09:40:45Z 2025-12-12T16:17:03.018109404+00:00 stderr F reason: RolloutHung 2025-12-12T16:17:03.018109404+00:00 stderr F status: "True" 2025-12-12T16:17:03.018109404+00:00 stderr F type: Degraded 2025-12-12T16:17:03.018109404+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:17:03.018109404+00:00 stderr F status: "True" 2025-12-12T16:17:03.018109404+00:00 stderr F type: Upgradeable 2025-12-12T16:17:03.018109404+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:17:03.018109404+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:17:03.018109404+00:00 stderr F reason: Unknown 2025-12-12T16:17:03.018109404+00:00 stderr F status: "False" 2025-12-12T16:17:03.018109404+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:17:03.018109404+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:17:03.018109404+00:00 stderr F status: "False" 2025-12-12T16:17:03.018109404+00:00 stderr F type: Progressing 2025-12-12T16:17:03.018109404+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:17:03.018109404+00:00 stderr F status: "True" 2025-12-12T16:17:03.018109404+00:00 stderr F type: Available 2025-12-12T16:17:03.059447873+00:00 stderr F I1212 16:17:03.058442 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:17:03.059447873+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:17:03.059447873+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:17:03.059447873+00:00 stderr F reason: Unknown 2025-12-12T16:17:03.059447873+00:00 stderr F status: "False" 2025-12-12T16:17:03.059447873+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:17:03.059447873+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:17:03.059447873+00:00 stderr F status: "False" 2025-12-12T16:17:03.059447873+00:00 stderr F type: Degraded 2025-12-12T16:17:03.059447873+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:17:03.059447873+00:00 stderr F status: "True" 2025-12-12T16:17:03.059447873+00:00 stderr F type: Upgradeable 2025-12-12T16:17:03.059447873+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:02Z" 2025-12-12T16:17:03.059447873+00:00 stderr F status: "False" 2025-12-12T16:17:03.059447873+00:00 stderr F type: Progressing 2025-12-12T16:17:03.059447873+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:17:03.059447873+00:00 stderr F status: "True" 2025-12-12T16:17:03.059447873+00:00 stderr F type: Available 2025-12-12T16:17:03.059447873+00:00 stderr F I1212 16:17:03.059216 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:17:03.080209210+00:00 stderr F I1212 16:17:03.080136 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:17:03.080209210+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:17:03.080209210+00:00 stderr F status: "False" 2025-12-12T16:17:03.080209210+00:00 stderr F type: Degraded 2025-12-12T16:17:03.080209210+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:17:03.080209210+00:00 stderr F status: "True" 2025-12-12T16:17:03.080209210+00:00 stderr F type: Upgradeable 2025-12-12T16:17:03.080209210+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:17:03.080209210+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:17:03.080209210+00:00 stderr F reason: Unknown 2025-12-12T16:17:03.080209210+00:00 stderr F status: "False" 2025-12-12T16:17:03.080209210+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:17:03.080209210+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:17:03.080209210+00:00 stderr F status: "False" 2025-12-12T16:17:03.080209210+00:00 stderr F type: Progressing 2025-12-12T16:17:03.080209210+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:17:03.080209210+00:00 stderr F status: "True" 2025-12-12T16:17:03.080209210+00:00 stderr F type: Available 2025-12-12T16:17:20.721881312+00:00 stderr F I1212 16:17:20.721799 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.733716832+00:00 stderr F I1212 16:17:20.733617 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.738795137+00:00 stderr F I1212 16:17:20.738760 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.744851311+00:00 stderr F I1212 16:17:20.744679 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.750682728+00:00 stderr F I1212 16:17:20.750635 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.756765203+00:00 stderr F I1212 16:17:20.756630 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.763865037+00:00 stderr F I1212 16:17:20.762794 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.773441191+00:00 stderr F I1212 16:17:20.772380 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.778621410+00:00 stderr F I1212 16:17:20.778575 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:17:20.786450095+00:00 stderr F I1212 16:17:20.786408 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.356477203+00:00 stderr F I1212 16:17:27.356391 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.362872376+00:00 stderr F I1212 16:17:27.362802 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.400605549+00:00 stderr F I1212 16:17:27.400551 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.408913007+00:00 stderr F I1212 16:17:27.408308 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.414765625+00:00 stderr F I1212 16:17:27.414646 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.422638091+00:00 stderr F I1212 16:17:27.422568 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.429423266+00:00 stderr F I1212 16:17:27.429359 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.438118915+00:00 stderr F I1212 16:17:27.438052 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.446293490+00:00 stderr F I1212 16:17:27.446231 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.452162118+00:00 stderr F I1212 16:17:27.452115 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.559098546+00:00 stderr F I1212 16:17:27.558321 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.758551189+00:00 stderr F I1212 16:17:27.758479 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:27.956780926+00:00 stderr F I1212 16:17:27.956017 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:28.155969081+00:00 stderr F I1212 16:17:28.155895 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:28.354597660+00:00 stderr F I1212 16:17:28.354509 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:28.557109221+00:00 stderr F I1212 16:17:28.557042 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:28.756768698+00:00 stderr F I1212 16:17:28.756707 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:28.958337281+00:00 stderr F I1212 16:17:28.958246 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:17:29.158138154+00:00 stderr F I1212 16:17:29.158059 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:17:29.358915474+00:00 stderr F I1212 16:17:29.358858 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:17:29.556294977+00:00 stderr F I1212 16:17:29.556165 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:17:33.627969977+00:00 stderr F I1212 16:17:33.627252 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:17:44.877769975+00:00 stderr F I1212 16:17:44.877701 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/client-ca-custom' 2025-12-12T16:17:44.891191457+00:00 stderr F I1212 16:17:44.891070 1 log.go:245] configmap 'openshift-config/client-ca-custom' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319818 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.319750605 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319881 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.319867728 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319898 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.319888078 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319914 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.319904779 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319931 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.319920839 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319950 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.319939139 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319966 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.31995562 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.319982 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.3199706 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.320002 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.319989951 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.320026 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.320012461 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.320054 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.320038642 +0000 UTC))" 2025-12-12T16:17:46.320358520+00:00 stderr F I1212 16:17:46.320339 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"*.metrics.openshift-network-operator.svc\" [serving] validServingFor=[*.metrics.openshift-network-operator.svc,*.metrics.openshift-network-operator.svc.cluster.local,metrics.openshift-network-operator.svc,metrics.openshift-network-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:19 +0000 UTC to 2027-11-02 07:52:20 +0000 UTC (now=2025-12-12 16:17:46.320320239 +0000 UTC))" 2025-12-12T16:17:46.320642467+00:00 stderr F I1212 16:17:46.320522 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556183\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556183\" (2025-12-12 15:16:23 +0000 UTC to 2028-12-12 15:16:23 +0000 UTC (now=2025-12-12 16:17:46.320504533 +0000 UTC))" 2025-12-12T16:17:47.090835697+00:00 stderr F I1212 16:17:47.089607 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/admin-kubeconfig-client-ca' 2025-12-12T16:17:47.112594695+00:00 stderr F I1212 16:17:47.112523 1 log.go:245] configmap 'openshift-config/admin-kubeconfig-client-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:23.606597936+00:00 stderr F E1212 16:18:23.606513 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-network-operator/leases/network-operator-lock?timeout=4m0s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:23.607047647+00:00 stderr F E1212 16:18:23.607017 1 leaderelection.go:436] error retrieving resource lock openshift-network-operator/network-operator-lock: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-network-operator/leases/network-operator-lock?timeout=4m0s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:23.735166164+00:00 stderr F E1212 16:18:23.735087 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:23.743156512+00:00 stderr F E1212 16:18:23.743088 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:23.754888062+00:00 stderr F E1212 16:18:23.754854 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:23.776984598+00:00 stderr F E1212 16:18:23.776930 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:23.819590161+00:00 stderr F E1212 16:18:23.819513 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:23.901994609+00:00 stderr F E1212 16:18:23.901925 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:24.064387563+00:00 stderr F E1212 16:18:24.064295 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:24.386913107+00:00 stderr F E1212 16:18:24.386842 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:25.029690318+00:00 stderr F E1212 16:18:25.029594 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:26.312240396+00:00 stderr F E1212 16:18:26.312113 1 base_controller.go:279] "Unhandled Error" err="cluster-network-operator-ManagementState reconciliation failed: unable to get network operator configuration: Get \"https://api-int.crc.testing:6443/apis/operator.openshift.io/v1/networks/cluster\": dial tcp 38.102.83.180:6443: connect: connection refused" 2025-12-12T16:18:33.725305106+00:00 stderr F I1212 16:18:33.725229 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:35.444144231+00:00 stderr F I1212 16:18:35.443924 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:36.740869459+00:00 stderr F I1212 16:18:36.740786 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:45.918692142+00:00 stderr F I1212 16:18:45.918610 1 reflector.go:430] "Caches populated" type="*v1.MachineConfig" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:46.312920818+00:00 stderr F I1212 16:18:46.312842 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:46.329117649+00:00 stderr F I1212 16:18:46.329045 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.335048705+00:00 stderr F I1212 16:18:46.334998 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.340725606+00:00 stderr F I1212 16:18:46.340679 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.346211691+00:00 stderr F I1212 16:18:46.346162 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.352295762+00:00 stderr F I1212 16:18:46.352233 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.360352791+00:00 stderr F I1212 16:18:46.360297 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.365947899+00:00 stderr F I1212 16:18:46.365899 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.381015692+00:00 stderr F I1212 16:18:46.380909 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.389972213+00:00 stderr F I1212 16:18:46.389875 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.397995022+00:00 stderr F I1212 16:18:46.397918 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:46.525521045+00:00 stderr F I1212 16:18:46.525438 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:47.294778183+00:00 stderr F I1212 16:18:47.294682 1 reflector.go:430] "Caches populated" type="*v1alpha1.PodNetworkConnectivityCheck" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:47.390830298+00:00 stderr F I1212 16:18:47.390739 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:47.465941914+00:00 stderr F I1212 16:18:47.465861 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:47.833665895+00:00 stderr F I1212 16:18:47.833582 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:48.127294394+00:00 stderr F I1212 16:18:48.127239 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:48.482871345+00:00 stderr F I1212 16:18:48.482797 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:49.355240533+00:00 stderr F I1212 16:18:49.355169 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:49.500513284+00:00 stderr F I1212 16:18:49.500428 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:50.260469373+00:00 stderr F I1212 16:18:50.260424 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:50.312001187+00:00 stderr F I1212 16:18:50.311883 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:50.322466355+00:00 stderr F I1212 16:18:50.322403 1 reflector.go:430] "Caches populated" type="*v1.MachineConfigPool" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:50.323079101+00:00 stderr F I1212 16:18:50.323045 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.328197107+00:00 stderr F I1212 16:18:50.328114 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.332925314+00:00 stderr F I1212 16:18:50.332828 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.337855026+00:00 stderr F I1212 16:18:50.337816 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.344119411+00:00 stderr F I1212 16:18:50.343353 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.351313459+00:00 stderr F I1212 16:18:50.351259 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.356243780+00:00 stderr F I1212 16:18:50.356200 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.363587122+00:00 stderr F I1212 16:18:50.363526 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.368599736+00:00 stderr F I1212 16:18:50.368542 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.374765078+00:00 stderr F I1212 16:18:50.374714 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.522309266+00:00 stderr F I1212 16:18:50.522226 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:50.751407410+00:00 stderr F I1212 16:18:50.751339 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:51.166103562+00:00 stderr F I1212 16:18:51.166044 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:51.326464576+00:00 stderr F I1212 16:18:51.326383 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:51.335639293+00:00 stderr F I1212 16:18:51.335593 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:51.340635877+00:00 stderr F I1212 16:18:51.340600 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:51.344816760+00:00 stderr F I1212 16:18:51.344790 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:51.348538462+00:00 stderr F I1212 16:18:51.348500 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:51.520216946+00:00 stderr F I1212 16:18:51.520139 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:51.721516523+00:00 stderr F I1212 16:18:51.721448 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:51.760090967+00:00 stderr F I1212 16:18:51.760007 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:51.760299692+00:00 stderr F I1212 16:18:51.760283 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:51.762735032+00:00 stderr F I1212 16:18:51.762698 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:51.921434156+00:00 stderr F I1212 16:18:51.921017 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:52.006980351+00:00 stderr F I1212 16:18:52.006909 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:52.007296708+00:00 stderr F I1212 16:18:52.007270 1 log.go:245] Reconciling configmap from openshift-config-managed/trusted-ca-bundle 2025-12-12T16:18:52.009560334+00:00 stderr F I1212 16:18:52.009523 1 log.go:245] trusted-ca-bundle changed, updating 13 configMaps 2025-12-12T16:18:52.009578395+00:00 stderr F I1212 16:18:52.009557 1 log.go:245] ConfigMap openshift-authentication-operator/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.009585675+00:00 stderr F I1212 16:18:52.009578 1 log.go:245] ConfigMap openshift-authentication/v4-0-config-system-trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.009600875+00:00 stderr F I1212 16:18:52.009595 1 log.go:245] ConfigMap openshift-console-operator/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.009641246+00:00 stderr F I1212 16:18:52.009614 1 log.go:245] ConfigMap openshift-console/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009647 1 log.go:245] ConfigMap openshift-image-registry/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009679 1 log.go:245] ConfigMap openshift-kube-apiserver/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009706 1 log.go:245] ConfigMap openshift-controller-manager/openshift-global-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009734 1 log.go:245] ConfigMap openshift-ingress-operator/trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009802 1 log.go:245] ConfigMap openshift-kube-controller-manager/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009831 1 log.go:245] ConfigMap openshift-machine-api/mao-trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009860 1 log.go:245] ConfigMap openshift-marketplace/marketplace-trusted-ca ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009877 1 log.go:245] ConfigMap openshift-apiserver-operator/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.013557923+00:00 stderr F I1212 16:18:52.009899 1 log.go:245] ConfigMap openshift-apiserver/trusted-ca-bundle ca-bundle.crt unchanged, skipping 2025-12-12T16:18:52.063832636+00:00 stderr F I1212 16:18:52.063678 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:52.063832636+00:00 stderr F I1212 16:18:52.063823 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:52.066149053+00:00 stderr F I1212 16:18:52.065428 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:18:52.122670351+00:00 stderr F I1212 16:18:52.122599 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:18:52.146226153+00:00 stderr F I1212 16:18:52.146135 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:52.177133877+00:00 stderr F I1212 16:18:52.177053 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:52.324276065+00:00 stderr F I1212 16:18:52.323957 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:18:52.390306288+00:00 stderr F I1212 16:18:52.390221 1 reflector.go:430] "Caches populated" type="*v1.EgressRouter" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:52.521841050+00:00 stderr F I1212 16:18:52.521750 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:52.702524986+00:00 stderr F I1212 16:18:52.702422 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:52.722267995+00:00 stderr F I1212 16:18:52.722130 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:52.927150630+00:00 stderr F I1212 16:18:52.927074 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:52.980375566+00:00 stderr F I1212 16:18:52.980258 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=openshiftapiservers" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:53.121869934+00:00 stderr F I1212 16:18:53.121796 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:53.323087559+00:00 stderr F I1212 16:18:53.323005 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:53.342841197+00:00 stderr F I1212 16:18:53.342768 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:53.343832882+00:00 stderr F I1212 16:18:53.343066 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/admin-acks' 2025-12-12T16:18:53.347597725+00:00 stderr F I1212 16:18:53.347572 1 log.go:245] configmap 'openshift-config/admin-acks' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.347639176+00:00 stderr F I1212 16:18:53.347628 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/admin-kubeconfig-client-ca' 2025-12-12T16:18:53.352016314+00:00 stderr F I1212 16:18:53.351963 1 log.go:245] configmap 'openshift-config/admin-kubeconfig-client-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.352016314+00:00 stderr F I1212 16:18:53.352002 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/client-ca-custom' 2025-12-12T16:18:53.355821898+00:00 stderr F I1212 16:18:53.355804 1 log.go:245] configmap 'openshift-config/client-ca-custom' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.355839648+00:00 stderr F I1212 16:18:53.355830 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/etcd-ca-bundle' 2025-12-12T16:18:53.359390766+00:00 stderr F I1212 16:18:53.359369 1 log.go:245] configmap 'openshift-config/etcd-ca-bundle' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.359421197+00:00 stderr F I1212 16:18:53.359393 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/etcd-serving-ca' 2025-12-12T16:18:53.364257936+00:00 stderr F I1212 16:18:53.364174 1 log.go:245] configmap 'openshift-config/etcd-serving-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.364257936+00:00 stderr F I1212 16:18:53.364214 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/initial-kube-apiserver-server-ca' 2025-12-12T16:18:53.368027300+00:00 stderr F I1212 16:18:53.368001 1 log.go:245] configmap 'openshift-config/initial-kube-apiserver-server-ca' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.368077161+00:00 stderr F I1212 16:18:53.368066 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/installer-images' 2025-12-12T16:18:53.372307425+00:00 stderr F I1212 16:18:53.372274 1 log.go:245] configmap 'openshift-config/installer-images' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.372356717+00:00 stderr F I1212 16:18:53.372325 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/kube-root-ca.crt' 2025-12-12T16:18:53.377570416+00:00 stderr F I1212 16:18:53.377458 1 log.go:245] configmap 'openshift-config/kube-root-ca.crt' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.377570416+00:00 stderr F I1212 16:18:53.377516 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/openshift-install-manifests' 2025-12-12T16:18:53.382373824+00:00 stderr F I1212 16:18:53.382302 1 log.go:245] configmap 'openshift-config/openshift-install-manifests' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.382373824+00:00 stderr F I1212 16:18:53.382359 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/openshift-service-ca.crt' 2025-12-12T16:18:53.386795054+00:00 stderr F I1212 16:18:53.386762 1 log.go:245] configmap 'openshift-config/openshift-service-ca.crt' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.386832155+00:00 stderr F I1212 16:18:53.386801 1 log.go:245] Reconciling additional trust bundle configmap 'openshift-config/registry-certs' 2025-12-12T16:18:53.522547830+00:00 stderr F I1212 16:18:53.522481 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:53.552077360+00:00 stderr F I1212 16:18:53.552006 1 log.go:245] configmap 'openshift-config/registry-certs' name differs from trustedCA of proxy 'cluster' or trustedCA not set; reconciliation will be skipped 2025-12-12T16:18:53.720795111+00:00 stderr F I1212 16:18:53.720709 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:53.742598020+00:00 stderr F I1212 16:18:53.742461 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:53.925111162+00:00 stderr F I1212 16:18:53.925038 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:53.941767474+00:00 stderr F I1212 16:18:53.941705 1 reflector.go:430] "Caches populated" type="*v1.DaemonSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:53.941932478+00:00 stderr F I1212 16:18:53.941877 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:18:53.941932478+00:00 stderr F I1212 16:18:53.941922 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:18:53.941955119+00:00 stderr F I1212 16:18:53.941936 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:18:53.941955119+00:00 stderr F I1212 16:18:53.941942 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-additional-cni-plugins updated, re-generating status 2025-12-12T16:18:53.941985160+00:00 stderr F I1212 16:18:53.941968 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/network-metrics-daemon updated, re-generating status 2025-12-12T16:18:53.941985160+00:00 stderr F I1212 16:18:53.941977 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/network-metrics-daemon updated, re-generating status 2025-12-12T16:18:53.941994700+00:00 stderr F I1212 16:18:53.941984 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:18:53.941994700+00:00 stderr F I1212 16:18:53.941989 1 pod_watcher.go:132] Operand /, Kind= openshift-network-node-identity/network-node-identity updated, re-generating status 2025-12-12T16:18:53.942005520+00:00 stderr F I1212 16:18:53.941997 1 pod_watcher.go:132] Operand /, Kind= openshift-network-operator/iptables-alerter updated, re-generating status 2025-12-12T16:18:53.942005520+00:00 stderr F I1212 16:18:53.942001 1 pod_watcher.go:132] Operand /, Kind= openshift-network-operator/iptables-alerter updated, re-generating status 2025-12-12T16:18:53.942014520+00:00 stderr F I1212 16:18:53.942009 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:18:53.942022961+00:00 stderr F I1212 16:18:53.942013 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:18:54.106447986+00:00 stderr F I1212 16:18:54.106364 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:54.120996495+00:00 stderr F I1212 16:18:54.120913 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:54.323855101+00:00 stderr F I1212 16:18:54.323791 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:18:54.431930362+00:00 stderr F I1212 16:18:54.431839 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:54.432133747+00:00 stderr F I1212 16:18:54.432046 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:18:54.523144217+00:00 stderr F I1212 16:18:54.522924 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:18:54.544100906+00:00 stderr F I1212 16:18:54.544032 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:54.723124061+00:00 stderr F I1212 16:18:54.723063 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:54.921064374+00:00 stderr F I1212 16:18:54.920992 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:54.953903716+00:00 stderr F I1212 16:18:54.953834 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:55.122431082+00:00 stderr F I1212 16:18:55.122386 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:55.322214301+00:00 stderr F I1212 16:18:55.322140 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:55.522291328+00:00 stderr F I1212 16:18:55.522225 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:55.646396856+00:00 stderr F I1212 16:18:55.646322 1 reflector.go:430] "Caches populated" type="*v1.CertificateSigningRequest" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:55.720838797+00:00 stderr F I1212 16:18:55.720769 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:55.922923043+00:00 stderr F I1212 16:18:55.922838 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:56.122645240+00:00 stderr F I1212 16:18:56.122553 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:56.321785084+00:00 stderr F I1212 16:18:56.321693 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:56.413108692+00:00 stderr F I1212 16:18:56.413041 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:56.522754662+00:00 stderr F I1212 16:18:56.522642 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:18:56.724259184+00:00 stderr F I1212 16:18:56.724135 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:18:56.762729425+00:00 stderr F I1212 16:18:56.762676 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:56.924978997+00:00 stderr F I1212 16:18:56.924908 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:57.120315996+00:00 stderr F I1212 16:18:57.119473 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:57.326235137+00:00 stderr F I1212 16:18:57.323618 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:57.529152353+00:00 stderr F I1212 16:18:57.529037 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:57.541489808+00:00 stderr F I1212 16:18:57.540821 1 reflector.go:430] "Caches populated" type="*v1.CustomResourceDefinition" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:57.722625267+00:00 stderr F I1212 16:18:57.722535 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:57.919864833+00:00 stderr F I1212 16:18:57.919798 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:57.926801434+00:00 stderr F I1212 16:18:57.926747 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:58.120077683+00:00 stderr F I1212 16:18:58.119994 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:58.279320099+00:00 stderr F I1212 16:18:58.278236 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:58.327251604+00:00 stderr F I1212 16:18:58.323497 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:58.522558022+00:00 stderr F I1212 16:18:58.522461 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:58.734279207+00:00 stderr F I1212 16:18:58.733987 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:18:58.922957591+00:00 stderr F I1212 16:18:58.922890 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:18:58.975863069+00:00 stderr F I1212 16:18:58.975788 1 reflector.go:430] "Caches populated" type="*v1.IngressController" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:58.976171217+00:00 stderr F I1212 16:18:58.976145 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:18:59.077094622+00:00 stderr F I1212 16:18:59.077016 1 reflector.go:430] "Caches populated" type="*v1.OperatorPKI" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:59.077258706+00:00 stderr F I1212 16:18:59.077235 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:18:59.077782909+00:00 stderr F I1212 16:18:59.077742 1 log.go:245] successful reconciliation 2025-12-12T16:18:59.093245321+00:00 stderr F I1212 16:18:59.091524 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:18:59.093245321+00:00 stderr F I1212 16:18:59.091848 1 log.go:245] successful reconciliation 2025-12-12T16:18:59.104671314+00:00 stderr F I1212 16:18:59.104544 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:18:59.106641162+00:00 stderr F I1212 16:18:59.104944 1 log.go:245] successful reconciliation 2025-12-12T16:18:59.122024263+00:00 stderr F I1212 16:18:59.121949 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:59.202297617+00:00 stderr F I1212 16:18:59.202204 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:18:59.204079481+00:00 stderr F I1212 16:18:59.202407 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-admission-controller updated, re-generating status 2025-12-12T16:18:59.204079481+00:00 stderr F I1212 16:18:59.202425 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus-admission-controller updated, re-generating status 2025-12-12T16:18:59.204079481+00:00 stderr F I1212 16:18:59.202432 1 pod_watcher.go:132] Operand /, Kind= openshift-network-console/networking-console-plugin updated, re-generating status 2025-12-12T16:18:59.204079481+00:00 stderr F I1212 16:18:59.202436 1 pod_watcher.go:132] Operand /, Kind= openshift-network-console/networking-console-plugin updated, re-generating status 2025-12-12T16:18:59.204079481+00:00 stderr F I1212 16:18:59.202441 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:18:59.204079481+00:00 stderr F I1212 16:18:59.202445 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:18:59.321722020+00:00 stderr F I1212 16:18:59.321637 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:18:59.528981254+00:00 stderr F I1212 16:18:59.528902 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:18:59.723819101+00:00 stderr F I1212 16:18:59.723753 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:18:59.922405490+00:00 stderr F I1212 16:18:59.921200 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:00.045130675+00:00 stderr F I1212 16:19:00.045036 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:00.045259878+00:00 stderr F I1212 16:19:00.045207 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:00.046349125+00:00 stderr F I1212 16:19:00.046318 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:00.121330798+00:00 stderr F I1212 16:19:00.121259 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:00.142306857+00:00 stderr F I1212 16:19:00.142238 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:00.321255821+00:00 stderr F I1212 16:19:00.321171 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:00.526834414+00:00 stderr F I1212 16:19:00.526751 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:00.724432759+00:00 stderr F I1212 16:19:00.724355 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:00.895991670+00:00 stderr F I1212 16:19:00.895911 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:00.922748942+00:00 stderr F I1212 16:19:00.922687 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:19:00.948686773+00:00 stderr F I1212 16:19:00.948619 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:01.122614703+00:00 stderr F I1212 16:19:01.121200 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:19:01.322018063+00:00 stderr F I1212 16:19:01.321955 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:01.348733833+00:00 stderr F I1212 16:19:01.348654 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:01.468985236+00:00 stderr F I1212 16:19:01.468909 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:01.521157736+00:00 stderr F I1212 16:19:01.521094 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:01.724029072+00:00 stderr F I1212 16:19:01.723949 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:19:01.800968953+00:00 stderr F I1212 16:19:01.800912 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285" 2025-12-12T16:19:01.801162988+00:00 stderr F I1212 16:19:01.801133 1 log.go:245] Reconciling proxy 'cluster' 2025-12-12T16:19:01.804031069+00:00 stderr F I1212 16:19:01.804011 1 log.go:245] httpProxy, httpsProxy and noProxy not defined for proxy 'cluster'; validation will be skipped 2025-12-12T16:19:01.813601505+00:00 stderr F I1212 16:19:01.813542 1 log.go:245] Reconciling proxy 'cluster' complete 2025-12-12T16:19:01.923294757+00:00 stderr F I1212 16:19:01.922657 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:02.121452396+00:00 stderr F I1212 16:19:02.121401 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:02.323048770+00:00 stderr F I1212 16:19:02.322983 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:02.522646225+00:00 stderr F I1212 16:19:02.522583 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:02.723298406+00:00 stderr F I1212 16:19:02.723226 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:02.921999098+00:00 stderr F I1212 16:19:02.921952 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:03.123423158+00:00 stderr F I1212 16:19:03.123322 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:19:03.321583217+00:00 stderr F I1212 16:19:03.321515 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:19:03.522232288+00:00 stderr F I1212 16:19:03.522106 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:03.720711385+00:00 stderr F I1212 16:19:03.720640 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.711338318+00:00 stderr F I1212 16:19:05.701230 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.711338318+00:00 stderr F I1212 16:19:05.707319 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.719161441+00:00 stderr F I1212 16:19:05.719052 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.727732843+00:00 stderr F I1212 16:19:05.727640 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.734620973+00:00 stderr F I1212 16:19:05.734535 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.743467322+00:00 stderr F I1212 16:19:05.743415 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.748398754+00:00 stderr F I1212 16:19:05.748346 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.753290735+00:00 stderr F I1212 16:19:05.753254 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.759454607+00:00 stderr F I1212 16:19:05.759417 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.768556832+00:00 stderr F I1212 16:19:05.768496 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:05.920668413+00:00 stderr F I1212 16:19:05.920569 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:24.010352578+00:00 stderr F I1212 16:19:24.010298 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:19:35.712465110+00:00 stderr F I1212 16:19:35.712372 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.718757248+00:00 stderr F I1212 16:19:35.718670 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.724135083+00:00 stderr F I1212 16:19:35.724052 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.728859021+00:00 stderr F I1212 16:19:35.728794 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.733009956+00:00 stderr F I1212 16:19:35.732967 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.740218947+00:00 stderr F I1212 16:19:35.740140 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.745238263+00:00 stderr F I1212 16:19:35.745200 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.750460974+00:00 stderr F I1212 16:19:35.750414 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.755914111+00:00 stderr F I1212 16:19:35.755861 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.760492206+00:00 stderr F I1212 16:19:35.760456 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:19:35.909693962+00:00 stderr F I1212 16:19:35.909591 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:20:02.260267039+00:00 stderr F I1212 16:20:02.260163 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:20:02.548947487+00:00 stderr F I1212 16:20:02.548890 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:20:02.550577998+00:00 stderr F I1212 16:20:02.550512 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:20:02.552030734+00:00 stderr F I1212 16:20:02.552001 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:20:02.553516291+00:00 stderr F I1212 16:20:02.553477 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc004a63d40 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:20:02.557210374+00:00 stderr F I1212 16:20:02.557158 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:20:02.557236655+00:00 stderr F I1212 16:20:02.557209 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:20:02.557236655+00:00 stderr F I1212 16:20:02.557219 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:20:02.559992964+00:00 stderr F I1212 16:20:02.559947 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 2 -> 2 2025-12-12T16:20:02.559992964+00:00 stderr F I1212 16:20:02.559969 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:20:02.559992964+00:00 stderr F I1212 16:20:02.559975 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 2 -> 2 2025-12-12T16:20:02.559992964+00:00 stderr F I1212 16:20:02.559981 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:20:02.560016355+00:00 stderr F I1212 16:20:02.559999 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:20:02.563307107+00:00 stderr F I1212 16:20:02.563252 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:20:02.563307107+00:00 stderr F I1212 16:20:02.563284 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:20:02.565191555+00:00 stderr F I1212 16:20:02.565098 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:20:02.575700508+00:00 stderr F I1212 16:20:02.575669 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:20:02.575700508+00:00 stderr F I1212 16:20:02.575694 1 log.go:245] Starting render phase 2025-12-12T16:20:02.576386866+00:00 stderr F I1212 16:20:02.576350 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:20:02.595922206+00:00 stderr F I1212 16:20:02.595835 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:20:02.623598171+00:00 stderr F I1212 16:20:02.623518 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:20:02.623598171+00:00 stderr F I1212 16:20:02.623542 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:20:02.623598171+00:00 stderr F I1212 16:20:02.623563 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:20:02.623598171+00:00 stderr F I1212 16:20:02.623584 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:20:02.678577270+00:00 stderr F I1212 16:20:02.678299 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:20:02.678577270+00:00 stderr F I1212 16:20:02.678325 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:20:02.886663535+00:00 stderr F I1212 16:20:02.886591 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:20:02.899766884+00:00 stderr F I1212 16:20:02.899663 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:20:02.904689738+00:00 stderr F I1212 16:20:02.904633 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:20:02.904719418+00:00 stderr F I1212 16:20:02.904694 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:20:02.918986117+00:00 stderr F I1212 16:20:02.918923 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:20:02.919025678+00:00 stderr F I1212 16:20:02.918988 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:20:02.925800328+00:00 stderr F I1212 16:20:02.925620 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:20:02.925800328+00:00 stderr F I1212 16:20:02.925671 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:20:02.932001583+00:00 stderr F I1212 16:20:02.931966 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:20:02.932029154+00:00 stderr F I1212 16:20:02.931999 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:20:02.937335007+00:00 stderr F I1212 16:20:02.937290 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:20:02.937335007+00:00 stderr F I1212 16:20:02.937324 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:20:02.941890612+00:00 stderr F I1212 16:20:02.941848 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:20:02.941909662+00:00 stderr F I1212 16:20:02.941896 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:20:02.945970974+00:00 stderr F I1212 16:20:02.945928 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:20:02.945988615+00:00 stderr F I1212 16:20:02.945974 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:20:02.949832431+00:00 stderr F I1212 16:20:02.949784 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:20:02.949853722+00:00 stderr F I1212 16:20:02.949837 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:20:02.953380080+00:00 stderr F I1212 16:20:02.953344 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:20:02.953397841+00:00 stderr F I1212 16:20:02.953390 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:20:02.958325534+00:00 stderr F I1212 16:20:02.958278 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:20:02.958325534+00:00 stderr F I1212 16:20:02.958309 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:20:03.104189117+00:00 stderr F I1212 16:20:03.104083 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:20:03.104189117+00:00 stderr F I1212 16:20:03.104137 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:20:03.304703441+00:00 stderr F I1212 16:20:03.304612 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:20:03.304703441+00:00 stderr F I1212 16:20:03.304669 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:20:03.509269057+00:00 stderr F I1212 16:20:03.508758 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:20:03.509317459+00:00 stderr F I1212 16:20:03.509265 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:20:03.705859294+00:00 stderr F I1212 16:20:03.705797 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:20:03.705896595+00:00 stderr F I1212 16:20:03.705859 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:20:03.906319257+00:00 stderr F I1212 16:20:03.906225 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:20:03.906319257+00:00 stderr F I1212 16:20:03.906282 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:20:04.108081873+00:00 stderr F I1212 16:20:04.108001 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:20:04.108081873+00:00 stderr F I1212 16:20:04.108073 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:20:04.307323555+00:00 stderr F I1212 16:20:04.307216 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:20:04.307323555+00:00 stderr F I1212 16:20:04.307285 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:20:04.505750317+00:00 stderr F I1212 16:20:04.505669 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:20:04.505750317+00:00 stderr F I1212 16:20:04.505735 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:20:04.706254912+00:00 stderr F I1212 16:20:04.706127 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:20:04.706316794+00:00 stderr F I1212 16:20:04.706299 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:20:04.905399352+00:00 stderr F I1212 16:20:04.905325 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:20:04.905399352+00:00 stderr F I1212 16:20:04.905389 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:20:05.108134002+00:00 stderr F I1212 16:20:05.108035 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:20:05.108134002+00:00 stderr F I1212 16:20:05.108101 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:20:05.307428396+00:00 stderr F I1212 16:20:05.307058 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:20:05.307428396+00:00 stderr F I1212 16:20:05.307120 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:20:05.505634153+00:00 stderr F I1212 16:20:05.505559 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:20:05.505634153+00:00 stderr F I1212 16:20:05.505617 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:20:05.715285967+00:00 stderr F I1212 16:20:05.715227 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:20:05.715430290+00:00 stderr F I1212 16:20:05.715417 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:20:05.913787341+00:00 stderr F I1212 16:20:05.913724 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:20:05.913787341+00:00 stderr F I1212 16:20:05.913774 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:20:06.106135821+00:00 stderr F I1212 16:20:06.106067 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:20:06.106135821+00:00 stderr F I1212 16:20:06.106115 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:20:06.343560741+00:00 stderr F I1212 16:20:06.309549 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:20:06.343560741+00:00 stderr F I1212 16:20:06.309595 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:20:06.504861791+00:00 stderr F I1212 16:20:06.504753 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:20:06.504861791+00:00 stderr F I1212 16:20:06.504808 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:20:06.708751200+00:00 stderr F I1212 16:20:06.708668 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:20:06.708751200+00:00 stderr F I1212 16:20:06.708731 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:20:06.906358022+00:00 stderr F I1212 16:20:06.906295 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:20:06.906358022+00:00 stderr F I1212 16:20:06.906347 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:20:07.108331133+00:00 stderr F I1212 16:20:07.108154 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:20:07.108331133+00:00 stderr F I1212 16:20:07.108236 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:20:07.307694598+00:00 stderr F I1212 16:20:07.307589 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:20:07.307694598+00:00 stderr F I1212 16:20:07.307669 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:20:07.507788482+00:00 stderr F I1212 16:20:07.507686 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:20:07.507788482+00:00 stderr F I1212 16:20:07.507745 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:20:07.706465241+00:00 stderr F I1212 16:20:07.706379 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:20:07.706465241+00:00 stderr F I1212 16:20:07.706431 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:20:07.906534094+00:00 stderr F I1212 16:20:07.906431 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:20:07.906534094+00:00 stderr F I1212 16:20:07.906496 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:20:08.104513935+00:00 stderr F I1212 16:20:08.104431 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:20:08.104513935+00:00 stderr F I1212 16:20:08.104484 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:20:08.309229785+00:00 stderr F I1212 16:20:08.309141 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:20:08.309268446+00:00 stderr F I1212 16:20:08.309225 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:20:08.511022392+00:00 stderr F I1212 16:20:08.510945 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:20:08.511022392+00:00 stderr F I1212 16:20:08.511000 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:20:08.708577142+00:00 stderr F I1212 16:20:08.708453 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:20:08.708577142+00:00 stderr F I1212 16:20:08.708517 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:20:08.908686367+00:00 stderr F I1212 16:20:08.908606 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:20:08.908686367+00:00 stderr F I1212 16:20:08.908661 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:20:09.106994216+00:00 stderr F I1212 16:20:09.106900 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:20:09.106994216+00:00 stderr F I1212 16:20:09.106960 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:20:09.305313305+00:00 stderr F I1212 16:20:09.305032 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:20:09.305313305+00:00 stderr F I1212 16:20:09.305308 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:20:09.507949743+00:00 stderr F I1212 16:20:09.507867 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:20:09.507949743+00:00 stderr F I1212 16:20:09.507926 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:20:09.708145729+00:00 stderr F I1212 16:20:09.708077 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:20:09.708214901+00:00 stderr F I1212 16:20:09.708149 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:20:09.911561596+00:00 stderr F I1212 16:20:09.911463 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:20:09.911561596+00:00 stderr F I1212 16:20:09.911530 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:20:10.113778963+00:00 stderr F I1212 16:20:10.113685 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:20:10.113778963+00:00 stderr F I1212 16:20:10.113741 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:20:10.318006971+00:00 stderr F I1212 16:20:10.317909 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:20:10.318006971+00:00 stderr F I1212 16:20:10.317963 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:20:10.517375097+00:00 stderr F I1212 16:20:10.516759 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:20:10.517375097+00:00 stderr F I1212 16:20:10.516820 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:20:10.710102136+00:00 stderr F I1212 16:20:10.710015 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:20:10.710102136+00:00 stderr F I1212 16:20:10.710073 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:20:10.941060755+00:00 stderr F I1212 16:20:10.940993 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:20:10.941060755+00:00 stderr F I1212 16:20:10.941047 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:20:11.138792749+00:00 stderr F I1212 16:20:11.138742 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:20:11.138896362+00:00 stderr F I1212 16:20:11.138886 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:20:11.305996778+00:00 stderr F I1212 16:20:11.305929 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:20:11.306100060+00:00 stderr F I1212 16:20:11.306090 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:20:11.528550446+00:00 stderr F I1212 16:20:11.528459 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:20:11.528550446+00:00 stderr F I1212 16:20:11.528527 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:20:11.741400620+00:00 stderr F I1212 16:20:11.741316 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:20:11.741400620+00:00 stderr F I1212 16:20:11.741378 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:20:11.905392677+00:00 stderr F I1212 16:20:11.904895 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:20:11.905428468+00:00 stderr F I1212 16:20:11.905389 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:20:12.105116762+00:00 stderr F I1212 16:20:12.105007 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:20:12.105116762+00:00 stderr F I1212 16:20:12.105060 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:20:12.305553285+00:00 stderr F I1212 16:20:12.305488 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:20:12.305593586+00:00 stderr F I1212 16:20:12.305552 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:20:12.506973962+00:00 stderr F I1212 16:20:12.506895 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:20:12.506973962+00:00 stderr F I1212 16:20:12.506946 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:20:12.706112052+00:00 stderr F I1212 16:20:12.706031 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:20:12.706112052+00:00 stderr F I1212 16:20:12.706086 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:20:12.905471147+00:00 stderr F I1212 16:20:12.905396 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:20:12.905471147+00:00 stderr F I1212 16:20:12.905462 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:20:13.107270964+00:00 stderr F I1212 16:20:13.107214 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:20:13.107270964+00:00 stderr F I1212 16:20:13.107260 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:20:13.305960233+00:00 stderr F I1212 16:20:13.305830 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:20:13.305960233+00:00 stderr F I1212 16:20:13.305896 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:20:13.504236811+00:00 stderr F I1212 16:20:13.504123 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:20:13.504236811+00:00 stderr F I1212 16:20:13.504173 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:20:13.708941641+00:00 stderr F I1212 16:20:13.708867 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:20:13.708941641+00:00 stderr F I1212 16:20:13.708931 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:20:13.904715896+00:00 stderr F I1212 16:20:13.904640 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:20:13.904715896+00:00 stderr F I1212 16:20:13.904697 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:20:14.106414480+00:00 stderr F I1212 16:20:14.106346 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:20:14.106467901+00:00 stderr F I1212 16:20:14.106408 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:20:14.305062538+00:00 stderr F I1212 16:20:14.304923 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:20:14.305062538+00:00 stderr F I1212 16:20:14.304980 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:20:14.504845424+00:00 stderr F I1212 16:20:14.504734 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:20:14.504845424+00:00 stderr F I1212 16:20:14.504796 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:20:14.709026271+00:00 stderr F I1212 16:20:14.708960 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:20:14.709026271+00:00 stderr F I1212 16:20:14.709019 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:20:14.905878654+00:00 stderr F I1212 16:20:14.905749 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:20:14.905878654+00:00 stderr F I1212 16:20:14.905826 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:20:15.107169648+00:00 stderr F I1212 16:20:15.107054 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:20:15.107169648+00:00 stderr F I1212 16:20:15.107120 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:20:15.319474008+00:00 stderr F I1212 16:20:15.319394 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:20:15.319531380+00:00 stderr F I1212 16:20:15.319519 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:20:15.505542900+00:00 stderr F I1212 16:20:15.505432 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:20:15.505542900+00:00 stderr F I1212 16:20:15.505490 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:20:15.705015708+00:00 stderr F I1212 16:20:15.704942 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:20:15.705015708+00:00 stderr F I1212 16:20:15.704993 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:20:15.907634096+00:00 stderr F I1212 16:20:15.907570 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:20:15.907684147+00:00 stderr F I1212 16:20:15.907632 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:20:16.113476994+00:00 stderr F I1212 16:20:16.113380 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:20:16.113476994+00:00 stderr F I1212 16:20:16.113446 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:20:16.305948047+00:00 stderr F I1212 16:20:16.305778 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:20:16.305948047+00:00 stderr F I1212 16:20:16.305862 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:20:16.505489257+00:00 stderr F I1212 16:20:16.505415 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:20:16.505539398+00:00 stderr F I1212 16:20:16.505485 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:20:16.704534914+00:00 stderr F I1212 16:20:16.704479 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:20:16.704534914+00:00 stderr F I1212 16:20:16.704529 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:20:16.906906366+00:00 stderr F I1212 16:20:16.906816 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:20:16.906906366+00:00 stderr F I1212 16:20:16.906878 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:20:17.105020069+00:00 stderr F I1212 16:20:17.104914 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:20:17.105020069+00:00 stderr F I1212 16:20:17.104972 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:20:17.305160594+00:00 stderr F I1212 16:20:17.305085 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:20:17.305160594+00:00 stderr F I1212 16:20:17.305141 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:20:17.505224437+00:00 stderr F I1212 16:20:17.505134 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:20:17.505224437+00:00 stderr F I1212 16:20:17.505214 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:20:17.706579603+00:00 stderr F I1212 16:20:17.706503 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:20:17.706579603+00:00 stderr F I1212 16:20:17.706555 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:20:17.907362304+00:00 stderr F I1212 16:20:17.907289 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:20:17.907362304+00:00 stderr F I1212 16:20:17.907344 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:20:18.115397038+00:00 stderr F I1212 16:20:18.115330 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:20:18.118402123+00:00 stderr F I1212 16:20:18.115976 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:20:18.322729133+00:00 stderr F I1212 16:20:18.322651 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:20:18.322729133+00:00 stderr F I1212 16:20:18.322713 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:20:18.508775345+00:00 stderr F I1212 16:20:18.508696 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:20:18.508775345+00:00 stderr F I1212 16:20:18.508750 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:20:18.709288959+00:00 stderr F I1212 16:20:18.709171 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:20:18.709288959+00:00 stderr F I1212 16:20:18.709246 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:20:18.906669185+00:00 stderr F I1212 16:20:18.906616 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:20:18.906796078+00:00 stderr F I1212 16:20:18.906785 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:20:19.105560679+00:00 stderr F I1212 16:20:19.104893 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:20:19.105560679+00:00 stderr F I1212 16:20:19.105532 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:20:19.305917019+00:00 stderr F I1212 16:20:19.305871 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:20:19.306001512+00:00 stderr F I1212 16:20:19.305991 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:20:19.509281765+00:00 stderr F I1212 16:20:19.505520 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:20:19.509281765+00:00 stderr F I1212 16:20:19.505573 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:20:19.704482887+00:00 stderr F I1212 16:20:19.704356 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:20:19.704482887+00:00 stderr F I1212 16:20:19.704425 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:20:19.905316129+00:00 stderr F I1212 16:20:19.905227 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:20:19.905316129+00:00 stderr F I1212 16:20:19.905278 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:20:20.104145322+00:00 stderr F I1212 16:20:20.104075 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:20:20.104145322+00:00 stderr F I1212 16:20:20.104130 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:20:20.312096703+00:00 stderr F I1212 16:20:20.311983 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:20:20.312096703+00:00 stderr F I1212 16:20:20.312043 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:20:20.507383366+00:00 stderr F I1212 16:20:20.507144 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:20:20.507383366+00:00 stderr F I1212 16:20:20.507253 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:20:20.706413892+00:00 stderr F I1212 16:20:20.706323 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:20:20.706413892+00:00 stderr F I1212 16:20:20.706377 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:20:20.904664220+00:00 stderr F I1212 16:20:20.904592 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:20:20.904723382+00:00 stderr F I1212 16:20:20.904673 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:20:21.107362329+00:00 stderr F I1212 16:20:21.106453 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:20:21.107362329+00:00 stderr F I1212 16:20:21.106516 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:20:21.312360577+00:00 stderr F I1212 16:20:21.312269 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:20:21.312360577+00:00 stderr F I1212 16:20:21.312330 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:20:21.507303101+00:00 stderr F I1212 16:20:21.507222 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:20:21.507303101+00:00 stderr F I1212 16:20:21.507291 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:20:21.709147419+00:00 stderr F I1212 16:20:21.708736 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:20:21.709147419+00:00 stderr F I1212 16:20:21.708833 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:20:21.908129915+00:00 stderr F I1212 16:20:21.908053 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:20:21.908129915+00:00 stderr F I1212 16:20:21.908120 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:20:22.110513387+00:00 stderr F I1212 16:20:22.110425 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:20:22.110513387+00:00 stderr F I1212 16:20:22.110505 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:20:22.304892977+00:00 stderr F I1212 16:20:22.304814 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:20:22.304892977+00:00 stderr F I1212 16:20:22.304869 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:20:22.504723465+00:00 stderr F I1212 16:20:22.504662 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:20:22.504723465+00:00 stderr F I1212 16:20:22.504715 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:20:22.704305676+00:00 stderr F I1212 16:20:22.704157 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:20:22.704305676+00:00 stderr F I1212 16:20:22.704246 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:20:22.905690342+00:00 stderr F I1212 16:20:22.905618 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:20:22.905690342+00:00 stderr F I1212 16:20:22.905669 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:20:23.105248013+00:00 stderr F I1212 16:20:23.105115 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:20:23.105248013+00:00 stderr F I1212 16:20:23.105170 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:20:23.304835374+00:00 stderr F I1212 16:20:23.304764 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:20:23.304835374+00:00 stderr F I1212 16:20:23.304823 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:20:23.504894157+00:00 stderr F I1212 16:20:23.504804 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:20:23.504894157+00:00 stderr F I1212 16:20:23.504867 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:20:23.705731030+00:00 stderr F I1212 16:20:23.705658 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:20:23.705731030+00:00 stderr F I1212 16:20:23.705719 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:20:23.905974218+00:00 stderr F I1212 16:20:23.905878 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:20:23.905974218+00:00 stderr F I1212 16:20:23.905937 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:20:24.112805910+00:00 stderr F I1212 16:20:24.112733 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:20:24.112805910+00:00 stderr F I1212 16:20:24.112796 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:20:24.308164515+00:00 stderr F I1212 16:20:24.306357 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:20:24.308164515+00:00 stderr F I1212 16:20:24.306421 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:20:24.505686874+00:00 stderr F I1212 16:20:24.505621 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:20:24.505686874+00:00 stderr F I1212 16:20:24.505674 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:20:24.705407559+00:00 stderr F I1212 16:20:24.705330 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:20:24.705469190+00:00 stderr F I1212 16:20:24.705405 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:20:24.905294088+00:00 stderr F I1212 16:20:24.904823 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:20:24.905294088+00:00 stderr F I1212 16:20:24.904887 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:20:25.107114995+00:00 stderr F I1212 16:20:25.107039 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:20:25.107114995+00:00 stderr F I1212 16:20:25.107102 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:20:25.309193019+00:00 stderr F I1212 16:20:25.308716 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:20:25.309193019+00:00 stderr F I1212 16:20:25.309138 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:20:25.510888973+00:00 stderr F I1212 16:20:25.510785 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:20:25.510888973+00:00 stderr F I1212 16:20:25.510872 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:20:25.706865093+00:00 stderr F I1212 16:20:25.706799 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:20:25.706935455+00:00 stderr F I1212 16:20:25.706873 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:20:25.910620989+00:00 stderr F I1212 16:20:25.910537 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:20:25.910620989+00:00 stderr F I1212 16:20:25.910602 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:20:26.106443146+00:00 stderr F I1212 16:20:26.106383 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:20:26.106443146+00:00 stderr F I1212 16:20:26.106431 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:20:26.307527505+00:00 stderr F I1212 16:20:26.307467 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:20:26.320799858+00:00 stderr F I1212 16:20:26.320739 1 log.go:245] Operconfig Controller complete 2025-12-12T16:21:25.829447233+00:00 stderr F I1212 16:21:25.829368 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:21:25.830148440+00:00 stderr F I1212 16:21:25.830102 1 log.go:245] successful reconciliation 2025-12-12T16:21:29.233004903+00:00 stderr F I1212 16:21:29.232875 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:21:29.233329491+00:00 stderr F I1212 16:21:29.233289 1 log.go:245] successful reconciliation 2025-12-12T16:21:32.229337282+00:00 stderr F I1212 16:21:32.229227 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:21:32.229760922+00:00 stderr F I1212 16:21:32.229703 1 log.go:245] successful reconciliation 2025-12-12T16:21:50.711534805+00:00 stderr F I1212 16:21:50.710768 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.715933791+00:00 stderr F I1212 16:21:50.715886 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.720502101+00:00 stderr F I1212 16:21:50.720455 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.725350558+00:00 stderr F I1212 16:21:50.725284 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.730119314+00:00 stderr F I1212 16:21:50.730056 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.736990544+00:00 stderr F I1212 16:21:50.736924 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.751128086+00:00 stderr F I1212 16:21:50.750118 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.757625417+00:00 stderr F I1212 16:21:50.757571 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.762380312+00:00 stderr F I1212 16:21:50.762345 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.767816955+00:00 stderr F I1212 16:21:50.767576 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:21:50.910510278+00:00 stderr F I1212 16:21:50.910447 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:22:24.017987521+00:00 stderr F I1212 16:22:24.017918 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:23:26.322909388+00:00 stderr F I1212 16:23:26.322061 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:23:26.469766708+00:00 stderr F I1212 16:23:26.469657 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:23:26.471961413+00:00 stderr F I1212 16:23:26.471894 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:23:26.473620375+00:00 stderr F I1212 16:23:26.473566 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:23:26.476256771+00:00 stderr F I1212 16:23:26.476167 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc003ffe6c0 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:23:26.481081933+00:00 stderr F I1212 16:23:26.480998 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:23:26.481081933+00:00 stderr F I1212 16:23:26.481025 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:23:26.481081933+00:00 stderr F I1212 16:23:26.481033 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:23:26.483729270+00:00 stderr F I1212 16:23:26.483664 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 2 -> 2 2025-12-12T16:23:26.483729270+00:00 stderr F I1212 16:23:26.483679 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:23:26.483729270+00:00 stderr F I1212 16:23:26.483684 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 2 -> 2 2025-12-12T16:23:26.483729270+00:00 stderr F I1212 16:23:26.483688 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:23:26.483729270+00:00 stderr F I1212 16:23:26.483712 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:23:26.487568496+00:00 stderr F I1212 16:23:26.487486 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:23:26.487568496+00:00 stderr F I1212 16:23:26.487532 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:23:26.545023414+00:00 stderr F I1212 16:23:26.544952 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:23:26.558986786+00:00 stderr F I1212 16:23:26.558927 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:23:26.558986786+00:00 stderr F I1212 16:23:26.558962 1 log.go:245] Starting render phase 2025-12-12T16:23:26.567686725+00:00 stderr F I1212 16:23:26.567636 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:23:26.604971014+00:00 stderr F I1212 16:23:26.604891 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:23:26.604971014+00:00 stderr F I1212 16:23:26.604925 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:23:26.604971014+00:00 stderr F I1212 16:23:26.604956 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:23:26.605024176+00:00 stderr F I1212 16:23:26.604991 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:23:26.744069338+00:00 stderr F I1212 16:23:26.743990 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:23:26.744069338+00:00 stderr F I1212 16:23:26.744031 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:23:26.953932455+00:00 stderr F I1212 16:23:26.953859 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:23:26.964209964+00:00 stderr F I1212 16:23:26.964153 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:23:26.968108052+00:00 stderr F I1212 16:23:26.968047 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:23:26.968108052+00:00 stderr F I1212 16:23:26.968071 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:23:26.973585800+00:00 stderr F I1212 16:23:26.973530 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:23:26.973613141+00:00 stderr F I1212 16:23:26.973593 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:23:26.979491189+00:00 stderr F I1212 16:23:26.979440 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:23:26.979525230+00:00 stderr F I1212 16:23:26.979486 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:23:26.984854434+00:00 stderr F I1212 16:23:26.984790 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:23:26.984854434+00:00 stderr F I1212 16:23:26.984821 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:23:26.992461276+00:00 stderr F I1212 16:23:26.992410 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:23:26.992461276+00:00 stderr F I1212 16:23:26.992451 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:23:27.000077438+00:00 stderr F I1212 16:23:27.000034 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:23:27.000095428+00:00 stderr F I1212 16:23:27.000075 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:23:27.004356135+00:00 stderr F I1212 16:23:27.004303 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:23:27.004378706+00:00 stderr F I1212 16:23:27.004370 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:23:27.008649864+00:00 stderr F I1212 16:23:27.008614 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:23:27.008672884+00:00 stderr F I1212 16:23:27.008654 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:23:27.012135762+00:00 stderr F I1212 16:23:27.012084 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:23:27.012135762+00:00 stderr F I1212 16:23:27.012114 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:23:27.015878516+00:00 stderr F I1212 16:23:27.015820 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:23:27.015878516+00:00 stderr F I1212 16:23:27.015858 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:23:27.169544497+00:00 stderr F I1212 16:23:27.169462 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:23:27.169544497+00:00 stderr F I1212 16:23:27.169511 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:23:27.371453474+00:00 stderr F I1212 16:23:27.371377 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:23:27.371509105+00:00 stderr F I1212 16:23:27.371451 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:23:27.569266768+00:00 stderr F I1212 16:23:27.569200 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:23:27.569311949+00:00 stderr F I1212 16:23:27.569261 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:23:27.770377284+00:00 stderr F I1212 16:23:27.770303 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:23:27.770377284+00:00 stderr F I1212 16:23:27.770355 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:23:27.970198139+00:00 stderr F I1212 16:23:27.970107 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:23:27.970198139+00:00 stderr F I1212 16:23:27.970157 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:23:28.169391527+00:00 stderr F I1212 16:23:28.169319 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:23:28.169391527+00:00 stderr F I1212 16:23:28.169382 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:23:28.370657238+00:00 stderr F I1212 16:23:28.370580 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:23:28.370657238+00:00 stderr F I1212 16:23:28.370641 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:23:28.568780700+00:00 stderr F I1212 16:23:28.568707 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:23:28.568780700+00:00 stderr F I1212 16:23:28.568761 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:23:28.770068461+00:00 stderr F I1212 16:23:28.769881 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:23:28.770068461+00:00 stderr F I1212 16:23:28.769948 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:23:28.971626169+00:00 stderr F I1212 16:23:28.971527 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:23:28.971626169+00:00 stderr F I1212 16:23:28.971592 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:23:29.170112080+00:00 stderr F I1212 16:23:29.170018 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:23:29.170112080+00:00 stderr F I1212 16:23:29.170078 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:23:29.370952740+00:00 stderr F I1212 16:23:29.370875 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:23:29.370952740+00:00 stderr F I1212 16:23:29.370928 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:23:29.570701043+00:00 stderr F I1212 16:23:29.570628 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:23:29.570761734+00:00 stderr F I1212 16:23:29.570695 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:23:29.781886343+00:00 stderr F I1212 16:23:29.780736 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:23:29.781886343+00:00 stderr F I1212 16:23:29.780801 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:23:29.986759764+00:00 stderr F I1212 16:23:29.986639 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:23:29.986759764+00:00 stderr F I1212 16:23:29.986691 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:23:30.171898849+00:00 stderr F I1212 16:23:30.171810 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:23:30.171898849+00:00 stderr F I1212 16:23:30.171875 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:23:30.369518476+00:00 stderr F I1212 16:23:30.369428 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:23:30.369518476+00:00 stderr F I1212 16:23:30.369500 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:23:30.570164811+00:00 stderr F I1212 16:23:30.570090 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:23:30.570164811+00:00 stderr F I1212 16:23:30.570147 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:23:30.773677939+00:00 stderr F I1212 16:23:30.773600 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:23:30.773677939+00:00 stderr F I1212 16:23:30.773670 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:23:30.981245378+00:00 stderr F I1212 16:23:30.980380 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:23:30.981245378+00:00 stderr F I1212 16:23:30.980458 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:23:31.170873136+00:00 stderr F I1212 16:23:31.170803 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:23:31.170873136+00:00 stderr F I1212 16:23:31.170855 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:23:31.372877435+00:00 stderr F I1212 16:23:31.372758 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:23:31.372877435+00:00 stderr F I1212 16:23:31.372829 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:23:31.569568770+00:00 stderr F I1212 16:23:31.569469 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:23:31.569568770+00:00 stderr F I1212 16:23:31.569523 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:23:31.771130358+00:00 stderr F I1212 16:23:31.771056 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:23:31.771130358+00:00 stderr F I1212 16:23:31.771117 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:23:31.969893486+00:00 stderr F I1212 16:23:31.969812 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:23:31.969893486+00:00 stderr F I1212 16:23:31.969863 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:23:32.169917005+00:00 stderr F I1212 16:23:32.169825 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:23:32.169917005+00:00 stderr F I1212 16:23:32.169874 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:23:32.371959996+00:00 stderr F I1212 16:23:32.371834 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:23:32.371959996+00:00 stderr F I1212 16:23:32.371943 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:23:32.571923923+00:00 stderr F I1212 16:23:32.571851 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:23:32.571923923+00:00 stderr F I1212 16:23:32.571903 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:23:32.774938068+00:00 stderr F I1212 16:23:32.774865 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:23:32.774938068+00:00 stderr F I1212 16:23:32.774922 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:23:32.971197032+00:00 stderr F I1212 16:23:32.971102 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:23:32.971197032+00:00 stderr F I1212 16:23:32.971160 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:23:33.170621397+00:00 stderr F I1212 16:23:33.170528 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:23:33.170621397+00:00 stderr F I1212 16:23:33.170580 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:23:33.370457412+00:00 stderr F I1212 16:23:33.370385 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:23:33.370511733+00:00 stderr F I1212 16:23:33.370488 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:23:33.569876806+00:00 stderr F I1212 16:23:33.569800 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:23:33.569876806+00:00 stderr F I1212 16:23:33.569852 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:23:33.773345272+00:00 stderr F I1212 16:23:33.773262 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:23:33.773345272+00:00 stderr F I1212 16:23:33.773323 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:23:33.980639494+00:00 stderr F I1212 16:23:33.980558 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:23:33.980687765+00:00 stderr F I1212 16:23:33.980648 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:23:34.176567580+00:00 stderr F I1212 16:23:34.176445 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:23:34.176567580+00:00 stderr F I1212 16:23:34.176518 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:23:34.392859479+00:00 stderr F I1212 16:23:34.392781 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:23:34.392892160+00:00 stderr F I1212 16:23:34.392855 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:23:34.588487388+00:00 stderr F I1212 16:23:34.588429 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:23:34.588539669+00:00 stderr F I1212 16:23:34.588489 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:23:34.776223178+00:00 stderr F I1212 16:23:34.776139 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:23:34.776273919+00:00 stderr F I1212 16:23:34.776230 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:23:35.003476573+00:00 stderr F I1212 16:23:35.003360 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:23:35.003623527+00:00 stderr F I1212 16:23:35.003609 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:23:35.202691502+00:00 stderr F I1212 16:23:35.202512 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:23:35.202691502+00:00 stderr F I1212 16:23:35.202595 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:23:35.377361793+00:00 stderr F I1212 16:23:35.377265 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:23:35.377361793+00:00 stderr F I1212 16:23:35.377339 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:23:35.601567301+00:00 stderr F I1212 16:23:35.601481 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:23:35.601567301+00:00 stderr F I1212 16:23:35.601536 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:23:35.816656670+00:00 stderr F I1212 16:23:35.816580 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:23:35.816710442+00:00 stderr F I1212 16:23:35.816654 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:23:35.971693136+00:00 stderr F I1212 16:23:35.970997 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:23:35.971693136+00:00 stderr F I1212 16:23:35.971673 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:23:36.170858264+00:00 stderr F I1212 16:23:36.170751 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:23:36.170858264+00:00 stderr F I1212 16:23:36.170810 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:23:36.371228662+00:00 stderr F I1212 16:23:36.371093 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:23:36.371228662+00:00 stderr F I1212 16:23:36.371159 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:23:36.570331028+00:00 stderr F I1212 16:23:36.570259 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:23:36.570331028+00:00 stderr F I1212 16:23:36.570314 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:23:36.769854485+00:00 stderr F I1212 16:23:36.769712 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:23:36.769854485+00:00 stderr F I1212 16:23:36.769777 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:23:36.970755176+00:00 stderr F I1212 16:23:36.970169 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:23:36.970755176+00:00 stderr F I1212 16:23:36.970696 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:23:37.170515259+00:00 stderr F I1212 16:23:37.170402 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:23:37.170515259+00:00 stderr F I1212 16:23:37.170458 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:23:37.371454821+00:00 stderr F I1212 16:23:37.371363 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:23:37.371454821+00:00 stderr F I1212 16:23:37.371436 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:23:37.569273794+00:00 stderr F I1212 16:23:37.569171 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:23:37.569273794+00:00 stderr F I1212 16:23:37.569242 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:23:37.770808001+00:00 stderr F I1212 16:23:37.770738 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:23:37.770904504+00:00 stderr F I1212 16:23:37.770892 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:23:37.969372374+00:00 stderr F I1212 16:23:37.969294 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:23:37.969372374+00:00 stderr F I1212 16:23:37.969359 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:23:38.169824574+00:00 stderr F I1212 16:23:38.169775 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:23:38.169933407+00:00 stderr F I1212 16:23:38.169923 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:23:38.371221708+00:00 stderr F I1212 16:23:38.371160 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:23:38.371321831+00:00 stderr F I1212 16:23:38.371312 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:23:38.569406611+00:00 stderr F I1212 16:23:38.569343 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:23:38.569406611+00:00 stderr F I1212 16:23:38.569397 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:23:38.771533304+00:00 stderr F I1212 16:23:38.771482 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:23:38.771741709+00:00 stderr F I1212 16:23:38.771729 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:23:38.970873236+00:00 stderr F I1212 16:23:38.970779 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:23:38.970873236+00:00 stderr F I1212 16:23:38.970845 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:23:39.173922832+00:00 stderr F I1212 16:23:39.173811 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:23:39.173922832+00:00 stderr F I1212 16:23:39.173878 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:23:39.372462234+00:00 stderr F I1212 16:23:39.372367 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:23:39.372462234+00:00 stderr F I1212 16:23:39.372428 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:23:39.569814746+00:00 stderr F I1212 16:23:39.569746 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:23:39.569814746+00:00 stderr F I1212 16:23:39.569796 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:23:39.768785219+00:00 stderr F I1212 16:23:39.768710 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:23:39.768785219+00:00 stderr F I1212 16:23:39.768767 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:23:39.972354987+00:00 stderr F I1212 16:23:39.972298 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:23:39.972466850+00:00 stderr F I1212 16:23:39.972455 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:23:40.173502095+00:00 stderr F I1212 16:23:40.173409 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:23:40.173502095+00:00 stderr F I1212 16:23:40.173464 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:23:40.372991520+00:00 stderr F I1212 16:23:40.372875 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:23:40.372991520+00:00 stderr F I1212 16:23:40.372933 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:23:40.570999249+00:00 stderr F I1212 16:23:40.570949 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:23:40.571089381+00:00 stderr F I1212 16:23:40.571079 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:23:40.771501430+00:00 stderr F I1212 16:23:40.771415 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:23:40.771501430+00:00 stderr F I1212 16:23:40.771466 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:23:40.973945740+00:00 stderr F I1212 16:23:40.973881 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:23:40.974077703+00:00 stderr F I1212 16:23:40.974067 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:23:41.171928848+00:00 stderr F I1212 16:23:41.171671 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:23:41.172054331+00:00 stderr F I1212 16:23:41.172041 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:23:41.370777687+00:00 stderr F I1212 16:23:41.370675 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:23:41.370777687+00:00 stderr F I1212 16:23:41.370734 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:23:41.571411312+00:00 stderr F I1212 16:23:41.571244 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:23:41.571411312+00:00 stderr F I1212 16:23:41.571324 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:23:41.774120329+00:00 stderr F I1212 16:23:41.774066 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:23:41.774190351+00:00 stderr F I1212 16:23:41.774122 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:23:41.970255931+00:00 stderr F I1212 16:23:41.970198 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:23:41.970342513+00:00 stderr F I1212 16:23:41.970251 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:23:42.181844101+00:00 stderr F I1212 16:23:42.181766 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:23:42.181844101+00:00 stderr F I1212 16:23:42.181829 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:23:42.395444653+00:00 stderr F I1212 16:23:42.395320 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:23:42.395581096+00:00 stderr F I1212 16:23:42.395519 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:23:42.571191020+00:00 stderr F I1212 16:23:42.571036 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:23:42.571191020+00:00 stderr F I1212 16:23:42.571132 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:23:42.771577078+00:00 stderr F I1212 16:23:42.771490 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:23:42.771577078+00:00 stderr F I1212 16:23:42.771560 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:23:42.972732446+00:00 stderr F I1212 16:23:42.972623 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:23:42.972732446+00:00 stderr F I1212 16:23:42.972715 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:23:43.172092709+00:00 stderr F I1212 16:23:43.171951 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:23:43.172092709+00:00 stderr F I1212 16:23:43.172014 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:23:43.373128734+00:00 stderr F I1212 16:23:43.373009 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:23:43.373128734+00:00 stderr F I1212 16:23:43.373068 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:23:43.569135452+00:00 stderr F I1212 16:23:43.569063 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:23:43.569135452+00:00 stderr F I1212 16:23:43.569122 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:23:43.772225679+00:00 stderr F I1212 16:23:43.772154 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:23:43.772302151+00:00 stderr F I1212 16:23:43.772270 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:23:43.973421158+00:00 stderr F I1212 16:23:43.973319 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:23:43.973483879+00:00 stderr F I1212 16:23:43.973420 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:23:44.169385245+00:00 stderr F I1212 16:23:44.169266 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:23:44.169385245+00:00 stderr F I1212 16:23:44.169327 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:23:44.376159194+00:00 stderr F I1212 16:23:44.376083 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:23:44.377575560+00:00 stderr F I1212 16:23:44.377515 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:23:44.572213842+00:00 stderr F I1212 16:23:44.572119 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:23:44.572213842+00:00 stderr F I1212 16:23:44.572204 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:23:44.772392819+00:00 stderr F I1212 16:23:44.772342 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:23:44.772445740+00:00 stderr F I1212 16:23:44.772434 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:23:44.970262439+00:00 stderr F I1212 16:23:44.969537 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:23:44.970262439+00:00 stderr F I1212 16:23:44.969628 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:23:45.170738614+00:00 stderr F I1212 16:23:45.170663 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:23:45.170738614+00:00 stderr F I1212 16:23:45.170727 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:23:45.379517696+00:00 stderr F I1212 16:23:45.379421 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:23:45.379582907+00:00 stderr F I1212 16:23:45.379517 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:23:45.570861173+00:00 stderr F I1212 16:23:45.570737 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:23:45.570861173+00:00 stderr F I1212 16:23:45.570840 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:23:45.770921918+00:00 stderr F I1212 16:23:45.770834 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:23:45.770921918+00:00 stderr F I1212 16:23:45.770898 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:23:45.971315160+00:00 stderr F I1212 16:23:45.971164 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:23:45.971315160+00:00 stderr F I1212 16:23:45.971278 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:23:46.173648132+00:00 stderr F I1212 16:23:46.173547 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:23:46.173648132+00:00 stderr F I1212 16:23:46.173603 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:23:46.370298941+00:00 stderr F I1212 16:23:46.369774 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:23:46.370360463+00:00 stderr F I1212 16:23:46.370317 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:23:46.569411702+00:00 stderr F I1212 16:23:46.569340 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:23:46.569411702+00:00 stderr F I1212 16:23:46.569391 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:23:46.769924878+00:00 stderr F I1212 16:23:46.769848 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:23:46.769924878+00:00 stderr F I1212 16:23:46.769897 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:23:46.970476164+00:00 stderr F I1212 16:23:46.970397 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:23:46.970476164+00:00 stderr F I1212 16:23:46.970455 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:23:47.171472302+00:00 stderr F I1212 16:23:47.171395 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:23:47.171472302+00:00 stderr F I1212 16:23:47.171453 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:23:47.371982348+00:00 stderr F I1212 16:23:47.369499 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:23:47.371982348+00:00 stderr F I1212 16:23:47.371946 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:23:47.569390627+00:00 stderr F I1212 16:23:47.569286 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:23:47.569390627+00:00 stderr F I1212 16:23:47.569339 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:23:47.770000595+00:00 stderr F I1212 16:23:47.769921 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:23:47.770000595+00:00 stderr F I1212 16:23:47.769970 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:23:47.971920736+00:00 stderr F I1212 16:23:47.971847 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:23:47.971920736+00:00 stderr F I1212 16:23:47.971903 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:23:48.175637210+00:00 stderr F I1212 16:23:48.175550 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:23:48.175637210+00:00 stderr F I1212 16:23:48.175608 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:23:48.370938906+00:00 stderr F I1212 16:23:48.370872 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:23:48.371010408+00:00 stderr F I1212 16:23:48.370937 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:23:48.568837507+00:00 stderr F I1212 16:23:48.568766 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:23:48.568837507+00:00 stderr F I1212 16:23:48.568814 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:23:48.770022579+00:00 stderr F I1212 16:23:48.769861 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:23:48.770022579+00:00 stderr F I1212 16:23:48.769919 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:23:48.970576916+00:00 stderr F I1212 16:23:48.970465 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:23:48.970576916+00:00 stderr F I1212 16:23:48.970543 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:23:49.171319978+00:00 stderr F I1212 16:23:49.171223 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:23:49.171319978+00:00 stderr F I1212 16:23:49.171278 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:23:49.377171797+00:00 stderr F I1212 16:23:49.377072 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:23:49.377171797+00:00 stderr F I1212 16:23:49.377151 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:23:49.571607431+00:00 stderr F I1212 16:23:49.571514 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:23:49.571607431+00:00 stderr F I1212 16:23:49.571568 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:23:49.771801159+00:00 stderr F I1212 16:23:49.771730 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:23:49.771801159+00:00 stderr F I1212 16:23:49.771788 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:23:49.973862043+00:00 stderr F I1212 16:23:49.973783 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:23:49.973906715+00:00 stderr F I1212 16:23:49.973859 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:23:50.172106663+00:00 stderr F I1212 16:23:50.171824 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:23:50.172106663+00:00 stderr F I1212 16:23:50.172075 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:23:50.371551552+00:00 stderr F I1212 16:23:50.371468 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:23:50.384652288+00:00 stderr F I1212 16:23:50.384569 1 log.go:245] Operconfig Controller complete 2025-12-12T16:24:43.050760758+00:00 stderr F I1212 16:24:43.050667 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:25:10.980129405+00:00 stderr F I1212 16:25:10.980073 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:25:11.121442586+00:00 stderr F I1212 16:25:11.121340 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:25:11.123439318+00:00 stderr F I1212 16:25:11.123380 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:25:11.125930713+00:00 stderr F I1212 16:25:11.125877 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:25:11.127683609+00:00 stderr F I1212 16:25:11.127607 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc00418e600 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:25:11.132992349+00:00 stderr F I1212 16:25:11.132915 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 3 -> 3 2025-12-12T16:25:11.132992349+00:00 stderr F I1212 16:25:11.132942 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:25:11.132992349+00:00 stderr F I1212 16:25:11.132951 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:25:11.135850854+00:00 stderr F I1212 16:25:11.135791 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 2 -> 2 2025-12-12T16:25:11.135850854+00:00 stderr F I1212 16:25:11.135814 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:25:11.135850854+00:00 stderr F I1212 16:25:11.135820 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 2 -> 2 2025-12-12T16:25:11.135850854+00:00 stderr F I1212 16:25:11.135825 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:25:11.135850854+00:00 stderr F I1212 16:25:11.135846 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:25:11.139438398+00:00 stderr F I1212 16:25:11.139381 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:25:11.139438398+00:00 stderr F I1212 16:25:11.139403 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:25:11.197930754+00:00 stderr F I1212 16:25:11.197852 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:25:11.209761465+00:00 stderr F I1212 16:25:11.209682 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:25:11.210133085+00:00 stderr F I1212 16:25:11.210087 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:25:11.210133085+00:00 stderr F I1212 16:25:11.210120 1 log.go:245] Starting render phase 2025-12-12T16:25:11.222707915+00:00 stderr F I1212 16:25:11.222643 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:25:11.255910447+00:00 stderr F I1212 16:25:11.255844 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:25:11.255910447+00:00 stderr F I1212 16:25:11.255872 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:25:11.255910447+00:00 stderr F I1212 16:25:11.255891 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:25:11.255994529+00:00 stderr F I1212 16:25:11.255916 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:25:11.402688481+00:00 stderr F I1212 16:25:11.402558 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:25:11.402688481+00:00 stderr F I1212 16:25:11.402599 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:25:11.612883061+00:00 stderr F I1212 16:25:11.612784 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:25:11.624780444+00:00 stderr F I1212 16:25:11.624696 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:25:11.629478607+00:00 stderr F I1212 16:25:11.629404 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:25:11.629478607+00:00 stderr F I1212 16:25:11.629439 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:25:11.636760608+00:00 stderr F I1212 16:25:11.636671 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:25:11.636760608+00:00 stderr F I1212 16:25:11.636708 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:25:11.644098841+00:00 stderr F I1212 16:25:11.644047 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:25:11.644098841+00:00 stderr F I1212 16:25:11.644077 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:25:11.650055367+00:00 stderr F I1212 16:25:11.649986 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:25:11.650055367+00:00 stderr F I1212 16:25:11.650012 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:25:11.655555772+00:00 stderr F I1212 16:25:11.655507 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:25:11.655555772+00:00 stderr F I1212 16:25:11.655533 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:25:11.661383905+00:00 stderr F I1212 16:25:11.661339 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:25:11.661383905+00:00 stderr F I1212 16:25:11.661362 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:25:11.665391740+00:00 stderr F I1212 16:25:11.665336 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:25:11.665391740+00:00 stderr F I1212 16:25:11.665359 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:25:11.670598127+00:00 stderr F I1212 16:25:11.670504 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:25:11.670598127+00:00 stderr F I1212 16:25:11.670526 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:25:11.673871573+00:00 stderr F I1212 16:25:11.673794 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:25:11.673871573+00:00 stderr F I1212 16:25:11.673815 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:25:11.677204670+00:00 stderr F I1212 16:25:11.677146 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:25:11.677225491+00:00 stderr F I1212 16:25:11.677209 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:25:11.830886586+00:00 stderr F I1212 16:25:11.830768 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:25:11.830886586+00:00 stderr F I1212 16:25:11.830833 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:25:12.030537269+00:00 stderr F I1212 16:25:12.030443 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:25:12.030537269+00:00 stderr F I1212 16:25:12.030517 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:25:12.231196508+00:00 stderr F I1212 16:25:12.231053 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:25:12.231196508+00:00 stderr F I1212 16:25:12.231108 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:25:12.430834240+00:00 stderr F I1212 16:25:12.430730 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:25:12.430834240+00:00 stderr F I1212 16:25:12.430785 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:25:12.630031901+00:00 stderr F I1212 16:25:12.629943 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:25:12.630031901+00:00 stderr F I1212 16:25:12.630009 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:25:12.830427494+00:00 stderr F I1212 16:25:12.830316 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:25:12.830427494+00:00 stderr F I1212 16:25:12.830396 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:25:13.030454927+00:00 stderr F I1212 16:25:13.030306 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:25:13.030454927+00:00 stderr F I1212 16:25:13.030357 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:25:13.230494820+00:00 stderr F I1212 16:25:13.230386 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:25:13.230494820+00:00 stderr F I1212 16:25:13.230440 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:25:13.431889248+00:00 stderr F I1212 16:25:13.431811 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:25:13.431889248+00:00 stderr F I1212 16:25:13.431868 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:25:13.630839403+00:00 stderr F I1212 16:25:13.630774 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:25:13.630889414+00:00 stderr F I1212 16:25:13.630835 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:25:13.832210711+00:00 stderr F I1212 16:25:13.832123 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:25:13.832263642+00:00 stderr F I1212 16:25:13.832207 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:25:14.030295842+00:00 stderr F I1212 16:25:14.029976 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:25:14.030295842+00:00 stderr F I1212 16:25:14.030029 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:25:14.233059196+00:00 stderr F I1212 16:25:14.232981 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:25:14.233109338+00:00 stderr F I1212 16:25:14.233057 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:25:14.444453478+00:00 stderr F I1212 16:25:14.444239 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:25:14.444453478+00:00 stderr F I1212 16:25:14.444293 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:25:14.648549617+00:00 stderr F I1212 16:25:14.648467 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:25:14.648549617+00:00 stderr F I1212 16:25:14.648525 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:25:14.836277297+00:00 stderr F I1212 16:25:14.835760 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:25:14.836277297+00:00 stderr F I1212 16:25:14.835822 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:25:15.030458536+00:00 stderr F I1212 16:25:15.030368 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:25:15.030458536+00:00 stderr F I1212 16:25:15.030425 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:25:15.230579361+00:00 stderr F I1212 16:25:15.230445 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:25:15.230579361+00:00 stderr F I1212 16:25:15.230519 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:25:15.434254200+00:00 stderr F I1212 16:25:15.434162 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:25:15.434254200+00:00 stderr F I1212 16:25:15.434234 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:25:15.630657887+00:00 stderr F I1212 16:25:15.630586 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:25:15.630657887+00:00 stderr F I1212 16:25:15.630644 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:25:15.830464354+00:00 stderr F I1212 16:25:15.830376 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:25:15.830464354+00:00 stderr F I1212 16:25:15.830428 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:25:16.030375674+00:00 stderr F I1212 16:25:16.030125 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:25:16.030375674+00:00 stderr F I1212 16:25:16.030190 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:25:16.228864306+00:00 stderr F I1212 16:25:16.228799 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:25:16.228864306+00:00 stderr F I1212 16:25:16.228846 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:25:16.431718033+00:00 stderr F I1212 16:25:16.431665 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:25:16.431823036+00:00 stderr F I1212 16:25:16.431813 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:25:16.631319065+00:00 stderr F I1212 16:25:16.631249 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:25:16.631319065+00:00 stderr F I1212 16:25:16.631294 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:25:16.830328730+00:00 stderr F I1212 16:25:16.830243 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:25:16.830328730+00:00 stderr F I1212 16:25:16.830294 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:25:17.031622527+00:00 stderr F I1212 16:25:17.031531 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:25:17.031622527+00:00 stderr F I1212 16:25:17.031601 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:25:17.068440753+00:00 stderr F I1212 16:25:17.068288 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:25:17.234777241+00:00 stderr F I1212 16:25:17.233823 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:25:17.234777241+00:00 stderr F I1212 16:25:17.234750 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:25:17.435490602+00:00 stderr F I1212 16:25:17.434623 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:25:17.435490602+00:00 stderr F I1212 16:25:17.435436 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:25:17.632421913+00:00 stderr F I1212 16:25:17.632341 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:25:17.632421913+00:00 stderr F I1212 16:25:17.632399 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:25:17.830223428+00:00 stderr F I1212 16:25:17.830151 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:25:17.830283079+00:00 stderr F I1212 16:25:17.830237 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:25:18.031196535+00:00 stderr F I1212 16:25:18.031098 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:25:18.031196535+00:00 stderr F I1212 16:25:18.031156 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:25:18.234129335+00:00 stderr F I1212 16:25:18.234032 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:25:18.234129335+00:00 stderr F I1212 16:25:18.234095 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:25:18.431584460+00:00 stderr F I1212 16:25:18.431503 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:25:18.431584460+00:00 stderr F I1212 16:25:18.431559 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:25:18.638505454+00:00 stderr F I1212 16:25:18.638384 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:25:18.638505454+00:00 stderr F I1212 16:25:18.638446 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:25:18.836806371+00:00 stderr F I1212 16:25:18.836706 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:25:18.836806371+00:00 stderr F I1212 16:25:18.836770 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:25:19.039100924+00:00 stderr F I1212 16:25:19.038991 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:25:19.039100924+00:00 stderr F I1212 16:25:19.039063 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:25:19.241060708+00:00 stderr F I1212 16:25:19.240974 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:25:19.241060708+00:00 stderr F I1212 16:25:19.241030 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:25:19.435911884+00:00 stderr F I1212 16:25:19.435821 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:25:19.435911884+00:00 stderr F I1212 16:25:19.435888 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:25:19.661966890+00:00 stderr F I1212 16:25:19.661889 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:25:19.661966890+00:00 stderr F I1212 16:25:19.661945 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:25:19.862417534+00:00 stderr F I1212 16:25:19.862325 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:25:19.862417534+00:00 stderr F I1212 16:25:19.862392 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:25:20.039126265+00:00 stderr F I1212 16:25:20.039009 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:25:20.039126265+00:00 stderr F I1212 16:25:20.039087 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:25:20.263559378+00:00 stderr F I1212 16:25:20.263485 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:25:20.263745453+00:00 stderr F I1212 16:25:20.263722 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:25:20.479334654+00:00 stderr F I1212 16:25:20.479230 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:25:20.479334654+00:00 stderr F I1212 16:25:20.479298 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:25:20.633095842+00:00 stderr F I1212 16:25:20.632989 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:25:20.633095842+00:00 stderr F I1212 16:25:20.633069 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:25:20.830498426+00:00 stderr F I1212 16:25:20.830439 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:25:20.830546787+00:00 stderr F I1212 16:25:20.830503 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:25:21.033820495+00:00 stderr F I1212 16:25:21.033696 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:25:21.033820495+00:00 stderr F I1212 16:25:21.033784 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:25:21.233774635+00:00 stderr F I1212 16:25:21.233689 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:25:21.233774635+00:00 stderr F I1212 16:25:21.233744 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:25:21.433998543+00:00 stderr F I1212 16:25:21.433889 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:25:21.433998543+00:00 stderr F I1212 16:25:21.433956 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:25:21.634424936+00:00 stderr F I1212 16:25:21.634348 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:25:21.634424936+00:00 stderr F I1212 16:25:21.634414 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:25:21.834972033+00:00 stderr F I1212 16:25:21.834895 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:25:21.834972033+00:00 stderr F I1212 16:25:21.834962 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:25:22.034534163+00:00 stderr F I1212 16:25:22.034398 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:25:22.034534163+00:00 stderr F I1212 16:25:22.034483 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:25:22.234129855+00:00 stderr F I1212 16:25:22.234039 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:25:22.234225617+00:00 stderr F I1212 16:25:22.234140 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:25:22.433996223+00:00 stderr F I1212 16:25:22.433922 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:25:22.433996223+00:00 stderr F I1212 16:25:22.433975 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:25:22.629959229+00:00 stderr F I1212 16:25:22.629869 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:25:22.629959229+00:00 stderr F I1212 16:25:22.629919 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:25:22.832877897+00:00 stderr F I1212 16:25:22.832777 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:25:22.832877897+00:00 stderr F I1212 16:25:22.832847 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:25:23.031128913+00:00 stderr F I1212 16:25:23.031036 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:25:23.031128913+00:00 stderr F I1212 16:25:23.031090 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:25:23.230615092+00:00 stderr F I1212 16:25:23.230501 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:25:23.230740055+00:00 stderr F I1212 16:25:23.230725 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:25:23.430544562+00:00 stderr F I1212 16:25:23.430482 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:25:23.430544562+00:00 stderr F I1212 16:25:23.430534 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:25:23.630248866+00:00 stderr F I1212 16:25:23.630037 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:25:23.630248866+00:00 stderr F I1212 16:25:23.630108 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:25:23.832344383+00:00 stderr F I1212 16:25:23.832250 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:25:23.832344383+00:00 stderr F I1212 16:25:23.832305 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:25:24.027783536+00:00 stderr F I1212 16:25:24.027675 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:25:24.037657615+00:00 stderr F I1212 16:25:24.037527 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:25:24.037657615+00:00 stderr F I1212 16:25:24.037586 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:25:24.232227384+00:00 stderr F I1212 16:25:24.232084 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:25:24.232227384+00:00 stderr F I1212 16:25:24.232149 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:25:24.433551211+00:00 stderr F I1212 16:25:24.433423 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:25:24.433551211+00:00 stderr F I1212 16:25:24.433503 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:25:24.634408186+00:00 stderr F I1212 16:25:24.634343 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:25:24.634532199+00:00 stderr F I1212 16:25:24.634522 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:25:24.838580906+00:00 stderr F I1212 16:25:24.838520 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:25:24.838717790+00:00 stderr F I1212 16:25:24.838708 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:25:25.030599959+00:00 stderr F I1212 16:25:25.030526 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:25:25.030599959+00:00 stderr F I1212 16:25:25.030590 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:25:25.231744166+00:00 stderr F I1212 16:25:25.231112 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:25:25.231851949+00:00 stderr F I1212 16:25:25.231840 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:25:25.431898613+00:00 stderr F I1212 16:25:25.431816 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:25:25.431898613+00:00 stderr F I1212 16:25:25.431886 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:25:25.630784307+00:00 stderr F I1212 16:25:25.630647 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:25:25.630784307+00:00 stderr F I1212 16:25:25.630704 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:25:25.832428881+00:00 stderr F I1212 16:25:25.832331 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:25:25.832428881+00:00 stderr F I1212 16:25:25.832383 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:25:26.030371172+00:00 stderr F I1212 16:25:26.030303 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:25:26.030371172+00:00 stderr F I1212 16:25:26.030355 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:25:26.231547694+00:00 stderr F I1212 16:25:26.231442 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:25:26.231547694+00:00 stderr F I1212 16:25:26.231490 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:25:26.433127606+00:00 stderr F I1212 16:25:26.433029 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:25:26.433127606+00:00 stderr F I1212 16:25:26.433087 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:25:26.631718973+00:00 stderr F I1212 16:25:26.631630 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:25:26.631759354+00:00 stderr F I1212 16:25:26.631715 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:25:26.835083040+00:00 stderr F I1212 16:25:26.834975 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:25:26.835285695+00:00 stderr F I1212 16:25:26.835237 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.835285695+00:00 stderr F I1212 16:25:26.835270 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.835700606+00:00 stderr F I1212 16:25:26.835664 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:25:26.835807938+00:00 stderr F I1212 16:25:26.835762 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:25:26.852333084+00:00 stderr F I1212 16:25:26.852074 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.852333084+00:00 stderr F I1212 16:25:26.852114 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.884903834+00:00 stderr F I1212 16:25:26.884778 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.884903834+00:00 stderr F I1212 16:25:26.884808 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.886917974+00:00 stderr F I1212 16:25:26.886288 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:25:26.886917974+00:00 stderr F I1212 16:25:26.886621 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:25:26.886917974+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:26.886917974+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:26.886917974+00:00 stderr F reason: Unknown 2025-12-12T16:25:26.886917974+00:00 stderr F status: "False" 2025-12-12T16:25:26.886917974+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:26.886917974+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:26.886917974+00:00 stderr F status: "False" 2025-12-12T16:25:26.886917974+00:00 stderr F type: Degraded 2025-12-12T16:25:26.886917974+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:26.886917974+00:00 stderr F status: "True" 2025-12-12T16:25:26.886917974+00:00 stderr F type: Upgradeable 2025-12-12T16:25:26.886917974+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:26.886917974+00:00 stderr F message: Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" update is 2025-12-12T16:25:26.886917974+00:00 stderr F being processed (generation 4, observed generation 3) 2025-12-12T16:25:26.886917974+00:00 stderr F reason: Deploying 2025-12-12T16:25:26.886917974+00:00 stderr F status: "True" 2025-12-12T16:25:26.886917974+00:00 stderr F type: Progressing 2025-12-12T16:25:26.886917974+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:26.886917974+00:00 stderr F status: "True" 2025-12-12T16:25:26.886917974+00:00 stderr F type: Available 2025-12-12T16:25:26.903627085+00:00 stderr F I1212 16:25:26.903359 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.903627085+00:00 stderr F I1212 16:25:26.903386 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.916098709+00:00 stderr F I1212 16:25:26.912589 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:25:26.916098709+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:26.916098709+00:00 stderr F status: "False" 2025-12-12T16:25:26.916098709+00:00 stderr F type: Degraded 2025-12-12T16:25:26.916098709+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:26.916098709+00:00 stderr F status: "True" 2025-12-12T16:25:26.916098709+00:00 stderr F type: Upgradeable 2025-12-12T16:25:26.916098709+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:26.916098709+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:26.916098709+00:00 stderr F reason: Unknown 2025-12-12T16:25:26.916098709+00:00 stderr F status: "False" 2025-12-12T16:25:26.916098709+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:26.916098709+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:26.916098709+00:00 stderr F message: Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" update is 2025-12-12T16:25:26.916098709+00:00 stderr F being processed (generation 4, observed generation 3) 2025-12-12T16:25:26.916098709+00:00 stderr F reason: Deploying 2025-12-12T16:25:26.916098709+00:00 stderr F status: "True" 2025-12-12T16:25:26.916098709+00:00 stderr F type: Progressing 2025-12-12T16:25:26.916098709+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:26.916098709+00:00 stderr F status: "True" 2025-12-12T16:25:26.916098709+00:00 stderr F type: Available 2025-12-12T16:25:26.927819313+00:00 stderr F I1212 16:25:26.927743 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.927819313+00:00 stderr F I1212 16:25:26.927773 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:26.976349965+00:00 stderr F I1212 16:25:26.976269 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:25:26.976704904+00:00 stderr F I1212 16:25:26.976683 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:25:26.976704904+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:26.976704904+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:26.976704904+00:00 stderr F reason: Unknown 2025-12-12T16:25:26.976704904+00:00 stderr F status: "False" 2025-12-12T16:25:26.976704904+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:26.976704904+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:26.976704904+00:00 stderr F status: "False" 2025-12-12T16:25:26.976704904+00:00 stderr F type: Degraded 2025-12-12T16:25:26.976704904+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:26.976704904+00:00 stderr F status: "True" 2025-12-12T16:25:26.976704904+00:00 stderr F type: Upgradeable 2025-12-12T16:25:26.976704904+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:26.976704904+00:00 stderr F message: Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available 2025-12-12T16:25:26.976704904+00:00 stderr F (awaiting 1 nodes) 2025-12-12T16:25:26.976704904+00:00 stderr F reason: Deploying 2025-12-12T16:25:26.976704904+00:00 stderr F status: "True" 2025-12-12T16:25:26.976704904+00:00 stderr F type: Progressing 2025-12-12T16:25:26.976704904+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:26.976704904+00:00 stderr F status: "True" 2025-12-12T16:25:26.976704904+00:00 stderr F type: Available 2025-12-12T16:25:27.056609684+00:00 stderr F I1212 16:25:27.055500 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:25:27.056609684+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:27.056609684+00:00 stderr F status: "False" 2025-12-12T16:25:27.056609684+00:00 stderr F type: Degraded 2025-12-12T16:25:27.056609684+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:27.056609684+00:00 stderr F status: "True" 2025-12-12T16:25:27.056609684+00:00 stderr F type: Upgradeable 2025-12-12T16:25:27.056609684+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:27.056609684+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:27.056609684+00:00 stderr F reason: Unknown 2025-12-12T16:25:27.056609684+00:00 stderr F status: "False" 2025-12-12T16:25:27.056609684+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:27.056609684+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:27.056609684+00:00 stderr F message: Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available 2025-12-12T16:25:27.056609684+00:00 stderr F (awaiting 1 nodes) 2025-12-12T16:25:27.056609684+00:00 stderr F reason: Deploying 2025-12-12T16:25:27.056609684+00:00 stderr F status: "True" 2025-12-12T16:25:27.056609684+00:00 stderr F type: Progressing 2025-12-12T16:25:27.056609684+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:27.056609684+00:00 stderr F status: "True" 2025-12-12T16:25:27.056609684+00:00 stderr F type: Available 2025-12-12T16:25:27.056609684+00:00 stderr F I1212 16:25:27.055885 1 warnings.go:110] "Warning: spec.template.spec.containers[3].ports[0]: duplicate port name \"https\" with spec.template.spec.containers[2].ports[0], services and probes that select ports by name will use spec.template.spec.containers[2].ports[0]" 2025-12-12T16:25:27.056717457+00:00 stderr F I1212 16:25:27.056682 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:25:27.056717457+00:00 stderr F I1212 16:25:27.056707 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:25:27.056995514+00:00 stderr F I1212 16:25:27.056952 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:25:27.057007504+00:00 stderr F I1212 16:25:27.056995 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:25:27.105504844+00:00 stderr F I1212 16:25:27.105430 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:25:27.105504844+00:00 stderr F I1212 16:25:27.105462 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:25:27.232406148+00:00 stderr F I1212 16:25:27.232332 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:25:27.232406148+00:00 stderr F I1212 16:25:27.232399 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:25:27.412783786+00:00 stderr F I1212 16:25:27.412690 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:25:27.412783786+00:00 stderr F I1212 16:25:27.412720 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:25:27.431488857+00:00 stderr F I1212 16:25:27.431402 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:25:27.431529558+00:00 stderr F I1212 16:25:27.431499 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:25:27.520070456+00:00 stderr F I1212 16:25:27.519995 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:25:27.520070456+00:00 stderr F I1212 16:25:27.520026 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:25:27.632078704+00:00 stderr F I1212 16:25:27.631997 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:25:27.632078704+00:00 stderr F I1212 16:25:27.632049 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:25:27.829989084+00:00 stderr F I1212 16:25:27.829505 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:25:27.829989084+00:00 stderr F I1212 16:25:27.829556 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:25:27.867268642+00:00 stderr F I1212 16:25:27.866918 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:25:27.867268642+00:00 stderr F I1212 16:25:27.867133 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:25:27.867268642+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:27.867268642+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:27.867268642+00:00 stderr F reason: Unknown 2025-12-12T16:25:27.867268642+00:00 stderr F status: "False" 2025-12-12T16:25:27.867268642+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:27.867268642+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:27.867268642+00:00 stderr F status: "False" 2025-12-12T16:25:27.867268642+00:00 stderr F type: Degraded 2025-12-12T16:25:27.867268642+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:27.867268642+00:00 stderr F status: "True" 2025-12-12T16:25:27.867268642+00:00 stderr F type: Upgradeable 2025-12-12T16:25:27.867268642+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:27.867268642+00:00 stderr F message: |- 2025-12-12T16:25:27.867268642+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" update is rolling out (0 out of 1 updated) 2025-12-12T16:25:27.867268642+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:25:27.867268642+00:00 stderr F reason: Deploying 2025-12-12T16:25:27.867268642+00:00 stderr F status: "True" 2025-12-12T16:25:27.867268642+00:00 stderr F type: Progressing 2025-12-12T16:25:27.867268642+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:27.867268642+00:00 stderr F status: "True" 2025-12-12T16:25:27.867268642+00:00 stderr F type: Available 2025-12-12T16:25:28.032732906+00:00 stderr F I1212 16:25:28.032649 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:25:28.032732906+00:00 stderr F I1212 16:25:28.032713 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:25:28.231338853+00:00 stderr F I1212 16:25:28.231258 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:25:28.231338853+00:00 stderr F I1212 16:25:28.231322 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:25:28.252733401+00:00 stderr F I1212 16:25:28.250704 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:25:28.252733401+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:28.252733401+00:00 stderr F status: "False" 2025-12-12T16:25:28.252733401+00:00 stderr F type: Degraded 2025-12-12T16:25:28.252733401+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:28.252733401+00:00 stderr F status: "True" 2025-12-12T16:25:28.252733401+00:00 stderr F type: Upgradeable 2025-12-12T16:25:28.252733401+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:28.252733401+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:28.252733401+00:00 stderr F reason: Unknown 2025-12-12T16:25:28.252733401+00:00 stderr F status: "False" 2025-12-12T16:25:28.252733401+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:28.252733401+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:28.252733401+00:00 stderr F message: |- 2025-12-12T16:25:28.252733401+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" update is rolling out (0 out of 1 updated) 2025-12-12T16:25:28.252733401+00:00 stderr F Deployment "/openshift-ovn-kubernetes/ovnkube-control-plane" is not available (awaiting 1 nodes) 2025-12-12T16:25:28.252733401+00:00 stderr F reason: Deploying 2025-12-12T16:25:28.252733401+00:00 stderr F status: "True" 2025-12-12T16:25:28.252733401+00:00 stderr F type: Progressing 2025-12-12T16:25:28.252733401+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:28.252733401+00:00 stderr F status: "True" 2025-12-12T16:25:28.252733401+00:00 stderr F type: Available 2025-12-12T16:25:28.431398966+00:00 stderr F I1212 16:25:28.431304 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:25:28.431398966+00:00 stderr F I1212 16:25:28.431376 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:25:28.442590958+00:00 stderr F I1212 16:25:28.442508 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:28.442590958+00:00 stderr F I1212 16:25:28.442544 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-control-plane updated, re-generating status 2025-12-12T16:25:28.516342973+00:00 stderr F I1212 16:25:28.516270 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:25:28.516342973+00:00 stderr F I1212 16:25:28.516310 1 pod_watcher.go:132] Operand /, Kind= openshift-multus/multus updated, re-generating status 2025-12-12T16:25:28.628769302+00:00 stderr F I1212 16:25:28.628686 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:25:28.628769302+00:00 stderr F I1212 16:25:28.628744 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:25:28.830745214+00:00 stderr F I1212 16:25:28.830640 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:25:28.830745214+00:00 stderr F I1212 16:25:28.830697 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:25:29.033618929+00:00 stderr F I1212 16:25:29.033544 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:25:29.033693441+00:00 stderr F I1212 16:25:29.033614 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:25:29.071242786+00:00 stderr F I1212 16:25:29.071151 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:25:29.071242786+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:29.071242786+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:29.071242786+00:00 stderr F reason: Unknown 2025-12-12T16:25:29.071242786+00:00 stderr F status: "False" 2025-12-12T16:25:29.071242786+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:29.071242786+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:29.071242786+00:00 stderr F status: "False" 2025-12-12T16:25:29.071242786+00:00 stderr F type: Degraded 2025-12-12T16:25:29.071242786+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:29.071242786+00:00 stderr F status: "True" 2025-12-12T16:25:29.071242786+00:00 stderr F type: Upgradeable 2025-12-12T16:25:29.071242786+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:29.071242786+00:00 stderr F message: |- 2025-12-12T16:25:29.071242786+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:25:29.071242786+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:25:29.071242786+00:00 stderr F reason: Deploying 2025-12-12T16:25:29.071242786+00:00 stderr F status: "True" 2025-12-12T16:25:29.071242786+00:00 stderr F type: Progressing 2025-12-12T16:25:29.071242786+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:29.071242786+00:00 stderr F status: "True" 2025-12-12T16:25:29.071242786+00:00 stderr F type: Available 2025-12-12T16:25:29.071327398+00:00 stderr F I1212 16:25:29.071265 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:25:29.231460897+00:00 stderr F I1212 16:25:29.231384 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:25:29.231460897+00:00 stderr F I1212 16:25:29.231440 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:25:29.432452475+00:00 stderr F I1212 16:25:29.432365 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:25:29.432452475+00:00 stderr F I1212 16:25:29.432418 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:25:29.451359941+00:00 stderr F I1212 16:25:29.451235 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:25:29.451359941+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:29.451359941+00:00 stderr F status: "False" 2025-12-12T16:25:29.451359941+00:00 stderr F type: Degraded 2025-12-12T16:25:29.451359941+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:29.451359941+00:00 stderr F status: "True" 2025-12-12T16:25:29.451359941+00:00 stderr F type: Upgradeable 2025-12-12T16:25:29.451359941+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:29.451359941+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:29.451359941+00:00 stderr F reason: Unknown 2025-12-12T16:25:29.451359941+00:00 stderr F status: "False" 2025-12-12T16:25:29.451359941+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:29.451359941+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:29.451359941+00:00 stderr F message: |- 2025-12-12T16:25:29.451359941+00:00 stderr F DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) 2025-12-12T16:25:29.451359941+00:00 stderr F DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) 2025-12-12T16:25:29.451359941+00:00 stderr F reason: Deploying 2025-12-12T16:25:29.451359941+00:00 stderr F status: "True" 2025-12-12T16:25:29.451359941+00:00 stderr F type: Progressing 2025-12-12T16:25:29.451359941+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:29.451359941+00:00 stderr F status: "True" 2025-12-12T16:25:29.451359941+00:00 stderr F type: Available 2025-12-12T16:25:29.631902953+00:00 stderr F I1212 16:25:29.631823 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:25:29.631902953+00:00 stderr F I1212 16:25:29.631882 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:25:29.832468350+00:00 stderr F I1212 16:25:29.832397 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:25:29.832505941+00:00 stderr F I1212 16:25:29.832466 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:25:30.033503619+00:00 stderr F I1212 16:25:30.033418 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:25:30.033503619+00:00 stderr F I1212 16:25:30.033485 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:25:30.231008848+00:00 stderr F I1212 16:25:30.230937 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:25:30.231060420+00:00 stderr F I1212 16:25:30.231003 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:25:30.266879891+00:00 stderr F I1212 16:25:30.266796 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:25:30.266879891+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:30.266879891+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:30.266879891+00:00 stderr F reason: Unknown 2025-12-12T16:25:30.266879891+00:00 stderr F status: "False" 2025-12-12T16:25:30.266879891+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:30.266879891+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:30.266879891+00:00 stderr F status: "False" 2025-12-12T16:25:30.266879891+00:00 stderr F type: Degraded 2025-12-12T16:25:30.266879891+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:30.266879891+00:00 stderr F status: "True" 2025-12-12T16:25:30.266879891+00:00 stderr F type: Upgradeable 2025-12-12T16:25:30.266879891+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:30.266879891+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 2025-12-12T16:25:30.266879891+00:00 stderr F 1 nodes) 2025-12-12T16:25:30.266879891+00:00 stderr F reason: Deploying 2025-12-12T16:25:30.266879891+00:00 stderr F status: "True" 2025-12-12T16:25:30.266879891+00:00 stderr F type: Progressing 2025-12-12T16:25:30.266879891+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:30.266879891+00:00 stderr F status: "True" 2025-12-12T16:25:30.266879891+00:00 stderr F type: Available 2025-12-12T16:25:30.267696832+00:00 stderr F I1212 16:25:30.267662 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:25:30.430790865+00:00 stderr F I1212 16:25:30.430688 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:25:30.430790865+00:00 stderr F I1212 16:25:30.430764 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:25:30.630779648+00:00 stderr F I1212 16:25:30.630707 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:25:30.630831239+00:00 stderr F I1212 16:25:30.630781 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:25:30.648592086+00:00 stderr F I1212 16:25:30.648500 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:25:30.648592086+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:25:30.648592086+00:00 stderr F status: "False" 2025-12-12T16:25:30.648592086+00:00 stderr F type: Degraded 2025-12-12T16:25:30.648592086+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:25:30.648592086+00:00 stderr F status: "True" 2025-12-12T16:25:30.648592086+00:00 stderr F type: Upgradeable 2025-12-12T16:25:30.648592086+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:25:30.648592086+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:25:30.648592086+00:00 stderr F reason: Unknown 2025-12-12T16:25:30.648592086+00:00 stderr F status: "False" 2025-12-12T16:25:30.648592086+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:25:30.648592086+00:00 stderr F - lastTransitionTime: "2025-12-12T16:25:26Z" 2025-12-12T16:25:30.648592086+00:00 stderr F message: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 2025-12-12T16:25:30.648592086+00:00 stderr F 1 nodes) 2025-12-12T16:25:30.648592086+00:00 stderr F reason: Deploying 2025-12-12T16:25:30.648592086+00:00 stderr F status: "True" 2025-12-12T16:25:30.648592086+00:00 stderr F type: Progressing 2025-12-12T16:25:30.648592086+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:25:30.648592086+00:00 stderr F status: "True" 2025-12-12T16:25:30.648592086+00:00 stderr F type: Available 2025-12-12T16:25:30.833909979+00:00 stderr F I1212 16:25:30.833809 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:25:30.833909979+00:00 stderr F I1212 16:25:30.833888 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:25:31.032423344+00:00 stderr F I1212 16:25:31.032335 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:25:31.032423344+00:00 stderr F I1212 16:25:31.032397 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:25:31.232960440+00:00 stderr F I1212 16:25:31.232867 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:25:31.232960440+00:00 stderr F I1212 16:25:31.232934 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:25:31.432818819+00:00 stderr F I1212 16:25:31.432711 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:25:31.432818819+00:00 stderr F I1212 16:25:31.432761 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:25:31.633639132+00:00 stderr F I1212 16:25:31.633548 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:25:31.633639132+00:00 stderr F I1212 16:25:31.633609 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:25:31.830955707+00:00 stderr F I1212 16:25:31.830871 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:25:31.830955707+00:00 stderr F I1212 16:25:31.830940 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:25:32.032083037+00:00 stderr F I1212 16:25:32.031999 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:25:32.032083037+00:00 stderr F I1212 16:25:32.032060 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:25:32.232007557+00:00 stderr F I1212 16:25:32.231919 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:25:32.232007557+00:00 stderr F I1212 16:25:32.231995 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:25:32.432816630+00:00 stderr F I1212 16:25:32.432742 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:25:32.432816630+00:00 stderr F I1212 16:25:32.432804 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:25:32.632408752+00:00 stderr F I1212 16:25:32.632323 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:25:32.632408752+00:00 stderr F I1212 16:25:32.632385 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:25:32.836095257+00:00 stderr F I1212 16:25:32.836015 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:25:32.836095257+00:00 stderr F I1212 16:25:32.836086 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:25:33.030366726+00:00 stderr F I1212 16:25:33.030279 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:25:33.030366726+00:00 stderr F I1212 16:25:33.030344 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:25:33.231461686+00:00 stderr F I1212 16:25:33.231365 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:25:33.231461686+00:00 stderr F I1212 16:25:33.231438 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:25:33.433404427+00:00 stderr F I1212 16:25:33.432374 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:25:33.433488899+00:00 stderr F I1212 16:25:33.433391 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:25:33.632989689+00:00 stderr F I1212 16:25:33.631987 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:25:33.632989689+00:00 stderr F I1212 16:25:33.632801 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:25:33.832513460+00:00 stderr F I1212 16:25:33.832345 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:25:33.832513460+00:00 stderr F I1212 16:25:33.832490 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:25:34.039622721+00:00 stderr F I1212 16:25:34.039531 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:25:34.039622721+00:00 stderr F I1212 16:25:34.039589 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:25:34.232813862+00:00 stderr F I1212 16:25:34.232728 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:25:34.232813862+00:00 stderr F I1212 16:25:34.232793 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:25:34.433421450+00:00 stderr F I1212 16:25:34.433323 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:25:34.433510572+00:00 stderr F I1212 16:25:34.433417 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:25:34.638073130+00:00 stderr F I1212 16:25:34.638000 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:25:34.638073130+00:00 stderr F I1212 16:25:34.638063 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:25:34.833613720+00:00 stderr F I1212 16:25:34.833511 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:25:34.833613720+00:00 stderr F I1212 16:25:34.833586 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:25:35.032898585+00:00 stderr F I1212 16:25:35.032776 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:25:35.049923603+00:00 stderr F I1212 16:25:35.049809 1 log.go:245] Operconfig Controller complete 2025-12-12T16:26:08.616367460+00:00 stderr F I1212 16:26:08.616306 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:26:08.616461372+00:00 stderr F I1212 16:26:08.616451 1 pod_watcher.go:132] Operand /, Kind= openshift-ovn-kubernetes/ovnkube-node updated, re-generating status 2025-12-12T16:26:08.666884586+00:00 stderr F I1212 16:26:08.666828 1 log.go:245] Network operator config updated with conditions: 2025-12-12T16:26:08.666884586+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:26:08.666884586+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:26:08.666884586+00:00 stderr F reason: Unknown 2025-12-12T16:26:08.666884586+00:00 stderr F status: "False" 2025-12-12T16:26:08.666884586+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:26:08.666884586+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:26:08.666884586+00:00 stderr F status: "False" 2025-12-12T16:26:08.666884586+00:00 stderr F type: Degraded 2025-12-12T16:26:08.666884586+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:26:08.666884586+00:00 stderr F status: "True" 2025-12-12T16:26:08.666884586+00:00 stderr F type: Upgradeable 2025-12-12T16:26:08.666884586+00:00 stderr F - lastTransitionTime: "2025-12-12T16:26:08Z" 2025-12-12T16:26:08.666884586+00:00 stderr F status: "False" 2025-12-12T16:26:08.666884586+00:00 stderr F type: Progressing 2025-12-12T16:26:08.666884586+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:26:08.666884586+00:00 stderr F status: "True" 2025-12-12T16:26:08.666884586+00:00 stderr F type: Available 2025-12-12T16:26:08.667086081+00:00 stderr F I1212 16:26:08.667069 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:26:08.689330193+00:00 stderr F I1212 16:26:08.687421 1 log.go:245] ClusterOperator config status updated with conditions: 2025-12-12T16:26:08.689330193+00:00 stderr F - lastTransitionTime: "2025-12-12T16:17:03Z" 2025-12-12T16:26:08.689330193+00:00 stderr F status: "False" 2025-12-12T16:26:08.689330193+00:00 stderr F type: Degraded 2025-12-12T16:26:08.689330193+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:03Z" 2025-12-12T16:26:08.689330193+00:00 stderr F status: "True" 2025-12-12T16:26:08.689330193+00:00 stderr F type: Upgradeable 2025-12-12T16:26:08.689330193+00:00 stderr F - lastTransitionTime: "2025-11-02T07:50:15Z" 2025-12-12T16:26:08.689330193+00:00 stderr F message: Unsupported management state "" for cluster-network-operator operator 2025-12-12T16:26:08.689330193+00:00 stderr F reason: Unknown 2025-12-12T16:26:08.689330193+00:00 stderr F status: "False" 2025-12-12T16:26:08.689330193+00:00 stderr F type: ManagementStateDegraded 2025-12-12T16:26:08.689330193+00:00 stderr F - lastTransitionTime: "2025-12-12T16:26:08Z" 2025-12-12T16:26:08.689330193+00:00 stderr F status: "False" 2025-12-12T16:26:08.689330193+00:00 stderr F type: Progressing 2025-12-12T16:26:08.689330193+00:00 stderr F - lastTransitionTime: "2025-11-02T07:51:40Z" 2025-12-12T16:26:08.689330193+00:00 stderr F status: "True" 2025-12-12T16:26:08.689330193+00:00 stderr F type: Available 2025-12-12T16:26:25.849686442+00:00 stderr F I1212 16:26:25.849255 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:26:25.849734304+00:00 stderr F I1212 16:26:25.849724 1 log.go:245] successful reconciliation 2025-12-12T16:26:29.248141269+00:00 stderr F I1212 16:26:29.248061 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:26:29.248481317+00:00 stderr F I1212 16:26:29.248448 1 log.go:245] successful reconciliation 2025-12-12T16:26:32.242301571+00:00 stderr F I1212 16:26:32.242221 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:26:32.242639689+00:00 stderr F I1212 16:26:32.242600 1 log.go:245] successful reconciliation 2025-12-12T16:26:50.385247943+00:00 stderr F I1212 16:26:50.385122 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:26:50.519256688+00:00 stderr F I1212 16:26:50.514301 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:26:50.519256688+00:00 stderr F I1212 16:26:50.516829 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:26:50.519256688+00:00 stderr F I1212 16:26:50.518490 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:26:50.520377707+00:00 stderr F I1212 16:26:50.520338 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc0053ce540 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:26:50.525015244+00:00 stderr F I1212 16:26:50.524991 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 4 -> 4 2025-12-12T16:26:50.525050325+00:00 stderr F I1212 16:26:50.525041 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:26:50.525075665+00:00 stderr F I1212 16:26:50.525066 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:26:50.527780234+00:00 stderr F I1212 16:26:50.527744 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:26:50.527780234+00:00 stderr F I1212 16:26:50.527763 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:26:50.527780234+00:00 stderr F I1212 16:26:50.527770 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:26:50.527780234+00:00 stderr F I1212 16:26:50.527774 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:26:50.527801984+00:00 stderr F I1212 16:26:50.527794 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:26:50.531247551+00:00 stderr F I1212 16:26:50.531220 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:26:50.531247551+00:00 stderr F I1212 16:26:50.531241 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:26:50.602439490+00:00 stderr F I1212 16:26:50.602380 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:26:50.617041889+00:00 stderr F I1212 16:26:50.616989 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:26:50.617120061+00:00 stderr F I1212 16:26:50.617110 1 log.go:245] Starting render phase 2025-12-12T16:26:50.617549771+00:00 stderr F I1212 16:26:50.617523 1 log.go:245] Skipping reconcile of Network.operator.openshift.io: spec unchanged 2025-12-12T16:26:50.635376722+00:00 stderr F I1212 16:26:50.635293 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:26:50.663209604+00:00 stderr F I1212 16:26:50.663120 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:26:50.663209604+00:00 stderr F I1212 16:26:50.663148 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:26:50.663209604+00:00 stderr F I1212 16:26:50.663201 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:26:50.663267316+00:00 stderr F I1212 16:26:50.663226 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:26:50.804489963+00:00 stderr F I1212 16:26:50.804398 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:26:50.804489963+00:00 stderr F I1212 16:26:50.804438 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:26:51.013804601+00:00 stderr F I1212 16:26:51.013720 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:26:51.026572794+00:00 stderr F I1212 16:26:51.026499 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:26:51.035106400+00:00 stderr F I1212 16:26:51.035030 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:26:51.035155641+00:00 stderr F I1212 16:26:51.035100 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:26:51.045078521+00:00 stderr F I1212 16:26:51.044996 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:26:51.045078521+00:00 stderr F I1212 16:26:51.045055 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:26:51.056726806+00:00 stderr F I1212 16:26:51.056655 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:26:51.056726806+00:00 stderr F I1212 16:26:51.056705 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:26:51.065059746+00:00 stderr F I1212 16:26:51.065003 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:26:51.065085967+00:00 stderr F I1212 16:26:51.065057 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:26:51.073553781+00:00 stderr F I1212 16:26:51.073062 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:26:51.073553781+00:00 stderr F I1212 16:26:51.073118 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:26:51.081948803+00:00 stderr F I1212 16:26:51.081860 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:26:51.081948803+00:00 stderr F I1212 16:26:51.081937 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:26:51.088923229+00:00 stderr F I1212 16:26:51.088835 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:26:51.088923229+00:00 stderr F I1212 16:26:51.088899 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:26:51.093202047+00:00 stderr F I1212 16:26:51.093135 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:26:51.093202047+00:00 stderr F I1212 16:26:51.093165 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:26:51.098003139+00:00 stderr F I1212 16:26:51.097959 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:26:51.098003139+00:00 stderr F I1212 16:26:51.097986 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:26:51.101457756+00:00 stderr F I1212 16:26:51.101377 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:26:51.101457756+00:00 stderr F I1212 16:26:51.101430 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:26:51.232573128+00:00 stderr F I1212 16:26:51.232455 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:26:51.232573128+00:00 stderr F I1212 16:26:51.232512 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:26:51.433777351+00:00 stderr F I1212 16:26:51.433713 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:26:51.433974866+00:00 stderr F I1212 16:26:51.433959 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:26:51.635382145+00:00 stderr F I1212 16:26:51.633709 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:26:51.635382145+00:00 stderr F I1212 16:26:51.633772 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:26:51.832710160+00:00 stderr F I1212 16:26:51.832619 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:26:51.832710160+00:00 stderr F I1212 16:26:51.832686 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:26:52.031549773+00:00 stderr F I1212 16:26:52.031474 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:26:52.031549773+00:00 stderr F I1212 16:26:52.031524 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:26:52.232923801+00:00 stderr F I1212 16:26:52.232855 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:26:52.232972662+00:00 stderr F I1212 16:26:52.232920 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:26:52.432153234+00:00 stderr F I1212 16:26:52.432075 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:26:52.432153234+00:00 stderr F I1212 16:26:52.432142 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:26:52.583303343+00:00 stderr F I1212 16:26:52.583240 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.589847428+00:00 stderr F I1212 16:26:52.589780 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.598256081+00:00 stderr F I1212 16:26:52.598011 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.612234034+00:00 stderr F I1212 16:26:52.611668 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.622072322+00:00 stderr F I1212 16:26:52.622022 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.636317692+00:00 stderr F I1212 16:26:52.635859 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.636317692+00:00 stderr F I1212 16:26:52.635927 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:26:52.636317692+00:00 stderr F I1212 16:26:52.635985 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:26:52.666324970+00:00 stderr F I1212 16:26:52.666100 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.686322575+00:00 stderr F I1212 16:26:52.685150 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.704240228+00:00 stderr F I1212 16:26:52.702169 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.721968036+00:00 stderr F I1212 16:26:52.720782 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.777229912+00:00 stderr F I1212 16:26:52.771786 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:52.836472929+00:00 stderr F I1212 16:26:52.836402 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:26:52.836522490+00:00 stderr F I1212 16:26:52.836472 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:26:52.977999324+00:00 stderr F I1212 16:26:52.976066 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:26:53.035984419+00:00 stderr F I1212 16:26:53.032547 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:26:53.035984419+00:00 stderr F I1212 16:26:53.032607 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:26:53.188597315+00:00 stderr F I1212 16:26:53.188398 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:53.252269953+00:00 stderr F I1212 16:26:53.252143 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:26:53.252269953+00:00 stderr F I1212 16:26:53.252233 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:26:53.372233234+00:00 stderr F I1212 16:26:53.371883 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:53.437260877+00:00 stderr F I1212 16:26:53.433836 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:26:53.437260877+00:00 stderr F I1212 16:26:53.433892 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:26:53.593356820+00:00 stderr F I1212 16:26:53.591601 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:53.650339940+00:00 stderr F I1212 16:26:53.649575 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:26:53.650339940+00:00 stderr F I1212 16:26:53.649636 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:26:53.778234461+00:00 stderr F I1212 16:26:53.776113 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:53.873359594+00:00 stderr F I1212 16:26:53.872947 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:26:53.873359594+00:00 stderr F I1212 16:26:53.872999 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:26:53.979971598+00:00 stderr F I1212 16:26:53.979150 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:54.065722134+00:00 stderr F I1212 16:26:54.065646 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:26:54.065722134+00:00 stderr F I1212 16:26:54.065702 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:26:54.171195609+00:00 stderr F I1212 16:26:54.169292 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:54.261440508+00:00 stderr F I1212 16:26:54.257618 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:26:54.261440508+00:00 stderr F I1212 16:26:54.257682 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:26:54.427308438+00:00 stderr F I1212 16:26:54.415826 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:26:54.483234411+00:00 stderr F I1212 16:26:54.481503 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:26:54.483234411+00:00 stderr F I1212 16:26:54.481570 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:26:54.584245663+00:00 stderr F I1212 16:26:54.582908 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:26:54.637234382+00:00 stderr F I1212 16:26:54.636832 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:26:54.637234382+00:00 stderr F I1212 16:26:54.636901 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:26:54.853289500+00:00 stderr F I1212 16:26:54.847026 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:54.853289500+00:00 stderr F I1212 16:26:54.850602 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:26:54.853289500+00:00 stderr F I1212 16:26:54.850656 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:26:54.986286580+00:00 stderr F I1212 16:26:54.985293 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:55.068857366+00:00 stderr F I1212 16:26:55.066136 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:26:55.068857366+00:00 stderr F I1212 16:26:55.066220 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:26:55.194233384+00:00 stderr F I1212 16:26:55.190409 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:26:55.239147418+00:00 stderr F I1212 16:26:55.237937 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:26:55.239147418+00:00 stderr F I1212 16:26:55.238003 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:26:55.386258425+00:00 stderr F I1212 16:26:55.385617 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:55.456319755+00:00 stderr F I1212 16:26:55.454667 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:26:55.456319755+00:00 stderr F I1212 16:26:55.454731 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:26:55.584992256+00:00 stderr F I1212 16:26:55.583337 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:55.632002713+00:00 stderr F I1212 16:26:55.631927 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:26:55.632002713+00:00 stderr F I1212 16:26:55.631993 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:26:55.766951523+00:00 stderr F I1212 16:26:55.766866 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:55.838804318+00:00 stderr F I1212 16:26:55.838738 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:26:55.838930941+00:00 stderr F I1212 16:26:55.838920 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:26:55.987485834+00:00 stderr F I1212 16:26:55.987431 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:56.038073612+00:00 stderr F I1212 16:26:56.034059 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:26:56.038073612+00:00 stderr F I1212 16:26:56.034105 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:26:56.172014526+00:00 stderr F I1212 16:26:56.171964 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:56.237648254+00:00 stderr F I1212 16:26:56.237596 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:26:56.237747187+00:00 stderr F I1212 16:26:56.237738 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:26:56.374274486+00:00 stderr F I1212 16:26:56.371685 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:56.434501037+00:00 stderr F I1212 16:26:56.433779 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:26:56.434501037+00:00 stderr F I1212 16:26:56.433835 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:26:56.568537034+00:00 stderr F I1212 16:26:56.568471 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:26:56.635536046+00:00 stderr F I1212 16:26:56.635429 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:26:56.635536046+00:00 stderr F I1212 16:26:56.635511 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:26:56.767800838+00:00 stderr F I1212 16:26:56.767696 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:26:56.838085403+00:00 stderr F I1212 16:26:56.837983 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:26:56.838085403+00:00 stderr F I1212 16:26:56.838069 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:26:56.970429957+00:00 stderr F I1212 16:26:56.970284 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:57.036087316+00:00 stderr F I1212 16:26:57.035989 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:26:57.036087316+00:00 stderr F I1212 16:26:57.036057 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:26:57.171968169+00:00 stderr F I1212 16:26:57.170115 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:57.236222932+00:00 stderr F I1212 16:26:57.235575 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:26:57.236222932+00:00 stderr F I1212 16:26:57.235640 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:26:57.368228857+00:00 stderr F I1212 16:26:57.367522 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:26:57.435237000+00:00 stderr F I1212 16:26:57.434454 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:26:57.435237000+00:00 stderr F I1212 16:26:57.434529 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:26:57.567919812+00:00 stderr F I1212 16:26:57.567850 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:57.647725708+00:00 stderr F I1212 16:26:57.647654 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:26:57.647725708+00:00 stderr F I1212 16:26:57.647715 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:26:57.770593452+00:00 stderr F I1212 16:26:57.767424 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:57.846016426+00:00 stderr F I1212 16:26:57.844710 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:26:57.846016426+00:00 stderr F I1212 16:26:57.844765 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:26:57.971225460+00:00 stderr F I1212 16:26:57.971088 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:58.048862971+00:00 stderr F I1212 16:26:58.048796 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:26:58.048914102+00:00 stderr F I1212 16:26:58.048863 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:26:58.172062654+00:00 stderr F I1212 16:26:58.171983 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:58.241545269+00:00 stderr F I1212 16:26:58.241479 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:26:58.241580970+00:00 stderr F I1212 16:26:58.241540 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:26:58.370159429+00:00 stderr F I1212 16:26:58.369774 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:58.445432674+00:00 stderr F I1212 16:26:58.445318 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:26:58.445432674+00:00 stderr F I1212 16:26:58.445391 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:26:58.568498799+00:00 stderr F I1212 16:26:58.567805 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:58.666686614+00:00 stderr F I1212 16:26:58.666616 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:26:58.666686614+00:00 stderr F I1212 16:26:58.666670 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:26:58.770082291+00:00 stderr F I1212 16:26:58.770025 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:26:58.849458850+00:00 stderr F I1212 16:26:58.849367 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:26:58.849458850+00:00 stderr F I1212 16:26:58.849441 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:26:58.969424576+00:00 stderr F I1212 16:26:58.968220 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:26:59.132423371+00:00 stderr F I1212 16:26:59.131088 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:26:59.132423371+00:00 stderr F I1212 16:26:59.131163 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:26:59.191633330+00:00 stderr F I1212 16:26:59.189926 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:59.306407164+00:00 stderr F I1212 16:26:59.306333 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:26:59.306407164+00:00 stderr F I1212 16:26:59.306383 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:26:59.370019034+00:00 stderr F I1212 16:26:59.369930 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:26:59.438545279+00:00 stderr F I1212 16:26:59.438470 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:26:59.438545279+00:00 stderr F I1212 16:26:59.438525 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:26:59.570889358+00:00 stderr F I1212 16:26:59.570801 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:26:59.671810482+00:00 stderr F I1212 16:26:59.671749 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:26:59.671872444+00:00 stderr F I1212 16:26:59.671817 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:26:59.770229323+00:00 stderr F I1212 16:26:59.770160 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:26:59.875250321+00:00 stderr F I1212 16:26:59.874571 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:26:59.875250321+00:00 stderr F I1212 16:26:59.874622 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:26:59.968056380+00:00 stderr F I1212 16:26:59.967996 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:00.043253043+00:00 stderr F I1212 16:27:00.039702 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:27:00.043253043+00:00 stderr F I1212 16:27:00.039762 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:27:00.166230446+00:00 stderr F I1212 16:27:00.166136 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:00.232263667+00:00 stderr F I1212 16:27:00.232051 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:27:00.232263667+00:00 stderr F I1212 16:27:00.232111 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:27:00.374136097+00:00 stderr F I1212 16:27:00.374073 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:00.436928677+00:00 stderr F I1212 16:27:00.436866 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:27:00.436986818+00:00 stderr F I1212 16:27:00.436925 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:27:00.570503767+00:00 stderr F I1212 16:27:00.570293 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:00.638522839+00:00 stderr F I1212 16:27:00.636794 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:27:00.638522839+00:00 stderr F I1212 16:27:00.636863 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:27:00.769031112+00:00 stderr F I1212 16:27:00.767411 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:00.839878485+00:00 stderr F I1212 16:27:00.837429 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:27:00.839878485+00:00 stderr F I1212 16:27:00.837478 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:27:00.979493018+00:00 stderr F I1212 16:27:00.978831 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:01.038448180+00:00 stderr F I1212 16:27:01.036825 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:27:01.038448180+00:00 stderr F I1212 16:27:01.036894 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:27:01.168800200+00:00 stderr F I1212 16:27:01.168737 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:01.237620641+00:00 stderr F I1212 16:27:01.236749 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:27:01.237620641+00:00 stderr F I1212 16:27:01.236812 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:27:01.377720927+00:00 stderr F I1212 16:27:01.377405 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:01.435272213+00:00 stderr F I1212 16:27:01.433336 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:27:01.435272213+00:00 stderr F I1212 16:27:01.433394 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:27:01.570021883+00:00 stderr F I1212 16:27:01.569953 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:01.633523540+00:00 stderr F I1212 16:27:01.633447 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:27:01.633523540+00:00 stderr F I1212 16:27:01.633498 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:27:01.840283943+00:00 stderr F I1212 16:27:01.840207 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:27:01.840283943+00:00 stderr F I1212 16:27:01.840267 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:27:02.038072269+00:00 stderr F I1212 16:27:02.037980 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:27:02.038072269+00:00 stderr F I1212 16:27:02.038058 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:27:02.234800018+00:00 stderr F I1212 16:27:02.234642 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:27:02.234800018+00:00 stderr F I1212 16:27:02.234721 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:27:02.434400249+00:00 stderr F I1212 16:27:02.432704 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:27:02.434400249+00:00 stderr F I1212 16:27:02.432774 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:27:02.640026433+00:00 stderr F I1212 16:27:02.639934 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:27:02.640026433+00:00 stderr F I1212 16:27:02.640002 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:27:02.832236278+00:00 stderr F I1212 16:27:02.831848 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:27:02.832236278+00:00 stderr F I1212 16:27:02.831912 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:27:02.925246312+00:00 stderr F I1212 16:27:02.925035 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:02.933855330+00:00 stderr F I1212 16:27:02.933798 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:02.938854717+00:00 stderr F I1212 16:27:02.938815 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:02.947798493+00:00 stderr F I1212 16:27:02.947692 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:02.960258678+00:00 stderr F I1212 16:27:02.956354 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:02.969157954+00:00 stderr F I1212 16:27:02.969073 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:02.986156224+00:00 stderr F I1212 16:27:02.986089 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:03.052677758+00:00 stderr F I1212 16:27:03.052217 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:27:03.052677758+00:00 stderr F I1212 16:27:03.052278 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:27:03.169986097+00:00 stderr F I1212 16:27:03.169751 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:03.233376771+00:00 stderr F I1212 16:27:03.233305 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:27:03.233376771+00:00 stderr F I1212 16:27:03.233360 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:27:03.389260566+00:00 stderr F I1212 16:27:03.386706 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:03.450265920+00:00 stderr F I1212 16:27:03.450010 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:27:03.450265920+00:00 stderr F I1212 16:27:03.450083 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:27:03.593942237+00:00 stderr F I1212 16:27:03.593464 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:03.648722383+00:00 stderr F I1212 16:27:03.647909 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:27:03.648722383+00:00 stderr F I1212 16:27:03.648703 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:27:03.770277369+00:00 stderr F I1212 16:27:03.770006 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:03.833701644+00:00 stderr F I1212 16:27:03.833626 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:27:03.833773756+00:00 stderr F I1212 16:27:03.833698 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:27:03.992886533+00:00 stderr F I1212 16:27:03.992811 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:04.051264641+00:00 stderr F I1212 16:27:04.051069 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:27:04.051401634+00:00 stderr F I1212 16:27:04.051378 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:27:04.183248831+00:00 stderr F I1212 16:27:04.179304 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:04.241191858+00:00 stderr F I1212 16:27:04.241082 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:27:04.241260289+00:00 stderr F I1212 16:27:04.241168 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:27:04.392628250+00:00 stderr F I1212 16:27:04.392550 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:04.441349043+00:00 stderr F I1212 16:27:04.441165 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:27:04.441349043+00:00 stderr F I1212 16:27:04.441313 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:27:04.580229468+00:00 stderr F I1212 16:27:04.579965 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:04.637232141+00:00 stderr F I1212 16:27:04.635762 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:27:04.637232141+00:00 stderr F I1212 16:27:04.635830 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:27:04.776476055+00:00 stderr F I1212 16:27:04.773005 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:04.837404627+00:00 stderr F I1212 16:27:04.834965 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:27:04.837404627+00:00 stderr F I1212 16:27:04.835027 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:27:04.978272021+00:00 stderr F I1212 16:27:04.971082 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:05.035315695+00:00 stderr F I1212 16:27:05.034424 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:27:05.035315695+00:00 stderr F I1212 16:27:05.034476 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:27:05.167255274+00:00 stderr F I1212 16:27:05.166106 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:05.236479276+00:00 stderr F I1212 16:27:05.236436 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:27:05.236585459+00:00 stderr F I1212 16:27:05.236575 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:27:05.379415874+00:00 stderr F I1212 16:27:05.379364 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:05.441306940+00:00 stderr F I1212 16:27:05.439586 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:27:05.441306940+00:00 stderr F I1212 16:27:05.439636 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:27:05.590285900+00:00 stderr F I1212 16:27:05.589747 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:05.635084084+00:00 stderr F I1212 16:27:05.634970 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:27:05.635123935+00:00 stderr F I1212 16:27:05.635055 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:27:05.780668569+00:00 stderr F I1212 16:27:05.779450 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:05.837420645+00:00 stderr F I1212 16:27:05.836283 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:27:05.837420645+00:00 stderr F I1212 16:27:05.836343 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:27:05.967508078+00:00 stderr F I1212 16:27:05.967290 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:06.048387955+00:00 stderr F I1212 16:27:06.046223 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:27:06.048387955+00:00 stderr F I1212 16:27:06.046306 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:27:06.173857520+00:00 stderr F I1212 16:27:06.173784 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:06.241192374+00:00 stderr F I1212 16:27:06.241125 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:27:06.242649061+00:00 stderr F I1212 16:27:06.242572 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:27:06.377502464+00:00 stderr F I1212 16:27:06.374851 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:06.466410024+00:00 stderr F I1212 16:27:06.465847 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:27:06.466410024+00:00 stderr F I1212 16:27:06.465912 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:27:06.578271566+00:00 stderr F I1212 16:27:06.578009 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:06.639214548+00:00 stderr F I1212 16:27:06.638671 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:27:06.639214548+00:00 stderr F I1212 16:27:06.638725 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:27:06.779600901+00:00 stderr F I1212 16:27:06.779481 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:06.833198388+00:00 stderr F I1212 16:27:06.833089 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:27:06.833198388+00:00 stderr F I1212 16:27:06.833143 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:27:06.968860501+00:00 stderr F I1212 16:27:06.968797 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:07.038108934+00:00 stderr F I1212 16:27:07.038029 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:27:07.038108934+00:00 stderr F I1212 16:27:07.038080 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:27:07.171379397+00:00 stderr F I1212 16:27:07.171303 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:07.237474340+00:00 stderr F I1212 16:27:07.237396 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:27:07.237474340+00:00 stderr F I1212 16:27:07.237446 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:27:07.371370498+00:00 stderr F I1212 16:27:07.368634 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:07.443312099+00:00 stderr F I1212 16:27:07.439620 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:27:07.443312099+00:00 stderr F I1212 16:27:07.439668 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:27:07.582522912+00:00 stderr F I1212 16:27:07.581709 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:07.648512332+00:00 stderr F I1212 16:27:07.645569 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:27:07.648512332+00:00 stderr F I1212 16:27:07.645619 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:27:07.771328521+00:00 stderr F I1212 16:27:07.771250 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:07.846416851+00:00 stderr F I1212 16:27:07.841760 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:27:07.846416851+00:00 stderr F I1212 16:27:07.841822 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:27:07.973770534+00:00 stderr F I1212 16:27:07.973694 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:08.037303232+00:00 stderr F I1212 16:27:08.034948 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:27:08.037303232+00:00 stderr F I1212 16:27:08.034996 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:27:08.173320915+00:00 stderr F I1212 16:27:08.173252 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:08.237104039+00:00 stderr F I1212 16:27:08.235511 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:27:08.237104039+00:00 stderr F I1212 16:27:08.235569 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:27:08.372839074+00:00 stderr F I1212 16:27:08.372761 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:08.440537447+00:00 stderr F I1212 16:27:08.438601 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:27:08.440537447+00:00 stderr F I1212 16:27:08.438664 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:27:08.567941861+00:00 stderr F I1212 16:27:08.567868 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:08.637204734+00:00 stderr F I1212 16:27:08.637061 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:27:08.637204734+00:00 stderr F I1212 16:27:08.637128 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:27:08.774850558+00:00 stderr F I1212 16:27:08.774385 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:08.837878053+00:00 stderr F I1212 16:27:08.837782 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:27:08.837878053+00:00 stderr F I1212 16:27:08.837840 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:27:08.966588300+00:00 stderr F I1212 16:27:08.966161 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:09.032348455+00:00 stderr F I1212 16:27:09.031211 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:27:09.032348455+00:00 stderr F I1212 16:27:09.031256 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:27:09.170813519+00:00 stderr F I1212 16:27:09.170733 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:09.237927657+00:00 stderr F I1212 16:27:09.237334 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:27:09.237927657+00:00 stderr F I1212 16:27:09.237389 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:27:09.376116415+00:00 stderr F I1212 16:27:09.376034 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:09.438622437+00:00 stderr F I1212 16:27:09.436795 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:27:09.438622437+00:00 stderr F I1212 16:27:09.436862 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:27:09.569229792+00:00 stderr F I1212 16:27:09.569049 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:09.642313682+00:00 stderr F I1212 16:27:09.637611 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:27:09.642313682+00:00 stderr F I1212 16:27:09.637682 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:27:09.767511551+00:00 stderr F I1212 16:27:09.766769 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:09.836243420+00:00 stderr F I1212 16:27:09.834463 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:27:09.836243420+00:00 stderr F I1212 16:27:09.834528 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:27:09.967986894+00:00 stderr F I1212 16:27:09.967895 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:10.034245381+00:00 stderr F I1212 16:27:10.033238 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:27:10.034245381+00:00 stderr F I1212 16:27:10.033305 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:27:10.171249929+00:00 stderr F I1212 16:27:10.169131 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:10.243635491+00:00 stderr F I1212 16:27:10.243366 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:27:10.243635491+00:00 stderr F I1212 16:27:10.243439 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:27:10.369556678+00:00 stderr F I1212 16:27:10.369476 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:10.434033059+00:00 stderr F I1212 16:27:10.433959 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:27:10.434033059+00:00 stderr F I1212 16:27:10.434008 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:27:10.572504864+00:00 stderr F I1212 16:27:10.572424 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:10.638650328+00:00 stderr F I1212 16:27:10.636733 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:27:10.638650328+00:00 stderr F I1212 16:27:10.636807 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:27:10.775265196+00:00 stderr F I1212 16:27:10.772726 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:10.835302535+00:00 stderr F I1212 16:27:10.832446 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:27:10.835302535+00:00 stderr F I1212 16:27:10.832497 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:27:10.974231201+00:00 stderr F I1212 16:27:10.972110 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:11.033240145+00:00 stderr F I1212 16:27:11.032371 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:27:11.033240145+00:00 stderr F I1212 16:27:11.032434 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:27:11.167911493+00:00 stderr F I1212 16:27:11.167848 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:11.234500568+00:00 stderr F I1212 16:27:11.234438 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:27:11.234532629+00:00 stderr F I1212 16:27:11.234502 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:27:11.366661573+00:00 stderr F I1212 16:27:11.366595 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:11.435103425+00:00 stderr F I1212 16:27:11.435036 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:27:11.435152557+00:00 stderr F I1212 16:27:11.435112 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:27:11.580110185+00:00 stderr F I1212 16:27:11.578006 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:11.632922002+00:00 stderr F I1212 16:27:11.632209 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:27:11.632922002+00:00 stderr F I1212 16:27:11.632265 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:27:11.767677942+00:00 stderr F I1212 16:27:11.767604 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:11.836227247+00:00 stderr F I1212 16:27:11.836017 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:27:11.836227247+00:00 stderr F I1212 16:27:11.836082 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:27:11.969931121+00:00 stderr F I1212 16:27:11.969857 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:12.033509090+00:00 stderr F I1212 16:27:12.033119 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:27:12.033509090+00:00 stderr F I1212 16:27:12.033226 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:27:12.168804623+00:00 stderr F I1212 16:27:12.168282 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:12.244817667+00:00 stderr F I1212 16:27:12.244758 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:27:12.244855438+00:00 stderr F I1212 16:27:12.244819 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:27:12.370249502+00:00 stderr F I1212 16:27:12.367343 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:12.436862568+00:00 stderr F I1212 16:27:12.435086 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:27:12.436862568+00:00 stderr F I1212 16:27:12.435141 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:27:12.573144317+00:00 stderr F I1212 16:27:12.573072 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:12.634733586+00:00 stderr F I1212 16:27:12.634640 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:27:12.634733586+00:00 stderr F I1212 16:27:12.634710 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:27:12.769389053+00:00 stderr F I1212 16:27:12.769270 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:12.834871871+00:00 stderr F I1212 16:27:12.834793 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:27:12.834871871+00:00 stderr F I1212 16:27:12.834846 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:27:12.970195766+00:00 stderr F I1212 16:27:12.969858 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:13.034476503+00:00 stderr F I1212 16:27:13.034420 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:27:13.034513593+00:00 stderr F I1212 16:27:13.034482 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:27:13.170242699+00:00 stderr F I1212 16:27:13.169618 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:13.233714635+00:00 stderr F I1212 16:27:13.233638 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:27:13.233714635+00:00 stderr F I1212 16:27:13.233693 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:27:13.369626985+00:00 stderr F I1212 16:27:13.369425 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:13.437725318+00:00 stderr F I1212 16:27:13.437654 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:27:13.437725318+00:00 stderr F I1212 16:27:13.437703 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:27:13.569393631+00:00 stderr F I1212 16:27:13.569317 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:13.636527620+00:00 stderr F I1212 16:27:13.636453 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:27:13.636527620+00:00 stderr F I1212 16:27:13.636516 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:27:13.775787754+00:00 stderr F I1212 16:27:13.775725 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:13.831758651+00:00 stderr F I1212 16:27:13.831658 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:27:13.831758651+00:00 stderr F I1212 16:27:13.831715 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:27:13.965602578+00:00 stderr F I1212 16:27:13.965436 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:14.039464738+00:00 stderr F I1212 16:27:14.039394 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:27:14.039464738+00:00 stderr F I1212 16:27:14.039456 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:27:14.167977100+00:00 stderr F I1212 16:27:14.167803 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:14.233275153+00:00 stderr F I1212 16:27:14.233010 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:27:14.233275153+00:00 stderr F I1212 16:27:14.233056 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:27:14.369202183+00:00 stderr F I1212 16:27:14.369120 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:14.433739486+00:00 stderr F I1212 16:27:14.433668 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:27:14.453227619+00:00 stderr F I1212 16:27:14.449799 1 log.go:245] Operconfig Controller complete 2025-12-12T16:27:14.568442345+00:00 stderr F I1212 16:27:14.568352 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:14.769454023+00:00 stderr F I1212 16:27:14.769355 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:14.968426838+00:00 stderr F I1212 16:27:14.968351 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:15.166992884+00:00 stderr F I1212 16:27:15.166463 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:15.366935434+00:00 stderr F I1212 16:27:15.366873 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:15.567373747+00:00 stderr F I1212 16:27:15.567281 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:15.768687241+00:00 stderr F I1212 16:27:15.768369 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:15.970763615+00:00 stderr F I1212 16:27:15.968990 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:16.166998112+00:00 stderr F I1212 16:27:16.166565 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:16.367892256+00:00 stderr F I1212 16:27:16.366914 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:16.566312308+00:00 stderr F I1212 16:27:16.566222 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:16.770526747+00:00 stderr F I1212 16:27:16.770465 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:16.966075786+00:00 stderr F I1212 16:27:16.966011 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:17.261736118+00:00 stderr F I1212 16:27:17.260016 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:17.370575893+00:00 stderr F I1212 16:27:17.370341 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:17.566582974+00:00 stderr F I1212 16:27:17.566487 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:17.781450852+00:00 stderr F I1212 16:27:17.780823 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:17.968472565+00:00 stderr F I1212 16:27:17.968398 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:18.178259475+00:00 stderr F I1212 16:27:18.176047 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:18.370232313+00:00 stderr F I1212 16:27:18.369657 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:18.569222310+00:00 stderr F I1212 16:27:18.568562 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:18.767766394+00:00 stderr F I1212 16:27:18.767703 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:18.966476174+00:00 stderr F I1212 16:27:18.966419 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:19.167250165+00:00 stderr F I1212 16:27:19.167142 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:19.368240881+00:00 stderr F I1212 16:27:19.368133 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:19.571172507+00:00 stderr F I1212 16:27:19.571067 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:19.767550327+00:00 stderr F I1212 16:27:19.767452 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:19.970911454+00:00 stderr F I1212 16:27:19.970807 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:20.167507169+00:00 stderr F I1212 16:27:20.167404 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:20.368828904+00:00 stderr F I1212 16:27:20.368744 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:20.567016230+00:00 stderr F I1212 16:27:20.566936 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:20.770013488+00:00 stderr F I1212 16:27:20.769940 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:20.968625725+00:00 stderr F I1212 16:27:20.968520 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:21.168487753+00:00 stderr F I1212 16:27:21.168413 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:21.368697320+00:00 stderr F I1212 16:27:21.368616 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.099157517+00:00 stderr F I1212 16:27:22.099061 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.111242623+00:00 stderr F I1212 16:27:22.108205 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.139064337+00:00 stderr F I1212 16:27:22.138896 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.176389841+00:00 stderr F I1212 16:27:22.176078 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.367387635+00:00 stderr F I1212 16:27:22.367306 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.570997049+00:00 stderr F I1212 16:27:22.570902 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.767480411+00:00 stderr F I1212 16:27:22.767360 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:22.967366439+00:00 stderr F I1212 16:27:22.967306 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:23.168230823+00:00 stderr F I1212 16:27:23.167411 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:23.367335602+00:00 stderr F I1212 16:27:23.367085 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:23.566982865+00:00 stderr F I1212 16:27:23.566893 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:23.770714331+00:00 stderr F I1212 16:27:23.770613 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:27:23.971641886+00:00 stderr F I1212 16:27:23.971506 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:24.167618156+00:00 stderr F I1212 16:27:24.167519 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:24.366876129+00:00 stderr F I1212 16:27:24.366803 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:24.570854712+00:00 stderr F I1212 16:27:24.570031 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:24.769596721+00:00 stderr F I1212 16:27:24.769507 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:24.966578487+00:00 stderr F I1212 16:27:24.966467 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:27:25.166662831+00:00 stderr F I1212 16:27:25.166596 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:27:25.367370910+00:00 stderr F I1212 16:27:25.367299 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:27:25.568437949+00:00 stderr F I1212 16:27:25.568353 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:27:25.771153280+00:00 stderr F I1212 16:27:25.770986 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:24.041240747+00:00 stderr F I1212 16:28:24.040457 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:28:46.330082520+00:00 stderr F I1212 16:28:46.329092 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.336168334+00:00 stderr F I1212 16:28:46.336110 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.343328265+00:00 stderr F I1212 16:28:46.343292 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.351737668+00:00 stderr F I1212 16:28:46.351641 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.358155911+00:00 stderr F I1212 16:28:46.358103 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.365128427+00:00 stderr F I1212 16:28:46.365068 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.371535299+00:00 stderr F I1212 16:28:46.371492 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.383119352+00:00 stderr F I1212 16:28:46.382988 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.392564552+00:00 stderr F I1212 16:28:46.392482 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.399365244+00:00 stderr F I1212 16:28:46.399314 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.529782764+00:00 stderr F I1212 16:28:46.529669 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:46.765113590+00:00 stderr F I1212 16:28:46.765025 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:28:50.326090594+00:00 stderr F I1212 16:28:50.326025 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.331694986+00:00 stderr F I1212 16:28:50.331653 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.337322798+00:00 stderr F I1212 16:28:50.337289 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.343661289+00:00 stderr F I1212 16:28:50.343586 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.349162048+00:00 stderr F I1212 16:28:50.349119 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.357058708+00:00 stderr F I1212 16:28:50.357004 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.363256224+00:00 stderr F I1212 16:28:50.363230 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.370226431+00:00 stderr F I1212 16:28:50.370129 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.379840454+00:00 stderr F I1212 16:28:50.379773 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.387781135+00:00 stderr F I1212 16:28:50.387750 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:50.525547202+00:00 stderr F I1212 16:28:50.525484 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.345642037+00:00 stderr F I1212 16:28:51.345089 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.354546733+00:00 stderr F I1212 16:28:51.354459 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.361201901+00:00 stderr F I1212 16:28:51.361125 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.369400959+00:00 stderr F I1212 16:28:51.369241 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.526676609+00:00 stderr F I1212 16:28:51.526587 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.732354025+00:00 stderr F I1212 16:28:51.726477 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:51.926724134+00:00 stderr F I1212 16:28:51.926649 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:52.129784163+00:00 stderr F I1212 16:28:52.129696 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:28:52.328463602+00:00 stderr F I1212 16:28:52.328386 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:28:52.526240946+00:00 stderr F I1212 16:28:52.526161 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:52.730115756+00:00 stderr F I1212 16:28:52.729990 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:52.928067536+00:00 stderr F I1212 16:28:52.927911 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:53.143169360+00:00 stderr F I1212 16:28:53.143103 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:53.325208917+00:00 stderr F I1212 16:28:53.325088 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:53.524565362+00:00 stderr F I1212 16:28:53.524504 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:53.724643446+00:00 stderr F I1212 16:28:53.724552 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:53.926530875+00:00 stderr F I1212 16:28:53.926448 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:54.126205619+00:00 stderr F I1212 16:28:54.126084 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:54.325632266+00:00 stderr F I1212 16:28:54.325476 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:28:54.526602422+00:00 stderr F I1212 16:28:54.526526 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:28:54.728972514+00:00 stderr F I1212 16:28:54.728888 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:54.928337369+00:00 stderr F I1212 16:28:54.928208 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.428748182+00:00 stderr F I1212 16:28:56.428674 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.436235212+00:00 stderr F I1212 16:28:56.436119 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.443885975+00:00 stderr F I1212 16:28:56.443800 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.450047241+00:00 stderr F I1212 16:28:56.449957 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.455872699+00:00 stderr F I1212 16:28:56.455804 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.464660351+00:00 stderr F I1212 16:28:56.464566 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.470809187+00:00 stderr F I1212 16:28:56.470726 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.527279156+00:00 stderr F I1212 16:28:56.526934 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.725124653+00:00 stderr F I1212 16:28:56.725068 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:28:56.925647589+00:00 stderr F I1212 16:28:56.925520 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:57.125553218+00:00 stderr F I1212 16:28:57.125477 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:57.556837963+00:00 stderr F I1212 16:28:57.556768 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:57.562414124+00:00 stderr F I1212 16:28:57.562383 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:57.727074342+00:00 stderr F I1212 16:28:57.727007 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:57.925984966+00:00 stderr F I1212 16:28:57.925928 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:58.128209054+00:00 stderr F I1212 16:28:58.128102 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:58.329613371+00:00 stderr F I1212 16:28:58.329489 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:58.525424766+00:00 stderr F I1212 16:28:58.525308 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:58.729437180+00:00 stderr F I1212 16:28:58.729297 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:28:58.930667193+00:00 stderr F I1212 16:28:58.930550 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:28:59.127143505+00:00 stderr F I1212 16:28:59.127033 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:59.327634380+00:00 stderr F I1212 16:28:59.327513 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:28:59.528147504+00:00 stderr F I1212 16:28:59.527987 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:28:59.725821987+00:00 stderr F I1212 16:28:59.725741 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:28:59.925718236+00:00 stderr F I1212 16:28:59.925636 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:00.127061571+00:00 stderr F I1212 16:29:00.126958 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:00.324979130+00:00 stderr F I1212 16:29:00.324813 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:00.526758517+00:00 stderr F I1212 16:29:00.526681 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:29:00.725122638+00:00 stderr F I1212 16:29:00.725032 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:00.929112651+00:00 stderr F I1212 16:29:00.929014 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:29:01.126116456+00:00 stderr F I1212 16:29:01.126028 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:29:01.328165540+00:00 stderr F I1212 16:29:01.327529 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:29:01.525081774+00:00 stderr F I1212 16:29:01.525004 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:01.726688066+00:00 stderr F I1212 16:29:01.726586 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:29:01.927080569+00:00 stderr F I1212 16:29:01.927009 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:29:02.128336523+00:00 stderr F I1212 16:29:02.128225 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:02.327283658+00:00 stderr F I1212 16:29:02.327168 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:02.527233499+00:00 stderr F I1212 16:29:02.527082 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:02.728005390+00:00 stderr F I1212 16:29:02.727928 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:29:02.927269203+00:00 stderr F I1212 16:29:02.927001 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:29:03.126533626+00:00 stderr F I1212 16:29:03.126464 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:29:03.327658536+00:00 stderr F I1212 16:29:03.327575 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:29:03.527927875+00:00 stderr F I1212 16:29:03.527821 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:29:03.728225294+00:00 stderr F I1212 16:29:03.725460 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:30:14.450524102+00:00 stderr F I1212 16:30:14.450419 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:30:14.605335847+00:00 stderr F I1212 16:30:14.605051 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:30:14.612501725+00:00 stderr F I1212 16:30:14.612415 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:30:14.618731821+00:00 stderr F I1212 16:30:14.617472 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:30:14.623230333+00:00 stderr F I1212 16:30:14.619943 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc00418e6c0 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:30:14.626039453+00:00 stderr F I1212 16:30:14.625399 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 4 -> 4 2025-12-12T16:30:14.626039453+00:00 stderr F I1212 16:30:14.625430 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:30:14.626039453+00:00 stderr F I1212 16:30:14.625441 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:30:14.627876439+00:00 stderr F I1212 16:30:14.627843 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:30:14.627876439+00:00 stderr F I1212 16:30:14.627864 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:30:14.627876439+00:00 stderr F I1212 16:30:14.627869 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:30:14.627900450+00:00 stderr F I1212 16:30:14.627873 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:30:14.627907980+00:00 stderr F I1212 16:30:14.627899 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:30:14.631734116+00:00 stderr F I1212 16:30:14.631688 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:30:14.631734116+00:00 stderr F I1212 16:30:14.631710 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:30:14.669366705+00:00 stderr F I1212 16:30:14.669303 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:30:14.682469901+00:00 stderr F I1212 16:30:14.682360 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:30:14.682469901+00:00 stderr F I1212 16:30:14.682408 1 log.go:245] Starting render phase 2025-12-12T16:30:14.691572789+00:00 stderr F I1212 16:30:14.691480 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:30:14.723830964+00:00 stderr F I1212 16:30:14.723763 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:30:14.723830964+00:00 stderr F I1212 16:30:14.723787 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:30:14.723830964+00:00 stderr F I1212 16:30:14.723809 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:30:14.723880415+00:00 stderr F I1212 16:30:14.723835 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:30:14.871827139+00:00 stderr F I1212 16:30:14.871705 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:30:14.871827139+00:00 stderr F I1212 16:30:14.871748 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:30:15.083126104+00:00 stderr F I1212 16:30:15.083048 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:30:15.096068167+00:00 stderr F I1212 16:30:15.095982 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:30:15.102894518+00:00 stderr F I1212 16:30:15.102839 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:30:15.102919468+00:00 stderr F I1212 16:30:15.102907 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:30:15.111617935+00:00 stderr F I1212 16:30:15.111536 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:30:15.111640646+00:00 stderr F I1212 16:30:15.111632 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:30:15.120695802+00:00 stderr F I1212 16:30:15.120670 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:30:15.120723563+00:00 stderr F I1212 16:30:15.120720 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:30:15.128813865+00:00 stderr F I1212 16:30:15.128779 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:30:15.128813865+00:00 stderr F I1212 16:30:15.128806 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:30:15.140802214+00:00 stderr F I1212 16:30:15.140763 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:30:15.140802214+00:00 stderr F I1212 16:30:15.140787 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:30:15.148169828+00:00 stderr F I1212 16:30:15.148129 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:30:15.148331652+00:00 stderr F I1212 16:30:15.148309 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:30:15.155112881+00:00 stderr F I1212 16:30:15.154953 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:30:15.155112881+00:00 stderr F I1212 16:30:15.154993 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:30:15.160421564+00:00 stderr F I1212 16:30:15.160375 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:30:15.160421564+00:00 stderr F I1212 16:30:15.160411 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:30:15.166449184+00:00 stderr F I1212 16:30:15.166371 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:30:15.166449184+00:00 stderr F I1212 16:30:15.166430 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:30:15.172923336+00:00 stderr F I1212 16:30:15.172861 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:30:15.172923336+00:00 stderr F I1212 16:30:15.172913 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:30:15.302596163+00:00 stderr F I1212 16:30:15.302450 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:30:15.302596163+00:00 stderr F I1212 16:30:15.302502 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:30:15.501110190+00:00 stderr F I1212 16:30:15.501012 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:30:15.501110190+00:00 stderr F I1212 16:30:15.501064 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:30:15.704816985+00:00 stderr F I1212 16:30:15.704673 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:30:15.704816985+00:00 stderr F I1212 16:30:15.704733 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:30:15.903075585+00:00 stderr F I1212 16:30:15.902985 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:30:15.903075585+00:00 stderr F I1212 16:30:15.903053 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:30:16.104296729+00:00 stderr F I1212 16:30:16.103661 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:30:16.104296729+00:00 stderr F I1212 16:30:16.103750 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:30:16.304270641+00:00 stderr F I1212 16:30:16.304165 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:30:16.304270641+00:00 stderr F I1212 16:30:16.304246 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:30:16.506075569+00:00 stderr F I1212 16:30:16.505894 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:30:16.506075569+00:00 stderr F I1212 16:30:16.505969 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:30:16.702064782+00:00 stderr F I1212 16:30:16.701978 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:30:16.702064782+00:00 stderr F I1212 16:30:16.702049 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:30:16.905091181+00:00 stderr F I1212 16:30:16.904981 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:30:16.905091181+00:00 stderr F I1212 16:30:16.905040 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:30:17.102962671+00:00 stderr F I1212 16:30:17.102866 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:30:17.102962671+00:00 stderr F I1212 16:30:17.102924 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:30:17.303171660+00:00 stderr F I1212 16:30:17.303062 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:30:17.303171660+00:00 stderr F I1212 16:30:17.303126 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:30:17.502524097+00:00 stderr F I1212 16:30:17.502432 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:30:17.502524097+00:00 stderr F I1212 16:30:17.502499 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:30:17.704060828+00:00 stderr F I1212 16:30:17.703985 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:30:17.704060828+00:00 stderr F I1212 16:30:17.704046 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:30:17.910854391+00:00 stderr F I1212 16:30:17.910772 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:30:17.910854391+00:00 stderr F I1212 16:30:17.910827 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:30:18.113550102+00:00 stderr F I1212 16:30:18.113339 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:30:18.113550102+00:00 stderr F I1212 16:30:18.113437 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:30:18.301671988+00:00 stderr F I1212 16:30:18.301604 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:30:18.301671988+00:00 stderr F I1212 16:30:18.301665 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:30:18.502279476+00:00 stderr F I1212 16:30:18.502200 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:30:18.502279476+00:00 stderr F I1212 16:30:18.502259 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:30:18.702522366+00:00 stderr F I1212 16:30:18.702345 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:30:18.702522366+00:00 stderr F I1212 16:30:18.702406 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:30:18.905768160+00:00 stderr F I1212 16:30:18.905239 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:30:18.905768160+00:00 stderr F I1212 16:30:18.905739 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:30:19.102950052+00:00 stderr F I1212 16:30:19.102862 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:30:19.102990213+00:00 stderr F I1212 16:30:19.102950 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:30:19.302102274+00:00 stderr F I1212 16:30:19.302033 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:30:19.302148045+00:00 stderr F I1212 16:30:19.302133 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:30:19.503957204+00:00 stderr F I1212 16:30:19.503876 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:30:19.504002235+00:00 stderr F I1212 16:30:19.503953 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:30:19.700900050+00:00 stderr F I1212 16:30:19.700833 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:30:19.700900050+00:00 stderr F I1212 16:30:19.700892 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:30:19.902541175+00:00 stderr F I1212 16:30:19.902025 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:30:19.902541175+00:00 stderr F I1212 16:30:19.902491 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:30:20.104149948+00:00 stderr F I1212 16:30:20.104084 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:30:20.104149948+00:00 stderr F I1212 16:30:20.104139 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:30:20.300549601+00:00 stderr F I1212 16:30:20.300481 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:30:20.300549601+00:00 stderr F I1212 16:30:20.300532 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:30:20.501136659+00:00 stderr F I1212 16:30:20.500989 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:30:20.501136659+00:00 stderr F I1212 16:30:20.501043 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:30:20.703010589+00:00 stderr F I1212 16:30:20.702917 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:30:20.703010589+00:00 stderr F I1212 16:30:20.702984 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:30:20.907052533+00:00 stderr F I1212 16:30:20.906972 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:30:20.907088374+00:00 stderr F I1212 16:30:20.907047 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:30:21.103307992+00:00 stderr F I1212 16:30:21.103195 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:30:21.103361764+00:00 stderr F I1212 16:30:21.103317 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:30:21.301874740+00:00 stderr F I1212 16:30:21.299948 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:30:21.301874740+00:00 stderr F I1212 16:30:21.299994 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:30:21.502147620+00:00 stderr F I1212 16:30:21.502057 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:30:21.502147620+00:00 stderr F I1212 16:30:21.502125 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:30:21.703576409+00:00 stderr F I1212 16:30:21.703502 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:30:21.703637551+00:00 stderr F I1212 16:30:21.703576 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:30:21.907240773+00:00 stderr F I1212 16:30:21.907158 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:30:21.907298305+00:00 stderr F I1212 16:30:21.907255 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:30:22.111614706+00:00 stderr F I1212 16:30:22.111524 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:30:22.111614706+00:00 stderr F I1212 16:30:22.111592 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:30:22.306537302+00:00 stderr F I1212 16:30:22.306453 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:30:22.306537302+00:00 stderr F I1212 16:30:22.306522 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:30:22.508105665+00:00 stderr F I1212 16:30:22.508019 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:30:22.508105665+00:00 stderr F I1212 16:30:22.508078 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:30:22.713771820+00:00 stderr F I1212 16:30:22.713673 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:30:22.713771820+00:00 stderr F I1212 16:30:22.713738 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:30:22.906754258+00:00 stderr F I1212 16:30:22.906663 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:30:22.906754258+00:00 stderr F I1212 16:30:22.906735 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:30:23.128703809+00:00 stderr F I1212 16:30:23.128628 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:30:23.128703809+00:00 stderr F I1212 16:30:23.128684 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:30:23.340342303+00:00 stderr F I1212 16:30:23.340256 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:30:23.340342303+00:00 stderr F I1212 16:30:23.340328 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:30:23.504515602+00:00 stderr F I1212 16:30:23.504439 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:30:23.504515602+00:00 stderr F I1212 16:30:23.504500 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:30:23.727281593+00:00 stderr F I1212 16:30:23.726384 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:30:23.727327804+00:00 stderr F I1212 16:30:23.727281 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:30:23.939766748+00:00 stderr F I1212 16:30:23.939694 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:30:23.939766748+00:00 stderr F I1212 16:30:23.939751 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:30:24.103998398+00:00 stderr F I1212 16:30:24.103917 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:30:24.103998398+00:00 stderr F I1212 16:30:24.103968 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:30:24.300942615+00:00 stderr F I1212 16:30:24.300856 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:30:24.300942615+00:00 stderr F I1212 16:30:24.300922 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:30:24.501850961+00:00 stderr F I1212 16:30:24.501772 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:30:24.501850961+00:00 stderr F I1212 16:30:24.501837 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:30:24.704690025+00:00 stderr F I1212 16:30:24.704144 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:30:24.704690025+00:00 stderr F I1212 16:30:24.704679 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:30:24.901122629+00:00 stderr F I1212 16:30:24.901058 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:30:24.901167590+00:00 stderr F I1212 16:30:24.901126 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:30:25.103120862+00:00 stderr F I1212 16:30:25.103049 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:30:25.103195474+00:00 stderr F I1212 16:30:25.103127 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:30:25.301504985+00:00 stderr F I1212 16:30:25.301452 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:30:25.301648699+00:00 stderr F I1212 16:30:25.301623 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:30:25.503469136+00:00 stderr F I1212 16:30:25.503387 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:30:25.503469136+00:00 stderr F I1212 16:30:25.503446 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:30:25.701882480+00:00 stderr F I1212 16:30:25.701810 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:30:25.701933471+00:00 stderr F I1212 16:30:25.701881 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:30:25.902998081+00:00 stderr F I1212 16:30:25.902929 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:30:25.902998081+00:00 stderr F I1212 16:30:25.902987 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:30:26.102102042+00:00 stderr F I1212 16:30:26.102044 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:30:26.102219705+00:00 stderr F I1212 16:30:26.102207 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:30:26.301564382+00:00 stderr F I1212 16:30:26.301494 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:30:26.301564382+00:00 stderr F I1212 16:30:26.301554 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:30:26.501359900+00:00 stderr F I1212 16:30:26.501314 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:30:26.501446792+00:00 stderr F I1212 16:30:26.501435 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:30:26.702827099+00:00 stderr F I1212 16:30:26.702750 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:30:26.702827099+00:00 stderr F I1212 16:30:26.702813 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:30:26.902570816+00:00 stderr F I1212 16:30:26.902509 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:30:26.902701630+00:00 stderr F I1212 16:30:26.902688 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:30:27.103451502+00:00 stderr F I1212 16:30:27.103367 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:30:27.103451502+00:00 stderr F I1212 16:30:27.103420 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:30:27.302114632+00:00 stderr F I1212 16:30:27.301618 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:30:27.302114632+00:00 stderr F I1212 16:30:27.301689 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:30:27.504542865+00:00 stderr F I1212 16:30:27.504441 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:30:27.504542865+00:00 stderr F I1212 16:30:27.504490 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:30:27.702049026+00:00 stderr F I1212 16:30:27.701970 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:30:27.702049026+00:00 stderr F I1212 16:30:27.702038 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:30:27.901710361+00:00 stderr F I1212 16:30:27.901617 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:30:27.901710361+00:00 stderr F I1212 16:30:27.901684 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:30:28.104940005+00:00 stderr F I1212 16:30:28.104215 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:30:28.104940005+00:00 stderr F I1212 16:30:28.104278 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:30:28.307411820+00:00 stderr F I1212 16:30:28.307307 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:30:28.307411820+00:00 stderr F I1212 16:30:28.307374 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:30:28.503145566+00:00 stderr F I1212 16:30:28.503062 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:30:28.503145566+00:00 stderr F I1212 16:30:28.503132 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:30:28.702526754+00:00 stderr F I1212 16:30:28.702427 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:30:28.702580295+00:00 stderr F I1212 16:30:28.702523 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:30:28.904315422+00:00 stderr F I1212 16:30:28.904238 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:30:28.904350493+00:00 stderr F I1212 16:30:28.904312 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:30:29.104137060+00:00 stderr F I1212 16:30:29.104061 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:30:29.104137060+00:00 stderr F I1212 16:30:29.104130 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:30:29.305957229+00:00 stderr F I1212 16:30:29.305064 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:30:29.305957229+00:00 stderr F I1212 16:30:29.305946 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:30:29.505349497+00:00 stderr F I1212 16:30:29.505258 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:30:29.505349497+00:00 stderr F I1212 16:30:29.505310 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:30:29.702499539+00:00 stderr F I1212 16:30:29.702415 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:30:29.702499539+00:00 stderr F I1212 16:30:29.702483 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:30:29.903928368+00:00 stderr F I1212 16:30:29.903087 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:30:29.903965759+00:00 stderr F I1212 16:30:29.903925 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:30:30.102777093+00:00 stderr F I1212 16:30:30.102692 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:30:30.102777093+00:00 stderr F I1212 16:30:30.102754 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:30:30.307147635+00:00 stderr F I1212 16:30:30.307075 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:30:30.307147635+00:00 stderr F I1212 16:30:30.307134 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:30:30.524784559+00:00 stderr F I1212 16:30:30.524720 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:30:30.524837470+00:00 stderr F I1212 16:30:30.524780 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:30:30.702809423+00:00 stderr F I1212 16:30:30.702739 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:30:30.702809423+00:00 stderr F I1212 16:30:30.702793 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:30:30.901334940+00:00 stderr F I1212 16:30:30.901248 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:30:30.901334940+00:00 stderr F I1212 16:30:30.901316 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:30:31.105628260+00:00 stderr F I1212 16:30:31.105552 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:30:31.105689912+00:00 stderr F I1212 16:30:31.105625 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:30:31.303895120+00:00 stderr F I1212 16:30:31.303792 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:30:31.303895120+00:00 stderr F I1212 16:30:31.303880 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:30:31.502330745+00:00 stderr F I1212 16:30:31.502261 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:30:31.502330745+00:00 stderr F I1212 16:30:31.502322 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:30:31.701522458+00:00 stderr F I1212 16:30:31.701448 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:30:31.701575489+00:00 stderr F I1212 16:30:31.701526 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:30:31.901217343+00:00 stderr F I1212 16:30:31.900925 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:30:31.901217343+00:00 stderr F I1212 16:30:31.901171 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:30:32.101694149+00:00 stderr F I1212 16:30:32.101622 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:30:32.101694149+00:00 stderr F I1212 16:30:32.101674 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:30:32.302068821+00:00 stderr F I1212 16:30:32.301964 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:30:32.302068821+00:00 stderr F I1212 16:30:32.302022 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:30:32.507080870+00:00 stderr F I1212 16:30:32.507006 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:30:32.507080870+00:00 stderr F I1212 16:30:32.507071 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:30:32.704159929+00:00 stderr F I1212 16:30:32.704061 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:30:32.704159929+00:00 stderr F I1212 16:30:32.704122 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:30:32.903369083+00:00 stderr F I1212 16:30:32.903295 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:30:32.903369083+00:00 stderr F I1212 16:30:32.903354 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:30:33.104560406+00:00 stderr F I1212 16:30:33.104468 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:30:33.104560406+00:00 stderr F I1212 16:30:33.104532 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:30:33.302473337+00:00 stderr F I1212 16:30:33.302396 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:30:33.302473337+00:00 stderr F I1212 16:30:33.302450 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:30:33.505875805+00:00 stderr F I1212 16:30:33.505780 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:30:33.505875805+00:00 stderr F I1212 16:30:33.505853 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:30:33.702585876+00:00 stderr F I1212 16:30:33.702500 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:30:33.702585876+00:00 stderr F I1212 16:30:33.702552 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:30:33.902546439+00:00 stderr F I1212 16:30:33.902455 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:30:33.902546439+00:00 stderr F I1212 16:30:33.902530 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:30:34.104391168+00:00 stderr F I1212 16:30:34.104310 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:30:34.104391168+00:00 stderr F I1212 16:30:34.104378 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:30:34.302629687+00:00 stderr F I1212 16:30:34.302548 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:30:34.302629687+00:00 stderr F I1212 16:30:34.302605 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:30:34.503487472+00:00 stderr F I1212 16:30:34.503388 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:30:34.503487472+00:00 stderr F I1212 16:30:34.503455 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:30:34.701365203+00:00 stderr F I1212 16:30:34.701299 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:30:34.701365203+00:00 stderr F I1212 16:30:34.701349 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:30:34.902663668+00:00 stderr F I1212 16:30:34.902589 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:30:34.902663668+00:00 stderr F I1212 16:30:34.902648 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:30:35.100912168+00:00 stderr F I1212 16:30:35.100827 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:30:35.100994290+00:00 stderr F I1212 16:30:35.100912 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:30:35.301433114+00:00 stderr F I1212 16:30:35.301361 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:30:35.301433114+00:00 stderr F I1212 16:30:35.301414 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:30:35.503219152+00:00 stderr F I1212 16:30:35.503145 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:30:35.503257993+00:00 stderr F I1212 16:30:35.503222 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:30:35.700964339+00:00 stderr F I1212 16:30:35.700892 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:30:35.700964339+00:00 stderr F I1212 16:30:35.700944 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:30:35.901510686+00:00 stderr F I1212 16:30:35.901455 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:30:35.901548687+00:00 stderr F I1212 16:30:35.901508 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:30:36.103552130+00:00 stderr F I1212 16:30:36.103497 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:30:36.103605371+00:00 stderr F I1212 16:30:36.103550 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:30:36.327247884+00:00 stderr F I1212 16:30:36.306646 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:30:36.327247884+00:00 stderr F I1212 16:30:36.306705 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:30:36.501851253+00:00 stderr F I1212 16:30:36.501788 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:30:36.501900774+00:00 stderr F I1212 16:30:36.501852 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:30:36.702996705+00:00 stderr F I1212 16:30:36.702490 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:30:36.702996705+00:00 stderr F I1212 16:30:36.702954 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:30:36.901788498+00:00 stderr F I1212 16:30:36.901669 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:30:36.901788498+00:00 stderr F I1212 16:30:36.901729 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:30:37.102542020+00:00 stderr F I1212 16:30:37.102459 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:30:37.102542020+00:00 stderr F I1212 16:30:37.102522 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:30:37.312254446+00:00 stderr F I1212 16:30:37.308301 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:30:37.312254446+00:00 stderr F I1212 16:30:37.308358 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:30:37.505485520+00:00 stderr F I1212 16:30:37.505406 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:30:37.505485520+00:00 stderr F I1212 16:30:37.505457 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:30:37.702675563+00:00 stderr F I1212 16:30:37.702596 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:30:37.702675563+00:00 stderr F I1212 16:30:37.702657 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:30:37.901390995+00:00 stderr F I1212 16:30:37.901296 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:30:37.901390995+00:00 stderr F I1212 16:30:37.901365 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:30:38.106212078+00:00 stderr F I1212 16:30:38.106131 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:30:38.106264350+00:00 stderr F I1212 16:30:38.106215 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:30:38.302725834+00:00 stderr F I1212 16:30:38.302257 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:30:38.302725834+00:00 stderr F I1212 16:30:38.302715 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:30:38.503373864+00:00 stderr F I1212 16:30:38.503059 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:30:38.523259960+00:00 stderr F I1212 16:30:38.523150 1 log.go:245] Operconfig Controller complete 2025-12-12T16:31:24.051596548+00:00 stderr F I1212 16:31:24.051508 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:31:25.860461166+00:00 stderr F I1212 16:31:25.860377 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:31:25.861000820+00:00 stderr F I1212 16:31:25.860971 1 log.go:245] successful reconciliation 2025-12-12T16:31:29.262389994+00:00 stderr F I1212 16:31:29.262306 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:31:29.263097991+00:00 stderr F I1212 16:31:29.263061 1 log.go:245] successful reconciliation 2025-12-12T16:31:32.253413128+00:00 stderr F I1212 16:31:32.253327 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:31:32.253717246+00:00 stderr F I1212 16:31:32.253689 1 log.go:245] successful reconciliation 2025-12-12T16:32:05.730543817+00:00 stderr F I1212 16:32:05.729841 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.736805681+00:00 stderr F I1212 16:32:05.736745 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.743223179+00:00 stderr F I1212 16:32:05.743110 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.749591525+00:00 stderr F I1212 16:32:05.749528 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.756049614+00:00 stderr F I1212 16:32:05.755977 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.763555629+00:00 stderr F I1212 16:32:05.763451 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.770228493+00:00 stderr F I1212 16:32:05.769863 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.777588584+00:00 stderr F I1212 16:32:05.777215 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.784220547+00:00 stderr F I1212 16:32:05.784111 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.790946312+00:00 stderr F I1212 16:32:05.790621 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:32:05.933455796+00:00 stderr F I1212 16:32:05.933377 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:32:25.054552047+00:00 stderr F I1212 16:32:25.054439 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:32:44.071865622+00:00 stderr F I1212 16:32:44.070462 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:33:38.524350829+00:00 stderr F I1212 16:33:38.524217 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:33:38.669516616+00:00 stderr F I1212 16:33:38.669423 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:33:38.672161093+00:00 stderr F I1212 16:33:38.672123 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:33:38.674432649+00:00 stderr F I1212 16:33:38.674371 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:33:38.676927472+00:00 stderr F I1212 16:33:38.676863 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc00418efc0 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:33:38.684210774+00:00 stderr F I1212 16:33:38.683482 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 4 -> 4 2025-12-12T16:33:38.684210774+00:00 stderr F I1212 16:33:38.683512 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:33:38.684210774+00:00 stderr F I1212 16:33:38.683522 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:33:38.687580128+00:00 stderr F I1212 16:33:38.687092 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:33:38.687580128+00:00 stderr F I1212 16:33:38.687113 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:33:38.687580128+00:00 stderr F I1212 16:33:38.687122 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:33:38.687580128+00:00 stderr F I1212 16:33:38.687128 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:33:38.687580128+00:00 stderr F I1212 16:33:38.687148 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:33:38.693204637+00:00 stderr F I1212 16:33:38.693120 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:33:38.693204637+00:00 stderr F I1212 16:33:38.693146 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:33:38.745450373+00:00 stderr F I1212 16:33:38.745367 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:33:38.763006722+00:00 stderr F I1212 16:33:38.762908 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:33:38.763006722+00:00 stderr F I1212 16:33:38.762954 1 log.go:245] Starting render phase 2025-12-12T16:33:38.772522010+00:00 stderr F I1212 16:33:38.772452 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:33:38.807873953+00:00 stderr F I1212 16:33:38.807816 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:33:38.807873953+00:00 stderr F I1212 16:33:38.807844 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:33:38.807922094+00:00 stderr F I1212 16:33:38.807870 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:33:38.807922094+00:00 stderr F I1212 16:33:38.807898 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:33:38.948686282+00:00 stderr F I1212 16:33:38.948631 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:33:38.948777794+00:00 stderr F I1212 16:33:38.948767 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:33:39.161090890+00:00 stderr F I1212 16:33:39.161032 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:33:39.185219703+00:00 stderr F I1212 16:33:39.185066 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:33:39.191949111+00:00 stderr F I1212 16:33:39.191892 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:33:39.192004723+00:00 stderr F I1212 16:33:39.191950 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:33:39.200765682+00:00 stderr F I1212 16:33:39.200685 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:33:39.200812133+00:00 stderr F I1212 16:33:39.200775 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:33:39.209421508+00:00 stderr F I1212 16:33:39.209368 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:33:39.209457999+00:00 stderr F I1212 16:33:39.209432 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:33:39.218646719+00:00 stderr F I1212 16:33:39.218583 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:33:39.218691040+00:00 stderr F I1212 16:33:39.218668 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:33:39.227366897+00:00 stderr F I1212 16:33:39.227293 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:33:39.227410618+00:00 stderr F I1212 16:33:39.227379 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:33:39.234239508+00:00 stderr F I1212 16:33:39.234199 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:33:39.234348261+00:00 stderr F I1212 16:33:39.234335 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:33:39.240313140+00:00 stderr F I1212 16:33:39.240250 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:33:39.240313140+00:00 stderr F I1212 16:33:39.240300 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:33:39.246201167+00:00 stderr F I1212 16:33:39.246149 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:33:39.246201167+00:00 stderr F I1212 16:33:39.246194 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:33:39.251243123+00:00 stderr F I1212 16:33:39.251217 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:33:39.251326345+00:00 stderr F I1212 16:33:39.251313 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:33:39.256987367+00:00 stderr F I1212 16:33:39.256914 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:33:39.256987367+00:00 stderr F I1212 16:33:39.256942 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:33:39.392980866+00:00 stderr F I1212 16:33:39.392900 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:33:39.392980866+00:00 stderr F I1212 16:33:39.392963 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:33:39.592703167+00:00 stderr F I1212 16:33:39.592635 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:33:39.592820400+00:00 stderr F I1212 16:33:39.592806 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:33:39.792013428+00:00 stderr F I1212 16:33:39.791929 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:33:39.792013428+00:00 stderr F I1212 16:33:39.791995 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:33:39.992628231+00:00 stderr F I1212 16:33:39.992548 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:33:39.992701123+00:00 stderr F I1212 16:33:39.992624 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:33:40.193398228+00:00 stderr F I1212 16:33:40.193296 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:33:40.193398228+00:00 stderr F I1212 16:33:40.193352 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:33:40.394195917+00:00 stderr F I1212 16:33:40.394102 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:33:40.394195917+00:00 stderr F I1212 16:33:40.394167 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:33:40.601012835+00:00 stderr F I1212 16:33:40.600954 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:33:40.601117268+00:00 stderr F I1212 16:33:40.601105 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:33:40.791380323+00:00 stderr F I1212 16:33:40.791317 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:33:40.791479155+00:00 stderr F I1212 16:33:40.791469 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:33:40.991867243+00:00 stderr F I1212 16:33:40.991790 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:33:40.992023147+00:00 stderr F I1212 16:33:40.992012 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:33:41.192474126+00:00 stderr F I1212 16:33:41.192412 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:33:41.192599559+00:00 stderr F I1212 16:33:41.192586 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:33:41.392859304+00:00 stderr F I1212 16:33:41.392797 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:33:41.392985057+00:00 stderr F I1212 16:33:41.392975 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:33:41.590923114+00:00 stderr F I1212 16:33:41.590847 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:33:41.590923114+00:00 stderr F I1212 16:33:41.590900 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:33:41.792699486+00:00 stderr F I1212 16:33:41.792643 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:33:41.792830790+00:00 stderr F I1212 16:33:41.792813 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:33:41.999168586+00:00 stderr F I1212 16:33:41.999080 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:33:41.999168586+00:00 stderr F I1212 16:33:41.999133 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:33:42.217066662+00:00 stderr F I1212 16:33:42.216965 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:33:42.217066662+00:00 stderr F I1212 16:33:42.217049 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:33:42.391746636+00:00 stderr F I1212 16:33:42.391644 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:33:42.391746636+00:00 stderr F I1212 16:33:42.391707 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:33:42.593932450+00:00 stderr F I1212 16:33:42.593860 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:33:42.594078313+00:00 stderr F I1212 16:33:42.594056 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:33:42.792122362+00:00 stderr F I1212 16:33:42.792065 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:33:42.792288166+00:00 stderr F I1212 16:33:42.792272 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:33:42.994188063+00:00 stderr F I1212 16:33:42.994127 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:33:42.994307436+00:00 stderr F I1212 16:33:42.994296 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:33:43.194807206+00:00 stderr F I1212 16:33:43.194750 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:33:43.194923829+00:00 stderr F I1212 16:33:43.194913 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:33:43.394683401+00:00 stderr F I1212 16:33:43.394611 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:33:43.395023139+00:00 stderr F I1212 16:33:43.395011 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:33:43.593011187+00:00 stderr F I1212 16:33:43.592911 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:33:43.593011187+00:00 stderr F I1212 16:33:43.592983 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:33:43.790744428+00:00 stderr F I1212 16:33:43.790657 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:33:43.790744428+00:00 stderr F I1212 16:33:43.790726 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:33:43.993467924+00:00 stderr F I1212 16:33:43.993405 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:33:43.993588827+00:00 stderr F I1212 16:33:43.993578 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:33:44.191936894+00:00 stderr F I1212 16:33:44.191860 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:33:44.191936894+00:00 stderr F I1212 16:33:44.191926 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:33:44.392641160+00:00 stderr F I1212 16:33:44.392551 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:33:44.392987379+00:00 stderr F I1212 16:33:44.392816 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:33:44.592401052+00:00 stderr F I1212 16:33:44.591457 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:33:44.592605407+00:00 stderr F I1212 16:33:44.592591 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:33:44.794225296+00:00 stderr F I1212 16:33:44.794148 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:33:44.794333238+00:00 stderr F I1212 16:33:44.794323 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:33:44.996532091+00:00 stderr F I1212 16:33:44.996474 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:33:44.996585823+00:00 stderr F I1212 16:33:44.996534 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:33:45.192548490+00:00 stderr F I1212 16:33:45.192497 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:33:45.192721804+00:00 stderr F I1212 16:33:45.192709 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:33:45.393158754+00:00 stderr F I1212 16:33:45.393083 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:33:45.393158754+00:00 stderr F I1212 16:33:45.393130 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:33:45.593051839+00:00 stderr F I1212 16:33:45.592984 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:33:45.593235454+00:00 stderr F I1212 16:33:45.593223 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:33:45.792789940+00:00 stderr F I1212 16:33:45.792654 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:33:45.792789940+00:00 stderr F I1212 16:33:45.792723 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:33:45.996254954+00:00 stderr F I1212 16:33:45.996140 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:33:45.996254954+00:00 stderr F I1212 16:33:45.996229 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:33:46.200570830+00:00 stderr F I1212 16:33:46.200450 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:33:46.200570830+00:00 stderr F I1212 16:33:46.200524 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:33:46.401356288+00:00 stderr F I1212 16:33:46.401252 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:33:46.401356288+00:00 stderr F I1212 16:33:46.401321 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:33:46.596533515+00:00 stderr F I1212 16:33:46.596450 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:33:46.596533515+00:00 stderr F I1212 16:33:46.596504 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:33:46.813241771+00:00 stderr F I1212 16:33:46.813111 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:33:46.813318583+00:00 stderr F I1212 16:33:46.813261 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:33:46.999241649+00:00 stderr F I1212 16:33:46.998969 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:33:46.999241649+00:00 stderr F I1212 16:33:46.999035 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:33:47.226886508+00:00 stderr F I1212 16:33:47.226791 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:33:47.226886508+00:00 stderr F I1212 16:33:47.226852 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:33:47.424461646+00:00 stderr F I1212 16:33:47.424365 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:33:47.424461646+00:00 stderr F I1212 16:33:47.424432 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:33:47.594912246+00:00 stderr F I1212 16:33:47.594824 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:33:47.594912246+00:00 stderr F I1212 16:33:47.594893 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:33:47.824135884+00:00 stderr F I1212 16:33:47.824068 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:33:47.824135884+00:00 stderr F I1212 16:33:47.824124 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:33:48.032043680+00:00 stderr F I1212 16:33:48.031938 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:33:48.032043680+00:00 stderr F I1212 16:33:48.031991 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:33:48.191707650+00:00 stderr F I1212 16:33:48.190757 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:33:48.192291615+00:00 stderr F I1212 16:33:48.192256 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:33:48.404858917+00:00 stderr F I1212 16:33:48.401213 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:33:48.404858917+00:00 stderr F I1212 16:33:48.401265 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:33:48.591802169+00:00 stderr F I1212 16:33:48.591716 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:33:48.591802169+00:00 stderr F I1212 16:33:48.591779 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:33:48.793760906+00:00 stderr F I1212 16:33:48.793075 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:33:48.793760906+00:00 stderr F I1212 16:33:48.793741 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:33:48.993245181+00:00 stderr F I1212 16:33:48.993131 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:33:48.993325853+00:00 stderr F I1212 16:33:48.993275 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:33:49.192666975+00:00 stderr F I1212 16:33:49.192551 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:33:49.192666975+00:00 stderr F I1212 16:33:49.192607 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:33:49.393231647+00:00 stderr F I1212 16:33:49.393144 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:33:49.393320529+00:00 stderr F I1212 16:33:49.393239 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:33:49.592306251+00:00 stderr F I1212 16:33:49.592169 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:33:49.592306251+00:00 stderr F I1212 16:33:49.592287 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:33:49.790586787+00:00 stderr F I1212 16:33:49.790531 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:33:49.790760541+00:00 stderr F I1212 16:33:49.790737 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:33:49.993276452+00:00 stderr F I1212 16:33:49.993213 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:33:49.993390755+00:00 stderr F I1212 16:33:49.993377 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:33:50.192230534+00:00 stderr F I1212 16:33:50.192119 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:33:50.192311266+00:00 stderr F I1212 16:33:50.192230 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:33:50.391218547+00:00 stderr F I1212 16:33:50.391098 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:33:50.391218547+00:00 stderr F I1212 16:33:50.391154 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:33:50.591092871+00:00 stderr F I1212 16:33:50.591017 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:33:50.591092871+00:00 stderr F I1212 16:33:50.591072 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:33:50.793144931+00:00 stderr F I1212 16:33:50.793022 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:33:50.793144931+00:00 stderr F I1212 16:33:50.793089 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:33:50.994108613+00:00 stderr F I1212 16:33:50.994051 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:33:50.994108613+00:00 stderr F I1212 16:33:50.994101 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:33:51.193704990+00:00 stderr F I1212 16:33:51.193631 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:33:51.193704990+00:00 stderr F I1212 16:33:51.193692 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:33:51.392754375+00:00 stderr F I1212 16:33:51.392660 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:33:51.392754375+00:00 stderr F I1212 16:33:51.392717 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:33:51.595962963+00:00 stderr F I1212 16:33:51.595808 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:33:51.595962963+00:00 stderr F I1212 16:33:51.595904 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:33:51.791478369+00:00 stderr F I1212 16:33:51.791359 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:33:51.791478369+00:00 stderr F I1212 16:33:51.791428 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:33:51.991112878+00:00 stderr F I1212 16:33:51.991030 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:33:51.991112878+00:00 stderr F I1212 16:33:51.991096 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:33:52.196275514+00:00 stderr F I1212 16:33:52.196122 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:33:52.196353046+00:00 stderr F I1212 16:33:52.196298 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:33:52.396135629+00:00 stderr F I1212 16:33:52.396054 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:33:52.396135629+00:00 stderr F I1212 16:33:52.396113 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:33:52.591520652+00:00 stderr F I1212 16:33:52.591418 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:33:52.591520652+00:00 stderr F I1212 16:33:52.591482 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:33:52.792559556+00:00 stderr F I1212 16:33:52.792348 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:33:52.792559556+00:00 stderr F I1212 16:33:52.792402 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:33:52.991985650+00:00 stderr F I1212 16:33:52.991893 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:33:52.991985650+00:00 stderr F I1212 16:33:52.991971 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:33:53.194128320+00:00 stderr F I1212 16:33:53.194059 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:33:53.194128320+00:00 stderr F I1212 16:33:53.194110 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:33:53.392910247+00:00 stderr F I1212 16:33:53.392821 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:33:53.392910247+00:00 stderr F I1212 16:33:53.392875 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:33:53.593354106+00:00 stderr F I1212 16:33:53.593281 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:33:53.593387727+00:00 stderr F I1212 16:33:53.593348 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:33:53.790820771+00:00 stderr F I1212 16:33:53.790694 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:33:53.790820771+00:00 stderr F I1212 16:33:53.790766 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:33:53.992819889+00:00 stderr F I1212 16:33:53.992740 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:33:53.992819889+00:00 stderr F I1212 16:33:53.992806 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:33:54.192419497+00:00 stderr F I1212 16:33:54.192341 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:33:54.192419497+00:00 stderr F I1212 16:33:54.192401 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:33:54.397106462+00:00 stderr F I1212 16:33:54.397014 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:33:54.397106462+00:00 stderr F I1212 16:33:54.397087 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:33:54.613398568+00:00 stderr F I1212 16:33:54.610617 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:33:54.613398568+00:00 stderr F I1212 16:33:54.610681 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:33:54.792516364+00:00 stderr F I1212 16:33:54.791786 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:33:54.792516364+00:00 stderr F I1212 16:33:54.791868 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:33:54.993288061+00:00 stderr F I1212 16:33:54.991662 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:33:54.993288061+00:00 stderr F I1212 16:33:54.991739 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:33:55.193891295+00:00 stderr F I1212 16:33:55.193804 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:33:55.193891295+00:00 stderr F I1212 16:33:55.193877 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:33:55.393042522+00:00 stderr F I1212 16:33:55.392149 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:33:55.393042522+00:00 stderr F I1212 16:33:55.393030 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:33:55.592674631+00:00 stderr F I1212 16:33:55.591753 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:33:55.592674631+00:00 stderr F I1212 16:33:55.592654 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:33:55.791643993+00:00 stderr F I1212 16:33:55.791530 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:33:55.791643993+00:00 stderr F I1212 16:33:55.791617 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:33:55.991893827+00:00 stderr F I1212 16:33:55.991782 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:33:55.991893827+00:00 stderr F I1212 16:33:55.991845 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:33:56.191239479+00:00 stderr F I1212 16:33:56.191111 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:33:56.191239479+00:00 stderr F I1212 16:33:56.191162 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:33:56.391943204+00:00 stderr F I1212 16:33:56.391858 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:33:56.391943204+00:00 stderr F I1212 16:33:56.391924 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:33:56.594470045+00:00 stderr F I1212 16:33:56.594398 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:33:56.594470045+00:00 stderr F I1212 16:33:56.594463 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:33:56.792464203+00:00 stderr F I1212 16:33:56.792386 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:33:56.792512564+00:00 stderr F I1212 16:33:56.792461 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:33:56.993974868+00:00 stderr F I1212 16:33:56.993881 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:33:56.993974868+00:00 stderr F I1212 16:33:56.993936 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:33:57.190886009+00:00 stderr F I1212 16:33:57.190801 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:33:57.190886009+00:00 stderr F I1212 16:33:57.190853 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:33:57.392886377+00:00 stderr F I1212 16:33:57.392790 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:33:57.392886377+00:00 stderr F I1212 16:33:57.392862 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:33:57.595988453+00:00 stderr F I1212 16:33:57.595899 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:33:57.595988453+00:00 stderr F I1212 16:33:57.595964 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:33:57.792215436+00:00 stderr F I1212 16:33:57.792068 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:33:57.792215436+00:00 stderr F I1212 16:33:57.792130 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:33:57.992484961+00:00 stderr F I1212 16:33:57.992387 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:33:57.992484961+00:00 stderr F I1212 16:33:57.992467 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:33:58.192314055+00:00 stderr F I1212 16:33:58.191679 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:33:58.192314055+00:00 stderr F I1212 16:33:58.191752 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:33:58.398747944+00:00 stderr F I1212 16:33:58.398627 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:33:58.398747944+00:00 stderr F I1212 16:33:58.398723 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:33:58.593945752+00:00 stderr F I1212 16:33:58.593854 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:33:58.593945752+00:00 stderr F I1212 16:33:58.593936 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:33:58.795256873+00:00 stderr F I1212 16:33:58.795134 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:33:58.795256873+00:00 stderr F I1212 16:33:58.795212 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:33:58.989802324+00:00 stderr F I1212 16:33:58.989690 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:33:58.989802324+00:00 stderr F I1212 16:33:58.989756 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:33:59.190759137+00:00 stderr F I1212 16:33:59.190673 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:33:59.190759137+00:00 stderr F I1212 16:33:59.190726 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:33:59.390437827+00:00 stderr F I1212 16:33:59.390312 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:33:59.390437827+00:00 stderr F I1212 16:33:59.390389 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:33:59.591234935+00:00 stderr F I1212 16:33:59.590773 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:33:59.591234935+00:00 stderr F I1212 16:33:59.590833 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:33:59.792005032+00:00 stderr F I1212 16:33:59.791925 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:33:59.792005032+00:00 stderr F I1212 16:33:59.791984 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:33:59.991648911+00:00 stderr F I1212 16:33:59.991557 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:33:59.991648911+00:00 stderr F I1212 16:33:59.991613 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:34:00.192477539+00:00 stderr F I1212 16:34:00.192409 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:34:00.192526840+00:00 stderr F I1212 16:34:00.192473 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:34:00.399944694+00:00 stderr F I1212 16:34:00.398781 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:34:00.399944694+00:00 stderr F I1212 16:34:00.398858 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:34:00.593678706+00:00 stderr F I1212 16:34:00.593585 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:34:00.593678706+00:00 stderr F I1212 16:34:00.593640 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:34:00.791749776+00:00 stderr F I1212 16:34:00.791660 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:34:00.791797587+00:00 stderr F I1212 16:34:00.791743 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:34:00.991915858+00:00 stderr F I1212 16:34:00.991807 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:34:00.991915858+00:00 stderr F I1212 16:34:00.991859 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:34:01.190500530+00:00 stderr F I1212 16:34:01.190398 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:34:01.190500530+00:00 stderr F I1212 16:34:01.190459 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:34:01.392474008+00:00 stderr F I1212 16:34:01.392401 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:34:01.392474008+00:00 stderr F I1212 16:34:01.392464 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:34:01.595437650+00:00 stderr F I1212 16:34:01.594942 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:34:01.595495582+00:00 stderr F I1212 16:34:01.595438 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:34:01.792698010+00:00 stderr F I1212 16:34:01.792625 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:34:01.792698010+00:00 stderr F I1212 16:34:01.792686 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:34:01.994803301+00:00 stderr F I1212 16:34:01.994704 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:34:01.994803301+00:00 stderr F I1212 16:34:01.994766 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:34:02.197027064+00:00 stderr F I1212 16:34:02.196936 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:34:02.197027064+00:00 stderr F I1212 16:34:02.196994 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:34:02.392408297+00:00 stderr F I1212 16:34:02.392332 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:34:02.392408297+00:00 stderr F I1212 16:34:02.392394 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:34:02.592442966+00:00 stderr F I1212 16:34:02.592363 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:34:02.606391675+00:00 stderr F I1212 16:34:02.606297 1 log.go:245] Operconfig Controller complete 2025-12-12T16:34:24.058234975+00:00 stderr F I1212 16:34:24.058123 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:36:25.877439871+00:00 stderr F I1212 16:36:25.876729 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:36:25.877824721+00:00 stderr F I1212 16:36:25.877799 1 log.go:245] successful reconciliation 2025-12-12T16:36:29.282427830+00:00 stderr F I1212 16:36:29.282331 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:36:29.282774659+00:00 stderr F I1212 16:36:29.282730 1 log.go:245] successful reconciliation 2025-12-12T16:36:32.273778828+00:00 stderr F I1212 16:36:32.273716 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:36:32.274113867+00:00 stderr F I1212 16:36:32.274093 1 log.go:245] successful reconciliation 2025-12-12T16:37:02.606753565+00:00 stderr F I1212 16:37:02.606658 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:37:02.752069115+00:00 stderr F I1212 16:37:02.751984 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:37:02.753535832+00:00 stderr F I1212 16:37:02.753482 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:37:02.755498951+00:00 stderr F I1212 16:37:02.755471 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:37:02.757873861+00:00 stderr F I1212 16:37:02.757826 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc00418f500 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:37:02.762014845+00:00 stderr F I1212 16:37:02.761980 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 4 -> 4 2025-12-12T16:37:02.762014845+00:00 stderr F I1212 16:37:02.762000 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:37:02.762014845+00:00 stderr F I1212 16:37:02.762007 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:37:02.764786305+00:00 stderr F I1212 16:37:02.764758 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:37:02.764786305+00:00 stderr F I1212 16:37:02.764774 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:37:02.764786305+00:00 stderr F I1212 16:37:02.764780 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:37:02.764816046+00:00 stderr F I1212 16:37:02.764784 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:37:02.764816046+00:00 stderr F I1212 16:37:02.764800 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:37:02.772077768+00:00 stderr F I1212 16:37:02.772038 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:37:02.772077768+00:00 stderr F I1212 16:37:02.772060 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:37:02.828240339+00:00 stderr F I1212 16:37:02.828142 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:37:02.842668482+00:00 stderr F I1212 16:37:02.842632 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:37:02.842702842+00:00 stderr F I1212 16:37:02.842675 1 log.go:245] Starting render phase 2025-12-12T16:37:02.860820138+00:00 stderr F I1212 16:37:02.860738 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:37:02.909689945+00:00 stderr F I1212 16:37:02.909599 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:37:02.909689945+00:00 stderr F I1212 16:37:02.909634 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:37:02.909689945+00:00 stderr F I1212 16:37:02.909667 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:37:02.909766477+00:00 stderr F I1212 16:37:02.909694 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:37:03.032778918+00:00 stderr F I1212 16:37:03.032683 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:37:03.032778918+00:00 stderr F I1212 16:37:03.032713 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:37:03.245616676+00:00 stderr F I1212 16:37:03.245542 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:37:03.263670109+00:00 stderr F I1212 16:37:03.263595 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:37:03.269834304+00:00 stderr F I1212 16:37:03.269774 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:37:03.269888596+00:00 stderr F I1212 16:37:03.269839 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:37:03.281918908+00:00 stderr F I1212 16:37:03.281856 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:37:03.281918908+00:00 stderr F I1212 16:37:03.281909 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:37:03.291444787+00:00 stderr F I1212 16:37:03.291388 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:37:03.291474098+00:00 stderr F I1212 16:37:03.291449 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:37:03.299440628+00:00 stderr F I1212 16:37:03.299380 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:37:03.299479169+00:00 stderr F I1212 16:37:03.299464 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:37:03.309159052+00:00 stderr F I1212 16:37:03.309116 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:37:03.309214054+00:00 stderr F I1212 16:37:03.309150 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:37:03.315681956+00:00 stderr F I1212 16:37:03.315660 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:37:03.315700717+00:00 stderr F I1212 16:37:03.315689 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:37:03.322131948+00:00 stderr F I1212 16:37:03.322093 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:37:03.322131948+00:00 stderr F I1212 16:37:03.322123 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:37:03.327418161+00:00 stderr F I1212 16:37:03.327391 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:37:03.327445222+00:00 stderr F I1212 16:37:03.327417 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:37:03.330999251+00:00 stderr F I1212 16:37:03.330968 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:37:03.331048522+00:00 stderr F I1212 16:37:03.331028 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:37:03.336737445+00:00 stderr F I1212 16:37:03.336697 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:37:03.336752886+00:00 stderr F I1212 16:37:03.336736 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:37:03.470014614+00:00 stderr F I1212 16:37:03.469900 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:37:03.470014614+00:00 stderr F I1212 16:37:03.469963 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:37:03.669877875+00:00 stderr F I1212 16:37:03.669813 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:37:03.669877875+00:00 stderr F I1212 16:37:03.669863 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:37:03.870218179+00:00 stderr F I1212 16:37:03.870125 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:37:03.870273020+00:00 stderr F I1212 16:37:03.870216 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:37:04.073087946+00:00 stderr F I1212 16:37:04.073009 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:37:04.073162558+00:00 stderr F I1212 16:37:04.073086 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:37:04.268978077+00:00 stderr F I1212 16:37:04.268364 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:37:04.268978077+00:00 stderr F I1212 16:37:04.268429 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:37:04.470795128+00:00 stderr F I1212 16:37:04.470716 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:37:04.470795128+00:00 stderr F I1212 16:37:04.470772 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:37:04.670041934+00:00 stderr F I1212 16:37:04.669957 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:37:04.670041934+00:00 stderr F I1212 16:37:04.670018 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:37:04.872265045+00:00 stderr F I1212 16:37:04.872135 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:37:04.872265045+00:00 stderr F I1212 16:37:04.872220 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:37:05.070450505+00:00 stderr F I1212 16:37:05.070366 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:37:05.070450505+00:00 stderr F I1212 16:37:05.070440 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:37:05.273280851+00:00 stderr F I1212 16:37:05.273183 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:37:05.273333652+00:00 stderr F I1212 16:37:05.273284 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:37:05.474047545+00:00 stderr F I1212 16:37:05.473299 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:37:05.474047545+00:00 stderr F I1212 16:37:05.474007 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:37:05.670280585+00:00 stderr F I1212 16:37:05.670160 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:37:05.670280585+00:00 stderr F I1212 16:37:05.670236 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:37:05.869897931+00:00 stderr F I1212 16:37:05.869797 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:37:05.869897931+00:00 stderr F I1212 16:37:05.869850 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:37:06.088989835+00:00 stderr F I1212 16:37:06.088875 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:37:06.088989835+00:00 stderr F I1212 16:37:06.088956 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:37:06.288309442+00:00 stderr F I1212 16:37:06.288213 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:37:06.288309442+00:00 stderr F I1212 16:37:06.288270 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:37:06.471675250+00:00 stderr F I1212 16:37:06.471576 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:37:06.471675250+00:00 stderr F I1212 16:37:06.471636 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:37:06.668672299+00:00 stderr F I1212 16:37:06.668603 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:37:06.668736891+00:00 stderr F I1212 16:37:06.668666 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:37:06.871343831+00:00 stderr F I1212 16:37:06.871260 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:37:06.871343831+00:00 stderr F I1212 16:37:06.871328 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:37:07.074517716+00:00 stderr F I1212 16:37:07.074430 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:37:07.074574347+00:00 stderr F I1212 16:37:07.074550 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:37:07.271244249+00:00 stderr F I1212 16:37:07.271145 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:37:07.271244249+00:00 stderr F I1212 16:37:07.271227 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:37:07.469980812+00:00 stderr F I1212 16:37:07.469899 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:37:07.469980812+00:00 stderr F I1212 16:37:07.469957 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:37:07.669990717+00:00 stderr F I1212 16:37:07.669870 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:37:07.669990717+00:00 stderr F I1212 16:37:07.669928 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:37:07.869641974+00:00 stderr F I1212 16:37:07.869535 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:37:07.869641974+00:00 stderr F I1212 16:37:07.869603 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:37:08.070479380+00:00 stderr F I1212 16:37:08.070396 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:37:08.070479380+00:00 stderr F I1212 16:37:08.070453 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:37:08.270386132+00:00 stderr F I1212 16:37:08.270255 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:37:08.270386132+00:00 stderr F I1212 16:37:08.270318 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:37:08.470825119+00:00 stderr F I1212 16:37:08.470751 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:37:08.470885520+00:00 stderr F I1212 16:37:08.470825 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:37:08.668489495+00:00 stderr F I1212 16:37:08.668404 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:37:08.668489495+00:00 stderr F I1212 16:37:08.668465 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:37:08.871727902+00:00 stderr F I1212 16:37:08.871661 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:37:08.871727902+00:00 stderr F I1212 16:37:08.871720 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:37:09.073838670+00:00 stderr F I1212 16:37:09.073734 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:37:09.073838670+00:00 stderr F I1212 16:37:09.073820 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:37:09.272909352+00:00 stderr F I1212 16:37:09.271609 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:37:09.272909352+00:00 stderr F I1212 16:37:09.271664 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:37:09.472558548+00:00 stderr F I1212 16:37:09.472472 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:37:09.472558548+00:00 stderr F I1212 16:37:09.472532 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:37:09.671132248+00:00 stderr F I1212 16:37:09.671031 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:37:09.671132248+00:00 stderr F I1212 16:37:09.671105 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:37:09.872798363+00:00 stderr F I1212 16:37:09.872727 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:37:09.872840094+00:00 stderr F I1212 16:37:09.872796 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:37:10.072664425+00:00 stderr F I1212 16:37:10.072520 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:37:10.072664425+00:00 stderr F I1212 16:37:10.072578 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:37:10.277605054+00:00 stderr F I1212 16:37:10.277522 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:37:10.277605054+00:00 stderr F I1212 16:37:10.277587 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:37:10.476851970+00:00 stderr F I1212 16:37:10.476772 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:37:10.476851970+00:00 stderr F I1212 16:37:10.476832 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:37:10.677587164+00:00 stderr F I1212 16:37:10.677488 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:37:10.677587164+00:00 stderr F I1212 16:37:10.677545 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:37:10.879534308+00:00 stderr F I1212 16:37:10.879441 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:37:10.879534308+00:00 stderr F I1212 16:37:10.879516 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:37:11.077524832+00:00 stderr F I1212 16:37:11.077424 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:37:11.077524832+00:00 stderr F I1212 16:37:11.077499 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:37:11.330002406+00:00 stderr F I1212 16:37:11.329505 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:37:11.330002406+00:00 stderr F I1212 16:37:11.329937 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:37:11.509281981+00:00 stderr F I1212 16:37:11.509150 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:37:11.509397304+00:00 stderr F I1212 16:37:11.509290 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:37:11.677722893+00:00 stderr F I1212 16:37:11.677595 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:37:11.677722893+00:00 stderr F I1212 16:37:11.677683 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:37:11.899145486+00:00 stderr F I1212 16:37:11.899048 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:37:11.899145486+00:00 stderr F I1212 16:37:11.899097 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:37:12.105481541+00:00 stderr F I1212 16:37:12.105359 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:37:12.105481541+00:00 stderr F I1212 16:37:12.105426 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:37:12.269915452+00:00 stderr F I1212 16:37:12.269817 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:37:12.269915452+00:00 stderr F I1212 16:37:12.269874 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:37:12.470530513+00:00 stderr F I1212 16:37:12.470476 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:37:12.470598835+00:00 stderr F I1212 16:37:12.470534 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:37:12.669157153+00:00 stderr F I1212 16:37:12.669088 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:37:12.669226895+00:00 stderr F I1212 16:37:12.669171 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:37:12.872842511+00:00 stderr F I1212 16:37:12.872751 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:37:12.872842511+00:00 stderr F I1212 16:37:12.872807 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:37:13.068926668+00:00 stderr F I1212 16:37:13.068837 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:37:13.068926668+00:00 stderr F I1212 16:37:13.068905 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:37:13.271031706+00:00 stderr F I1212 16:37:13.270953 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:37:13.271031706+00:00 stderr F I1212 16:37:13.271021 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:37:13.471433520+00:00 stderr F I1212 16:37:13.471368 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:37:13.471482661+00:00 stderr F I1212 16:37:13.471435 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:37:13.669575619+00:00 stderr F I1212 16:37:13.669455 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:37:13.669575619+00:00 stderr F I1212 16:37:13.669514 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:37:13.871721398+00:00 stderr F I1212 16:37:13.871624 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:37:13.871721398+00:00 stderr F I1212 16:37:13.871685 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:37:14.072294707+00:00 stderr F I1212 16:37:14.072215 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:37:14.072294707+00:00 stderr F I1212 16:37:14.072281 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:37:14.269399069+00:00 stderr F I1212 16:37:14.269326 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:37:14.269399069+00:00 stderr F I1212 16:37:14.269387 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:37:14.472357909+00:00 stderr F I1212 16:37:14.470337 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:37:14.472465392+00:00 stderr F I1212 16:37:14.472405 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:37:14.671763049+00:00 stderr F I1212 16:37:14.671691 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:37:14.671796010+00:00 stderr F I1212 16:37:14.671760 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:37:14.871637001+00:00 stderr F I1212 16:37:14.871525 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:37:14.871637001+00:00 stderr F I1212 16:37:14.871615 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:37:15.072524208+00:00 stderr F I1212 16:37:15.072430 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:37:15.072524208+00:00 stderr F I1212 16:37:15.072506 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:37:15.271832386+00:00 stderr F I1212 16:37:15.271737 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:37:15.271832386+00:00 stderr F I1212 16:37:15.271802 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:37:15.471045731+00:00 stderr F I1212 16:37:15.470957 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:37:15.471045731+00:00 stderr F I1212 16:37:15.471031 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:37:15.671601461+00:00 stderr F I1212 16:37:15.671503 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:37:15.671601461+00:00 stderr F I1212 16:37:15.671557 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:37:15.870014986+00:00 stderr F I1212 16:37:15.869851 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:37:15.870014986+00:00 stderr F I1212 16:37:15.869919 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:37:16.071276882+00:00 stderr F I1212 16:37:16.071131 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:37:16.071276882+00:00 stderr F I1212 16:37:16.071214 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:37:16.274094778+00:00 stderr F I1212 16:37:16.274002 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:37:16.274201681+00:00 stderr F I1212 16:37:16.274091 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:37:16.474434242+00:00 stderr F I1212 16:37:16.474331 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:37:16.474434242+00:00 stderr F I1212 16:37:16.474391 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:37:16.669459092+00:00 stderr F I1212 16:37:16.668537 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:37:16.669459092+00:00 stderr F I1212 16:37:16.668601 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:37:16.870391591+00:00 stderr F I1212 16:37:16.870308 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:37:16.870391591+00:00 stderr F I1212 16:37:16.870367 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:37:17.071001210+00:00 stderr F I1212 16:37:17.070905 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:37:17.071001210+00:00 stderr F I1212 16:37:17.070955 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:37:17.270759309+00:00 stderr F I1212 16:37:17.270657 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:37:17.270759309+00:00 stderr F I1212 16:37:17.270709 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:37:17.477117554+00:00 stderr F I1212 16:37:17.476279 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:37:17.477117554+00:00 stderr F I1212 16:37:17.477086 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:37:17.669421406+00:00 stderr F I1212 16:37:17.668676 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:37:17.669421406+00:00 stderr F I1212 16:37:17.668736 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:37:17.870159700+00:00 stderr F I1212 16:37:17.870082 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:37:17.870159700+00:00 stderr F I1212 16:37:17.870145 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:37:18.073599721+00:00 stderr F I1212 16:37:18.073524 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:37:18.073599721+00:00 stderr F I1212 16:37:18.073592 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:37:18.271980245+00:00 stderr F I1212 16:37:18.271833 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:37:18.271980245+00:00 stderr F I1212 16:37:18.271908 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:37:18.477941780+00:00 stderr F I1212 16:37:18.477820 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:37:18.477941780+00:00 stderr F I1212 16:37:18.477902 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:37:18.698863991+00:00 stderr F I1212 16:37:18.698763 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:37:18.698863991+00:00 stderr F I1212 16:37:18.698845 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:37:18.871730285+00:00 stderr F I1212 16:37:18.871652 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:37:18.871730285+00:00 stderr F I1212 16:37:18.871715 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:37:19.068893029+00:00 stderr F I1212 16:37:19.068798 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:37:19.068893029+00:00 stderr F I1212 16:37:19.068861 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:37:19.270887184+00:00 stderr F I1212 16:37:19.270784 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:37:19.270887184+00:00 stderr F I1212 16:37:19.270870 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:37:19.470043688+00:00 stderr F I1212 16:37:19.469977 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:37:19.470043688+00:00 stderr F I1212 16:37:19.470035 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:37:19.669334885+00:00 stderr F I1212 16:37:19.669257 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:37:19.669334885+00:00 stderr F I1212 16:37:19.669315 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:37:19.870776707+00:00 stderr F I1212 16:37:19.870700 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:37:19.870776707+00:00 stderr F I1212 16:37:19.870751 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:37:20.072230388+00:00 stderr F I1212 16:37:20.072137 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:37:20.072230388+00:00 stderr F I1212 16:37:20.072202 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:37:20.272671184+00:00 stderr F I1212 16:37:20.272569 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:37:20.272671184+00:00 stderr F I1212 16:37:20.272641 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:37:20.470659889+00:00 stderr F I1212 16:37:20.469684 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:37:20.470659889+00:00 stderr F I1212 16:37:20.469748 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:37:20.674726855+00:00 stderr F I1212 16:37:20.674635 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:37:20.674726855+00:00 stderr F I1212 16:37:20.674700 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:37:20.872764971+00:00 stderr F I1212 16:37:20.872647 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:37:20.872764971+00:00 stderr F I1212 16:37:20.872704 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:37:21.071531286+00:00 stderr F I1212 16:37:21.071443 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:37:21.071531286+00:00 stderr F I1212 16:37:21.071500 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:37:21.271873279+00:00 stderr F I1212 16:37:21.271777 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:37:21.271873279+00:00 stderr F I1212 16:37:21.271838 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:37:21.470156081+00:00 stderr F I1212 16:37:21.470048 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:37:21.470156081+00:00 stderr F I1212 16:37:21.470123 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:37:21.675897371+00:00 stderr F I1212 16:37:21.675794 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:37:21.675897371+00:00 stderr F I1212 16:37:21.675879 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:37:21.872547842+00:00 stderr F I1212 16:37:21.872448 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:37:21.872547842+00:00 stderr F I1212 16:37:21.872530 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:37:22.069306165+00:00 stderr F I1212 16:37:22.069169 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:37:22.069343846+00:00 stderr F I1212 16:37:22.069308 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:37:22.268762357+00:00 stderr F I1212 16:37:22.268716 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:37:22.268850449+00:00 stderr F I1212 16:37:22.268840 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:37:22.471366667+00:00 stderr F I1212 16:37:22.470530 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:37:22.471366667+00:00 stderr F I1212 16:37:22.471348 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:37:22.674104951+00:00 stderr F I1212 16:37:22.671602 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:37:22.674104951+00:00 stderr F I1212 16:37:22.671660 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:37:22.884283642+00:00 stderr F I1212 16:37:22.874521 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:37:22.884283642+00:00 stderr F I1212 16:37:22.874571 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:37:23.070125482+00:00 stderr F I1212 16:37:23.070055 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:37:23.070125482+00:00 stderr F I1212 16:37:23.070105 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:37:23.271431020+00:00 stderr F I1212 16:37:23.270167 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:37:23.271431020+00:00 stderr F I1212 16:37:23.270895 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:37:23.470913932+00:00 stderr F I1212 16:37:23.470849 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:37:23.470959403+00:00 stderr F I1212 16:37:23.470908 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:37:23.670637570+00:00 stderr F I1212 16:37:23.670484 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:37:23.670637570+00:00 stderr F I1212 16:37:23.670561 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:37:23.869397734+00:00 stderr F I1212 16:37:23.869329 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:37:23.869397734+00:00 stderr F I1212 16:37:23.869386 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:37:24.066334392+00:00 stderr F I1212 16:37:24.066176 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:37:24.070384944+00:00 stderr F I1212 16:37:24.070345 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:37:24.070564298+00:00 stderr F I1212 16:37:24.070536 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:37:24.270888501+00:00 stderr F I1212 16:37:24.270816 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:37:24.270888501+00:00 stderr F I1212 16:37:24.270878 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:37:24.476735943+00:00 stderr F I1212 16:37:24.476643 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:37:24.476735943+00:00 stderr F I1212 16:37:24.476699 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:37:24.670317406+00:00 stderr F I1212 16:37:24.670265 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:37:24.670427999+00:00 stderr F I1212 16:37:24.670415 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:37:24.869330647+00:00 stderr F I1212 16:37:24.869172 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:37:24.869330647+00:00 stderr F I1212 16:37:24.869265 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:37:25.070250495+00:00 stderr F I1212 16:37:25.070126 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:37:25.070352378+00:00 stderr F I1212 16:37:25.070339 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:37:25.270581009+00:00 stderr F I1212 16:37:25.270516 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:37:25.270748433+00:00 stderr F I1212 16:37:25.270722 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:37:25.469265311+00:00 stderr F I1212 16:37:25.468576 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:37:25.469265311+00:00 stderr F I1212 16:37:25.468662 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:37:25.676513808+00:00 stderr F I1212 16:37:25.676358 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:37:25.676513808+00:00 stderr F I1212 16:37:25.676413 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:37:25.869512227+00:00 stderr F I1212 16:37:25.869449 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:37:25.869640210+00:00 stderr F I1212 16:37:25.869629 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:37:26.070679292+00:00 stderr F I1212 16:37:26.070590 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:37:26.070679292+00:00 stderr F I1212 16:37:26.070646 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:37:26.283595671+00:00 stderr F I1212 16:37:26.283526 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:37:26.283785186+00:00 stderr F I1212 16:37:26.283763 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:37:26.471991165+00:00 stderr F I1212 16:37:26.471879 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:37:26.471991165+00:00 stderr F I1212 16:37:26.471952 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:37:26.671212890+00:00 stderr F I1212 16:37:26.671133 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:37:26.684465183+00:00 stderr F I1212 16:37:26.684436 1 log.go:245] Operconfig Controller complete 2025-12-12T16:37:50.768432201+00:00 stderr F I1212 16:37:50.768324 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:38:46.325481643+00:00 stderr F I1212 16:38:46.325387 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.332893270+00:00 stderr F I1212 16:38:46.332841 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.339317541+00:00 stderr F I1212 16:38:46.339239 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.345576298+00:00 stderr F I1212 16:38:46.344708 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.350893552+00:00 stderr F I1212 16:38:46.350838 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.359633142+00:00 stderr F I1212 16:38:46.359576 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.366634438+00:00 stderr F I1212 16:38:46.366596 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.373716335+00:00 stderr F I1212 16:38:46.373683 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.379359927+00:00 stderr F I1212 16:38:46.379303 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.386581109+00:00 stderr F I1212 16:38:46.386549 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:46.526971195+00:00 stderr F I1212 16:38:46.526793 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.328955472+00:00 stderr F I1212 16:38:50.328872 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.335596349+00:00 stderr F I1212 16:38:50.335542 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.344999835+00:00 stderr F I1212 16:38:50.344956 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.352204576+00:00 stderr F I1212 16:38:50.352121 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.363565292+00:00 stderr F I1212 16:38:50.363492 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.370596828+00:00 stderr F I1212 16:38:50.370529 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.381344088+00:00 stderr F I1212 16:38:50.380481 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.388365025+00:00 stderr F I1212 16:38:50.388306 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.394611592+00:00 stderr F I1212 16:38:50.394483 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.401962966+00:00 stderr F I1212 16:38:50.401912 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:50.527814378+00:00 stderr F I1212 16:38:50.527733 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.341537304+00:00 stderr F I1212 16:38:51.341458 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.347068483+00:00 stderr F I1212 16:38:51.347018 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.357008232+00:00 stderr F I1212 16:38:51.356946 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.363678050+00:00 stderr F I1212 16:38:51.363621 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.528887611+00:00 stderr F I1212 16:38:51.528820 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.727037970+00:00 stderr F I1212 16:38:51.726957 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:51.928918712+00:00 stderr F I1212 16:38:51.928824 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:52.127820580+00:00 stderr F I1212 16:38:52.127736 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:38:52.331824015+00:00 stderr F I1212 16:38:52.331746 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:38:52.529413570+00:00 stderr F I1212 16:38:52.529303 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:52.728500152+00:00 stderr F I1212 16:38:52.728414 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:52.933606166+00:00 stderr F I1212 16:38:52.933469 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:53.126213305+00:00 stderr F I1212 16:38:53.126118 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:53.328762605+00:00 stderr F I1212 16:38:53.328640 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:53.529150440+00:00 stderr F I1212 16:38:53.529052 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:53.729484712+00:00 stderr F I1212 16:38:53.729375 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:53.933897948+00:00 stderr F I1212 16:38:53.933775 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:54.130651762+00:00 stderr F I1212 16:38:54.130504 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:54.331750934+00:00 stderr F I1212 16:38:54.331618 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:38:54.529298208+00:00 stderr F I1212 16:38:54.529165 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:38:54.731128859+00:00 stderr F I1212 16:38:54.730854 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:54.928117858+00:00 stderr F I1212 16:38:54.928033 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.431439450+00:00 stderr F I1212 16:38:56.431349 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.441836141+00:00 stderr F I1212 16:38:56.441756 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.451820222+00:00 stderr F I1212 16:38:56.451742 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.459799942+00:00 stderr F I1212 16:38:56.459766 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.471733642+00:00 stderr F I1212 16:38:56.471664 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.481947829+00:00 stderr F I1212 16:38:56.481867 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.490707529+00:00 stderr F I1212 16:38:56.490559 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.528381916+00:00 stderr F I1212 16:38:56.528304 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.727821937+00:00 stderr F I1212 16:38:56.727683 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:38:56.931321060+00:00 stderr F I1212 16:38:56.931231 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:57.127575051+00:00 stderr F I1212 16:38:57.127463 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:57.551601994+00:00 stderr F I1212 16:38:57.551521 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:57.559000350+00:00 stderr F I1212 16:38:57.558902 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:57.727838042+00:00 stderr F I1212 16:38:57.727736 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:57.926317149+00:00 stderr F I1212 16:38:57.926250 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:58.150718427+00:00 stderr F I1212 16:38:58.126512 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:58.329842258+00:00 stderr F I1212 16:38:58.329307 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:58.525937015+00:00 stderr F I1212 16:38:58.525868 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:58.726847463+00:00 stderr F I1212 16:38:58.726764 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:38:58.928059398+00:00 stderr F I1212 16:38:58.927275 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:38:59.129019347+00:00 stderr F I1212 16:38:59.128938 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:59.329419342+00:00 stderr F I1212 16:38:59.329346 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:38:59.529786507+00:00 stderr F I1212 16:38:59.529727 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:38:59.727216738+00:00 stderr F I1212 16:38:59.727085 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:38:59.926960316+00:00 stderr F I1212 16:38:59.926892 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:00.128849749+00:00 stderr F I1212 16:39:00.128755 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:00.328268609+00:00 stderr F I1212 16:39:00.328160 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:00.527605058+00:00 stderr F I1212 16:39:00.527541 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:39:00.729494980+00:00 stderr F I1212 16:39:00.729404 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:00.930154851+00:00 stderr F I1212 16:39:00.930105 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:39:01.128560226+00:00 stderr F I1212 16:39:01.128434 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:39:01.330343265+00:00 stderr F I1212 16:39:01.330270 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:39:01.526002591+00:00 stderr F I1212 16:39:01.525935 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:01.732576151+00:00 stderr F I1212 16:39:01.731589 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-default-service-cluster-0 -n openshift-network-diagnostics is applied 2025-12-12T16:39:01.927093459+00:00 stderr F I1212 16:39:01.926991 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:39:02.127107914+00:00 stderr F I1212 16:39:02.127053 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:02.327113029+00:00 stderr F I1212 16:39:02.327026 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:02.527980746+00:00 stderr F I1212 16:39:02.527885 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-kubernetes-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:02.729068379+00:00 stderr F I1212 16:39:02.728952 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:39:02.929342500+00:00 stderr F I1212 16:39:02.929242 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-openshift-apiserver-endpoint-crc -n openshift-network-diagnostics is applied 2025-12-12T16:39:03.126493024+00:00 stderr F I1212 16:39:03.126438 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-external -n openshift-network-diagnostics is applied 2025-12-12T16:39:03.328010007+00:00 stderr F I1212 16:39:03.327959 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-load-balancer-api-internal -n openshift-network-diagnostics is applied 2025-12-12T16:39:03.525536140+00:00 stderr F I1212 16:39:03.525477 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-service-cluster -n openshift-network-diagnostics is applied 2025-12-12T16:39:03.725279769+00:00 stderr F I1212 16:39:03.725225 1 log.go:245] The check PodNetworkConnectivityCheck/network-check-source-crc-to-network-check-target-crc -n openshift-network-diagnostics is applied 2025-12-12T16:40:24.076003916+00:00 stderr F I1212 16:40:24.075421 1 log.go:245] Reconciling update to IngressController openshift-ingress-operator/default 2025-12-12T16:40:26.684808723+00:00 stderr F I1212 16:40:26.684736 1 log.go:245] Reconciling Network.operator.openshift.io cluster 2025-12-12T16:40:26.841535650+00:00 stderr F I1212 16:40:26.841434 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu-host=, the list of nodes are [] 2025-12-12T16:40:26.843739225+00:00 stderr F I1212 16:40:26.843685 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/dpu=, the list of nodes are [] 2025-12-12T16:40:26.845809427+00:00 stderr F I1212 16:40:26.845760 1 ovn_kubernetes.go:791] For Label network.operator.openshift.io/smart-nic=, the list of nodes are [] 2025-12-12T16:40:26.848543886+00:00 stderr F I1212 16:40:26.848483 1 ovn_kubernetes.go:956] OVN configuration is now &{GatewayMode: HyperShiftConfig:0xc001a60d80 DisableUDPAggregation:false DpuHostModeLabel:network.operator.openshift.io/dpu-host DpuHostModeNodes:[] DpuHostModeValue: DpuModeLabel:network.operator.openshift.io/dpu DpuModeNodes:[] SmartNicModeLabel:network.operator.openshift.io/smart-nic SmartNicModeNodes:[] SmartNicModeValue: MgmtPortResourceName: ConfigOverrides:map[]} 2025-12-12T16:40:26.852808423+00:00 stderr F I1212 16:40:26.852765 1 ovn_kubernetes.go:1728] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete; 1/1 scheduled; 1 available; generation 4 -> 4 2025-12-12T16:40:26.852808423+00:00 stderr F I1212 16:40:26.852783 1 ovn_kubernetes.go:1733] deployment openshift-ovn-kubernetes/ovnkube-control-plane rollout complete 2025-12-12T16:40:26.852808423+00:00 stderr F I1212 16:40:26.852792 1 ovn_kubernetes.go:1248] ovnkube-control-plane deployment status: progressing=false 2025-12-12T16:40:26.856675090+00:00 stderr F I1212 16:40:26.856624 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:40:26.856675090+00:00 stderr F I1212 16:40:26.856641 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:40:26.856675090+00:00 stderr F I1212 16:40:26.856646 1 ovn_kubernetes.go:1693] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 3 -> 3 2025-12-12T16:40:26.856675090+00:00 stderr F I1212 16:40:26.856650 1 ovn_kubernetes.go:1698] daemonset openshift-ovn-kubernetes/ovnkube-node rollout complete 2025-12-12T16:40:26.856675090+00:00 stderr F I1212 16:40:26.856666 1 ovn_kubernetes.go:1279] ovnkube-node DaemonSet status: progressing=false 2025-12-12T16:40:26.860119447+00:00 stderr F I1212 16:40:26.860069 1 ovn_kubernetes.go:1321] Found the DefaultV4MasqueradeSubnet(169.254.0.0/17) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:40:26.860119447+00:00 stderr F I1212 16:40:26.860091 1 ovn_kubernetes.go:1318] Found the DefaultV6MasqueradeSubnet(fd69::/112) in the "networkoperator.openshift.io/default-masquerade-network-cidrs" annotation 2025-12-12T16:40:26.902560693+00:00 stderr F I1212 16:40:26.902472 1 log.go:245] reconciling (operator.openshift.io/v1, Kind=Network) /cluster 2025-12-12T16:40:26.919883758+00:00 stderr F I1212 16:40:26.919816 1 log.go:245] Apply / Create of (operator.openshift.io/v1, Kind=Network) /cluster was successful 2025-12-12T16:40:26.919883758+00:00 stderr F I1212 16:40:26.919855 1 log.go:245] Starting render phase 2025-12-12T16:40:26.934109066+00:00 stderr F I1212 16:40:26.934031 1 ovn_kubernetes.go:344] OVN_EGRESSIP_HEALTHCHECK_PORT env var is not defined. Using: 9107 2025-12-12T16:40:26.964028377+00:00 stderr F I1212 16:40:26.963944 1 ovn_kubernetes.go:1457] IP family mode: node=single-stack, controlPlane=single-stack 2025-12-12T16:40:26.964028377+00:00 stderr F I1212 16:40:26.963972 1 ovn_kubernetes.go:1429] IP family change: updateNode=true, updateControlPlane=true 2025-12-12T16:40:26.964028377+00:00 stderr F I1212 16:40:26.963998 1 ovn_kubernetes.go:1601] OVN-Kubernetes control-plane and node already at release version 4.20.1; no changes required 2025-12-12T16:40:26.964086119+00:00 stderr F I1212 16:40:26.964026 1 ovn_kubernetes.go:531] ovnk components: ovnkube-node: isRunning=true, update=true; ovnkube-control-plane: isRunning=true, update=true 2025-12-12T16:40:27.103425550+00:00 stderr F I1212 16:40:27.103349 1 ovn_kubernetes.go:1693] daemonset openshift-network-node-identity/network-node-identity rollout complete; 1/1 scheduled; 0 unavailable; 1 available; generation 1 -> 1 2025-12-12T16:40:27.103425550+00:00 stderr F I1212 16:40:27.103376 1 ovn_kubernetes.go:1698] daemonset openshift-network-node-identity/network-node-identity rollout complete 2025-12-12T16:40:27.320645337+00:00 stderr F I1212 16:40:27.320566 1 log.go:245] Render phase done, rendered 126 objects 2025-12-12T16:40:27.336959277+00:00 stderr F I1212 16:40:27.336862 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster 2025-12-12T16:40:27.342558178+00:00 stderr F I1212 16:40:27.342507 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/applied-cluster was successful 2025-12-12T16:40:27.342619569+00:00 stderr F I1212 16:40:27.342591 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io 2025-12-12T16:40:27.350198670+00:00 stderr F I1212 16:40:27.350124 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /network-attachment-definitions.k8s.cni.cncf.io was successful 2025-12-12T16:40:27.350198670+00:00 stderr F I1212 16:40:27.350172 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io 2025-12-12T16:40:27.359823552+00:00 stderr F I1212 16:40:27.359713 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ippools.whereabouts.cni.cncf.io was successful 2025-12-12T16:40:27.359823552+00:00 stderr F I1212 16:40:27.359784 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io 2025-12-12T16:40:27.369836893+00:00 stderr F I1212 16:40:27.369753 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /overlappingrangeipreservations.whereabouts.cni.cncf.io was successful 2025-12-12T16:40:27.369836893+00:00 stderr F I1212 16:40:27.369803 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io 2025-12-12T16:40:27.380706686+00:00 stderr F I1212 16:40:27.380628 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /nodeslicepools.whereabouts.cni.cncf.io was successful 2025-12-12T16:40:27.380706686+00:00 stderr F I1212 16:40:27.380682 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-multus 2025-12-12T16:40:27.390221365+00:00 stderr F I1212 16:40:27.390090 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-multus was successful 2025-12-12T16:40:27.390221365+00:00 stderr F I1212 16:40:27.390163 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus 2025-12-12T16:40:27.397143369+00:00 stderr F I1212 16:40:27.397052 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus was successful 2025-12-12T16:40:27.397143369+00:00 stderr F I1212 16:40:27.397099 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools 2025-12-12T16:40:27.403155381+00:00 stderr F I1212 16:40:27.403093 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-ancillary-tools was successful 2025-12-12T16:40:27.403155381+00:00 stderr F I1212 16:40:27.403147 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus 2025-12-12T16:40:27.408519045+00:00 stderr F I1212 16:40:27.408479 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus was successful 2025-12-12T16:40:27.408642368+00:00 stderr F I1212 16:40:27.408624 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient 2025-12-12T16:40:27.413105610+00:00 stderr F I1212 16:40:27.413080 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-transient was successful 2025-12-12T16:40:27.413225843+00:00 stderr F I1212 16:40:27.413210 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group 2025-12-12T16:40:27.542321187+00:00 stderr F I1212 16:40:27.542244 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-group was successful 2025-12-12T16:40:27.542497232+00:00 stderr F I1212 16:40:27.542467 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools 2025-12-12T16:40:27.742294392+00:00 stderr F I1212 16:40:27.741586 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ancillary-tools was successful 2025-12-12T16:40:27.742294392+00:00 stderr F I1212 16:40:27.742257 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools 2025-12-12T16:40:27.946565274+00:00 stderr F I1212 16:40:27.946450 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-ancillary-tools was successful 2025-12-12T16:40:27.946676347+00:00 stderr F I1212 16:40:27.946584 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers 2025-12-12T16:40:28.142863606+00:00 stderr F I1212 16:40:28.142759 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-cluster-readers was successful 2025-12-12T16:40:28.142863606+00:00 stderr F I1212 16:40:28.142813 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts 2025-12-12T16:40:28.343514488+00:00 stderr F I1212 16:40:28.343455 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-whereabouts was successful 2025-12-12T16:40:28.343547238+00:00 stderr F I1212 16:40:28.343510 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts 2025-12-12T16:40:28.544783744+00:00 stderr F I1212 16:40:28.544617 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/multus-whereabouts was successful 2025-12-12T16:40:28.544783744+00:00 stderr F I1212 16:40:28.544690 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni 2025-12-12T16:40:28.744964834+00:00 stderr F I1212 16:40:28.744123 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /whereabouts-cni was successful 2025-12-12T16:40:28.744964834+00:00 stderr F I1212 16:40:28.744928 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni 2025-12-12T16:40:28.943128553+00:00 stderr F I1212 16:40:28.942979 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/whereabouts-cni was successful 2025-12-12T16:40:28.943128553+00:00 stderr F I1212 16:40:28.943041 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project 2025-12-12T16:40:29.146437171+00:00 stderr F I1212 16:40:29.146310 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /net-attach-def-project was successful 2025-12-12T16:40:29.146437171+00:00 stderr F I1212 16:40:29.146378 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist 2025-12-12T16:40:29.343790010+00:00 stderr F I1212 16:40:29.343701 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/default-cni-sysctl-allowlist was successful 2025-12-12T16:40:29.343790010+00:00 stderr F I1212 16:40:29.343771 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources 2025-12-12T16:40:29.543383904+00:00 stderr F I1212 16:40:29.543269 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/cni-copy-resources was successful 2025-12-12T16:40:29.543383904+00:00 stderr F I1212 16:40:29.543323 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config 2025-12-12T16:40:29.746469907+00:00 stderr F I1212 16:40:29.746299 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/whereabouts-flatfile-config was successful 2025-12-12T16:40:29.746469907+00:00 stderr F I1212 16:40:29.746373 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config 2025-12-12T16:40:29.946422471+00:00 stderr F I1212 16:40:29.946301 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-multus/multus-daemon-config was successful 2025-12-12T16:40:29.946422471+00:00 stderr F I1212 16:40:29.946395 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus 2025-12-12T16:40:30.155676479+00:00 stderr F I1212 16:40:30.155579 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus was successful 2025-12-12T16:40:30.155676479+00:00 stderr F I1212 16:40:30.155659 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins 2025-12-12T16:40:30.362291189+00:00 stderr F I1212 16:40:30.362130 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/multus-additional-cni-plugins was successful 2025-12-12T16:40:30.362373381+00:00 stderr F I1212 16:40:30.362308 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa 2025-12-12T16:40:30.545295517+00:00 stderr F I1212 16:40:30.543624 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/metrics-daemon-sa was successful 2025-12-12T16:40:30.545295517+00:00 stderr F I1212 16:40:30.543696 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role 2025-12-12T16:40:30.743817775+00:00 stderr F I1212 16:40:30.743754 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /metrics-daemon-role was successful 2025-12-12T16:40:30.744168964+00:00 stderr F I1212 16:40:30.743949 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding 2025-12-12T16:40:30.943652566+00:00 stderr F I1212 16:40:30.943553 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /metrics-daemon-sa-rolebinding was successful 2025-12-12T16:40:30.943652566+00:00 stderr F I1212 16:40:30.943620 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon 2025-12-12T16:40:31.149165860+00:00 stderr F I1212 16:40:31.149054 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-multus/network-metrics-daemon was successful 2025-12-12T16:40:31.149165860+00:00 stderr F I1212 16:40:31.149135 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network 2025-12-12T16:40:31.346297003+00:00 stderr F I1212 16:40:31.345386 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-network was successful 2025-12-12T16:40:31.346297003+00:00 stderr F I1212 16:40:31.346256 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/network-metrics-service 2025-12-12T16:40:31.548762600+00:00 stderr F I1212 16:40:31.548674 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/network-metrics-service was successful 2025-12-12T16:40:31.548968325+00:00 stderr F I1212 16:40:31.548939 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:40:31.742270702+00:00 stderr F I1212 16:40:31.742096 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:40:31.742270702+00:00 stderr F I1212 16:40:31.742166 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:40:31.946176335+00:00 stderr F I1212 16:40:31.945308 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:40:31.946176335+00:00 stderr F I1212 16:40:31.946108 1 log.go:245] reconciling (/v1, Kind=Service) openshift-multus/multus-admission-controller 2025-12-12T16:40:32.147313678+00:00 stderr F I1212 16:40:32.147244 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-multus/multus-admission-controller was successful 2025-12-12T16:40:32.147401801+00:00 stderr F I1212 16:40:32.147390 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-multus/multus-ac 2025-12-12T16:40:32.346291968+00:00 stderr F I1212 16:40:32.346147 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-multus/multus-ac was successful 2025-12-12T16:40:32.346525124+00:00 stderr F I1212 16:40:32.346491 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook 2025-12-12T16:40:32.542997990+00:00 stderr F I1212 16:40:32.542921 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /multus-admission-controller-webhook was successful 2025-12-12T16:40:32.542997990+00:00 stderr F I1212 16:40:32.542975 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook 2025-12-12T16:40:32.745545149+00:00 stderr F I1212 16:40:32.745475 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /multus-admission-controller-webhook was successful 2025-12-12T16:40:32.745787745+00:00 stderr F I1212 16:40:32.745764 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io 2025-12-12T16:40:32.944866947+00:00 stderr F I1212 16:40:32.944753 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /multus.openshift.io was successful 2025-12-12T16:40:32.944866947+00:00 stderr F I1212 16:40:32.944808 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller 2025-12-12T16:40:33.149521460+00:00 stderr F I1212 16:40:33.149364 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-multus/multus-admission-controller was successful 2025-12-12T16:40:33.149521460+00:00 stderr F I1212 16:40:33.149438 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller 2025-12-12T16:40:33.345031572+00:00 stderr F I1212 16:40:33.344941 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-multus/monitor-multus-admission-controller was successful 2025-12-12T16:40:33.345031572+00:00 stderr F I1212 16:40:33.345001 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s 2025-12-12T16:40:33.544050872+00:00 stderr F I1212 16:40:33.543942 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-multus/prometheus-k8s was successful 2025-12-12T16:40:33.544050872+00:00 stderr F I1212 16:40:33.543991 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s 2025-12-12T16:40:33.743292998+00:00 stderr F I1212 16:40:33.743197 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-multus/prometheus-k8s was successful 2025-12-12T16:40:33.743292998+00:00 stderr F I1212 16:40:33.743261 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules 2025-12-12T16:40:33.946221536+00:00 stderr F I1212 16:40:33.946090 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-multus/prometheus-k8s-rules was successful 2025-12-12T16:40:33.946221536+00:00 stderr F I1212 16:40:33.946150 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-ovn-kubernetes 2025-12-12T16:40:34.144221631+00:00 stderr F I1212 16:40:34.144105 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-ovn-kubernetes was successful 2025-12-12T16:40:34.144221631+00:00 stderr F I1212 16:40:34.144201 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org 2025-12-12T16:40:34.350850393+00:00 stderr F I1212 16:40:34.350796 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressfirewalls.k8s.ovn.org was successful 2025-12-12T16:40:34.350977646+00:00 stderr F I1212 16:40:34.350956 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org 2025-12-12T16:40:34.555165236+00:00 stderr F I1212 16:40:34.555115 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressips.k8s.ovn.org was successful 2025-12-12T16:40:34.555280309+00:00 stderr F I1212 16:40:34.555270 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org 2025-12-12T16:40:34.753472039+00:00 stderr F I1212 16:40:34.753361 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressqoses.k8s.ovn.org was successful 2025-12-12T16:40:34.753472039+00:00 stderr F I1212 16:40:34.753441 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org 2025-12-12T16:40:34.958937301+00:00 stderr F I1212 16:40:34.958884 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminpolicybasedexternalroutes.k8s.ovn.org was successful 2025-12-12T16:40:34.959044534+00:00 stderr F I1212 16:40:34.959033 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org 2025-12-12T16:40:35.149967561+00:00 stderr F I1212 16:40:35.149889 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /egressservices.k8s.ovn.org was successful 2025-12-12T16:40:35.150018192+00:00 stderr F I1212 16:40:35.149965 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:40:35.391812508+00:00 stderr F I1212 16:40:35.391756 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /adminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:40:35.391851419+00:00 stderr F I1212 16:40:35.391817 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io 2025-12-12T16:40:35.576679413+00:00 stderr F I1212 16:40:35.576593 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /baselineadminnetworkpolicies.policy.networking.k8s.io was successful 2025-12-12T16:40:35.576679413+00:00 stderr F I1212 16:40:35.576655 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io 2025-12-12T16:40:35.744317575+00:00 stderr F I1212 16:40:35.744216 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /ipamclaims.k8s.cni.cncf.io was successful 2025-12-12T16:40:35.744317575+00:00 stderr F I1212 16:40:35.744298 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org 2025-12-12T16:40:35.964644871+00:00 stderr F I1212 16:40:35.964573 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /userdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:40:35.964644871+00:00 stderr F I1212 16:40:35.964632 1 log.go:245] reconciling (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org 2025-12-12T16:40:36.181644523+00:00 stderr F I1212 16:40:36.181578 1 log.go:245] Apply / Create of (apiextensions.k8s.io/v1, Kind=CustomResourceDefinition) /clusteruserdefinednetworks.k8s.ovn.org was successful 2025-12-12T16:40:36.181730575+00:00 stderr F I1212 16:40:36.181646 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:40:36.341897889+00:00 stderr F I1212 16:40:36.341788 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:40:36.341897889+00:00 stderr F I1212 16:40:36.341838 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited 2025-12-12T16:40:36.547996318+00:00 stderr F I1212 16:40:36.547917 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:40:36.548050759+00:00 stderr F I1212 16:40:36.547991 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited 2025-12-12T16:40:36.742106185+00:00 stderr F I1212 16:40:36.742053 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-nodes-identity-limited was successful 2025-12-12T16:40:36.742157016+00:00 stderr F I1212 16:40:36.742105 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited 2025-12-12T16:40:36.945344811+00:00 stderr F I1212 16:40:36.945278 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-node-limited was successful 2025-12-12T16:40:36.945344811+00:00 stderr F I1212 16:40:36.945330 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited 2025-12-12T16:40:37.143121601+00:00 stderr F I1212 16:40:37.142819 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-identity-limited was successful 2025-12-12T16:40:37.143121601+00:00 stderr F I1212 16:40:37.142873 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy 2025-12-12T16:40:37.343409843+00:00 stderr F I1212 16:40:37.342581 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-kube-rbac-proxy was successful 2025-12-12T16:40:37.343409843+00:00 stderr F I1212 16:40:37.342648 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy 2025-12-12T16:40:37.542653108+00:00 stderr F I1212 16:40:37.542567 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-node-kube-rbac-proxy was successful 2025-12-12T16:40:37.542653108+00:00 stderr F I1212 16:40:37.542628 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config 2025-12-12T16:40:37.742808107+00:00 stderr F I1212 16:40:37.742731 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-config was successful 2025-12-12T16:40:37.742853238+00:00 stderr F I1212 16:40:37.742803 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:40:37.944120305+00:00 stderr F I1212 16:40:37.944027 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:40:37.944120305+00:00 stderr F I1212 16:40:37.944102 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:40:38.058226452+00:00 stderr F I1212 16:40:38.058125 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:40:38.154414979+00:00 stderr F I1212 16:40:38.154315 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:40:38.154414979+00:00 stderr F I1212 16:40:38.154387 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:40:38.342576197+00:00 stderr F I1212 16:40:38.342492 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:40:38.342637568+00:00 stderr F I1212 16:40:38.342571 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:40:38.542687585+00:00 stderr F I1212 16:40:38.542610 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:40:38.542740756+00:00 stderr F I1212 16:40:38.542681 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited 2025-12-12T16:40:38.742606368+00:00 stderr F I1212 16:40:38.742550 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/openshift-ovn-kubernetes-control-plane-limited was successful 2025-12-12T16:40:38.742650319+00:00 stderr F I1212 16:40:38.742603 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn 2025-12-12T16:40:38.944082560+00:00 stderr F I1212 16:40:38.944016 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/ovn was successful 2025-12-12T16:40:38.944082560+00:00 stderr F I1212 16:40:38.944071 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer 2025-12-12T16:40:39.144221579+00:00 stderr F I1212 16:40:39.143486 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-ovn-kubernetes/signer was successful 2025-12-12T16:40:39.144221579+00:00 stderr F I1212 16:40:39.144213 1 log.go:245] reconciling (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes 2025-12-12T16:40:39.343361792+00:00 stderr F I1212 16:40:39.343282 1 log.go:245] Apply / Create of (flowcontrol.apiserver.k8s.io/v1, Kind=FlowSchema) /openshift-ovn-kubernetes was successful 2025-12-12T16:40:39.343361792+00:00 stderr F I1212 16:40:39.343349 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader 2025-12-12T16:40:39.542627809+00:00 stderr F I1212 16:40:39.542534 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-cluster-reader was successful 2025-12-12T16:40:39.542627809+00:00 stderr F I1212 16:40:39.542593 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib 2025-12-12T16:40:39.768311269+00:00 stderr F I1212 16:40:39.765640 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-ovn-kubernetes/ovnkube-script-lib was successful 2025-12-12T16:40:39.768311269+00:00 stderr F I1212 16:40:39.765706 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor 2025-12-12T16:40:39.942596919+00:00 stderr F I1212 16:40:39.942537 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-editor was successful 2025-12-12T16:40:39.942596919+00:00 stderr F I1212 16:40:39.942584 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer 2025-12-12T16:40:40.144154583+00:00 stderr F I1212 16:40:40.144072 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-ovn-kubernetes-udn-viewer was successful 2025-12-12T16:40:40.144154583+00:00 stderr F I1212 16:40:40.144137 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules 2025-12-12T16:40:40.345912212+00:00 stderr F I1212 16:40:40.345813 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/master-rules was successful 2025-12-12T16:40:40.345912212+00:00 stderr F I1212 16:40:40.345872 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules 2025-12-12T16:40:40.549027976+00:00 stderr F I1212 16:40:40.548969 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-ovn-kubernetes/networking-rules was successful 2025-12-12T16:40:40.549061776+00:00 stderr F I1212 16:40:40.549030 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features 2025-12-12T16:40:40.745085082+00:00 stderr F I1212 16:40:40.745033 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-config-managed/openshift-network-features was successful 2025-12-12T16:40:40.745121813+00:00 stderr F I1212 16:40:40.745089 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics 2025-12-12T16:40:40.945464546+00:00 stderr F I1212 16:40:40.945404 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-control-plane-metrics was successful 2025-12-12T16:40:40.945464546+00:00 stderr F I1212 16:40:40.945455 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane 2025-12-12T16:40:41.144958658+00:00 stderr F I1212 16:40:41.144878 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-control-plane was successful 2025-12-12T16:40:41.144958658+00:00 stderr F I1212 16:40:41.144939 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node 2025-12-12T16:40:41.344685896+00:00 stderr F I1212 16:40:41.344608 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-ovn-kubernetes/monitor-ovn-node was successful 2025-12-12T16:40:41.344685896+00:00 stderr F I1212 16:40:41.344677 1 log.go:245] reconciling (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node 2025-12-12T16:40:41.547643355+00:00 stderr F I1212 16:40:41.547576 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-ovn-kubernetes/ovn-kubernetes-node was successful 2025-12-12T16:40:41.547643355+00:00 stderr F I1212 16:40:41.547625 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:40:41.743097246+00:00 stderr F I1212 16:40:41.743048 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:40:41.743144628+00:00 stderr F I1212 16:40:41.743095 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s 2025-12-12T16:40:41.943156653+00:00 stderr F I1212 16:40:41.943097 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-ovn-kubernetes/prometheus-k8s was successful 2025-12-12T16:40:41.943230395+00:00 stderr F I1212 16:40:41.943159 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-host-network 2025-12-12T16:40:42.145535608+00:00 stderr F I1212 16:40:42.145466 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-host-network was successful 2025-12-12T16:40:42.145592399+00:00 stderr F I1212 16:40:42.145531 1 log.go:245] reconciling (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas 2025-12-12T16:40:42.342544178+00:00 stderr F I1212 16:40:42.342465 1 log.go:245] Apply / Create of (/v1, Kind=ResourceQuota) openshift-host-network/host-network-namespace-quotas was successful 2025-12-12T16:40:42.342544178+00:00 stderr F I1212 16:40:42.342534 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane 2025-12-12T16:40:42.547985150+00:00 stderr F I1212 16:40:42.547880 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-ovn-kubernetes/ovnkube-control-plane was successful 2025-12-12T16:40:42.547985150+00:00 stderr F I1212 16:40:42.547940 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node 2025-12-12T16:40:42.763580667+00:00 stderr F I1212 16:40:42.763492 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-ovn-kubernetes/ovnkube-node was successful 2025-12-12T16:40:42.763580667+00:00 stderr F I1212 16:40:42.763553 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label 2025-12-12T16:40:42.944139433+00:00 stderr F I1212 16:40:42.944069 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicy) /user-defined-networks-namespace-label was successful 2025-12-12T16:40:42.944139433+00:00 stderr F I1212 16:40:42.944126 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding 2025-12-12T16:40:43.143275027+00:00 stderr F I1212 16:40:43.143208 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingAdmissionPolicyBinding) /user-defined-networks-namespace-label-binding was successful 2025-12-12T16:40:43.143275027+00:00 stderr F I1212 16:40:43.143259 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-diagnostics 2025-12-12T16:40:43.344688058+00:00 stderr F I1212 16:40:43.344592 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-diagnostics was successful 2025-12-12T16:40:43.344688058+00:00 stderr F I1212 16:40:43.344676 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:40:43.543995245+00:00 stderr F I1212 16:40:43.543384 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:40:43.543995245+00:00 stderr F I1212 16:40:43.543944 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:40:43.742863732+00:00 stderr F I1212 16:40:43.742773 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:40:43.742863732+00:00 stderr F I1212 16:40:43.742823 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics 2025-12-12T16:40:43.943501993+00:00 stderr F I1212 16:40:43.943442 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/network-diagnostics was successful 2025-12-12T16:40:43.943501993+00:00 stderr F I1212 16:40:43.943494 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics 2025-12-12T16:40:44.144152175+00:00 stderr F I1212 16:40:44.144084 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-diagnostics was successful 2025-12-12T16:40:44.144152175+00:00 stderr F I1212 16:40:44.144134 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics 2025-12-12T16:40:44.342388666+00:00 stderr F I1212 16:40:44.342326 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-diagnostics was successful 2025-12-12T16:40:44.342443507+00:00 stderr F I1212 16:40:44.342387 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics 2025-12-12T16:40:44.543035427+00:00 stderr F I1212 16:40:44.542921 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) kube-system/network-diagnostics was successful 2025-12-12T16:40:44.543035427+00:00 stderr F I1212 16:40:44.542973 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source 2025-12-12T16:40:44.750272403+00:00 stderr F I1212 16:40:44.750114 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:40:44.750272403+00:00 stderr F I1212 16:40:44.750168 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-source 2025-12-12T16:40:44.942417391+00:00 stderr F I1212 16:40:44.942348 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:40:44.942417391+00:00 stderr F I1212 16:40:44.942406 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source 2025-12-12T16:40:45.144476798+00:00 stderr F I1212 16:40:45.144395 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=ServiceMonitor) openshift-network-diagnostics/network-check-source was successful 2025-12-12T16:40:45.144476798+00:00 stderr F I1212 16:40:45.144452 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:40:45.348304049+00:00 stderr F I1212 16:40:45.347917 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:40:45.348304049+00:00 stderr F I1212 16:40:45.348022 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s 2025-12-12T16:40:45.542478188+00:00 stderr F I1212 16:40:45.542404 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-diagnostics/prometheus-k8s was successful 2025-12-12T16:40:45.542478188+00:00 stderr F I1212 16:40:45.542467 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target 2025-12-12T16:40:45.746691699+00:00 stderr F I1212 16:40:45.746608 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:40:45.746691699+00:00 stderr F I1212 16:40:45.746676 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-diagnostics/network-check-target 2025-12-12T16:40:45.945842142+00:00 stderr F I1212 16:40:45.945764 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-diagnostics/network-check-target was successful 2025-12-12T16:40:45.945842142+00:00 stderr F I1212 16:40:45.945834 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role 2025-12-12T16:40:46.145129540+00:00 stderr F I1212 16:40:46.145046 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-config-managed/openshift-network-public-role was successful 2025-12-12T16:40:46.145129540+00:00 stderr F I1212 16:40:46.145106 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding 2025-12-12T16:40:46.345954336+00:00 stderr F I1212 16:40:46.345237 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-config-managed/openshift-network-public-role-binding was successful 2025-12-12T16:40:46.345954336+00:00 stderr F I1212 16:40:46.345311 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-node-identity 2025-12-12T16:40:46.545644123+00:00 stderr F I1212 16:40:46.545571 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-node-identity was successful 2025-12-12T16:40:46.545644123+00:00 stderr F I1212 16:40:46.545627 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity 2025-12-12T16:40:46.745014632+00:00 stderr F I1212 16:40:46.744839 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:40:46.745014632+00:00 stderr F I1212 16:40:46.744891 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity 2025-12-12T16:40:46.943533170+00:00 stderr F I1212 16:40:46.943466 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /network-node-identity was successful 2025-12-12T16:40:46.943585321+00:00 stderr F I1212 16:40:46.943533 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity 2025-12-12T16:40:47.143335460+00:00 stderr F I1212 16:40:47.143273 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /network-node-identity was successful 2025-12-12T16:40:47.143335460+00:00 stderr F I1212 16:40:47.143326 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:40:47.343332105+00:00 stderr F I1212 16:40:47.342567 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:40:47.343376076+00:00 stderr F I1212 16:40:47.343341 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases 2025-12-12T16:40:47.543365471+00:00 stderr F I1212 16:40:47.543274 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=Role) openshift-network-node-identity/network-node-identity-leases was successful 2025-12-12T16:40:47.543365471+00:00 stderr F I1212 16:40:47.543337 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 2025-12-12T16:40:47.743717415+00:00 stderr F I1212 16:40:47.743640 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=RoleBinding) openshift-network-node-identity/system:openshift:scc:hostnetwork-v2 was successful 2025-12-12T16:40:47.743717415+00:00 stderr F I1212 16:40:47.743696 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm 2025-12-12T16:40:47.943309080+00:00 stderr F I1212 16:40:47.943215 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-node-identity/ovnkube-identity-cm was successful 2025-12-12T16:40:47.943309080+00:00 stderr F I1212 16:40:47.943292 1 log.go:245] reconciling (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity 2025-12-12T16:40:48.143349466+00:00 stderr F I1212 16:40:48.143274 1 log.go:245] Apply / Create of (network.operator.openshift.io/v1, Kind=OperatorPKI) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:40:48.143349466+00:00 stderr F I1212 16:40:48.143340 1 log.go:245] reconciling (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io 2025-12-12T16:40:48.344114850+00:00 stderr F I1212 16:40:48.344048 1 log.go:245] Apply / Create of (admissionregistration.k8s.io/v1, Kind=ValidatingWebhookConfiguration) /network-node-identity.openshift.io was successful 2025-12-12T16:40:48.344114850+00:00 stderr F I1212 16:40:48.344100 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity 2025-12-12T16:40:48.548433963+00:00 stderr F I1212 16:40:48.547688 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-node-identity/network-node-identity was successful 2025-12-12T16:40:48.548433963+00:00 stderr F I1212 16:40:48.547739 1 log.go:245] reconciling (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules 2025-12-12T16:40:48.742446988+00:00 stderr F I1212 16:40:48.742391 1 log.go:245] Apply / Create of (monitoring.coreos.com/v1, Kind=PrometheusRule) openshift-network-operator/openshift-network-operator-ipsec-rules was successful 2025-12-12T16:40:48.742498290+00:00 stderr F I1212 16:40:48.742463 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter 2025-12-12T16:40:48.943358556+00:00 stderr F I1212 16:40:48.943292 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRole) /openshift-iptables-alerter was successful 2025-12-12T16:40:48.943408238+00:00 stderr F I1212 16:40:48.943355 1 log.go:245] reconciling (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter 2025-12-12T16:40:49.142851449+00:00 stderr F I1212 16:40:49.142768 1 log.go:245] Apply / Create of (/v1, Kind=ServiceAccount) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:40:49.142851449+00:00 stderr F I1212 16:40:49.142842 1 log.go:245] reconciling (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter 2025-12-12T16:40:49.341806368+00:00 stderr F I1212 16:40:49.341705 1 log.go:245] Apply / Create of (rbac.authorization.k8s.io/v1, Kind=ClusterRoleBinding) /openshift-iptables-alerter was successful 2025-12-12T16:40:49.341806368+00:00 stderr F I1212 16:40:49.341758 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script 2025-12-12T16:40:49.543860834+00:00 stderr F I1212 16:40:49.542996 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-operator/iptables-alerter-script was successful 2025-12-12T16:40:49.543860834+00:00 stderr F I1212 16:40:49.543161 1 log.go:245] reconciling (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter 2025-12-12T16:40:49.746601348+00:00 stderr F I1212 16:40:49.746546 1 log.go:245] Apply / Create of (apps/v1, Kind=DaemonSet) openshift-network-operator/iptables-alerter was successful 2025-12-12T16:40:49.746722441+00:00 stderr F I1212 16:40:49.746711 1 log.go:245] reconciling (/v1, Kind=Namespace) /openshift-network-console 2025-12-12T16:40:49.945086955+00:00 stderr F I1212 16:40:49.944765 1 log.go:245] Apply / Create of (/v1, Kind=Namespace) /openshift-network-console was successful 2025-12-12T16:40:49.945086955+00:00 stderr F I1212 16:40:49.944830 1 log.go:245] reconciling (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin 2025-12-12T16:40:50.143434119+00:00 stderr F I1212 16:40:50.143142 1 log.go:245] Apply / Create of (/v1, Kind=ConfigMap) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:40:50.143434119+00:00 stderr F I1212 16:40:50.143347 1 log.go:245] reconciling (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin 2025-12-12T16:40:50.349101027+00:00 stderr F I1212 16:40:50.349032 1 log.go:245] Apply / Create of (apps/v1, Kind=Deployment) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:40:50.349101027+00:00 stderr F I1212 16:40:50.349086 1 log.go:245] reconciling (/v1, Kind=Service) openshift-network-console/networking-console-plugin 2025-12-12T16:40:50.545326957+00:00 stderr F I1212 16:40:50.544945 1 log.go:245] Apply / Create of (/v1, Kind=Service) openshift-network-console/networking-console-plugin was successful 2025-12-12T16:40:50.545326957+00:00 stderr F I1212 16:40:50.545006 1 log.go:245] reconciling (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin 2025-12-12T16:40:50.743443565+00:00 stderr F I1212 16:40:50.743327 1 log.go:245] Apply / Create of (console.openshift.io/v1, Kind=ConsolePlugin) /networking-console-plugin was successful 2025-12-12T16:40:50.771503260+00:00 stderr F I1212 16:40:50.771400 1 log.go:245] Operconfig Controller complete 2025-12-12T16:41:25.893232545+00:00 stderr F I1212 16:41:25.891794 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-network-node-identity/network-node-identity 2025-12-12T16:41:25.893755838+00:00 stderr F I1212 16:41:25.893722 1 log.go:245] successful reconciliation 2025-12-12T16:41:29.298027452+00:00 stderr F I1212 16:41:29.297926 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/ovn 2025-12-12T16:41:29.298645337+00:00 stderr F I1212 16:41:29.298517 1 log.go:245] successful reconciliation 2025-12-12T16:41:32.287060213+00:00 stderr F I1212 16:41:32.286248 1 log.go:245] Reconciling pki.network.operator.openshift.io openshift-ovn-kubernetes/signer 2025-12-12T16:41:32.287471203+00:00 stderr F I1212 16:41:32.287439 1 log.go:245] successful reconciliation 2025-12-12T16:42:19.074165758+00:00 stderr F I1212 16:42:19.074079 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:42:51.772112399+00:00 stderr F I1212 16:42:51.772034 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000000015117043043033065 0ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000000015117043043033065 0ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043121033072 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server/0.log.gzhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000014636715117043043033120 0ustar zuulzuul‹#Fâô‹Ã_îoÏÎï¯_žŸÝŸ]^¿ýâûúæâíݧððû¿ôO~/Ô¢‘>ê{¹»?»}ø.nnn¯ßœì¯ûæúâêþÅÉÙë×·‡»»Ó—×çg—ï®ïî_Y³öâ/E6%Žàò¹/E>õR·__ÉíáíþÎÞ~óâñïÛÉÍõíý©¶¦ôŒ¯ûþúææðúáÕ>ðpóáäøf^\N^ï¯ãg½—¹¹»vqÏ}MùSf}xM——/N^}¸w}{qÿÍé‹/ÿöÞ—?¾ø+º/Ùÿƒ/Ïnn./ö7ðâúê‹··7çû#Ž¿mç×ïo®¯öž-u¸ýö«ïûS¿>ýí»ÃùŸø•¯žíÃÕÙí7ß~ùöð¿w÷ÛëÃÙëË‹«O}+ÚþðÝÿõÀÂùáôá/ïg—÷ïöÒöïúî!Çoñ«ô¦<<ìø€¯Þß¶­õ“›ÃávûÞÔ}Û¿¼Ñ+åìíÅÑj÷×ç×—ÿíɇýE¼<{{ü†_|yüÊWß¾¯ö‡ýéððì/ß^A›ñÖÿøŒÆ|squq÷îÉ­ùúpú_ÿñë0í.„Ŷն)‡ôÖš}¦mÞ‚_É•žÊšãâTádMîÃZóWñI}VÛî–¬¶-o!Ñ“£óçÚ–Þ'õü°ÇPo.öøãúæp{v}{·íºº{wñæþåû³Û?îo.ÏÎÛÝ×ç?Óü/ÏÎÏ7÷/W;{xq|þïâæ³>ëÿ¹ÇQ¿ÙcÕËÃÝ©øöñ_í‘ÖáìýgÒ¡ß¾5Ñqvs±ý÷w!Ûç@ÁŸ€Bòï¡h[§#ì¯Ä¹ýs,þ ²5oìŸíŠU~æü¥às%b$:ïéMxûÓxoþ(&U†ßïyêþf Ä|"ਠ@LxwþËKÃLÐæ¡LhJp“Å”áß…àqäséAÏIl?½ Nq‰ ì™Cv6¨ÀP* Vb{îÔl?–íIjldð“à%±@™KBÆR®±½8YƒíÇÊj"?%‰Fð“5* B¡*0– Û[6gØ~EïoÍRQ\òso=”QºØkH!W0r=°Fœ4ÔáVŒ\©'êCgƒ5uˆÇ‘xrÛ×4‚<¥àÁ@M Ä"Q ^Òù‡™ JÁC7kHÈfÊ$ \ šá$IAS`Éጶ›*0° ´"Ȧ$ŒLB/‰ ²±E(üÁ‚ùA6ß•ƒ¢ƒÙ¾¤˜DÑMãÀ¤#Ô<"ÜÓvÙGä7ò€À$ @žÞô€ì$pÉ0C‹@à–Mà w§²qBÖUˆD`¢„ . XZ¢H HjJ…,!‚ÑñÁüA/±½5¨À¸þÀµ&?¤ý© cy€аHÌ‹„' *ðôë†KLoÝÂ0*8ØØHM2èÔãñÈã" ÿŽk½'Hù8I A¤ŒCƃE5¦OõŽEsƒåÛ£.ü¬¶Ïd¬Úù;H §ZV X>ù@‚PatŒWŒ vÃ7gL d{ÚHJ2±‡‹)aû'aûe;ƒÇ{H¦_qÁdˆwÚEÐ?r! Æ¸YæìHôhŠYñå/ÚIH!ìš^rHH²™+F&ÁJzÚ:uì:(´’È@É’w5HxnM`bÄ’u#Ú»uìž­RX°pœ'ºGœ“˜³\¤ìÞÓ ‚mR³‰R¥÷¦!,!(²½©4tކ%Á·%[çtÏ ;.¡X²¬jIJÀÀ*@5$„3Ê£•JfÔÌÜÑ6Ìö5…aïFA°ýX…€šJÀ®øB¨ ½M &p÷´Œ‡¾‰ &ôÔP0pFÀE-ãØó†êà‚Ûè5‰$, ýæÃÕëËÃï®oûîìêêp9htX“¦§1ÆŠGö\æ®„ÚЊb£&䙄Y@€ÌyÂÔŽ[èiùœpŠà°{Ipèj–ŽÓ&#—eà&Íý87‚qâ‘WÏxI€àÎF‚RáX" VcûÈÀÞ¹‘E Y;éªXp”ÀS ùÀ°$ð‘“xz(éF鉢aIè…NBT`Îó…Aǯ3D`ÜP f¨0È[2ÃwÓ‹—ĄѹI 3\p”,˜[àž²qAÈÝ”„„IÂŽ‚ÑBB«±½e"X²4˜ý8>ˆMÃ’ [ÍÑ’'"0éi²d$ Œxú4¡FBBŠ#ŸJl6 Io·ˆpg Œ-5ýÄÜß~Æ~ŠÁd &ELéQ4„ ì(¨>r]d`ÎûβQk1[j&w‚#†+Ng“ì„CË@Ô`IÜ  Æt¼ýÓ¥c‹€Õ Ðm*¨Àб™7Üx7¶ $× °Ç‰%÷ƒ]vUvKÁëe ob< §Ï d€ZëLÞ‘ -ŠžÈÀÓÏ3d2p”‡ `d`mðж1e âR ÈÀ2PPµU §) …ç=tØYà–¿ž'OˆÄî!‚¡ Ëë•@@B‰­uSœHçM¬$`°&pïÅ’q‚QÓæ8†¼Þ˜áÑö{nŠMU£}îµÄö=‚qÈ`Áe…»íW›àƳ¡7KÍl™‰5UŒ%^2Eb*ÚàþG[^_ãÜ;ÖŽ½½¾fšì P€<}((ÈÀN‚„‚°“ÀðË2€ @îN{ßR‘!…‡k±+ÖRÁ!¬»ª*0¾Ch›6Èd2°¸ ˜¥¢î+=¬0}4á,¤Y‚fA*0åÞzÈÀ 2 @Nð̶ÏLGN0® ЖŒiOÂâú'‰£›‘tÈÀÀ2@VÒ2 &WÁA¢‡ÊƒÃW[-,5¡t펤`Å ¡„$T``¢*‘vnŽ{͆E7òQPëpÖp¹ vÓ©` ÍÐþ@kކ)³ãB£Á¢Â’c&á$XϬI$f¨:rIRÁÆÚŽc$Ìjʤž‰BôÊç#XZa*zéù–îÒ!ä³ RâDÚ®@ˆfH4½¦]%º» EjÔx¡FŒ '«Çm\—0àá‚¥îsä’5äqÉ¿`Îm’î@HüÝÕÓ“Øaò.ö² Œ}ÓLå­°ýr¶‡Û‡Ûï–õûÛ£p 5Ó€-$gšNDz]¨YÐ ?àßßðQÓ¨3?X¾E›$˜3Áý—s¿ÿÇŸzÑMóGt6~ˆóÓ¦@Ä9Hu:'¾¤‰¬9e]Û9€ ºÃ7À7ü0ÏØZv $>ªP%\\@0 p5ñžLZBPdR@ ÞÞáãN¦‚ 0ññÁéiªÏ@¢J&,‘XÔ11îaÙíÞ"ÉöË唿ŸW~òÛ”ào(ýÕ‰‡Ò–ZäG>ùŽ"¶PK„|$àP¦?ž=ű1~Šœ#jÖ6$»˜‰¾µš­ÀÙ8ö¾bå©él®D Â0ÃLͰdò® _1E›*J²Áù¸%ÁpsÏPKc5Ô*Wž‰¥]qÁñy„E ÞÎÌr~W¹„‰⎴b&bk½¢Þ ÖÄÂ@ŒZ›–’ˆL! 3˜À<ÌÝ©ö-3'A>bòèÁ4Hô¶YtªOÐèÂGºÌÐèB‡0@>†¤¢¤RXÙ +·«¼±+¡]5‡0¸×0±ÿ Œ½­- ¤ëéWž}ïrü ï0¾w JÖ:€‰©˜È’TÂy÷‰éÇ¥×ËB0ò—¤Zcû èõ¨$ðÆE®@Zofp Y“Eg ê £î\÷7º05º û×™  ƒêBnÞ±þmŠó2”&ÀÄGLt«);%ööŒÚÈÒE1²0j±„ožj(5Zj²¶ ×øçlm‡Q}BÍ´Šg'EÁq|$Ø7‰aH6k¸0{еYN¤h01…NhM¡!ö¼…’ÊQ« ½’ÊQ!ÐI €w¨ò^sh*ºZ–0¡[hÖ0¡Ù=ËA…ÖD LA g#–®;…íÏ ï0¾w°-ŠR OÇiÊ ˜ˆ­IMÄ`fªˆFj pîé˜h™CjÙáj=P|\»øèá†í<‹ç“a"‚Tb ï Mj˜ÈLLÐÂæzMÉ!{èËsÑ5k?#™÷_@,|Ÿ©ä­ ¡Æ°8š0à4É€SIf± ™ŽÙ–•·ÅkS%wx‡Q MR±í±"¼Ãøù¤lÒ¬„ ß ¶BOqµqx ™òȵŸ@¢ 9î縉=ιìR råí~;i»2‚…gÞ”º‘'6yª%Q#ñ$t¤“£BÀ5„´†trŠÔKÂÄÒe¦Qï5w)¢ %g#VwÕžF;zFÝÑC¹EÅ {e¶dF‰q|qð-KrH– &–®&±F‹ÀÑêñ‘о©•DŽlæò¸”Â0ù†Ï§€‘ã3O;÷¼‘ä`T9¨Iœ¹'ÎÈ-)¹Â' ëú–F%(dïh@ŽzeH¯ MÑ€¶‚P4Û}S\\_rFåxayâÊ‘oЮ5(„»$P¶Å›¶’ìAø8¯Ô- 2–ôdg#gkçÝ’%#l¢Ýˆ±zgáˤT¬›6jL  ²qÍ›¤pVÁOÁ„k Úº!µœ„‰(¹}N•:u¬‚_9~p·&‰[íçØÈ%Í'aºðŒº 5hoŽ‚Ã H¤u šôqñŠeÖ à*‚CRƒº)ŠB1à¸zœà–ê(BÚ²¢™)ì¬<дCpìTcáû¥„=N¤ïJ»{€0,|VÒ7Ä86o-½yË=*AMÙ { Xú¦ú;ÁÂǤŸÔ’ŸõŠÚ8¨…ôÄÂÚzíÿÙû–œK“%©ùû1` Øjµq±‰8Ý*·•z{ܰifé+)ÃŽùËÜœNÀhì÷S¡¡cäYlß—&Öc"â“3½„²SØ«¬ÍÎ+õPȰÃCpÚ¹ƒjDúž-©LìÇD~”oÁê‹?8¡nˆá¿FS@øÅ²Ï˜&þÊ•k{ÀüŽ”ÒFÒÖD5.…¬mYOÌ-*“ °f}ÁJmȘ&~Y§ô‘³R? ‹?¸+ã Ã_‚E|¢€ `âßìÞ[ÀÄ_0QO¿ô®ùÓ#'T.—Ó£bø $êc H¿šu¨¡)LüŠ kÁDQ5ü ïÀDø¹LT” ðLø§XG0ÑÉϯ¦ßçáu„'Jˆ…;î¸EÀ3HéDè¸ tÄ'c¤EUæ"áÔ˜è‘ée’h[îìQÕLQÞR¸mv$²>9„‰nÇþ÷~LôÇGÖ¿«šH~‚W”:b ÕÞÑ ‰ ² ÿôˆLGš° Eu&’0!'p¨‰ v\ ŸØ÷üL öüƒ{à Ã/Q@øµÎðD¬À‚g€@ ¿@¢ @â¯uEœX1’?8UàÚRbˆ‘º¢Y* ÄpA9cr ¡Üáà…­¼0’0t„wô&.ሰ¦xàÊNd_‚ÐÂØum- åÔÖº  xü!W¤‰Hÿ .ðªÇ 7AðÂU¼0Òh$nr®W4™‰_ áÀHü‰tÐm™j¶Zlõ()-Õ=M±œÿ²`ဠÉf¸v|Œf‚C(xaéâÔR’OWè˜f"…hamIFzL&~È3¯è1Í´ͼ™Ðcº£í8—ê$ˆ ¯P;NÜ´;€è.Eò°³¦¨‰¢Âšš‡ .)*f a,,^xؤÅÚÿżp/L¤ Î?Ž—%ÑÙf¸;p‡ÇÄ.¥«â¼°4]¨¸gˆ‚nhK×$Òú ‚¼°´–œh;æ‰ÚÙÄ‹#–né.”Ž|áåýÚô bx²Ü‘/ð„Ä©LZ\À [¥3 0>  \‘Bæ"œëä  …0Aš¦"]¸BÏbeD«¨p@½ðò¦ÄO€¼06¥LðÂó¼^/ü 'êÀ Û½'4,Q¬‰qÄ~Hèg&]hªL¨šžÞºïf;¬€5º§kÉ––ó7ÁR&Á€z«Aú¾>’$¶×¢°g$4f ‘‘\H\q²¶F QBÁXœÃHê/ðp\¬]›Að‚±9BÅÙCÛ$ô»%eÛÃ#©r(Û.1àADXJb—ò† ^1 ÎnØ¿>Ü: HªjðÂÍDq9Á|«x!g@PéQ¼þ/$ºŒ±±v—r$_0ªÃ@Ȯإœé8¥D^æ…Tv…ú^i/”J5¡½u@%—€´0Õ^0|Uf«4(_vv< ø¾6æÖw¯è.°yÔ°ó€¼0Å 6’/œªÒˆÁ [ya&æùë§Dj¬Zx¹éÈÞ_•†w #F ‘™ÒYüV^ YÒ)è:ÞQFŒ@¢¾¤cH¹UÔÔ# P78;ÞÑv@„P “ö‡ÿ€ÆÒв…š©hᎳÝ…ƒŠxw½¼!_US¦w¤ P2nfh·®Rê¢ÅÀ w˜qŒø0¨†p| ·¶F‚ƒ–E;¦w\*„‰3ãŒõÛù‚Yúxá^èDè¡ðÂË}GuRwFßñHØÈ„J],çí_>q@`%i0õ»£ŽI!½‚)`Õôò<ˆ#“QG쇄|zDû/ü9^Ð/€ÆïH€àÉ^¸K¿#¼À©é +è—ç”&üU<‚`Êò“/ÜÞ_8_äß\©/Œy¸ä âêç«à…¥ùÂH!™‹øKxa¦ŽPRmìM­åAºuBïx…z&…4ó“1 _xÙ•å`à[J€®è/ø /e2úŽ[û 1‚J‚Þñ޾ãHi™eaà…—M_-»Ô ƒ¾dL9‘.œ‚Ùà½vmÊF@'6àbímÇsG× ‰/¼\F8 ò# @p;[¬#h!زÜQGL”–n”T†5ë‡Û nìA /è+hadpí&QP;n-#fxAÝÍ v¼‚FáçSƒµO7,:JPD\Á 5 IU[yÁg@Ðe‚*âŽ*b¡F ··i!þ%i„UÓ´Ð#p«Æ}ûµ¼0RKFH0Y_2žš ¥dŽöÂRЈ€%šC1¶FñDd…cñô,"©U RÇ;haBàùF_·J&ZÏQÌm^¸B=’AF‘@»ð´SÓh%Ú Wä #2·T-)Â&åR^Uçw*E…UÊ+.Ê”@"4ƒÐvÜš/è’NxÀLjk1Ù3 pWHÛ.±ß˜á…êlSÓn6ÂYn}K/GLô²XÙuļ3¨b#8¾¾<È&PÈöC‚?B)dY¸YaN¹”â€Æ.Ü@¢½µ¼°µŽˆdÒož/Œ]²îHT&s–Î#lî„|áýÂ/4S æ”[ë™ApšÏ[E,:6vÂqk§‘G@p>Yw¦+*‡šAD†fÕW̤tê*‰ÓÖ“€ÀL«pôHø œŒùãÖÓLhwlÎ݉Ùéo©¢¦+È‘ž£‡°,Ü–FŠ‘n“§ Ü®ELt¡¨È ȵeåÔÂù÷ly@ #ˆOø"¾wD:0›Úz“’G@ð=K™(+¯KLœ)µ“;Z¦€Þ­= È0ƒßëðOÕ /4¿ÀÂÔÖ|a"idþú49†Ö7ðBÊDw…²"0´ÞÚ]˜äWìì´fIÄ»qÂúáBÆ$2ÍáÇr$ZsbaÄÓe„“›xáŠPÑ#¡ÂÕXäR^HI‡@Àò°)Ïw,é88÷² þ€ Ë(0˜¼À™‰¦&^’‚æ3®‡üÍd ã²üHdS#}ØÚ`ÈK@^˜šNh€àÅ12§l±V¤w4e$Tt¤tMkÛ #‰nvúKꈉ^´ˆsb ñr/ZT™×Ì—ZÂ'0APš,^—2AOT"]P·ÝÀ úéœ(4ÃOA ×—'•?Ã$ªfx¡… >,û°˜Ÿ¬Q`ãwA¾ çùGrÈ2íH„Š+ºÏ#G­à6øBß@ýñ‘‰ïM1EçqkçÑ/ˆaÊÇmdkÆD¸ 'èÖò0€Ò2/bÃ?”Ú$" ®]/›Ä”J*ŠÉ+rƶH„aêŽ2bR™‰Hñ°®é'@Z˜¢…‘ý×¶(ÃÐr+/Ø% /LÝ”ñ‰}J?ß 2ä [Yf@fî˜F\±O9âí—’‘†Ûæk;ÑÅd‘Ts`yn«v"8kšâˆÈ¨Súç$ªÒq…îŠ-ª˜Z:“œH:b­/K\ðÂTAÐð$e¨¾Cw@P–žØ–¸¢ïXb?±"¢¡‚^;˜AD1£Ž¸i¡"œJ´áã¶µå4ËàÆpÅ&Ý *¤Ì臭ÒÜaÅpɲ}ï2i¨š¶vTAUüur/ÜpLÂb ²Ìú2×kÄðŸÏÓü+ þçÿûúÿúßÿ×óxÿíŸþÇßû#í…?nÒâçB)RúwÛ;ú?"1Ü Qú´Û(”¿Ø¿_ïÿ8Ìð_þéŸÿûùÇZ„ÿÿQƒL @ZDÔp&ÎKò$”ûä ú1œÄñcà·óÅ‹é@åÔ’nOÓÁr$0å4Ü=Á«X€GRC5®Ô ,FBÎäN‘Ä`],à3ojÿ˜,ðÁ#@j+»žÞuæí ´ƒö"A¨G_I ‚e©@¼}‘v7XàùT ‚„ $°êé#FÞ¾94$°¸7˜3©@g)¡ ØÅ5ÂF©,·ß•μ=kx xóíû;ÆÛ?Xþ™¸°!ë_õöa3O_)…вŸýH±oª¡ŠÐêÒ&‘•^dïÕßSˆâí^8óôEƨõ—=ýLâçr^µþ²·ï™·÷ä*¼ý®p?ÃùAMŠZÿÉT/ò|Åþ®··ÎOj&ÁÛ?¨ô=:_ÑèY<Ùjô¤·'=ONø²©ðöÛÞ~dÏÇJ˜!óÆÆ×A‚Ú½OÖþŽ~ﲞßLí_Äè÷îzûU¿5å¿ã :Þþ)óë¯Î»þ«;€:ƒ„üúA^ÌüºÅSŸû>Nډ瓿{?‰aêóæÛ³8'¦>«M>F¦>.ôÝì6×3œ åþ{¦À@Ÿæ„[€ÌàÇß^Gú®–ɰ{YŒ•‘î ›D2jÃeõμ½³aýo3¬‡ÐB ¿—Uoß<¢ vwê„#üZ$ðGr&+ô2/t #aª6 !Oè„^ÜðýMX°ÀŸÎ fâAt5cKôIH«€#Ô“JQ¯óžðÝŒ„žé–z˃ê?1ügBÁLƒà‚ È—…<Ô0®Ž´ _<w‱BÁÓUA«J¡W¼z‹t Ùeè¾87¢l °ÀÞ„Ð,g`ÞØ)}r¯ð{32_¼pGB8Ó* V~ÒLð’ªÀf@¸!ýds0DË ûeOf¯¾ý%䯗€ÅàŸ^( †uÓ+ÈmÄy ”»Ž“Ë6‹fž^5 6ƒO„ÚÂáÀ‹#Â0i&ȇW’žB°$ÔÃ/:Ï„“Tc¿|s8C®Ù¿g; üáRfúDþuœÀàÅóâl0X~gx¦2üîâÑêÄ`æôlD¶àÍ“g#]É! \ÎÌŒ²YÂ'»Äu>„õòïÎGyFC;ºU;:Ó¨Vgô†V/Í ¡ÅÜ Ê‘È¡gÃeG»ðÁä ‰È’â‡IJMpxñLQRV:‚Oþî™+qžz·Šœg dÐ>9HvoÁÛ/{û‘§¢ \©]-ð$œÐ ¼¸Qšâ‚¹Àßž®¤¿çH‚—AÐt(‡±>öâJyŸLÃ+Dk‘ Ù6a“V áù±Éùã'ÝÆ00I§P … ú7WA7à=€)u&ckh-êcÖ#HPJ&À‹ÁÿÙ·¿£4’¨¥‰Ãhð厠}»DÂë!‘±Ÿ7 ! ×f ýÑ®$ô×käp9ØÄ(ÙŒ´Ä±Yú )¹™dœhw Fâ "ˆ 7xÏÔ.©oºÅ׫| êØ4{Pjhn”‰[¶/IE΋'K">ƒ„S(0"À²ñ²Œ¼={GcßôA—zgqO8Ð-.ôÓCÉÀÉDQ®…B~rDcêlzÒÖ’B~,frKNölq¦`#Hpj~±Op‰þh†¹:1FX ö`"¥=±«üòõ[窀îäbÐL‘zcæ|ÇÜ!H¸JBŒô nÙ…»HÐUØÛU8¯”} @?NÚ ÐÀ—tˆD™ Cç"ÉÚ&¯¦§Û…þÚ€ÞsºuÑo€ß5²ó8Ý-+ †h =2¤Í4`3#F±f/Œ“–Ž“b†Ü«” Þ[H9oßßâ±`1l¦G§…ñÙAh†¢ü¤ n« P™è¬¾5„7´ žL3#­‚ÅH`›AB?Æy„ ÒCÿTÍ4*8&©K›G9“4Ÿ®{›É’¶µ'ƒžN»Â*£ ý|¦¹`nôâï^É8oÿౄóöÑâh/Kÿsæí›#‚'UÅÊ,9áj$´Í á»\€Á‹¢beëßàíüíièí“Û°a¼LE<ý»Ûå^Fs&s\F|ÑXàëa8û¤&LÅ< —Ï#Á§X ¬ÁOF€R!¼ý²¤äé¿ûÃM¡e‡ fž^˜±$öfÞ§Nêpz2Úkp<É7#¡†X ‚Ò®‡U;æaoFOÈúÞ”xØÉûoÿæï>N±oÈü6ûj Éœù=™ùp‚QÜ6c¨™…ÿêúðöË~÷3œï&©õ=ÙóóаDæ·¹ç7„Šhhüž,ÿ‚JB¯'ƒ(Gâ:õæC´37Ä4ÜB¯mŠ™îhj&zìôýZìO5r/ö÷€e)FÀ/ž‘Ó:EÜ=žÜõ*5UÌ~žœýTžß=úÿË($Ü»¥Q õ÷¢êÇ­Û­¿ <¤º>Ï7œ™‘ü$@? Àèþü'þÈÌ À÷ý$üáŽ0 !!¾;»#OŸæÉ„¯> 5„&t†^L ‹ŠÂÐ…¡^îM†®nÍ€*í†ØêL`†ZÊ {ÒûÙ;Bpük3RGDæ‰í>ª3H«F{èÅx_§@}°ºQ8R„’¥cƒd5fâÚ©I‘#nÎ fîÄ…¦)CGðâ6i(¼ý“¢0m"T/N Â2%Ñ/†Dál+±O^ÏdÇÌøE±hR‚ä^$ÈÇgVŠ’YÅà'ð·¥'C xf3 ˆ2Æ’Ñ~HÈÔžyŠ·‚vÖˆÊ3 °ª@·UÃùWA`¾ëí[gÞ>)*‚½ãwõX† Põ¢ ÅÕÂŒª(#¨‚—=IÒ¹Q2ÞbM9“'xI„€^ö(Ë`çRð ]„žé"D$—7°DϤù53Dñ·—UY_ JƒÖCâ<äŒÛ]V)Äék[Mö©œÉ#[Q_^/ªgZ}>•Ð-/ËfjˆîhL"Ï øS6’#î`¯†‚N)‹:ü÷t̀Ÿ†‚$ÅšžRïf›B³¼°6Cá±w.¹•$¹ÝQÀù'÷ôö?~~«]êé• îKTåP2Ìù5&fº¹Õ¥f1‹<°õÉn ±ÚYb:Z0ã­­ÓÚ4É:…ÄÄ f¥B;}¢°QˆåDálS_°Ùi%«Žg«¦êèËÚi—pØáuŒ ¸ÇhpZåPAE*kÍ'¿Õ˜1vªÀ…¨£QèÀ¨B¸Ûw¬A(¦æ9l?­¯ÝŽq+º.Ÿ‹‚?Þ˜‡¢ueQ8zTA.A¥„Kç»kÿÅ,ò\Ч@sŒcËy²íèà@L¯áuŽ£QPÌñqgáxÊÙ((…®/ýˆÂ_£ ¡µ‘hZkÚ²À@àÎi„£õÓv5ug5éd ó2¨çâ Ô ¨ÂÄZËXc:\0(˜ŠkLG7 0†c‹«Pg£`˜èÑÝ•ÇáoKcʱ:sÊ£QÀ,;Ldi9kLoöä›´ÐNBðfƒ÷IoÖîðöÆÄ ;¤WçH€²ˆYšÂ a§>ˆU˜Oé¹ë; B@ø8à–`‚ºÀÛÂC] .üÛàÝ݈‘ø™Gð©àS¡”Ê«QS ¹RÇyMîÕíˆ\å¶*¨ W„ ˜²#"y}öͪ\Ó>Ãxá ]pH™Ö¾šíˆCu’J¦÷ˆqzáT0BñÝ-!ø; £Ù^Å¢Ò›‹JY1SŒN]š…0ÐK†·Ç¯ Â@²Ð=Y O} „Zê< uEÃ!ƒDukJ7 á l+ +ÖŽ ­åß¹½‚?¬'‚h¤áʱ{’ö 1qBšÚp;ꊔÒ1ÙDŽ®àqjœ€ Ûª\hë}V9ñík}^çîü©“IB8™‚ Mxç㊋ó‘ AÂÄy9ð–aµÂ ¡íÊ ‚ÇFŽâHºº9Ïü/¾0ü^V€gçóüö¯­`²›êä·?,SÈ·ÿx°>ÛR²'z¨Ð›ùhQhL ø™J[œ[óvc­XcNîs_†|ÓW +5š œ*˜7!2c¸Çr* §Œ¼»¹˜ªÑì0¿;:Ì7‡ÒOõWÅÄ%3«ØS¾büSDªXé¼õvnÖèOµAPèËYZ>…F¡`™J7”wç ½HV•O…¤£MÝ;VÞÓ‚×N^<9NÁt&#ͨWÜ|ƒ¨ƒ,ÙPpçùä6dwcPÈe‘DáTúñp ¥3¬>¿Ù–¿DöÛÐÜw? á>,7Ÿì/>…Aatñ8ðâÐu°%U\;ÕÅÁë!ÀŒ°& ñqBðrÊ58½øæ[ñ%n–¬$ž›6ê<ƒ]›wMVß"x{ çX¯h;bºŽ!™‹~8ï#:ׂ7ï=J´õâèê©æ9Hé~ãüòÙ9˜&Bš}ŽO4Ìú³dí~ežpüËÐ&<¨¥^>·’d'Õ5_Ž'2R¼Ý!C»,‡ì7  )#˜|æU¹÷vh=±1ØL³Ä|¢‚yOp)úÝKÑ–m_®7‚¿{ 4Út2h¼ yÄ HøÊÜ¿Šºðæ¹—¬æžKÖ¡!;°ånM$n¨/`Îxye£‡ŽýÍS ÃD»K3|8uéÒˆýP¨Òké$R…Œði>Õo ³ü)¼{ñä@JÑѶƒH¶¨N„ÀP;²1¦c|N½‹€ ]}¥0f¼£u!¢¥’7_N­D !àÛ0LLßxýéì-L\`¹f¿ýûVà>ß¾¼7Œ'ècЉ ÷ŸJ¶¦/`b= b"beq ñÔ Â@jXrEî"¼0¡c†”Ðví̃æm¨ý›N ‘Ï ==J¿æ "H{ Seˆ5êédâüQ6Ђ}‡ìTœd;´@EºXm¸£• &rIrþ|&üéJf%³ˆÄà A¢²h¾pjMrata2µ>\>|Þ H^‘ø W4° rœ¼ÓGÇ8üxEüàÁ·ŒQsúûëþiO:¤`)º8¬_扨‰pV.ê‘„ÌÅfïì"9÷òæ“R]«´ÜWƒ 䱨 +uÇñ$f6²zIi…ƒ¯HbT¡uVÑ'üÔ1YH÷²f©®"‡ÆŒ˜Ga¼f8wAÌØÄx#1í4…~µ)t÷rcÓúÍN£ñI y‚ôTAw;’7 ¹a¬=GkM5ÛP‡¶¥!失%+šk–7\ 0šK•É ýô™P6cÎÕÛ7W™6U+¸TwÁR=I+,ö£Ttô¹aÏr ½ê±L×>/¾^;V©ºXn¸ã©À¤-ú¥é#þxp¡“Mt: Ï­H ‚ÉÞ¤»ã‰ˆ‚”¤];ÚéÑq¨0$§t]Ååû „¡žHÄàQíBÛø3…3?žžÃ†å©GÇ AÙ|iÁ@þ0w ›‚b‹òŽãب±¥Ì9}…_4¦âè oÞ”¥‚CƒÆ c©¿ò\ñÙG‡ ŸÞ}%KН¾:8Ñkí¿Ù{<Ø|©d0(|` ½ÆuEÅ 1+‡Ï«ƒ>f 4½ÀïØÁŒ¸ÆTfs‡£òFÌŒJîH¡é. ZO ¡¥ïÑ)f*Wyqeò†•É|Ò!L´-ËäÌÒ;“;v!1¶çÝOöl L º=²›öGÛw‚P¨å=td:µiGv¯â ã¹·¤A‰$“§2"˜ÕöåkÀ´Un†œžœÑ õd¬xl¬¸žqÈ6Ü”è$7$ï°kƒ¤Ó©EÇ+ìÚSzÜ/Æg•HÜ ˆ™gYki%cˆ+\™V I7^¸#÷¬€ ¡–ÂXâÜLCúIÄ ë‚…äbAòÅKµz-nJ».R‚q+Bp(]\:Šùà H´$ ÍaJy‰&^p÷H:±¼¸‹ý ÄŠ A‚ hQBplæ`>ëÉ3s§BàT–Òüÿå¸y²×pAû©w²€))ÄŒ çžÇC«¦°ˆpÅæœfV¡Ýz»Ÿñl ÕÓ4߸a¸ bá)Ó®•Í½Ú öje.Ì‹1ãÆPòÍ·H7iKthÇqCŽ9Ï`ˆˆlž9v† R„Qwá Û©8‚Pá Û±;öf-M2phˆ tYë7ìÁ¤&‰ý£hðzE)1hÑšäà±ËP ]˜h.¼{pA|ÇŒ¢„àP J ]ßµÁíåå"ª+ËËç#á‰ý6¬P¶"o@¢²ãµ"qƒës $zð–¡£ßõ{$ ÃD»;“/>2&²rjÿÛSŽGâóµ DôÊhÖß¼5·!Uúr=þ MLB|ç•Êò™² †)/øø—‹“”ÌÎ\<ª/„kD³I}j“Úu@LJÝ]ôö¼ ¥Ô§Aò!#\8Õt1ø,¶<‹>ŽWìÚ[@Bɼ ×#.èTõÓÉ1ÍL¢8}AÕAŸÂÌ;™ïà•éÅ&Ÿ><ÃBZû—o>F¹!(s.ÓÝSÚã &Ò]†½«S“M Õi¼FyÇ~%fÖźą…Iž1þ/ckë’oöôÚš ÝÁ‚Çxb,n)]\°ºa„~©`øÄDâ™ý¹ µjwóq¦šïŽÜ?×8ûrðì‹H˜(c$yA5² ažÎ3G þŒ†‚hýnÿ’\î½!øØ÷ÐÎëݤÆ$ÛÖG¾ó¬ÁPP:µØ·¾ ŖS`ªÒU\¼}÷óÐR;c />O)±a޶½Û÷µ—¨×%.ú‰P ­Ó†W¿-ѺhûâãÅÒZfJkŽ 2!©Ãg“îËûÔ…ËZKGΧT®(6NB¶ï»&]¸=wýta‚ÈWž¯¼‰9—Ñ^*L,^]ÎǪE¹qjv ‰"§kSÀ¡¥S½¡¨„Ôâ¾Ü 5íÛ«î\Âx£ò ¿pÌ=²ÄLߊSËNB‚±‰bÇúŠEÚ€8’‰{‚H}f0Ãìadâ‚åjÌ ÃD÷îF¼ÙÍí7  0 ®QA†âÕÖêi"qÁ[¡C†Ÿ6.œ‚|óx4!à¥ãÙËØ¬|9U=„àÔ±…Á@Ð%[xyLðŸë„àÍEM'Ç>…àŸó„àЦÆ@0ªFwÆSŸºÊŠ~ o6ÕØÄrN­Ýq%&“0jªÓõýŽ˜Ðc1<s…ë»* Ëæü2a/L8ák:ÉÄO‡cÈÕÅCÆW¬Ý¨2‹V —ܼ[%VҶ犗C “ˆ¦Œñ|Èf6¦rÙ"LD¯Ð ÇìYZyŽrö &, !¦›h';\§öºAØt…á†{C&ab¿Å ò &rNݽ3T^ÁD&¤½áážÊRö/L)Û#Н[drw@=e¦8S{jža 5v²I]¸¡N rü!½½÷Žt¥»ËL¬AÄ”¦Ë*ŠÃ–W0á&¤¼i/{Cê9ä‚©/£L\aç]&Ú²‹©ç¡©§B H«^+¹ ~gÃDYìׂÂp¦0@6vL;Ú“ÉæÂJÓѧ•è©»ÿaøÇ‚“Ö7ƒ>áaB{dQÞ|ÚÊöŸõåU#Bp=Qž\ô¾ãuH‹Â²Ý‚Û_šÕ?ÎÁ¡&uèÐÎÓ߀„BfŸ6SÞt;ƒ ¶|œ`Ä„ÝÉ€lÙ|ö®²Óˆ úÕë ƒŒµøŽRÒÙ‘¸âá€LÏo$êã#ÆèáЬ¢1LÇâ¸Û H¸AÆÝ\JÝ9ÕrEø ˜2¤›y}gUÌ·â¯jÑû{C„!Vvqö¿êåêClö†›Úw¬ß äè•…Oî 3ݳãW2qƒë¨,ÌÛ1¡;Í%ç¯ZÕÖ H¢‘ë“ò`⩇q E©Ô460î°ÿiL2}E ì/ˆ)ýé ÙÁõ»K,¡’gdXwqJê&Ö#U&ƃ2w01†a¢Vk2÷¼"žHÌ$݈¨íOˆ€•“½SOæž7 ‘r„/WwcóûÔšTA HŸ^L6ïX¿0½¬Šºpª£F:ghõrÇú… æ\Äv ɵ¬Sã ZÂúy)¼+ö¢,¼¸µí>5’¬Eß. F^|…5Â4âÔ²£B ´Ã…+Ê ‰bÿQÆ /¾näá²’·ï0§pk:ÏŸ+ 4"v¼ Áµ+Vÿ…„Š+íN-/‚Z¡Mé+trKÙ«gUsªéÔxÒªî•åÌ#î(/@â…OÕ±œyÄ©º9m÷PPÓŽAY`¸”ÊÂ$¦0HèuáØò‚C ø4*›[ØW”FÔíË…[êÂN/` Ø?§Yv¼D0O…¥êw'êÂÆ ÀGËÙ¦¼B “ZÆÖ…ÅvÄ©º ʦ•íˆ;nÖ@(Ëf¼ðæeÊ(áéK®Ô@&Zb²b) AOíG@òˆiW Öî0_€ÔfÚ\xùÔº#€\kS ®H# Gív°P3FY8T"!”¤uáŽeJm¢’_85\À@àµêºkÊ®InM« &g « w„ "Jdéâ¢cU¡+Ï%°/ÚЯëŠûè3D‚Hü‰€ÌÅÿ|*øTPpž,”Ê‚S( ?dA) ÊeáGѱ© Ô1êuá‡.uº9A]¸jTproãÊÔ›gšÊUÍWNx%d`.€*@ð$Dâg.9|*˜Kuºðs‚óT˜®pÞ,>Õ~“GäZ_^œ£. †d„ÅÇÛš+Sç#!BnVwÚr:¾¾ø.åo0@Y€Ä e™%uºð?º€h\wMH8+N/Þ§íD´;Ëó‰ÐG1ç6«¥ehÈòæ‚! ¹ç«%<6vG—ó8||ã…;ŒëùÌ¿® ǩη@@]@5®G© !º@]À—ÇB5ŠFÐo¾WûP`–,ÔêĘg*£VðþܺP— A]øC] Ì%o߆Ÿ·II‰ø!Í—‚/uºð?/…uºàÔêÂ](§.pza¨ Ô…º¹%2m½híøò<â  . ¦š °0£kUu-v)ß»cý+ PPáÂr©õeáîpáW  .ÀT@¢½e”CЧêB^uÖŽHêuÁ© Ôô:]“Ÿ[Ê‹Ðmº%k÷W¬Ý`zAeY¯%Ô…7‡ ²¼¤›>~§ó‚XéC¿;®È4e«ú͵gY9ê\¨½#—ÌA ¡%®lG\ÒŽ€ afYÁé…{²ü Ô˜‡¤¾`cnÅÔòP]HDÝQ\v¡C]¸ 3Hj饯ãc/Þ»W‰NÝ(P.¸4%_Ðý Ä[S§Þµ.²Võwñ!øÃÕ9‡@ ¢É‹µW ‘ˆ˜q?k¿B\šº£ì ×L/¦§¾àŸº#—¬ïЦˢ‡íˆSËŽ À{§ /Y²kÒ]8ÐrjuQb 1éVVo@ÂG 7mÆpáTƒOÄ [Œö|Ùª¦.@¨' ¡ 9¶…añX顺P  Ì;¸5uE±yD»ŠuáØ……@P«ÊY^`ƒê?DØÊjÍeÊS» À—NqØñŠò µHuç<Ë©áB ð%ÚÆ¥©+Òˆÿgï\–ì¸4ü*óê@Þ3gi/ì…ŽyMµM†¨Kˆ´óôƒ:-f±„ŽS€pP)J Šj‘Ôù&?:B(TJHÎvœõ8bÀQ5q(SÉ{÷`‚Þî\wï/Ä“axó9e¼N>üPðÝ?½÷1Ã?¾{~ûí}Ô þßþ'µÛ?¼{~óáÓ»§ŸáéÏ·ïµà‹½ÀgŒ¸¡+OPq;ûo²ûúåwòuý²¿=ÿW#M*\BýšG~ÎÖø|¶øù8æ®l·ÿŸ'6i<4úå#X$Ráš¿^¿®É#UqZšKDjW¶ðù ÃÞl« k-¶£4z¶ÿd™H…rͽœz »ŒäÓÂ\!Pû¢ÔÁlk1-¤àla” §É.”¦1r¿¦ÉóÒ\"R»²…ÏçÇöf»©°‡{0¶²]'¥âiº ñºHåšû¸ÍÀbrÍ"µ/[,>˜-nBñhd‹ëäT¸„«º~{³=¼IÓ…íæ%,­õ®“Sñ4ÝÝuýÒTš¡hÖ?¤È=4—ˆÔ®l1h4Û› c¨[k×É©tšîîº~ÍŽ®[éD“• Ñö‹«Ú~{³¥2šíÍ(Z÷6˜ÙÒ:9•NÓÝ}ÛïáøóN4u›Õ¨÷Ð\"R»²E€ÑloFQE²æ ­“Sù4ÝÝ·ý~¹B"ÆN4€B¤öeK€ƒÙnFÑŠ•kmmlyœÊ§é.¿ò*ÍFÓ;ÑD5hÞËðª9µ/[äÑh7R mÝØð:)UN“Ý}×ï3´ ö¢ÉàÔÜÈçUSj_¶G7iú°½E B)ÚºM•uRªœ&»òÊ!Jõ3ïÐÈ¡IhzÍ%"µ+[<ºÚ‡ímÖ/™µæÔel¿tš5÷m¿~©TÈûÐ$¨¿Œâ=4?R{³Õ£HíÀ–^Œ¢aÑ8©Ö±ýÒiÖPzµí—7o´4—ˆÔ®lb4Û—Á„¬ÜÊvœz–5”öm¿tpÝBËé“›¡…MÍB¤öe áƒÙnFQ¦ Í‚ÖÈÖÉ©pšîÂ+S7šÜ‡&×\nhUÛoo¶x0ì·ÛÛΦÂ5oŽÔur*ž¦»»¶ß/Ë®”óÇwœs…@틌³½ùDË*{#[\'¥âi²»ëúU8¤ÉÓÒ\"R»²ÑÑl« »GýÈ£´²]'¥Òiº»ëú=¸À¨BØfR#ºæ Ú- f»ÙDYDÐ/ÒÐ:¦_:ÍJ_0ýÆ!MïD³æSW¾‡æ‘Ú•í¡é·ÛM…UKQk-—h”ʧéî¾é÷ R7§õ´4WˆÔ¾lAG³½ÙDØŒZ{…¼NNåÓtw×ô{0î¬Ò4ïDÉ©ñAFZÕôÛ›­xZ:±}y¿ºÄÖf!¯“Så4ÝåW^¤áú/祹B¤öe €ƒÙn>Qâ,КSeœ*§éî®ëWüˆ&c'šÈ¬&÷Ð\"R»²E’Ñl·MýQ’æná2®_>ÍJ_öûå¡DŠÚ‰fÝCÛ=4?R{³EÇ¡lùæå²MÀ¶»¼Žë—Os†ò¾ë÷àp¼Ò¤^4Eýù¼ªë·7[fͶªpléÚ_1áu\¿|š3”÷]¿zp8@܉&”0¤{h®©}Ù‚Å`¶·ñ°TZ]¿¼Žë—Os†ò¾ë—Žiê´4—ˆÔ®lŽSû Ý66õ/àÒŠv”ЧÉî®é—¾ÜPÒCþŸ¦Pë6•Wuýöf[ƒq0ÛÍ'ªÈX¸qæ$¯ãúåÓœ¡¼ïú=ŽTçii.©]Ù=ÈØ‰íM…UX¹´²]'§Òiº»ïú…šæÞ‰¦¡`à=4WˆÔ¾lh0[z™d‡ÔZýÒ:9•NÓÝ×Ú~+ÍÀ^4YôšKDjW¶@0ší¦Â°Ý=.Íl×É©|šîîZCËa…¤Ü‹f ‰ÝCs…HíËU¸Û›QEÈ¢•-¯“Sù4ÝåW|èÓu8‹æ‘Ú•-Ðp¶/ƒ }s?´²]'§ÊiºË¯´(ñvÝ»Mæí~ê=4WˆÔ¾lAq0Û›Q4Œ„š#UÖÉ©ršîîÛ~aò´0—Ô®hq4ÛÛ#&Æ„ÑzR¾ŒëWNs†ò\¿ö)‹w¢éaPèš©½ÙbØP¶òËtØm.7·mldׯœæ •}×ïÁMšJóô¦Ã¿hºÖopÍ%"µ+[ÍvkVñje»NN=Ë*»ÎPÔ#ã@7šQS*ßCs…HíË–‘³}™ ªÐøŠ‰¬ãú•Óœ¡ò×ïa¤R™–æ‘Ú•-€Œf{{À:Ðȱ•í:9OÓÝ]Ûïá!Ÿ~8~Í"µ/ÛcÛo¶x3µD­™9Ë:¶_9Í*û¶ßƒþ –(<-Í%"µ+[Íö¶³q(Üø,®¬cû•Ó¬¡²oû=š¢µH–æ ‘Ú—íQ¤öa»EÉ£˜CëΆÖÉ©tšîîÛ~#}ZšKDjW¶·S;¡ÝDx›äo#È:®_9Í*ûÃ~åÀø eZš+j_¶GÃ~û°å›#õÖr‰×I©|šìîû=(ÔŠv¢I,­£YdU×oo¶GÇ©ØÞ|¢a®Ôj|àurªœ¦»ûÃ~†(Y°÷¡IÅJ‰¸‡æ ‘Ú—-º°•Ûœ¸´Ž;“u\¿rš5Tö]¿lG4e^šKDjW¶€0šíKN ‘ÒšS—±ýêiÖPÙ·ýD*Z'šÅQÊ=4?R{³E¤¡lõÅ(jªÛÖMluÛ¯žf Õ]kèѤJóô®Ãy4—ˆÔ®lzëÄö6#€T@°•í:9õ,k¨îû=¼ê‚ÓÒ\!Rû²…ûY¶7£¨ÆÐÒÈÖÉ©pšîîÚ~Ùhž~Ýâ<šKDjW¶pðÖ|'¶ÛÎF£~æA­l×É©xšîîÚ~žd$áÀii®©}ÙÍ;ëÃo'pÅŒÙâ:9OÓ]|¥E‰D OKs‰HíÊ0F³}™öë¥4>a­ëØ~õ4k¨~Áö{tJCÚ‹¦ ,÷Ð\!Rû²=¸IÓíÍ'jT¼u褮ãúÕÓœ¡új×o¥ióÒ\"P»²…ƒD'¶·faÑ⥵­Oë¤T>Mvwm¿õ‘¹ê´0WÔ¾hÁi0ÛÍ'*Å)‚ ‘-¯“Rù4ÙÝwýÚMÇii.©]Ùúh¶ðDb ØÌ^©oŸúôþïùꇟzó釟>>Õï}ÿñÝû¿úê»7?}ûüéÇoÞ>?}üùí+ñõæíÛç?}õü}]ï¿ÿÇö)üïû›bý/ï?~úÃ?¿ÿæÃóÇÏWÅË×ýñÓOÏo¾k\¶³:ÞüøþéžÿQ©-²ãE;ÿYÿx;vß–EýoHµ¡Ðúí5 ÛÇ$[zþô×7o¿­ŸÕ}ÂÐ ýç¾ñIÙiûï ¿5;L¾ÀaÄJuðÆWrRF©ÉöV¶±^©«@²ØÙ¥¤ L¦ü ìSz«€X ÆPÀ U`*ÃÞ€Ræ] øùà×+ÁêŽ@\"U`®¾€=ûTε€Y è€a–*0• |~K²{fO˜x%øZ@ܨäÁdìE‡°šj,ûY ¨Š„rªÀ\µÀ ö&uK*0s‡hÄAÝ‚òT¹jÃÞØ­¤ L\ ؈oó çIádµ@<ûTÎ*#òA€p©¦ \oGpûTÞ~L¸¬ hª@ªÀí^5 Y Nµ*̾Àd*àc؇BI¿ÀÔ'…#ö†AÆÒA<{R²#•’ÞÁ™½ƒC‚ yˆ';"ð1ì¹ð}WŠS:oxÄAa(«aªÀ\—‰}Š@o·Àý€‰•â”"0— )LÁð>¿Xª@çR`ÄY—‚Î@¹!˜¬+0†={¤sp殀ñˆ•P€Ú‡s¦ RÃÞ­hžL=iH†¬ BÍ{Å“íh{gÕÈæàÕgŒpAÈZ`ºéñ ìSzß"¢›_$oNvP8¤`QCÊÁÌ* 8b%ˆ×Ê0ò p²$6†½Q­Rf®Ü%b(š*0W-ú ìSz÷‰S.{‰€SR¶•`)—Ô€Ô€›xŠ@M¸¸ PªÀe›–**0êbiª@Ö©—·ÂöP•02ÙX Ã>sðèÔ—F¬–@üN8™mp {“POÛàÌ0ä È ppÞ ¸â!á ìSú®M¸lkÐSRnMI¸¬ bÏažM©ï’ŒY PJL¦ü ìSz×*pYrLÆÂ‘³‡§¶ ÈÈŒšÈ/7wúô)›ƒ¡)ù$Q_öQô>ŸHª@ï'‰F´H°V©sMŸç1ì•$ÇŽN]ê• \)UàzOo쉢¤ \ý!&ƒBj9‚|²±£ƒØ×J@²9˜;S¥ì Læ¶!ì­(Xú‡§><¢CDŽ.w¾Q—*pþAa<ûT¦ ‘3 €¤\o´@e¯˜…S‹Àˆ#c.¨œ¥Àd-¡GaŸ*Ðûe²mw(’ÎÁÉÚ0„}¨xÉWJg6 •·ÊÅDµ¤ ÌuD0¤@uTÉ[SGF8É™PA9_&»à°1¦úç›Ds¿X>¤*#rÏZ`²=ûTÞµÀˆƒBvQ!ÈîàUÀÕŠYö¦VÝA-‘ƒG§bOÂ>U swpÈ…A@µÈZ`²î aÛ€4Ϭ¦CV‚*9„|6R `«¥ \}±AÍ¡ƒ“™†ôAاt!·ÊD”"U`²ã¡1èÍTÓ98³ ™;'ÎVP³+0Ù~àAЧô® ,G¬•@>O8ÛäéAŸй"ÔԀ˺SR^ž©L¸l!`))[%ð0+!UàüRRRnù ›×U¬Rƽ[Ÿ*0gÈZ Uà¥7(©—­iš*Ošö^ „$9©øŠæÁœT<õ@Š!ù@jÍ‘µÀ§žÁ>U w_`ÈŽ@´`ñ|»äе€(3bN*¾ü»–n˜ÓbtÅyT•=;HÎ(yrý÷+Ü QN*¾âÕ£ˆP(Y L=¤t„ Da Ï!¥WœW~ûTÞ;‚gÆÁ …R.7¥4Ø€ ,E`æ#‚…¡„.‚ 6Ï`Ÿ*°Bs0‚ÝS.hŒs¤´ \Ý@,µîò³ Ž+>…}ª@ï{Å>b%Ôª±¤up²¶€<ûT® sõÈæà\* ð ìSzGF,2çtN¶! aŸ"ÐûFá•@#U`²RǰgôO½!°[Cp¦RЦ ÌåÄž-Ò.0· ŒÈh&y‹`¾IC<„½;X‘T‹?] èÁ¨*0— @Œa¯H”CË§î ©BÕò ðzOVö†RRfVù€Š‹§ \Ð4TÙ‡+¦ Lm ± .¶lNæÂ!ìC hLí²PÌÐrÜØ7„ŠnÍÁ™W‚X„År¶Àd½ÁAì%8»swtÄJ¨« pž\²+pûTÞ"¨+Áƒ²78™[ÀǰWǬ¦n Œ°‚ Jv®X (bÝhŠÀÅ_¬ ù Él¥=ûTÞÏ8'äRÜóVñõVöhæ9mlîZ€†¬Uöœ9xÁic•½!¨äøá©ÝCV€ÔŸ4wt 0 QN¹üs8Z>NvEç K!×.pù犅µ˜ŠHªÀ\#Fx{@©O˜yòè| Jž}ÉÎàAا ô¶ Œ°²‘zÞ%šL†dó&©S¿K4ä¤0$Ørþðl}zö©½¯è ã¶7Ì3‚ÉúCÐן)t~³\F¤ƒM€U ÷§ \vº§ ¤ ÜFŒ © ÝB)ß"˜mG0DÜ1°ä[WŸ9¸M¡-éºàpÊ^6Ã@ŠÀÄ/i+:#Ez†.èÖm¶S6§¾K4¢(ÔZ @É#‚Ù61†½8rªÀåo*™ DœL†Ôän9øòÎAe°ûòAªÀc>Z®¬¥¥spê#‚!;‚*uGÝÁ Ú´~Cöœ12swpÈÌ)Ý®öœ1rÅÇ*{+sêàÔNò!;‚( ª9ø‚vÊ>0MCyFPW‚j±ÈZ`2‚Þ˜¡ä‹$—³"(9nl¶¶Àô®w:ERz{†FÜ*6—º;Ìic³íñAا t.†tÌ£`ñì L¦#N,PTsþðÔ*0äœÐ¢®Æœ34™xH3 ÉÞàÌ*0ä¹b/b\òBáO¼n<6ÓPªÀÌ G¸€, O&«† G¢(¹!˜Z†¤¨éàÎ×ëSÎ/p ûú—§ \ý¬È„0Ç ]ñ>¡cÝ å~`j³ÜÃ9Ï 'sa¨yNxy÷°“ ç{$×ܹId-pùc—bš½Á빇]À"ŒL}©xÈJP7È Ádì‡i¨¥}xfûð‹¥îhzõ|ð—Êéÿüþ›Ï?_ /_ÿu%ùüæ»ßsI ÁȈŽqSòH7édgÇ0„½£PzÊ/þŽ}<™#×nw“ÆëTàÃõßýðñÓ+!“ûã»ç·ßÞMëoè?¡ÝþáÝó›ŸÞ=ý O¾}¯…^ìı|Öéà ]…÷ßZ\~›Ý×/¿“¯ë—ýíe…Ž¡y×wàlÊ` <š­”'aA ±V;}‹D*œEóßëâ×WhZéD“”ÚßÞ¥¹D¤ve‹£ÙÒ“‚nÕU³ ­QYŸ½Á"xùhî)¬þý4ÇR­åŒ ÞYX ×êm³g2¶Äçžû÷Z²±°(ØL0ü®Eñ ÊðËVk¢uÀ¯99s„„R³gs]ex€%O2`MH v³« ÃI-˜nr ü ì³×w%Àˆê@ÔƒÈSæ1 ÙO™ÆÄ} €z&€YW‚Öý¼ŽX V”¤ù= \ ÃW=Âÿ±w.«–&E¾£8=Qp\· Øà iAðþÿ\íÀ½««à—];:Ó|UzPàÒZù®'Î#Jàן+ìÁ…¾Àúo‡=ؘ•vˆ@O¡·7iP`_ tôŒRìÿ½¢€Œ¼½¯ÀàÞùöÎÁ °«^¤#Jˆõí«ƒ[Q€ræíKRQÚ™­‡(8Õv§€p‚×R`ÄQq(°1”Q(p¨/ ìOPî¦@(p-^(øÈ €—S €/ \Md‘.§€ÙŒŒ\A;)ðá·>9"&*no@àF|üíÏ.4(p-†ÞÞ›1H¶wrp&MÌI…A²Ý(0òô–Å„™ò³3ýÃ?@ €À©P`{ t€p›!`*€ \ ¡Ô °ãn@zÆpv PàFWàãO|.œG’­îË÷Aà<= ðÉ0Bx­›¹Skk€§ÇÓãé7xzsØ~ØþŸdªIJØÞ I؃kË…·Giàêó#f ( Üív)(p-~ò‡ eB(á7%zP"ú/ÿ£øÞw`j¿»Ô}(2z&•øíï ȸ!•øñ·‡ãðÉäÌ®ZPàât’­9(°3dH i\¸k|ãZ²ö£º‚S`¨ÙØS ¸’θi}cÉ[Űœvg03vÚa삳Æ;+!gDQRä×’õú "l(ݹD0¤YbCé3¢løS€ÙQ(pdhì!à €wSÀ@…¸› À'RàOÿúõo¿üü—üóÏÿ믿þüËž8É ÇH‚5`!hp±s4•‚ÈûB@‘'@ž¸œš( \N‘VÂo+áË‘øîß0M~ÒóˆÿµR¾ù=‚û0C¦¬Ç‡•€ çá@‹TÞJµÀ…ë+D@SMÉ0§H‚N#VÙw‡ŽÆ%À0¼3 ÇQ€`á­"ÖC ˆ#á8‚ ï%Ñà¸À .€ ïêQ­à¸ðÇ‰à ¸ðµ$¾ûõueýn£ÐÉr8¿ù €"›Pdb‚6AÒÐy€$^sS#i™Õ ïbÓLµû!"ƺà\@ñ\¾2.\ÏÀ…¯ºà\€¿Ðà¸ðV2²¯+£Ý \¸· êGhX˜ÂÂÈ%à{±€¦yà`w%Èzop ¸ÙÀ0€·¿7@ˆ"€?€,Áûbc ×c!"@ç*"ˆ@<„·’ H’xç4RÀT\o* ˆà¬.m°ü»ÿw[ZükÍB2ÿ^ò9Äo¾ü‰ý‰(H’x×½d I@ï$|ˆ$àp~b‚* Dð`懩…Ì—*HÐßt~ÈÛ3ýŽöd54Öןlò‰P¦áÚ»å ÀþˆÇ­@ŒÃwc ΜPàr $(g¸Y \ë 0 ü†5`Þ0p3òá‰ÃãÀ0 lúã‚‚/'bà{ÁHeú]÷ò‰ÿ­P¾ý=‚;1Câ)Àuøñ€' @s: x€nÔ7` §BŒ. àÂ.ðCÇ„à€a 0 CE €á0°ÀA€áT`@ <¸»éÿþç&‚/ð¾/‰ï~‘]l¿û5ý‘ù‡™VÈoÿ½A‘M(b‡ˆQÆXúA``ÞAà0€ pÀ…¯¸à‡`˜êpŽC* À0¼CM%¯t8 !€`Þ¡Õ€E €á-dæXØw4ñ`ø ¾÷]'»P|ýû;.ðøö_Ù#3›B#ʽà^lê^ôÄù¦×N°N´×ï5‡7óö©µ €·¿os½}’vãíï )™ŠØoßnÇõôÆž„§¿òg_E‚=ÐÖ™Z”Hñö7ºzí·¿ÑÜwkªáé/D>“Š$'Üèê­ðž«o¡¹_ßÈè^Øw¼ÞÞ´ÝÍÞ~èw¿|=dõntóYÔL‘ÕÛlû¶Ï¼}.À0ÞþFWOiÙ{dtïÛº¿žÞÔI½­½fH ©w%ñm½¾ ©wepoë£I½ÍÞ>fÞ¾mš—&óÝ oeÏ¥‘Ðݬ5èíË-ºw2?¸Ý+Ãûðß#¡{áæ$aGB÷JW/ý5‘…·¿ÑÜç«v´Þf…œ™´^I Rº7´Yo_YŽÔΕ¾^+·!µ³ógì}g‘!µsãï~ý©¹ µsajG(T©Cê3±¾ºZx®l×­È@žçÊ··åôÞþÆ>±åù=7&zœ+‰ž D¯·cLeÞiîã5$ßÈ(Å`ÞyždOFÏ?ûô×Ú<ý•ü’²uq ½û™Jð~|fçžT(¥@ ÛJ!陂_¯ û_é¶G˜c t¦â×-HlæŽÜÓÓ8ú}nŒ•TÓðô{=}ÊÌÛGcˆïÎÌ¿R¯ÂÛ߈üW¹—Ðå·uìŸCRviHa[)ä“CTxÝÜj$‡w–BÏ4‚©4‰ 9¼³4G¦ÀT%äcaÂ'û +†‘‚½öù:¤°1fFÄ „ý¡Î¤ðöRøM 3ÉE“ÎC ŸV˜¢B*ÚŒ·ë.™ ]„‰æ­1 3~¢;ÚŽ¯,7¹ì"(ð©¨Çì)œI?ýë׿ýòó_þñÏ?ÿý¯¿þúó/7¯u1eˆàæQ” ‚ÿ£p€$ض‚& ‰wšÐ˜à„¨©š£Aeç’"G¤à™•ð6õÜFDµì„ÃFl/ ÑUÎh¢4*#™'•’Fõò„2Å2C˜0K'”)~¹y£)3¦+O°%#iÍÌ–´Á…ÃðcùÜ8ót€¿ð¸$*¼Ø XØ´«aÄ6TvQc¿ã¦"èãP= |â„íHSuerÏ'H‚m$¡Ð¬a™¹{)xkª²JD°«ª ¸‰=#‚ncÔ"O„ËDKcRd·2ÚØ¶Þõ %@ ¿­c)‘Âú· ç}ï]ð1cºÈ¼!…w·åDÛR2qµaIÓÖK’g¤à͈&7Í+刖u0ÁÔÃÍÉÅd5nA^éæíïÉÎùÁZ<ƒON"8éŒ\Y1"»³“h4ãx‰;–´mE)<$‡1†¥w64“[äpqÅÉ­©`CT¨ MHag·±F”B^˜r¹:Šm5ÅXì ÙäáI/‰y)îGí † qùME(6ìÚº @="VÍÆHÞ"°¨˜áÜð¾$È4‡+DpñYÑ ¡×¤6Î$ŠOt)VZk ’ÊûJ¡†.ÌV:µ¤°uº†¤ànŽcÃ;×ÖÍH¡-phW‡qÊ4@·W™*CµK™w͈À² ·ŸØÕnóL ™b(1ìºMm$»œI^…!è£G¹ç±¤ ¡†¥À4=®(Õ ™æ <2ﶤ°>É!…­·d䌯PT˜oÙÚ@ØLìP\Â֥Ș1õÛØ¤°±¯ #}«ÙDí¨DnmdÆ>´³%æ[n^•ñAVe\½4g‰ Ãå§›o‚V‘HDpñQ¯¥+W\pÚu²-fD°PÀ(7í"ÔL³b1‹áöçæy›‘‚i¤°uâÀgæ#™JÒp4ןA Š—¢•MR§" HâÝB ƒ& ‰w»hÅa: ‰÷š hšøj!G¢ ÇÏ>w0œ|ö£Ú Ãõd€u€Ìa`ÞJ‚$ñ•Ã0% dÞ“ `@>ŠÃW`ÈB| 2‚ á}|ò‡IâËo’øî#½\¿(rÇêòï½? ²Ÿ{áô˜ç!¢N_,2œDŸ*n‚ Èp²ddÞ’r‡( Š· íy.SQÓS]„à.‚ €áëà¢fV‚ {×;áG‚ _1á3y• ÷Ö0CIJ '¬{XOIzˆ(†Ãƒ á(2ô1ædø<2h ûæÉ*p¸.¼“âPA@áý$¿ p\Þq¡ÁpA¸ðÕ9fëI8Y9Ž)îºÌ¸0å/Œ,‰6÷fD¤­8brtÆ_h6á6åˆT– <À…$1b(Ô…\нk1b¤"¥¡F ¼…7’HËŽn®FhIu ëxdÆ_hÂtĶþÂHÖј“²À…#ª”¦Â‰CÉ-À…{‡#–Š–Ë@Á½K„—:-ÊaN(U³NHb}¢D qÀj¶''Î,Qˆ´`ãü¶CS#PWgD“›j jDÁ‡áˆ™ $”Z(°˜åŒÈ"–ò’ZnŠ…™$Óò¢î¦Á¤Æˆ‚|E“° “þ¸Ï˜‡Vâ•¥ ŠAQ@ÐÄ4Qõ”$4M¼YßÔúä (l9–”¢9a<­÷ÉJ;ÚåwMM挤­‡:Á­T{j&5-µ@¾ú„ÏLσ-=,’8ƒ )p›KÉÇZè!ŠãQë%G$ÂŽ5'¤$hÊtd8ÚåN„ÒÃ3m1VVp'p'–嘩pX³c oÓ|ÄP¦²ÃÄÐH{ˆŸRžEqbJóˆšÆz¯1ÎäFh•Ùu;ÜHúÚ…:°×ás‘=’¾v¥¤FZꈶ ›ÑÄˉ€_y€&øai•qÉhbÿÄ”^} ¯Û¤ÖaWL”'™(š°ì ë#cu¼øÃª8dr³ÛÈ$Éá(M†šáB®?oäÏ„Íh¢5–½€­¸8÷¸ÜJs€á0Äã=â?,Ò+†MÁ@="wqÂv§#ÀP5’qà(mAt¹«Ç #"(ngLP†|–)ŸÑD‡6&¨®ÎGw{'vH_O YŠ)Võ°ëÏŸqå5D%‚BÕwIJ&Ö7 [7Ö7ÝœcX˜lŠäã õ‰‘I|á*u¬i¹Û”×Òh¬> þȈÙ*ØÒ²ív`>DàÂÔÕËp»L€áMŽá©„Ã.¸.¼?€ M@ïoÏPÀXÀX0CÁHeª]3û›Î¸Læ3šè&-¬i¹¹¿I×ÿ i¢#ú0èCY#šìļíÕÅJ%cǪ#Àà6¢ ·jwx‘'G§§yÄZx²~ð\¬Å§Y Ž2ôúc~ækÑÍ„± ”0Ü|ùúÿØ;¿;n䌿J^À¬¿díeØä"A^`¡hg×ÂzmÃRö"OŸî+ÖX=4†¤Ùu YÐXžó«úŠ,~,r² Ib/±h(]$¢:ÌZ0ØŒS Þ–Œ°í&B}† /¡ Ó6º½ÇººðBXæ„Df}Äc¯Í@cè%tòEB"ta`“)‡.„.¼Ð…9¥Â„ …¯å!!xËS.ÛZÎÅ ‡vÕZQ¦AÉ ¾–KÔ ™±|°‚Ûæ2*WÕÖ)AÀœ1f¼]"$hJÒŠŠ….<òÞÒJÆ’-$ùñ¡-Œ ‡ñõ!‘'肦mY¢@º°¨.\$Bfí%# " ^Dᨱ‰ÐÐ…Ð…/C‚yŠ.È~kŠã=²Uñ6%ˆ9N)/ÑtL2#$Œ1E¿iÕÈ‹Ä@Ȥˆ˜1ñQ0BØÜVm9ÂE‚ da–,¤º» ˆEqˆâð¢8€….Ä¥†.„.¼¼3‹ÆÐ…çB®uÇ:šL¡ ûˆÐ…_õCBæÈ‚ZJ1°h ˜\$¢6L»#5åLŠ2J8X.pÏþ†SÌŽ*(HÓ>/ðœqºÍxȶKHÄêaàkÆ%t!táåå ]ˆ“ë( Q¢8¼´/PèB‡Å¡ä×'WÝ9hA܉‚¸1õbDX$Çí¸U"'CePâ" þ A°úîÀ"±V""®¢Ñ?œsè 7(ÂK aax! ‰(„!„BB^2DLDL|v+3GÃV+J SüªóE‚ daVÓfxTRŠ‹”|‘²K„.L ‰L¡ ¡ f¡ ¡ /Ö IBâ„jJ €ìs C.¢3dÁ”‹ÄiĪÛ½H„.Ì:€ +HCµDfáu|ܫƴŸQb<±~DÝl¸ÅIn)e’’rók„²0,äw1;v ‚Ð…I!!ãý,[H)°….,ª 4#4¡YûÛö¡ ¿[HÀ tF©Ø6•@)ÅzaQ]˜QŠnaB. HdT©…,,* :#‰(Årá ºÀ:¡½›(°&ÓÐ…E·r‘ ]˜–‡¯ì–‹13´_º·ûtá»¶ßüö‡Ÿî¤]GøÇoŸÞÿýMÐ0oßí¯¡ÿòíÓ»ï>}{û'ÜþýøU =;s|•и£K7ø %ýMv~þ?ùóöÇþûé_æÑ¬%øp¶¦çØÂ×·‡²Ý¿Ÿ›%2l´ýü8ÉTèCó˸ø2SµJ³,KÓE¦e )Ïf»«°â†¶q™µ‰›L…ÔæiMµWi ê²4=dêX¶0™-ìFÀR H©‘-ø©©ÐMwᬦj5S‰—¥é"S‡²…¯¯kf»©°e1imiî_â&S±›îÂYM}}+ÃÐÒbø½`zHÔ±hòd¶¸‰0'ÛK£º~þœ$j7ÙÅÔôFÜ—4!-KÓE¦e ˆ³ÙîTLÜÊÖOI¥nº‹Öt³äš—¥é!SDz¢ÉliWaA,L­lÉOM¥nºK龆ÒF“×¥é"S‡²…D³Ùî* Æœ[Ùú©©ÜMwéÎÖ/í¶†eizÈÔÁl¿¾_5˜-ï*Ìh»§¾‘-û©©ÜMwùÎ}êFÓʲ4]dêP¶ <›í®ÂûÝ+ÖÊÖOM•nºËÖôªË/+$Óœ–¥é!SDz(“ÙÊ®ÂEqû¯K#[ñSS¥›îJjzdãM]–¦‹LÊ€f³=T8ÌÍ*ìÆö[ºYCó¹í—k4·Ýä²4¯Ÿ©£ÙÎe[£¨dÊÜÚ-,~l¿¥›5´œÛ~kç©fRÑdƒüš.2u(ÛjI‚öh¦” S+Z?%µ—3´œ»~k‰š“¤eizHÔ±l¡rKj ÛÝ'*b©`ÁF¶à§¤B7Ù=uýÖJ9).KÓE¦e[m(a{˜Z2–ÖûŒÅë·t³†–W\¿¯¯4YZ—¦‡L˶–©cØFQ0biÞ¦¢ŸšŠÝt÷Ôö[ÏTX—¦‹LʶÖúÄvWaÑ n+[?5•ºéî¹í÷õÃq"]–¦‡L˶ÖúÖŽMBkeK~j*uÓÝSÛ¯Ô`rZ¦‹DŠf³ÝD8c"J ­lý”Tî&»§ÎP®Ü`,’xYš2u,ÛÊ ¥1h›(M’¬-û©¨ÜMvOM¿µ>þF³,KÓE¢e ³Éî,JX´µ§Ï~ ªtÝSËoeÖ™ ØÇô¢é!ODz…Éd‹(rJ­vñSO¥›æž~¹Ôh .KÓEže[»B>ˆí>rG $³ÒÊÖKEµn¦Ðr¿á7“ð²4¯Ÿ©£ÙÖ=ýÙÚaå„¨ÚØšïu3…Ú¹á·6–e£©ËÒt‘©CÙÊl¶û¾FE³q3[?5µ—+Ôο•&>qáuizÈÔ±l·Æd¶pìl,在‘-ø©©ÐMwáΚºÑì~Óâÿi–„£ÎÌ«ãw4[¨ìl±=T8Ñþ´e+[?5»é.ÜYS8ó²4=dêX¶µA¿cØQVÈJ­ë%ôSS±›îž;~k0˺0]$êP´³ÑîÌ”¶¿§uµ„~**uS]¼³óKÌ\–¥é!QDz–ÉlÁ°I·õR#[òSQ©›ìÒ÷R7šÝï.ö£é"S‡²ÐÙl÷[ĉ[î˜ïu3…Ú+†ßŠ5?KÖeizÈÔ±lx2ÛÃ#*%e¡ÔÈ–ýÔT|çH¦á²4]dêP¶µÑùƒØî;›LEÛ{…짦J7Ýåû^xËÌËÂô¨cÑn$'³=l¢ª(¹yc#~Jªt“ÝsÓ/Ôh .KÓE¦e uÁö¸ÉŒÍl”TJÝŒ¡vnú¥Z{u]š—ÏÔála&Ùã»Ù -*ªÜDöóà$Oû¨î‹¨hk'í4uYš.òt([À2›í®ÁÄ–@›Ùú©¨l¡Ÿ?”;Ö¾š ×¥é!SDz…Ùh¨’%kk@|þœ$j7Ù=wür&ã²4]$êP¶2›í1@X¨ÍÑr|‰›LÅn² wÎ#ÜëÞº4=dêX¶µÆï¶»I”Är!°F¶è§¦b7ÝÅ»·©ÝçÀv¤é"S‡²àÙlÁ [ÁÒÊÖOM¥nº{îù­4$õ¾iÑ‘¦‡LËjw‡°=\¢™±ÕÏýù#p’©Ýt÷nϯ¤¢ËÒt‘©CÙÁl¶» jUaòSS¹›îžz~K­£$Ñä„*é-4=dêX¶šp2ÛÃ%š¨”,­;öSS¹›îžz~¥š©…—¥é"S‡²Åd³Ù–3€æšÊ~jªtÓÝSÏo~&SÐÉïEÓC¦Že &“Ù>ÛD·ßÉlÅOM•nº{jú­LäÞoò²4]dêP¶5Óï ¶ûlXL¼Usle륦B/cèçå‹3BZ–æõ3u4Ûê¤ßlá0Š*K¶n!ø±ýB7k(œÛ~¥J³»ãì3M$Ĥo¡é"S‡²ͳÙî;›‚¥lû›V¶~jj/k(œÛ~åõþ ˆ@D“ßL}¦‡LËt6ÛÃ)ʄк³?¾_èæ …W|¿T£‰¼,M™:”-TÖKƒØ>Ÿ§ZÉm¯÷_â&S±›îžú~¥2 뀽L/š2u,[ ›Ìö+ª”›×K触b7Ý=õýJmÎŽ"¢™1¥¶¯Ñt‘©CÙÉl¶Ç$;KÛ?­ë%ôSS©›îÞëûM£h–-Wí-4=dêX¶€0™íótXCÍlÉOM¥nºK÷îS7šº,M™:”mµ¦Ža»w -K‚F§(øñýB7o(œû~µ’©È”–¥é!SDz…Z_Ûçé°$Ü:xüø~¡›7^ñý–*M^–¦‹LÊPg³Ýw6|\>.­lýÔT馻|çy*óÝíDÓC¦Že UW˶‡SÕ¬•­ø©©ÒMwχýæ*M^–¦‹LÊ~C…°=&ïPÎY[{¿n|¿ØÍ ç¾_«Ü¥Q ²,Íëgêh¶PÊT¶ø< VÍ2A[ýø~±›7_ñýBfïç:Òt‘©CÙBížÔ¶ÇF-9·²õSS{yCñ~ßoÊe]š2u,[¨=c2„íáÝ>ðLÒÊüÔT覻ç¾_­Ñ´´,M™:”- Íf»w ­hÒ,­lýÔT즻ç¾ßÊ^ ʲ4=dêX¶`8™íîÆbÛÇÞÈýÔT즻xçlÂf÷;ŒýhºÈÔ¡lh6ÛãöèF—[Ùú©©ÔMwO}¿µWŽQFèn'š2u,[H:™íáe1,€lÉOM¥nº{êû­ÌØhâ(šªV˜ßBÓE¦e óÙ>?a­¸u½D~j*wÓ]ºs6¡XÐìEÓC¦Že E'³åç©ë%ÒF¶ì§¦r7Ý=÷ýV`BJËÂt‘¨CÑál¶ûÆFMT¤u¹Ä~Jªt“ÝsÛoe$,gY—¦‡L˶ÚPÂö0Šf±í'jd+~JªtÓÝsÛo©ÒÔeiºÈÔ¡lg£=Þ3JI­­—’JÝœ¡øÊ´ßÚ»}i@Ï¡Íë'êh¶@SÑÒ³M” –¶Õù1ýR7c(›~Ùj4»_¶èGÓE¢e ȳÙW/0ïÇ4­lý”Ô^ÆP:7ýVŒ¡š2ë²4=dêX¶y2ÛÝ&JÆ‚Üh>#?¦_êf ¥WL¿¥J³ ¢)˜RÎo¡é"S‡²¤ÙlQvdh [Ùú©©ØMwÏM¿•[Q\Ò²0=$êX´€e2Ûc:,äR µnlÐOIÅn²‹÷Î{P¶uiºÈÔ¡l#SG°ÝU¸0aáVF?%•ºé.Þ{˜ŠCt÷ )IRsC ½ÖÔ±lAm2[:66»õLµ‘-ù©©ÔMwÏgýV3ÕxYš.2u(ÛÚ¬ßAl÷×6 SBmmAŸšÊÝt÷Ôó[›Êm–tYš2u,[@œÌö°‰BR+¥•-û©©ÜMwO=¿µof˜–¥é"S‡²…D³Ùîp@hl­‡4짦J7Ý=5ýÖŒš•—¥é!SDz…Ê$»1l›¨Ê”S#[ñSS¥›îžš~+³ëvšº,M™:”-$œÍöxÁZP´ª°×/ws†Ò¹ë·6eÒ@ʲ4¯Ÿ©£ÙØT¶ül€lØvJÃ~l¿ÜÍÊç¶_ªÒÌiYš.2u([¨Ý¤Ãö˜»­–D¤•­ŸšÚËÊwÛ~‹•eizÈÔ±l!•ÉláçWLRjìë³Û/w³†ò¹í·2ïa÷û ‚™y¿Jó˜.u(Z0žÍv?€KŠßæDõSR±›ìžº~+עȗ…é!QÇ¢…élÙ°˜% R#[ôSQ±›ìâivšeYš.2u(ÛšïaÛcÞ:[Êh­lý”Tꦻ§®_®Ñè²4=dêX¶@6™í³O”˜2µ²%?5•ºé.ÝùùF“Ò²4]dêP¶X[ýŽa»÷ µ00k+[?5•»é.ÝÛù5U^–¦‡L˶vF3†íáM,R¤õüýÔT|ç>u£9àÄ­M™:”-Ðl´Çñ›íï\´¶ ØOI•n²{nú­$*ÕeizHÔ±l©öÌŶ‡M¬BëA¹ø)©ÒMvïô»Ñ,¸,M™:”-T.b{ll…Ÿ0a?¦_éf åsÓ¯ÔF³”¢ËÒ¼~¦Žf µaØÊ³M4™kÛ0ñcú•nÆP97ýjź{–¥é"S‡²áÙlwfHš?¦_éf •sÓ¯Vhn8uÍm£ší-4=dêX¶ :™í³OTØ€[UüÔT覻§¦_…Ú³%t·M™:”-°Îf»«°!4Nå?®_éf •W\¿•Õo±ºÛ‰¦‡LË Lf{Œ‡U¡,¨lÑOMÅnº{jû­­~7šeM„RÞBÓE¦e •竱=^1ál%·îSÑOM¥nº‹÷>ó&šuM… o¡é!SDzE•ùl„™mP¦¾úéÓ‡¿~xúË7?üøôÓ»O?üôñ¶ýêûß~øë§oþñî§¿?}úñ»wïŸnÿùþNüß¼{ÿþéÇOß<}¿ÅÁ‡ïÿ¶ ÿûáǦ\ÿ?ýëÿ|ÿ—ïž>~ÏþÏ?ýôôîmÑÁé ¦Ÿ£ãÝnÿõô·í¯úd÷'ü×v˜-R` ÊàL W!ëA@ûj.eN[$4ÁÝnáß5 Z%â!±Õó[ùú‚Àˆ -˜ŒßU†ÿ|÷þïÛ‡µNœ^,™X’ŠÒÃKÃbnPf” ܪ’æ”Lþíés¼i!9L¾~²gQö—׃Å#¾î¼† <Š @¨@¨ÀsW7D D Dà±EàëÛçC")eæPµ6s*€EX¬øÛöŠ"jÁ~)ö8§ ¤¦)A°_kå7‡}€ìc?§ÞgQ6 ö™÷$Q°_‹ýœvo6²B±Û_82L‰„’À G$Äa`a#•¨kÕžÃÞUƒýZìe {Kì2ï ´€Fõ_ùÔgN÷×ÔXK¨Àv!AtþWc?}.ÈÑü]¬øO9è`ÀŒQüWnñœH0DVðb`{äl"¡ {À̉„m-P¢°XówJh[þ+Q°_+ïç4:° 060åP°… <´ •PGU9Ç¡ë«@™ (Ç¡Àj=¡9§%‹„ , 6§/`Ä Â þÇÂÛR =¢AüyrØŸ~øéß¾ûþû§ïÝNi c‰ÛÀÉžEK …7|¿,dùá·…—( €Sö‡I £K´Xa˜ƒžSÊ1+&ê‚fIá~Ä»ˆ¬f)د•÷“Ø) Â¥„S — ñ˜íyc0D`p$Ì™!ŽTˆKøÇRö÷4!HÄ(-1QÞB‚P° 20Ì ô¤‡b©ŠŽ‘Ÿ?mÂísÝãßÚ,ÀOËVzyݪLFïÐÔÞ!ë`[ÃBÉð$ª'kÜ?)fL.<`¹âkÈåâa£=Eóm°‘pôò™žDÑŠ—aíÄÏ«C³)öÔÏî+ëÁÌ '즖Zޏñ‚Ý!¡‡P>¾¼ ªÒQ4üyucQº¬`@ps4¨×ªbWGƒRr±8äõ çU±BÝQ;ºyeéëeÙÄ\ltE)bÃDP¶Ø%^–¨"MõŒ-'ÊK„±´¸D-ÙPK¼Ú ”í`P0Ñ4 U*¶€Äõ„Ž5—›fÁI¬3Òî!¯WW˜l…F0dá$²%§°×Š|ƺƒ«K“eæ ]8 Ö©%©$Æ#AVK!Úri*!TÜl!](”  }üJ$  ‡—¥! 'ÉBÇV]Äd„ת¡(t¡m¿^BPq*ètá?HÈ£ÌФ ]€.|ÔØÈBÏÈdá Yˆ@â· ï†P‰Ìºð¶¾º]@!²ð& ” Y€,´è‚•»†î^|X4²0Au²zNÃ"×Û÷:«k-ñÂv÷QA¼åÛ—§+A¦’Àö´LÔjl‡àØóÂ<?ÞSSø&Î,'A¦¨ *pë­èŽ7h[²sœ ¿ñàW|{D€oÔÔÌ|¯ àÖ äà C(È€RÁ{õÈS"®uŠ=@høÁü)dàV08DÈÀ–x‚க‡ @^e#/øB ð_?ü‡-¼7; ¨Tàe 3!×ÊA ¿d ¾(üÏî?—,þ5; üÛÿeûÛŠù Õ{*Jÿüë…f *+TÐ!(ÀE~½‹LÈdàçCYt@î–‚/ ¯7JG<¸ÖT*ðƒëYikef2ðKÈ2p« $C ?XžU ¸¶]!— C®Í  P_9üÁZñ_oûøÿúÌmýmáä˜åWÿö7ͤV§ Í8ud`ü\{eSÏ™—d`˜ ôÔœÕE ƒ·!VO‹AЊÄüÒ°-ù=-g‘%øö7¶¦Q!úßÙq^¤Ä ß~Ö?÷=ÃòZÌøöÎ+o[aìðzÃþ¹oy3"Ñ…7£É…ŸìIþÉreV_ßìÈ5àÿo| pÝŸ±`l,ÐGzzŠ)c©áΰt ¥ôËK¥ ocI |zæÎ™˜tá–ò þ°çq˜·9\¥Èn^PÄbÊåSÄþJ¥=BY(z04oì1ŒæË…ÁРÐR<à`ÏåðŠ !Ñ£ á,Ž>’ß•8ÃKPD˜[D¨§g.™KB‚à†ºƒ–ºA‘ÄŠ€ŒMŸè9°\’‰åŠsm@Žp@``ë‘1Gßá,9Ж’A)k}à›ëÈé܃B„†!I¸yö¤Êw®k8,4|z_KKe! >¤ˆ]=ˆ£+[ ¼ÿ$ÜádIˆ&Ò—¡ßdôdzMPK5L¨^X<Ü D¨MóÕòí“„SIŽ#ìo•x?[ê ü%Æÿaß¾zÄ¿2ÓßþBÓGË_ãøö÷­#r"‘J´^¸†À‰—Ù†ÿ Ó×rÏ`CàÌ7V{ Ç¼ª>I(þÎ \M$XE"¸Ò *­Xx œ¬Òc 4tÿþ¡³ÎܵT‚È„WbŠd, öHõÄKÇÚÊÑ®°§Rà¢ÎH§>Q‘¡S0ëÛ÷¤…±óÏ…í#W¿SD¡60ìÛsË·ONbì*–6}û`ÿÜ*:|ûS_Êì“ãb˜&ï™#•'[öVïDÒ°»ø&ôáˆ&^ã#xAœ;fþZ>ÓBq†a¸p. þT˱óWcIÙç…o…jò B¤…>ÃY…Ò#bº û«Þ\@bq ÂcâèXP=Q¹Ì°¡îu ž§Ö`¼-ÞX^f#—@Â872è³¼GìðÌ|ó†º A„Þ8ŠzÞã·`2xÄÞVöCä†o‘G\¿¡ï˜â…]UWî.äÌô£^Ù–Â%„DaèYƒÌ"3Ðzc’¸La—ýЪqµ”‹„K 1õÆi$!óO¾'âÛý¡»÷'¼ó>d}7ž@Þß¾d6• ü=›JDŒ “hw:å`FSéä­=PÓëF÷ÈÍãžÐ^…ÀÀ›Ð æ {² B»ñÔ&’–ƒâTÅÑ~±§k@üWº€qB„èi.’Xb‰×ß7Ÿ¼‘°UŒ6ë§’$Jv¦‚àpBpˆ]H1wtžÄê)7gpº‰Gö¨D‘†£ît='Ô¤,2Qwºú’ŠTÙ"ÜTš A˲]B–X—=‚Õ”ÖNbá$biJV(9M-9µ4°)/©T@p7–Æè]™ ®d­؉7¬o©'E)V̲ϗ{¼§©Y%îœh9ÃhJUŠÑ–£Uíu`pó^ÕJô&]}jOMvzˆù¦aS-=ucËeK[£R¦žî™[-†Š¿y¾”ctaª?œ=z+®f\¹äBÃ^6ß~VØã£Ü öæ»zÂz‹rÁ8°7wKþz´Gj­OVˆ@Â7oÄ.m©ïß¾‹*P|kQW‹]4bÆ@ óãáž9gc- “O±ö´žšPEàYéçÍ—¶L3LPYœ«öX´¤“fK-a° ²åAz˜0QGÚÔÖƒ£à+|¡ô8‰|²zRË_ÿBéq®m¨Ç¤ …ôÄöí»[-R K“ÎðŽÒò&a©Ì‰¥I'Œ³0õd˜™¥bÃ\×°ËžCmVL´Pl˜¯þPÏž+¯(Ԧɖá‹×þå#DL º¿zô%”Œ•ISy´¼W;Q:›ÎH,½gI†“•ª€‰˜ ž1)gWÅ„ÌLðSÕÒÚð:óÁ‚|bêÛe˶«dLäÍIe°…‚›÷ëÛ2^Ø´9‚ìÀ·æàò€Ô!Ÿ´aØ.Q ¯R“‡§V Û*``戀Á-k—¶kÅ©ÈÉﺿ7P€:¼ÓEK<˜8 £%TAùåá£Ѓr å @áå{&­†¯qGú=øÕÙf„ЀLò}¯ç"`âíTϳ5lÃ쌒º ÁÁá}îÞ`ÀÄ›LˆeIR–8 §žÒS•–c ÿˆ–ïP‰ä×¾h1¢¢¤ÅW…ƒ¦ð³åHœ“jØfâ™@ âðIlÃ1Hh>É a@…J@%`XX„·bÓ@â j9fŒP1BÌ[BF.örƒþhO¯t8Ez¥ÏèŸç$R8ð@qf=L„ªâRÕ ¯Ùò÷ÔrI¡CòÐ'¥'éHg$GŒ^IõÄŽÚ-¼WœˆrÏSwT™aj÷ ‹ÉÔRœÈ噊´ãˆ§îìi¢MÇkÆÔÕ/=0 òŒ3®×ô,LÕ\™`â„`¡Ö2¶›QSyÄ`÷<‚fî¿=1C D¯†ò@íúŒd3L€‰÷ë5¢‡0Xñ]Sa€0¼á„@`" Âaøàò©H0&þÃD=n‰X‹€e'‡·Å¢–-HT8Ö„€„É“$€Ä‡´Bi¹ üLÀ>|ãš°€ ö‚ ~È+ðê)ÐèÂ{b±:š'lÿ‡Œ[ªG„±“"VÌŒ°§  r­‡@ƒÐ5Ò ]ÀF:@<ªJppoHX˸&8É4¿í¦, $ÞT¢ H‰K½§ ™ì±c ÝÔ’cd•‰ }ajɉÁ¡ËB¶ìŠ‚. ׃.@Þ P $€Ä[jé‚P· ‡.@ÞÈ‚. Ó­¥›¡V¥ANH-³‰\UÐ…±~¡#H!QÓÿfïÚv庎ã¯ä «ïݯ1àä!þAq(0#’’ïÏÞ”“2u24gÚ½g•©[MVíêËê®fèÂt¡£ÙP~”+ê YÚqJ“ñW!bC%´å&3(q!JBÚ ]€.|z±  h/@ ŸtíÈ;tºðiº€2º@€¿ h;‚ÿ€ÝJ„ øC.¥ ¬ÐèÂ?®˜üÃßO‚ßúŸWQû5×@™ÇR†[(óy,¡MC>‚È'ºò #ÀˆO^’)ð‚…t$hêT#8\']P%@‰O/  Ä¯N&kCJ±.¥@ö°¯Û×ñC8=V¸Lu[¡”Ð å¯ÛÒ‡.Ö…XJ€Ÿ”–•%>¥A%@‰O§’Pâ…J¡ Ð…Pé]€.8tºð±.˜A  Œ:ºð‰.t ¦2.Çù™©ã-`"wÂtË5®XV %xU-Œ¿N]¬Ô˜»c»vc3Žƒçu:B‚0už…zHPbZÈ.1Ê-”PA%y‘­ûžPa+[’nhOŽðŠ»G˜ìUBª¿Û‡Yw  ýšjF>Íë¨ÏÆ–˜[±=?‡g—ÜׂøóÁ‹|©’ÝyññÚ¡¿…&ÏEó%¾Ô§bKnÝØ*\áçCáÝØ¾NL¥õ04?Sßh¦„> ÍH«;m,ÍWøRŸ‹-‹5cK§ ‹ÙRÐØÒëÄTz˜îÒgËT} Í̱h¾Ä—úTl‰«ÛS…Wê:Âù½Ø¾NLå‡é.}aë×\i= Í"•Œ¯Aó¾ÔçbKoÄÔç`˧ »0ñº[~˜ÊÓ]þlL•·Ðä¹h¾Ä—úTlI¸Û³[¨u^ðñ{±}˜*Ó]®/Ì~c= M£#?ªõ5h¾Â—ú\lùŽÒs°ý°Xb+ˆYïÄV^'¦ÊÃt÷ó6ñ&šù$4ù”òkÐ|‰/õ©Ø²S7¶T¸ÊýÎý±_Øh¨~ìW~ûKµÓ´k,š¯ð¥>[¢lÆö—AÑ9´øNlõubª>Lwõ>«¾Ñš|wGI_5¦>[ZÚí¡Â©îbwg¿ú:1Õ¦»ú…uê9œ cÑ|…/õ¹Ø¾1LøhÏ9Q©0uoCÉ^'¤ÚÃd×¾´õË¥9Í—øPŸŠ- uc{Šðª¸»õû2c¿ñ°ÑPÿìh(¿µó&+},š×ÿRŸ­¼Õ,|¶ñaP4–•Þ‡m¼ÎØo?öûö—Z<Í—øRŸŠ-q;¶§ ‡Ñòä{±}˜ú¨ÑÐø±ß7ô9>pö84_áK}.¶ÄÒŒí‡AQ#Òev'¶ô:1•¦»´¾pés­±h¾Ä—úTl‰µ[¹yUd­Ê{±•×pWû·ÿüéÏ^Z?ý-+~ùç¿ùéçß}û_wZiñ/4¿•V|ô>vW{îm®$(ÎáE_C‚Ë»«]ƒÌy|´Þ@ YË#R7T†?|ûÇ?XÓ¥¡…g| ß^.@ ºQµp‚Î%0µÍ„á_Þý…_gºù,9ëÁÞ}ÉÞz0œ DÔ„{‚¿ò"`¿cÒ¯&% ìgaßîíûÓ°ïiù™ûbö³ ¾–Göã¡Ñ3 ûìÁþøîž-5ßCŒ1ò1™ Õ“õ›0=ÃF>¤{câ ;=¿l†üþ‡÷Ý·ßÿîýÐPГ¦iáÅôô_Ï(gO³T §¨ò%ìg¥-Å€¬ZHgi~´¤ÿB&æ˜ú<ï'-ÅÀ!aˆþ[>ý‰drT`.Üz˜ ~žã… Ìšýç+¨À`ЖjPÎËXŒžÐè]°&„œ‡ vgÂ%Æä-3‚’¾," +z°/©Â’ð\&œ¶AÔÄâã/¨À†O†§}è ¤‡ƒU g[\iQz†;ŽŒ+YªÁ;l°dO÷XYN)0aòÖ¨ô0!•á$6{‘°¥ƒ~c“s¥&¤9&ІÍöD&sÃNá°ÐóݳŸk%ˆƒó@Ñ&¿œ‘SëÁ>WòÀÉ*ÐLÎÎD`Ç4ÐR²ð(°¡ñ¸Òñƒç*gß±¦‹0*pÕb*0þ|q‹á˜R’†âhíä‚à"<@4x|=à=؇E`Tpö= ¡Ó\Æ1*8˜ -LàEn°ÝŒ&øâÂ’ñ°þPËÀ0“2*ñL°›­`>ßÀÄÑ…ú`˜ mKmxî¬È&3Á{D@”Uð`¼¡ý½{Œ‡m”·<³žV”˜ݱ1ÌK»C“›ƒÖ²?ÂNa;ÎŒþóÿïßýþ‡÷Ý·ßÿîýГD=â µµÀ`9°žœ "1@¸å™Jå ÁãÀ–u`‘©Ât,è”– ’[Ù¢P-¢ j’xVröÄ0–¶´Ä„§I¦aßb(%¾|šÂÃvGz4ßuU Üqsô¼?Žp´™XËœ „-al‹=Ðgˆà"í–IŠ c(trèbBñR”þ;.Iqh ôßð6ñ}ˆÞ~‡yI·<úèi¶Ï–³€ºBØïˆ=qºc)pô*XËô²Ò–W¥®1Ú’ªÈýïXùé?6¦aßÒ÷U#–¯]Cû{F¿ÕÂjaîo°Dœ] O“7Ã{ÜD5(Ká!6z* 'Eˆ2xˆMÞËž–QšÔŽí‹,Œö¤yŽ £bx¿ó¼¨–»/Œ þeNüøó]p•†½ô`ïÄ855º*ð–`¤$„ƒ7…ŒÝSpvr² ¬&ˆ”®@10ôèTK(ˆÐCP ìxzîØ#<“ rsmÙˆdQ”ƒ}dè¦ÖD]ŽÛC[.“E‘ÓŽƒd&ähÖÂDÙŽ~r¹HLñ\<ÚDLz˜pTŠ—Â©/…-bÉ@q‚;ûË&UÎNM>@æ=3ÉÊÉÛw . çMºž„Qh… bœš&ôè‚XúŽ;H× µ,¢¥*)‚Ã(ÁU=”ˆåä Ä(¡=;Ji™¸a{•èy{L7§"dC³‡–Y¤ %I.\!Tô8›d„òRèÂΙ¶¤PU\‚Õ“B–ˆãê%(a-MÈZ$ÊYÚ±÷XËXpuðä⤥Ytd Xjš¦Ôƒ½:£|œüFm=oÔE^¬˜^ÜÑ ±ØbáîdЋ‹vb¬6¾ßy›¡ŒíH Ð(¸À¸÷´ËS9!W‹Ü3ÕZ!ê„Ñæ©Þ¹-Hò¯\uD€ha„Þ,£‡™ž˜gúÂÔÓC¨t \U6ˆÒQ4Ü’´`4– qki$žF‰.éý¦NlÑyVÆ8ƒE Å.õ B-Ƹò%*„ÈJÈÂÆ×l)©b*ñŒˆ[¶8ëÛ²ÅQJ\@$Ä£‡Qé…H±±…Š-wg%èÂ(AÒ“=„¸%Öå§êBªt)dá éBËü²­tf‡ãÚPYPo!AE1Álk´cKŸ‰Žué‚lìÅ{ ؼÞïü$Mtä†zaçŽ3‘äQ7‚[G 1#Æû-ÚXmÁTíOÒîrJ@.¾ít –tá ÛN«§ƒ DX‚ÜÛ–×H—)ÚH{JVl-™ËȨ̈ߤŌٲ”J["ÔRUIÂ|Fߢ‡,“„kT’ÔÓt¬pwŦü%š F-”HÅå§­Ç•|­#XpQÙùYÒÏ›ÁÅ…MéYÕCµ`Ïj™Œyű†Zâ-÷:|‰ÒJ¸¦ 3ÒÒìõ() ƒe@WO6`!.xlØ».HÒ€yÎŽ9|U•ûYß}ÇȉÉy‹ØÏj´ä€tüí…æÑ9 GôP! mÁ­Û‚¤&Yx ˜:u$-$8»ƒd ÁP%hé ÒQÄÂÅΠ̘تGÎ aØ;E¨'âfÙ“b†¨Fw>ÆâšD¯õ±# =CÆ‘G¢ã0”«G Je¡’¸F%-‹.çG%qJÂ%z8Á¸Üµ÷…ÑQ:$J‡­K£R^ðU¾Æå®–6“‰P0rÈK´™T²‡„§KD‰&J¤•ã ôÎ|ÜÔ‚…ÄTô(ÁQNVÀ`m¿K=ôç& Ì.ÑoôFx ÷®Q=pO|ðUæ0â-¨Ç‡ÛÜqÀ@ì%Þ"¸z8¶ŠÐnºB ÁÕ“Cäy¸A0ÆnÔñM{v¬Ï[ [mÌöXè™\(/bŒµm| Ø}QX8°sƒÑ‰Ö‚ÑÆè|€½GÈŽ„FÃBAöÇÌ…»Ž£Ë‚ž¦³± 8wô^t®(UÈÀ`°¦làUÛqûm¶oOQ ™¤8í5Y\{d@yYˆuÇ;>®nE ˜,Õ²Ðâf¤¹@…És=%B¥(á6ÇŽ]¢ãçD0šÅsO¾Þ¬zˆ ‡ ÌÍ ä¦ÙD…Ì\ˆÃâAöÊdÁÀ~¿áP;êCÌ‘M.zˆpüᯅ)²Ÿ‹Â$–D`0z‹#àÁ`0²g¨8œŽjï;¦„G>¸4ý†÷Úâø%YÀ~Ögß²RU‘Ž9ò­¯öæ’ó/À†ÏB¹¼|aN`û20i K!ì¼Q”$¢¼@‚½O“Êp­sî¾»ñÄ•±$hY'JUgÇ팩Õa >XYÂÐtïÄðœY8Þ<•=}¢ E‚œ`kkãŒ\pŸÙ›©¦Äpë3îY²¨àP8Ÿ|“ž)µ–r¢u458x Âe)táº=ùBiF¡|Üû°^¥‹9Ž¢L%A *CóFS ‡ŽÆX)œ ÕãÎoK¡±œÍÄ÷»Ô$`«B3q* :8`KVáèöÞÑÀ–K Ä Äóà ÛBJ°; Ž$Ø:% ŠLp`ç7æƒj‰37SI MJ`’ì * ·Ž¦r 'Èÿ²wm9’Éñ*:A"üí®_ýè ­@€€ì‡:¿"›‹ÕpÙÓ[TUúDT†=ƒÆt¹…?ÍÍ%) T·Øó÷ÛG˜B©î†úÓîDé8hsC]’óÍ“Àö‹ÙÞ{â½:” W«ù¤ÇöeBþàüAnÑ,3V/$6({|„™¡/´j‡¸ © ½ÊêU†…GMnͳ÷î’)èc¿ÜºEœL–Á/·î¦"pïM$Íé ¸5a ktjn­XVó“™øæb2´ ï¬g;AàbþØ½× ªD ½â¦ƒ>Ø*¶÷~ÿ>ŒSqÞàÖé ÓàºãyƒµÏ›ôDbIƒü†æÓöf5GŽVE‚Ú¢Y:‘PQ& ’­Ï-Œ#¤…kìªÂÏÝ¿&z0Q‡eË@ÙMÍœ€‰õ1‘Gö¸‰óJz!‰X6‰i=ëÉ']ÎK¨%Öª%ZÜ@Ì/k&"ÃÙBµ'•3Ž ^-Ʋš%&¼ÃúÞA訞l¡fÚ>àVu,‡õPTg¬H5ð”BEGŒÈ™¨f!]ØtˆwL§SݵdÅU%®:‚CžTŲDž°jž t¤e ”•'´6(#ü(­LØ qp™ï¼è8A0sÇ@aÇ 3oèq Î# ÔÆU…’{0`ç9^``QŽsBEˆv€D€Ä7¨ƒz²…(—Rt–í2ð!ÙÒˆ¶TÑ€Ðæñ‚œ[ Q¬„û¼Ëæ‘- ðÁ:àÖo2ÔÚ1¡ªá6” À»&쨑-˜cr4#7d£Ì{0‘9ÐŒ\¶Ù,’ÂÝÈ; ³N¨@pcÙ ‚YH8@pgMÎ3+pEz}HØMˆÏúκ|c€ä¶AùHÓð-åã¹Vk"ôeºêTéJÅ\j Ç-ò]æ³’&vÀDr&Ò‡b‘jL´È¾‘kÄœvH'¸ÉM0±€»lNÙRl’°'¡ãpgbK‘ aÃrYîã´¸¶(1L(d¸$¼î·h]äÉ %¿=D`³EëX]²0¨\U¡ö$ù~¡«vìq "l(n|N²ÄlVh)m.ðá=©,ÃqYškËœRIŒÇ$¶‰oÑú,åQÉà/¬é¼‡ù¬3k Á^Ì~¢z0‘‚ÊD?‰zÒyd0bÅŠK”3_há+¨…H‚»CúÖ²3£ÎÊ ªÛ·§Zî‘ML¸i 5½ÅÕõžü!(·FöÐ¥žý‰é#†zÕ[ôª¹gWsHJÏ=nÞzO‹*c(0{t-›*ÑâYw`š±EèžÑ·–QáźœI£Cz¶µÏf¥ ,eîÑ•ˆœ å ‡{Xöð©Ñ’Oq8î’ìq°(Zä¥ËØIó­[‹¼˜H=)òl~µªÌ†E&@pç Ü €4IlÜÝÑö5¤éáì'9² &QÕ½%íR}‹Ê„mŠU+…–ùCyP(ÊÅUAÐâ Îm;'ðv IW]îôc-Â\rƒÁƒ6¤œLŠzb7Ñ ú žŽX‡º¶`B-IÀrÚâ@I &HGÊãÛ Lü¸¬&@¤ Kby@¨×of~@‚cŒg VÄk@ Ÿ´"$Z@àƒýéeÅBóˆëûÓPˆé4Ø•¡P=e& °8Xn˜ýŠ3!zm$ô!9@Xe-PHÑâ–…‚ê=9ãt …[;Œ­äÐhré ņ=V,«§Õ4ÃE‚·piYÇè‚‚ è4lA’ëI²¿b"ƒš+×N-eæ„’ +×ÒSfòüð Ê¡÷åKŸ 8E¦M ºG™Þƒ YN;à²'o)ˆtl" X=~B#a!{ÑÛfÒcŽÖÓºê=q°UΛ(,,ääÒ…B]34µ4Ä9-±8±‡ØßèI<=ßÃOþÇ´[ ‘c¨8bÅš}ÈRƒ¤Jz‹ <µ,QI¹z ‡å!á@Äo‘-ŒyJé Hl‘ARKU¡³¢ìÓì0±âC{ZPÊb)8v·&èÈžm\ 9r‡-ýƒ«ÇM¨èp4)·¸(M˜0ÖYò;ܺ«è >Ê Wj–nZö@A" û™+SIr¤Böi‹‹E:zTÁª’[\Yï ©Œ¦å…'#©I Bl‚‰–1¸–g>'„1øucpm™f›–Â1l@ŒÃ{ø1¦ó[ø1«:†êAåù€cøqŽaf Ö’EšÉ9Á€c¸ë}܉Z´¡r‹èÁKŽ‚üøšòãÚÒ‹ôA…¾Ó “Hˆ9CöeÑ„A¨,, ëA•Ð\¶ØÏ¬žcgNÌdÀÄ79z¡*aú•‰OÖ”:’1¦S÷ÎxÖµï]?°ÆQ«v{2vè¿-»³ßƒé“‡-jHîÑu™ùAbi‹1udO¬PvÂÖþ˜°#{&”>…=Îi„²wBÉ= D¨1‚Åpk‚Dy=~£ø‘ùƒEOë1-ÆsˈƊžÂ¢ÎÛEǰ$¢©Y)!¸K±ó1³e†DT8I°&êÚâ'¦¨¥Ð«¾ñè’¤8PXl@n‰CZäDIgJé…ÂbUš<µ€ÀÄíñëfp ?’&ßÒp uaª„_X´áÐãr "L­öð Õ‰rb ­Võ -û¶”ÁsÚÃ/´"Uùñ3ÚÄ—ìDE(†[@‚Í[0Q:nl ùVGJKªW$¦ëcÂÎFuKó¡ø¼s$ÀÄG ¢§æ&6ŠÞ²C%j %úMbGË-4*MShP-:ÐÒ˜+7¦KR…ÅÄòC{ꊴÔBlXuxÑáf1ˆŒ—;O¶?jÉXìÜZ!¡·pëmLJ>S€`MdT]‚U=Aô€Àë\"Þ3”äp€àÆd7ž{€àæõ¢{Ð@çhUô”ˆá ½® =¨'AÈ Nd‰«ºmÁL‹±±*:0 U) ãÛö2 $~CX«r`˜øf줇ö4™l˜šã‚é Kƒ€Lx‚ ™C£ibJQ[ ,/p•Ö‡DÔrN€mþJÃrõÑã%D\l÷ aÑÒm0%ÁtbU6[OBé^,Êaéé1Õüûp·Þ‡c§ ‚¤ã­‡Õ®óëÊa}HÔQj=(-†úïÈÊ«ûд"–Gé%èKïJô–æc±¾“˜æoADÇ b DtÕ™ç…`b‡TBzêÊ¢Bv¹C*ÁÑ’]Ƙ%¨x‘;œ¬#kqA3Ã,ä—{d-§-Åf†IŠ5ÌUÅ­5ˆáv Èé!Ú1Û’³‘ªÃ•ÄɈT‡EA``ÊÍ- à˜©"ø.‹‚ 'è,JÀ‘Þ]Y¢Ó@3zƒ–BÚ"ê31qJ‚ƒ7¿Eë±ç®Œ¸'É0±Aè°–½Lñ™P*AvÕCnø…I>²©ÔŒð¡z—œ²eLåEDŽM‹;7"UØI"á–‡Oó7aÂ92 yç1ÕAÔ(D‡U'Ù‚”¬-é ÚNÜÔŠÔ“ÁÀŒêì¦ÇPB™¨'÷p ®=˜¨RÂÜj‡ÃÙ“Dêp6Cu¹C䈖î´Îü‡ŠVMZ–«ôŒ¤ëC"Žè‰:ëÌ!rÕžS[0Q 0vXÃåìñ Aƒàn, ¨ê>Èqõ|UH b1h-;iŸpbœ¿^Ö/x RùIIq€`ûàƒ¡Ö²Cpè)%K”  «z…–‰” ¦¡X°\µ×܃w'èpìbP$ÂÒ,[@­e*i¤dŒJrU¶cKÎhá¶Â~!¥¥ól2CÅ÷€DO¤ÐqN#)-,£>ž÷6¿T¥fj%à@o°:×E`1‹áàµmAÖ¦ ÒJƒïzk¾«…F L#î/$ÍRË”;ž­š a&ŽNôçÐÅ[¯>SÕáPúÛ`³2o9bg¤F‰#v[Tš-ggŒBüɾ4ÊÍ…þ&¢ÒPîà¢'Pdžý bƒ|2ZèÑFUÁ„|r þSG…a<„ˆ@sØÂIx$êܤA:¹h“º%y`²ù­PS, ‚–Û•Æìj u·UÕݨ’Ã9ã…%EKÉOEÒ¸Á9SiÒæ06$ w>bhìf…k#[Ä õžB‹<±L³ª_è qÞ¬ïmMOàÞSH$NÌܽš¬AFh.Þ:Ìo£Œë-‚·Ü›Å# 1ög6h)-Œ-—H0ZvØŸ¡j 6Ò|ÀKlÀ_ ®I¥ñ°€PÃnÂG ÍÉ$G("Çú°ÓZ8Ùœ8)pëñ„ûàºs H7AÂXqÀpY¿ÐÒ|ð°¬Ùm‰ù—Z ‘*dÈîíÊ4 uÄÐÑ!öeá<݆˜«ú… äü…æ¢hé'D°…‚î¸(¢…Ì)ذݥn¿îT ;®Jrj™?Då¬0’Ú=CJ b#DôÐ[rœŽq¥l‹ä¡EÒ’HEÅ­OXZ2ÛHL¤nÝyNÐß®=P 1¤[¤-{N~Ž¨Ðˆ¾ñ0Â)bd€Ó´§I:2HgìŽ2bÕdmø….¿P¤ëŒRØÂ¿se9AÀáÁ¢êŽÜ‚JV°Wõ Ûl- HOêq MìJµÇ/d¹AÉoÙò±%C(rKÏùÎ7e^‡®à` H¿a­´(|;y‰–-)@b«…Z$‰ßj/tš1¡É(4W-4;Êà’ª€`ÑVtG y^šâ`lMÝyk*„=©Âl®§è ¶bv¸PÊ-צBÃ’ Ýwï4QKôÉ4~¡I2¾Z¤ûÂf^¢t¶UGTÚ‚"cH÷íp7@#Z Á&\`>ï =63d;@´ I…»„Ëö³æ¥è6ìáZš æÄ¥X©]u=‚z@$ÁÁE„…¥tXVm;¶ &-]§+@‚€‹!‡DåŒؘقÛ-MHJ"ÓuX&N‰Oð˜î|Ø:œ*¥pŸtH˜$ Hüˆt•Ò î¸$Zf›ŽYÄ¢¹ƒ·äî8&²ÃÑbéDÒÐ`Ô«2œZ ËäÅ}û á÷̧*kHëCBefR ’ü§UÉœó Ú³j2Z@ NŠM‰=Ž·,Õ%K¨ «¬ÕøJt·˜MT_H3#Ü·ßâ¤LKÁ¥çQcDŠEÏZw´Sˆ"QK.ËÎM@€ØÐ%Ä =8£ÃÀ„j *Cö„Š´´ŽÓI¤w.tœö 8u -Ó¤˜Úw>5•go)äòñY§8a½öÆ›¯ÀÜB›8xö@¢T®W%ºõÄ÷rE{a HT_›¹*ʈUYM’,5&”Ð^ØbJÙBh©Aåƒ!¾j¾- ΄Ã&î;Ƴˆ›¥üÂO—–žD7,ÌìÑ_h¹w²™3À/Ü9_xàÚú ¿¿··ð›t¡å¨ÜÒø…½Ò…†ýZ:l-ì…kü½ð ]{t·Í‰Eϵ{ÐVݯmÁ@V1ä»öp ÑÀ^ ®‚,ôªË”ÑRZHv€Du(º‘†i„ciêÆËdYdžÈ6د¥C¨#‡tòäaÐû\•¾° àºÄÁ;ök)Mb8–¬—m/ìø…®þÂõ€`‘ÃCÊUPl¨‘BÔo%h¹^’¥Žˆù}rÐâþ˜_øå/ó‹?ÿå¿ÿú­ýµ ÿíçŸþó¿ž2ÚüQxü£Ñ>þççŸþã—¿þ|üÿþñ»G¬W©tòiºqпN˲ÿSÛýé×ÉŸæ_ûóOÿÒgͯøæ|ôyn`[ònÛòL,† =¬ññ¼ÉK¥WYóÿpñmî_YStYk¾ÅK½Ô¶Dí¶=½pqJù£~~Ÿ˜*/ó»üYLý}Fôwkš‡çEÖÔ"ø¨7¿kL½Ö¶ÊÚl[9ozÆ´¬Û£cyŸ˜*/ó»òYL5ÿÊš1.²¦¸—Õ3Ö|‹—z©m…³Û¶Ó ‡Ö¶.ä}bª¾Ìïʧuª|ך*ìz‘5gPe¦g¬ù/õZÛþþºÜŦÕé„•NqW~4ùÕ÷ ©ú2·«ã¡›`ßZ3Öµæ[<ÔKmK¿_QºÚ¶gaã"+¨éû„T{™ÛÕß¹]>ܾŸ É|Kã*k–û°g¬ù/õZÛFY³míôÂ>ÊõaöŸ½OLµ—ù],ù=­éËZó-^ꥶ¥/²ß‹l{zaaËÈGpö.15çOû2k~V¦ú÷ý®ÍZ2—µæþ/õjÛ’ôÚöüy³au~ó‡lû·àM^ê‹üî·¸ø6¦~1¤){€´òÿ³¦émß±æ[¼ÔKmK_ô .²í‡ó鉵íûÄT/³æg~—ùûOs.kÍwx©×Ú–¿è(]c[ú¨lXMTýÛGð&/õe~—>‹©òµ5ý"kªxzÆšoñR/µ­ ë¶íG·0"KõÂô>1•_æw?¥ý~A õ¡>.²fÊ ©þŒ5ßá¥^k[ÊfÓž!•_æv?gýÖ—ÖÌe­ùõRÛ~ÕPºÈ¶Ó ŸBo¤1µíû„Ty™Ûý”õkß/S(ù"k {<(Ò–ïÊú½Ú¶ÔnÛ“'ª*Ì¥¶~å}bª¼Ìï~ÊúõüÊšµ®5ßâ¥^jÛòR¯°íé…gj,¶ ä}bª¾Ìï~Æ ý‚ª¬~‘-ôŒž±å;¼Ók-ËͶÕ_÷h$˜}§ú>U_æu?%ýÚWÆ´¼È˜Ì£Šž1æ[<ÔKMûÕ,õ"ÛÖ‘Cˆ™äaÛ¾O@µ—¹]ý´ïû…å‚Þà¯Æd›HÑgŒùõZÓ’G³mO’¨IZ˃¶µ÷‰¨ö2·û)å×ëKkòEÖ,5{ÆšoñR/µ-™uÛöc—1yŒ‡{úoCù­—ÑBósÊïW›©6¶æþ/õjÛ’R«mëƒ$ªNÅâyázÊo½ŒZŸS~5¾²æË×_gÍ·x©—Ú–¸ºm{Z4ƒ%´ÔûP~ëe´Ðú”:¾ð»1ø"k–°[>cÍwx©×Ú–¨šmûA­0µüßîÎ,7’¢7ˆ›–û_l”ªq¨&L jÂþ3üázŽ`Š"­J…<5Ü|wùí¬Ñ„z-ÍJ e‹¥Ÿf»B¢2 U°²ÍSSÑÍw·‘_¥=8a¢`VáRé7035-8Ìv…D‡P?1²Å<%Ýlwùm:Í~-ÍJ e ÊÌ ¶ŸHË<¨vk ó”TróÝmä·V%À-…ƒhöQªñ¾ÛÈùf‹ý4Û£×yº1²¥<5•Ü|w?èW»ï&P®¥™B©¡lµ JAl×ÉfÀ,ªV¦<5•Ý|w;è—ÕáùT¯¥™A©±lé0Û5¶ V¶œ§¦²›ïî'ý¢F“陵B©¡lA_ŒÁv½€«¤X[¿œ§¦Š›ïnS¿¬t„ž>4;42w8kMe‹‡Ù® h¡1¿­ÝBÉSSÅÍw÷“~›FkÍg.!ðoh¦Pj([ÂãlW·Pæ¹X_•g‰ýrq‹†Ž/“~»¶²¯”kiþõJ g N²]Ï<Ù`iHbz^úù’(ÕÇwÿ÷a öD¡Ç=|£™B©¡lù4Ûçd#P«™mžšê ýùPÞ쎂âÝüCaT[àìÍ Je xíʉ¢ F#ZÈSRÁÍv·©_eÍŤÉåZš)„Ê”ƒMÛç>c¯¥ub+Û<%Ýl^¶~©òàkifPj,[`>Ìö ŠÒªÅÈóÔTtóÝý¤_eÀz•R¯¥™B©¡lµÛ©AlŸf!Ð4âam(ažšJn¾»ý*wÞ¸pÍ: Tü Í JeËÚć¶Ÿ (ÔVl)OM%7ߥ—s FÁì½ýf ¡†¢>ÎvüÓ ŠÕ„)OIe7Ûݦ~•¼Œ°]'š”˳åÏŽ·Á¥X]˜ó”Tvó]~yL4±_K3…RCÙ"ÓlŸƒÍ´àÂæc*ç©©âæ»ÛÔ¯¶‘±òµ43(5–-”q˜íʉJŒduaÉSSÅÍw·©_%÷0i¶r-ÍJ e«Ýy bûdZŠ<}}ë ¸4©_ðJ†þ|(/ž~¥÷q/Í¿_©ÑlŽ¢…OLTb3v~!OèÜ‚¡ð%ô«\`œ0ùZš)„ÊV{›Äv™00b+Û<%Õ+ ûЯvL% ~-Í J f;Æa¶Ÿœè3ŹYÙBžš n¾»MýRÕh2_K3…RCÙÂi²kèN(d%›§¢¢›ën3¿Jäw ïÙŽ03È4­¶ä-†íš H½v+[ÌSPÑÍtñíÍÔI³_K3…RCÙ*÷Ý‚Ð>& M`të³橨äf»Ûįøm­×kafÐi,Z8ÌveD¥wbó¡†òTTrsÝ}àW¹¼Øº÷r¡?4‰pû ÍJ e ڢͶ #Ô¶iëëWÒ(•Ý|wŸøÕ¦6C)×ÒÌ ÔX¶€r˜í“¬‚V¶œ§¦²›ïî¿]¥‰×ÒL¡ÔP¶õ4ÛÇ…©f+Û<5UÜ|w?çW¹iAØKÍçË6 öÍ Je ‡ÙÊðìc¬V–<5UÜ|WÞÞ¢™4ëµ4S(5”-´qšízÿÖ±jí(¥Iü¢[*¾$~¿÷~…ï¥ù÷+5š-`?Ê?!Qj]@linÌùE·X(¾žó;iÑì<ʨ¿¡™B©¡lÕÍQ1lדg)®ñdƒy"¿è Å/‘_­£Tj½–f¥Æ²ÕÂù1lalž«\Œl!OM7ß…—;ÞšýZš)”Ê”}4AlŸùƒæ“µq/æ ý¢[2¿ úU:JØ"|׉f¥Æ²eÏ| Û5¶3×nÛ´ùó$Qª›ïâËš:iv¼–f ¥†²8Îv¹0pëÒ¬lóÔTróÝmì·)½ßѸ_K3ƒRcÙ’’a»’¢0O6¬.Lyj*¹ùîÛi°“¦û<,?š)”Êϳ][Áž˜‹•mžšÊn¾û6÷+<¯¥™A©±lè0Û5* Tëóç©©ìæ»ÛÜoÕ`J‚9ŸÛø ÌB E ê«ò¶+( ­ÁÎSRÅÍv߯~…( =èE3ƒRcÙŒÃl×hX’:ÀÈVò”Tqó]y¹7uÒtëG3…RCÙÓlWP”„©X_•§‰ý’[4÷±_e!ˆŒ{iþýJf«*5€-ý7¸M6±¥<±_r‹†Ò>ö«„ Ej¹—f ¥†²Uf³¡]d$A+Ú<%Õ+J¯S¿Œ<ðZš„Ë f»FÃöyî­ÙBž’ n¶»Mý¶ªÒäki¦Pj([PÞ¿¹°ý-³W Ù#././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015117043043033041 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015117043062033042 5ustar zuulzuul././@LongLink0000644000000000000000000000031500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000644000175000017500000001332715117043043033051 0ustar zuulzuul2025-12-12T16:16:56.580965186+00:00 stderr F W1212 16:16:56.580735 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:16:56.581359445+00:00 stderr F Validity period of the certificate for "check-endpoints-signer@1765556216" is unset, resetting to 43800h0m0s! 2025-12-12T16:16:58.953870109+00:00 stderr F I1212 16:16:58.952412 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:59.098938591+00:00 stderr F I1212 16:16:59.098486 1 builder.go:304] check-endpoints version 4.20.0-202510211040.p2.gb0393aa.assembly.stream.el9-b0393aa-b0393aa3e67302d89e91b8f7b1013b6d2e317f04 2025-12-12T16:16:59.667774889+00:00 stderr F I1212 16:16:59.667696 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:59.667774889+00:00 stderr F W1212 16:16:59.667752 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:59.667774889+00:00 stderr F W1212 16:16:59.667759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:59.667774889+00:00 stderr F W1212 16:16:59.667764 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:59.667814140+00:00 stderr F W1212 16:16:59.667769 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:59.667814140+00:00 stderr F W1212 16:16:59.667776 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:59.667814140+00:00 stderr F W1212 16:16:59.667781 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:59.677006314+00:00 stderr F I1212 16:16:59.676939 1 secure_serving.go:211] Serving securely on [::]:17698 2025-12-12T16:16:59.761635640+00:00 stderr F I1212 16:16:59.761526 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:59.761635640+00:00 stderr F I1212 16:16:59.761564 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:59.761743813+00:00 stderr F I1212 16:16:59.761681 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:59.761910097+00:00 stderr F I1212 16:16:59.761829 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/tmp/serving-cert-2638467676/tls.crt::/tmp/serving-cert-2638467676/tls.key" 2025-12-12T16:16:59.762019220+00:00 stderr F I1212 16:16:59.761883 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:59.762303757+00:00 stderr F I1212 16:16:59.762253 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:59.762303757+00:00 stderr F I1212 16:16:59.762259 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:59.762891791+00:00 stderr F I1212 16:16:59.762251 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:59.781389122+00:00 stderr F I1212 16:16:59.781311 1 base_controller.go:76] Waiting for caches to sync for CheckEndpointsTimeToStart 2025-12-12T16:16:59.862873662+00:00 stderr F I1212 16:16:59.862814 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:59.862991275+00:00 stderr F I1212 16:16:59.862862 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:59.863032016+00:00 stderr F I1212 16:16:59.863007 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:59.982253776+00:00 stderr F I1212 16:16:59.982162 1 base_controller.go:82] Caches are synced for CheckEndpointsTimeToStart 2025-12-12T16:16:59.982335668+00:00 stderr F I1212 16:16:59.982322 1 base_controller.go:119] Starting #1 worker of CheckEndpointsTimeToStart controller ... 2025-12-12T16:16:59.982579944+00:00 stderr F I1212 16:16:59.982564 1 base_controller.go:76] Waiting for caches to sync for CheckEndpointsStop 2025-12-12T16:16:59.982611225+00:00 stderr F I1212 16:16:59.982601 1 base_controller.go:82] Caches are synced for CheckEndpointsStop 2025-12-12T16:16:59.982635886+00:00 stderr F I1212 16:16:59.982626 1 base_controller.go:119] Starting #1 worker of CheckEndpointsStop controller ... 2025-12-12T16:16:59.982716818+00:00 stderr F I1212 16:16:59.982702 1 base_controller.go:123] Shutting down worker of CheckEndpointsTimeToStart controller ... 2025-12-12T16:16:59.987422933+00:00 stderr F I1212 16:16:59.987367 1 base_controller.go:76] Waiting for caches to sync for check-endpoints 2025-12-12T16:16:59.987647588+00:00 stderr F I1212 16:16:59.987589 1 base_controller.go:181] Shutting down CheckEndpointsTimeToStart ... 2025-12-12T16:16:59.987647588+00:00 stderr F I1212 16:16:59.987628 1 base_controller.go:113] All CheckEndpointsTimeToStart workers have been terminated 2025-12-12T16:17:00.087966166+00:00 stderr F I1212 16:17:00.087902 1 base_controller.go:82] Caches are synced for check-endpoints 2025-12-12T16:17:00.088028808+00:00 stderr F I1212 16:17:00.088016 1 base_controller.go:119] Starting #1 worker of check-endpoints controller ... ././@LongLink0000644000000000000000000000022200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043043033225 5ustar zuulzuul././@LongLink0000644000000000000000000000023600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015117043062033226 5ustar zuulzuul././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/1.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000036027015117043043033237 0ustar zuulzuul2025-12-12T16:25:27.826860345+00:00 stdout F 2025-12-12T16:25:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0cb6fa6b-04dd-49b3-879a-f549d90643d1 2025-12-12T16:25:27.880698630+00:00 stdout F 2025-12-12T16:25:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0cb6fa6b-04dd-49b3-879a-f549d90643d1 to /host/opt/cni/bin/ 2025-12-12T16:25:27.900584540+00:00 stderr F 2025-12-12T16:25:27Z [verbose] multus-daemon started 2025-12-12T16:25:27.900584540+00:00 stderr F 2025-12-12T16:25:27Z [verbose] Readiness Indicator file check 2025-12-12T16:25:37.901893264+00:00 stderr F 2025-12-12T16:25:37Z [verbose] Readiness Indicator file check done! 2025-12-12T16:25:37.903425873+00:00 stderr F I1212 16:25:37.903382 23414 certificate_store.go:130] Loading cert/key pair from "/etc/cni/multus/certs/multus-client-current.pem". 2025-12-12T16:25:37.903628468+00:00 stderr F 2025-12-12T16:25:37Z [verbose] Waiting for certificate 2025-12-12T16:25:38.904492032+00:00 stderr F I1212 16:25:38.904408 23414 certificate_store.go:130] Loading cert/key pair from "/etc/cni/multus/certs/multus-client-current.pem". 2025-12-12T16:25:38.904698247+00:00 stderr F 2025-12-12T16:25:38Z [verbose] Certificate found! 2025-12-12T16:25:38.905371224+00:00 stderr F 2025-12-12T16:25:38Z [verbose] server configured with chroot: /hostroot 2025-12-12T16:25:38.905371224+00:00 stderr F 2025-12-12T16:25:38Z [verbose] Filtering pod watch for node "crc" 2025-12-12T16:25:39.006479778+00:00 stderr F 2025-12-12T16:25:39Z [verbose] API readiness check 2025-12-12T16:25:39.007238487+00:00 stderr F 2025-12-12T16:25:39Z [verbose] API readiness check done! 2025-12-12T16:25:39.007461793+00:00 stderr P 2025-12-12T16:25:39Z [verbose] 2025-12-12T16:25:39.007490094+00:00 stderr P Generated MultusCNI config: {"binDir":"/var/lib/cni/bin","cniVersion":"0.3.1","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","namespaceIsolation":true,"globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","type":"multus-shim","auxiliaryCNIChainName":"vendor-cni-chain","daemonSocketDir":"/run/multus/socket"} 2025-12-12T16:25:39.007509474+00:00 stderr F 2025-12-12T16:25:39.007746340+00:00 stderr P 2025-12-12T16:25:39Z [verbose] 2025-12-12T16:25:39.007773641+00:00 stderr P started to watch file /host/run/multus/cni/net.d/10-ovn-kubernetes.conf 2025-12-12T16:25:39.007793971+00:00 stderr F 2025-12-12T16:26:39.054133770+00:00 stderr F 2025-12-12T16:26:39Z [verbose] DEL starting CNI request ContainerID:"8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e" Netns:"/var/run/netns/f4daef72-d6c9-455b-b870-17f85a9da346" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-jkgqd;K8S_POD_INFRA_CONTAINER_ID=8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e;K8S_POD_UID=5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4" Path:"" 2025-12-12T16:26:39.055272149+00:00 stderr F 2025-12-12T16:26:39Z [verbose] Del: openshift-marketplace:redhat-marketplace-jkgqd:5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:26:39.196042745+00:00 stderr F 2025-12-12T16:26:39Z [verbose] DEL finished CNI request ContainerID:"8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e" Netns:"/var/run/netns/f4daef72-d6c9-455b-b870-17f85a9da346" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-jkgqd;K8S_POD_INFRA_CONTAINER_ID=8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e;K8S_POD_UID=5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4" Path:"", result: "", err: 2025-12-12T16:26:40.330778402+00:00 stderr F 2025-12-12T16:26:40Z [verbose] ADD starting CNI request ContainerID:"2222d2af5cfbae8c2cbbb82776f89d17b3250ebe67976d95e30a580990050687" Netns:"/var/run/netns/ff994bbb-270b-407f-bb37-98db6f85a24d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=image-registry-5d9d95bf5b-6md9w;K8S_POD_INFRA_CONTAINER_ID=2222d2af5cfbae8c2cbbb82776f89d17b3250ebe67976d95e30a580990050687;K8S_POD_UID=b75bc011-274b-4fb1-8311-15ffa1b33366" Path:"" 2025-12-12T16:26:40.487713926+00:00 stderr F I1212 16:26:40.479821 25053 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:26:40.487713926+00:00 stderr F I1212 16:26:40.480252 25053 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:26:40.487713926+00:00 stderr F I1212 16:26:40.480271 25053 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:26:40.487713926+00:00 stderr F I1212 16:26:40.480279 25053 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:26:40.487713926+00:00 stderr F I1212 16:26:40.480288 25053 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:26:40.488536127+00:00 stderr F 2025-12-12T16:26:40Z [verbose] Add: openshift-image-registry:image-registry-5d9d95bf5b-6md9w:b75bc011-274b-4fb1-8311-15ffa1b33366:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"2222d2af5cfbae8","mac":"e2:b3:49:1a:1e:df"},{"name":"eth0","mac":"0a:58:0a:d9:00:07","sandbox":"/var/run/netns/ff994bbb-270b-407f-bb37-98db6f85a24d"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.7/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:26:40.488816114+00:00 stderr F I1212 16:26:40.488756 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-image-registry", Name:"image-registry-5d9d95bf5b-6md9w", UID:"b75bc011-274b-4fb1-8311-15ffa1b33366", APIVersion:"v1", ResourceVersion:"41000", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.7/23] from ovn-kubernetes 2025-12-12T16:26:40.510661476+00:00 stderr F 2025-12-12T16:26:40Z [verbose] ADD finished CNI request ContainerID:"2222d2af5cfbae8c2cbbb82776f89d17b3250ebe67976d95e30a580990050687" Netns:"/var/run/netns/ff994bbb-270b-407f-bb37-98db6f85a24d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=image-registry-5d9d95bf5b-6md9w;K8S_POD_INFRA_CONTAINER_ID=2222d2af5cfbae8c2cbbb82776f89d17b3250ebe67976d95e30a580990050687;K8S_POD_UID=b75bc011-274b-4fb1-8311-15ffa1b33366" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"e2:b3:49:1a:1e:df\",\"name\":\"2222d2af5cfbae8\"},{\"mac\":\"0a:58:0a:d9:00:07\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/ff994bbb-270b-407f-bb37-98db6f85a24d\"}],\"ips\":[{\"address\":\"10.217.0.7/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:26:43.218867445+00:00 stderr F 2025-12-12T16:26:43Z [verbose] ADD starting CNI request ContainerID:"1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9" Netns:"/var/run/netns/39a47076-5779-455d-95de-ba901ce9d271" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85;K8S_POD_INFRA_CONTAINER_ID=1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9;K8S_POD_UID=475bdfbd-4d7a-4f0b-9483-7ad3811012cf" Path:"" 2025-12-12T16:26:43.365198792+00:00 stderr F I1212 16:26:43.355361 25183 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:26:43.365198792+00:00 stderr F I1212 16:26:43.355962 25183 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:26:43.365198792+00:00 stderr F I1212 16:26:43.355978 25183 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:26:43.365198792+00:00 stderr F I1212 16:26:43.355984 25183 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:26:43.365198792+00:00 stderr F I1212 16:26:43.355990 25183 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:26:43.365775847+00:00 stderr F 2025-12-12T16:26:43Z [verbose] Add: openshift-marketplace:6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85:475bdfbd-4d7a-4f0b-9483-7ad3811012cf:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"1ee56347ed1b9c5","mac":"2e:3a:6b:be:d7:62"},{"name":"eth0","mac":"0a:58:0a:d9:00:09","sandbox":"/var/run/netns/39a47076-5779-455d-95de-ba901ce9d271"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.9/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:26:43.368251659+00:00 stderr F I1212 16:26:43.366526 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85", UID:"475bdfbd-4d7a-4f0b-9483-7ad3811012cf", APIVersion:"v1", ResourceVersion:"41068", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.9/23] from ovn-kubernetes 2025-12-12T16:26:43.381593817+00:00 stderr F 2025-12-12T16:26:43Z [verbose] ADD finished CNI request ContainerID:"1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9" Netns:"/var/run/netns/39a47076-5779-455d-95de-ba901ce9d271" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85;K8S_POD_INFRA_CONTAINER_ID=1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9;K8S_POD_UID=475bdfbd-4d7a-4f0b-9483-7ad3811012cf" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"2e:3a:6b:be:d7:62\",\"name\":\"1ee56347ed1b9c5\"},{\"mac\":\"0a:58:0a:d9:00:09\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/39a47076-5779-455d-95de-ba901ce9d271\"}],\"ips\":[{\"address\":\"10.217.0.9/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:26:48.023031104+00:00 stderr F 2025-12-12T16:26:48Z [verbose] DEL starting CNI request ContainerID:"1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9" Netns:"/var/run/netns/39a47076-5779-455d-95de-ba901ce9d271" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85;K8S_POD_INFRA_CONTAINER_ID=1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9;K8S_POD_UID=475bdfbd-4d7a-4f0b-9483-7ad3811012cf" Path:"" 2025-12-12T16:26:48.023770573+00:00 stderr F 2025-12-12T16:26:48Z [verbose] Del: openshift-marketplace:6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85:475bdfbd-4d7a-4f0b-9483-7ad3811012cf:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:26:48.203370921+00:00 stderr F 2025-12-12T16:26:48Z [verbose] DEL finished CNI request ContainerID:"1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9" Netns:"/var/run/netns/39a47076-5779-455d-95de-ba901ce9d271" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85;K8S_POD_INFRA_CONTAINER_ID=1ee56347ed1b9c5be047fbf9b682c5d1ce70b62f833bef1400a0b70fbb9f59d9;K8S_POD_UID=475bdfbd-4d7a-4f0b-9483-7ad3811012cf" Path:"", result: "", err: 2025-12-12T16:26:49.625286433+00:00 stderr F 2025-12-12T16:26:49Z [verbose] ADD starting CNI request ContainerID:"a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1" Netns:"/var/run/netns/4025e425-df6d-4ed4-a60e-cd7c93369acd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx;K8S_POD_INFRA_CONTAINER_ID=a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1;K8S_POD_UID=fd6585e4-c189-4aaf-98f6-4081874d4336" Path:"" 2025-12-12T16:26:49.778145625+00:00 stderr F I1212 16:26:49.768470 25360 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:26:49.778145625+00:00 stderr F I1212 16:26:49.769230 25360 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:26:49.778145625+00:00 stderr F I1212 16:26:49.769250 25360 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:26:49.778145625+00:00 stderr F I1212 16:26:49.769260 25360 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:26:49.778145625+00:00 stderr F I1212 16:26:49.769267 25360 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:26:49.778855513+00:00 stderr F 2025-12-12T16:26:49Z [verbose] Add: openshift-marketplace:8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx:fd6585e4-c189-4aaf-98f6-4081874d4336:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"a5b13036e403cee","mac":"1e:46:b0:7d:b3:a6"},{"name":"eth0","mac":"0a:58:0a:d9:00:0f","sandbox":"/var/run/netns/4025e425-df6d-4ed4-a60e-cd7c93369acd"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.15/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:26:49.779142430+00:00 stderr F I1212 16:26:49.779093 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx", UID:"fd6585e4-c189-4aaf-98f6-4081874d4336", APIVersion:"v1", ResourceVersion:"41137", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.15/23] from ovn-kubernetes 2025-12-12T16:26:49.800518830+00:00 stderr F 2025-12-12T16:26:49Z [verbose] ADD finished CNI request ContainerID:"a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1" Netns:"/var/run/netns/4025e425-df6d-4ed4-a60e-cd7c93369acd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx;K8S_POD_INFRA_CONTAINER_ID=a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1;K8S_POD_UID=fd6585e4-c189-4aaf-98f6-4081874d4336" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"1e:46:b0:7d:b3:a6\",\"name\":\"a5b13036e403cee\"},{\"mac\":\"0a:58:0a:d9:00:0f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/4025e425-df6d-4ed4-a60e-cd7c93369acd\"}],\"ips\":[{\"address\":\"10.217.0.15/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:26:53.473809850+00:00 stderr F 2025-12-12T16:26:53Z [verbose] ADD starting CNI request ContainerID:"2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56" Netns:"/var/run/netns/50c19c8e-c51a-410c-b7d3-b2cac4be8bce" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5;K8S_POD_INFRA_CONTAINER_ID=2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56;K8S_POD_UID=86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728" Path:"" 2025-12-12T16:26:53.656510906+00:00 stderr F 2025-12-12T16:26:53Z [verbose] ADD starting CNI request ContainerID:"261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3" Netns:"/var/run/netns/3570c08d-63b8-4ff4-ae84-bebc7cfc1a12" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-8pl6d;K8S_POD_INFRA_CONTAINER_ID=261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3;K8S_POD_UID=3d7f1528-4228-46f7-8f31-311c3c561112" Path:"" 2025-12-12T16:26:53.875225271+00:00 stderr F I1212 16:26:53.856937 25503 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:26:53.875225271+00:00 stderr F I1212 16:26:53.857644 25503 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:26:53.875225271+00:00 stderr F I1212 16:26:53.857654 25503 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:26:53.875225271+00:00 stderr F I1212 16:26:53.857661 25503 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:26:53.875225271+00:00 stderr F I1212 16:26:53.857666 25503 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:26:53.875225271+00:00 stderr F 2025-12-12T16:26:53Z [verbose] Add: openshift-marketplace:1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5:86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"2e8b42efd3171fe","mac":"0e:59:4b:95:f6:8c"},{"name":"eth0","mac":"0a:58:0a:d9:00:1c","sandbox":"/var/run/netns/50c19c8e-c51a-410c-b7d3-b2cac4be8bce"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.28/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:26:53.877687434+00:00 stderr F I1212 16:26:53.877533 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5", UID:"86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728", APIVersion:"v1", ResourceVersion:"41228", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.28/23] from ovn-kubernetes 2025-12-12T16:26:53.949742094+00:00 stderr F 2025-12-12T16:26:53Z [verbose] ADD finished CNI request ContainerID:"2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56" Netns:"/var/run/netns/50c19c8e-c51a-410c-b7d3-b2cac4be8bce" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5;K8S_POD_INFRA_CONTAINER_ID=2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56;K8S_POD_UID=86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"0e:59:4b:95:f6:8c\",\"name\":\"2e8b42efd3171fe\"},{\"mac\":\"0a:58:0a:d9:00:1c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/50c19c8e-c51a-410c-b7d3-b2cac4be8bce\"}],\"ips\":[{\"address\":\"10.217.0.28/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:26:54.004284812+00:00 stderr F I1212 16:26:53.968671 25548 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:26:54.004284812+00:00 stderr F I1212 16:26:53.968829 25548 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:26:54.004284812+00:00 stderr F I1212 16:26:53.968837 25548 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:26:54.004284812+00:00 stderr F I1212 16:26:53.968854 25548 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:26:54.004284812+00:00 stderr F I1212 16:26:53.968878 25548 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:26:54.008649792+00:00 stderr F 2025-12-12T16:26:54Z [verbose] Add: openshift-marketplace:certified-operators-8pl6d:3d7f1528-4228-46f7-8f31-311c3c561112:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"261a1835ba12431","mac":"12:04:d9:43:a9:03"},{"name":"eth0","mac":"0a:58:0a:d9:00:20","sandbox":"/var/run/netns/3570c08d-63b8-4ff4-ae84-bebc7cfc1a12"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.32/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:26:54.010095879+00:00 stderr F I1212 16:26:54.008838 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"certified-operators-8pl6d", UID:"3d7f1528-4228-46f7-8f31-311c3c561112", APIVersion:"v1", ResourceVersion:"41235", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.32/23] from ovn-kubernetes 2025-12-12T16:26:54.049238748+00:00 stderr F 2025-12-12T16:26:54Z [verbose] ADD finished CNI request ContainerID:"261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3" Netns:"/var/run/netns/3570c08d-63b8-4ff4-ae84-bebc7cfc1a12" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-8pl6d;K8S_POD_INFRA_CONTAINER_ID=261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3;K8S_POD_UID=3d7f1528-4228-46f7-8f31-311c3c561112" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"12:04:d9:43:a9:03\",\"name\":\"261a1835ba12431\"},{\"mac\":\"0a:58:0a:d9:00:20\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/3570c08d-63b8-4ff4-ae84-bebc7cfc1a12\"}],\"ips\":[{\"address\":\"10.217.0.32/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:26:56.115112208+00:00 stderr F 2025-12-12T16:26:56Z [verbose] DEL starting CNI request ContainerID:"a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1" Netns:"/var/run/netns/4025e425-df6d-4ed4-a60e-cd7c93369acd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx;K8S_POD_INFRA_CONTAINER_ID=a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1;K8S_POD_UID=fd6585e4-c189-4aaf-98f6-4081874d4336" Path:"" 2025-12-12T16:26:56.115739574+00:00 stderr F 2025-12-12T16:26:56Z [verbose] Del: openshift-marketplace:8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx:fd6585e4-c189-4aaf-98f6-4081874d4336:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:26:56.294607633+00:00 stderr F 2025-12-12T16:26:56Z [verbose] DEL finished CNI request ContainerID:"a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1" Netns:"/var/run/netns/4025e425-df6d-4ed4-a60e-cd7c93369acd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx;K8S_POD_INFRA_CONTAINER_ID=a5b13036e403cee2132c5c7872f885a1a961158dcae306204f37b292640e09b1;K8S_POD_UID=fd6585e4-c189-4aaf-98f6-4081874d4336" Path:"", result: "", err: 2025-12-12T16:26:58.207382596+00:00 stderr F 2025-12-12T16:26:58Z [verbose] ADD starting CNI request ContainerID:"fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310" Netns:"/var/run/netns/fb237388-4cc3-4796-bc10-1a36fe6a2990" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-b4n58;K8S_POD_INFRA_CONTAINER_ID=fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310;K8S_POD_UID=5f56514c-f6b2-4f15-8a4a-615ab5442708" Path:"" 2025-12-12T16:26:58.462851455+00:00 stderr F I1212 16:26:58.453134 25689 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:26:58.462851455+00:00 stderr F I1212 16:26:58.453784 25689 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:26:58.462851455+00:00 stderr F I1212 16:26:58.453792 25689 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:26:58.462851455+00:00 stderr F I1212 16:26:58.453798 25689 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:26:58.462851455+00:00 stderr F I1212 16:26:58.453805 25689 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:26:58.463021289+00:00 stderr F 2025-12-12T16:26:58Z [verbose] Add: openshift-marketplace:redhat-operators-b4n58:5f56514c-f6b2-4f15-8a4a-615ab5442708:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"fe08d83e6afea01","mac":"e6:7b:ab:76:49:dc"},{"name":"eth0","mac":"0a:58:0a:d9:00:2c","sandbox":"/var/run/netns/fb237388-4cc3-4796-bc10-1a36fe6a2990"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.44/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:26:58.464343463+00:00 stderr F I1212 16:26:58.463305 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-operators-b4n58", UID:"5f56514c-f6b2-4f15-8a4a-615ab5442708", APIVersion:"v1", ResourceVersion:"41465", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.44/23] from ovn-kubernetes 2025-12-12T16:26:58.489365576+00:00 stderr F 2025-12-12T16:26:58Z [verbose] ADD finished CNI request ContainerID:"fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310" Netns:"/var/run/netns/fb237388-4cc3-4796-bc10-1a36fe6a2990" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-b4n58;K8S_POD_INFRA_CONTAINER_ID=fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310;K8S_POD_UID=5f56514c-f6b2-4f15-8a4a-615ab5442708" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"e6:7b:ab:76:49:dc\",\"name\":\"fe08d83e6afea01\"},{\"mac\":\"0a:58:0a:d9:00:2c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/fb237388-4cc3-4796-bc10-1a36fe6a2990\"}],\"ips\":[{\"address\":\"10.217.0.44/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:05.393485460+00:00 stderr F 2025-12-12T16:27:05Z [verbose] ADD starting CNI request ContainerID:"5b61638b538d67385ff62ec556bb9836d79d18f96dbf65a8bfc5dbd83678fe29" Netns:"/var/run/netns/dea6f00e-d8a2-4431-9ab0-b29c7b1f5f4b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=perses-operator-68bdb49cbf-nqtp8;K8S_POD_INFRA_CONTAINER_ID=5b61638b538d67385ff62ec556bb9836d79d18f96dbf65a8bfc5dbd83678fe29;K8S_POD_UID=f38bca5c-15f3-4d63-9c03-a33ec7a5f22b" Path:"" 2025-12-12T16:27:05.485807356+00:00 stderr F 2025-12-12T16:27:05Z [verbose] ADD starting CNI request ContainerID:"5e1483dea205ec79a1a499118d0a0e0c1adb2fd310a13a6f5a34c5ef1a4ef13c" Netns:"/var/run/netns/3777e81a-6f34-4051-ab4f-0e764c19ed23" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=observability-operator-78c97476f4-qxqmn;K8S_POD_INFRA_CONTAINER_ID=5e1483dea205ec79a1a499118d0a0e0c1adb2fd310a13a6f5a34c5ef1a4ef13c;K8S_POD_UID=9425bd1f-c734-4ec0-9e2e-80b2d5ece709" Path:"" 2025-12-12T16:27:05.486987146+00:00 stderr P 2025-12-12T16:27:05Z [verbose] 2025-12-12T16:27:05.487021677+00:00 stderr P ADD starting CNI request ContainerID:"215eaa28b76476200c8ab036e8eeedcc38417fa2d7f547d20c78b51d8eea327b" Netns:"/var/run/netns/13ef5212-43c6-46bf-8a0c-ae7d64578dde" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g;K8S_POD_INFRA_CONTAINER_ID=215eaa28b76476200c8ab036e8eeedcc38417fa2d7f547d20c78b51d8eea327b;K8S_POD_UID=c6b5aa8b-142f-4f74-a328-f0937a20672f" Path:"" 2025-12-12T16:27:05.487040327+00:00 stderr F 2025-12-12T16:27:05.491987153+00:00 stderr F 2025-12-12T16:27:05Z [verbose] ADD starting CNI request ContainerID:"ca918709ac3de2a4eabe8d4d0736ffb5efe6c11c978e10732b7e348cea2388a2" Netns:"/var/run/netns/e5089e35-8fe8-4ab5-ac59-9e402a71c62d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=obo-prometheus-operator-86648f486b-wbj29;K8S_POD_INFRA_CONTAINER_ID=ca918709ac3de2a4eabe8d4d0736ffb5efe6c11c978e10732b7e348cea2388a2;K8S_POD_UID=18744739-d26e-4056-a036-656151fcc824" Path:"" 2025-12-12T16:27:05.596504728+00:00 stderr F 2025-12-12T16:27:05Z [verbose] ADD starting CNI request ContainerID:"4f37588aa915265bb38384189d2bebd507e3439e01b1ab8101df19b76f529e46" Netns:"/var/run/netns/34445e48-0ebf-4fe7-8a25-84f7d6534fd1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr;K8S_POD_INFRA_CONTAINER_ID=4f37588aa915265bb38384189d2bebd507e3439e01b1ab8101df19b76f529e46;K8S_POD_UID=bc636fbb-cf50-4a1f-82f5-81db89bb0f5b" Path:"" 2025-12-12T16:27:05.652425233+00:00 stderr F 2025-12-12T16:27:05Z [verbose] ADD starting CNI request ContainerID:"61fdd494af0ccbc8ea45046db69f7485cdf193e6f532aeb32f18f8c12c5fe3e4" Netns:"/var/run/netns/fc98cbff-9fd8-4ec1-ad2f-f848db10618d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=elastic-operator-6c994c654b-42tmw;K8S_POD_INFRA_CONTAINER_ID=61fdd494af0ccbc8ea45046db69f7485cdf193e6f532aeb32f18f8c12c5fe3e4;K8S_POD_UID=1aa11df6-5c2b-4018-8146-09c5d79b9311" Path:"" 2025-12-12T16:27:05.770142883+00:00 stderr F I1212 16:27:05.750842 25995 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:05.770142883+00:00 stderr F I1212 16:27:05.751407 25995 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:05.770142883+00:00 stderr F I1212 16:27:05.751418 25995 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:05.770142883+00:00 stderr F I1212 16:27:05.751426 25995 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:05.770142883+00:00 stderr F I1212 16:27:05.751433 25995 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:05.770861701+00:00 stderr P 2025-12-12T16:27:05Z [verbose] 2025-12-12T16:27:05.770891452+00:00 stderr P Add: openshift-operators:obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g:c6b5aa8b-142f-4f74-a328-f0937a20672f:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"215eaa28b764762","mac":"ea:93:85:b4:b1:e5"},{"name":"eth0","mac":"0a:58:0a:d9:00:2f","sandbox":"/var/run/netns/13ef5212-43c6-46bf-8a0c-ae7d64578dde"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.47/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:05.770910162+00:00 stderr F 2025-12-12T16:27:05.775318044+00:00 stderr F I1212 16:27:05.771292 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operators", Name:"obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g", UID:"c6b5aa8b-142f-4f74-a328-f0937a20672f", APIVersion:"v1", ResourceVersion:"41675", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.47/23] from ovn-kubernetes 2025-12-12T16:27:05.811843768+00:00 stderr F 2025-12-12T16:27:05Z [verbose] ADD finished CNI request ContainerID:"215eaa28b76476200c8ab036e8eeedcc38417fa2d7f547d20c78b51d8eea327b" Netns:"/var/run/netns/13ef5212-43c6-46bf-8a0c-ae7d64578dde" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g;K8S_POD_INFRA_CONTAINER_ID=215eaa28b76476200c8ab036e8eeedcc38417fa2d7f547d20c78b51d8eea327b;K8S_POD_UID=c6b5aa8b-142f-4f74-a328-f0937a20672f" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ea:93:85:b4:b1:e5\",\"name\":\"215eaa28b764762\"},{\"mac\":\"0a:58:0a:d9:00:2f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/13ef5212-43c6-46bf-8a0c-ae7d64578dde\"}],\"ips\":[{\"address\":\"10.217.0.47/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:05.911342446+00:00 stderr F I1212 16:27:05.895009 25994 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:05.911342446+00:00 stderr F I1212 16:27:05.895284 25994 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:05.911342446+00:00 stderr F I1212 16:27:05.895293 25994 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:05.911342446+00:00 stderr F I1212 16:27:05.895299 25994 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:05.911342446+00:00 stderr F I1212 16:27:05.895305 25994 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:05.911342446+00:00 stderr F 2025-12-12T16:27:05Z [verbose] Add: openshift-operators:observability-operator-78c97476f4-qxqmn:9425bd1f-c734-4ec0-9e2e-80b2d5ece709:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"5e1483dea205ec7","mac":"56:fa:ab:0f:2e:ca"},{"name":"eth0","mac":"0a:58:0a:d9:00:30","sandbox":"/var/run/netns/3777e81a-6f34-4051-ab4f-0e764c19ed23"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.48/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:05.911342446+00:00 stderr F I1212 16:27:05.908844 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operators", Name:"observability-operator-78c97476f4-qxqmn", UID:"9425bd1f-c734-4ec0-9e2e-80b2d5ece709", APIVersion:"v1", ResourceVersion:"41694", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.48/23] from ovn-kubernetes 2025-12-12T16:27:06.003568360+00:00 stderr P 2025-12-12T16:27:06Z [verbose] 2025-12-12T16:27:06.003661903+00:00 stderr P ADD finished CNI request ContainerID:"5e1483dea205ec79a1a499118d0a0e0c1adb2fd310a13a6f5a34c5ef1a4ef13c" Netns:"/var/run/netns/3777e81a-6f34-4051-ab4f-0e764c19ed23" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=observability-operator-78c97476f4-qxqmn;K8S_POD_INFRA_CONTAINER_ID=5e1483dea205ec79a1a499118d0a0e0c1adb2fd310a13a6f5a34c5ef1a4ef13c;K8S_POD_UID=9425bd1f-c734-4ec0-9e2e-80b2d5ece709" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"56:fa:ab:0f:2e:ca\",\"name\":\"5e1483dea205ec7\"},{\"mac\":\"0a:58:0a:d9:00:30\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/3777e81a-6f34-4051-ab4f-0e764c19ed23\"}],\"ips\":[{\"address\":\"10.217.0.48/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:06.003943280+00:00 stderr F 2025-12-12T16:27:06.240305992+00:00 stderr F I1212 16:27:06.232559 26077 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:06.240305992+00:00 stderr F I1212 16:27:06.232980 26077 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:06.240305992+00:00 stderr F I1212 16:27:06.232989 26077 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:06.240305992+00:00 stderr F I1212 16:27:06.232996 26077 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:06.240305992+00:00 stderr F I1212 16:27:06.233003 26077 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:06.240698162+00:00 stderr F 2025-12-12T16:27:06Z [verbose] Add: service-telemetry:elastic-operator-6c994c654b-42tmw:1aa11df6-5c2b-4018-8146-09c5d79b9311:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"61fdd494af0ccbc","mac":"56:e5:04:8d:d3:93"},{"name":"eth0","mac":"0a:58:0a:d9:00:32","sandbox":"/var/run/netns/fc98cbff-9fd8-4ec1-ad2f-f848db10618d"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.50/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:06.240871836+00:00 stderr F I1212 16:27:06.240837 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"elastic-operator-6c994c654b-42tmw", UID:"1aa11df6-5c2b-4018-8146-09c5d79b9311", APIVersion:"v1", ResourceVersion:"41996", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.50/23] from ovn-kubernetes 2025-12-12T16:27:06.271382248+00:00 stderr F I1212 16:27:06.263786 25966 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:06.271382248+00:00 stderr F I1212 16:27:06.264129 25966 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:06.271382248+00:00 stderr F I1212 16:27:06.264145 25966 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:06.271382248+00:00 stderr F I1212 16:27:06.264153 25966 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:06.271382248+00:00 stderr F I1212 16:27:06.264163 25966 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:06.271822970+00:00 stderr F 2025-12-12T16:27:06Z [verbose] Add: openshift-operators:perses-operator-68bdb49cbf-nqtp8:f38bca5c-15f3-4d63-9c03-a33ec7a5f22b:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"5b61638b538d673","mac":"0a:d8:ea:a6:56:0b"},{"name":"eth0","mac":"0a:58:0a:d9:00:31","sandbox":"/var/run/netns/dea6f00e-d8a2-4431-9ab0-b29c7b1f5f4b"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.49/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:06.272661411+00:00 stderr F I1212 16:27:06.272204 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operators", Name:"perses-operator-68bdb49cbf-nqtp8", UID:"f38bca5c-15f3-4d63-9c03-a33ec7a5f22b", APIVersion:"v1", ResourceVersion:"41719", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.49/23] from ovn-kubernetes 2025-12-12T16:27:06.279694059+00:00 stderr F 2025-12-12T16:27:06Z [verbose] ADD finished CNI request ContainerID:"61fdd494af0ccbc8ea45046db69f7485cdf193e6f532aeb32f18f8c12c5fe3e4" Netns:"/var/run/netns/fc98cbff-9fd8-4ec1-ad2f-f848db10618d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=elastic-operator-6c994c654b-42tmw;K8S_POD_INFRA_CONTAINER_ID=61fdd494af0ccbc8ea45046db69f7485cdf193e6f532aeb32f18f8c12c5fe3e4;K8S_POD_UID=1aa11df6-5c2b-4018-8146-09c5d79b9311" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"56:e5:04:8d:d3:93\",\"name\":\"61fdd494af0ccbc\"},{\"mac\":\"0a:58:0a:d9:00:32\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/fc98cbff-9fd8-4ec1-ad2f-f848db10618d\"}],\"ips\":[{\"address\":\"10.217.0.50/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:06.314306305+00:00 stderr P 2025-12-12T16:27:06Z [verbose] 2025-12-12T16:27:06.314406387+00:00 stderr P ADD finished CNI request ContainerID:"5b61638b538d67385ff62ec556bb9836d79d18f96dbf65a8bfc5dbd83678fe29" Netns:"/var/run/netns/dea6f00e-d8a2-4431-9ab0-b29c7b1f5f4b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=perses-operator-68bdb49cbf-nqtp8;K8S_POD_INFRA_CONTAINER_ID=5b61638b538d67385ff62ec556bb9836d79d18f96dbf65a8bfc5dbd83678fe29;K8S_POD_UID=f38bca5c-15f3-4d63-9c03-a33ec7a5f22b" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"0a:d8:ea:a6:56:0b\",\"name\":\"5b61638b538d673\"},{\"mac\":\"0a:58:0a:d9:00:31\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/dea6f00e-d8a2-4431-9ab0-b29c7b1f5f4b\"}],\"ips\":[{\"address\":\"10.217.0.49/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:06.314426178+00:00 stderr F 2025-12-12T16:27:06.332886505+00:00 stderr F I1212 16:27:06.303565 26001 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:06.332886505+00:00 stderr F I1212 16:27:06.303792 26001 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:06.332886505+00:00 stderr F I1212 16:27:06.303803 26001 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:06.332886505+00:00 stderr F I1212 16:27:06.303810 26001 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:06.332886505+00:00 stderr F I1212 16:27:06.303896 26001 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:06.333582013+00:00 stderr F 2025-12-12T16:27:06Z [verbose] Add: openshift-operators:obo-prometheus-operator-86648f486b-wbj29:18744739-d26e-4056-a036-656151fcc824:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"ca918709ac3de2a","mac":"c6:e6:d9:23:17:e5"},{"name":"eth0","mac":"0a:58:0a:d9:00:2d","sandbox":"/var/run/netns/e5089e35-8fe8-4ab5-ac59-9e402a71c62d"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.45/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:06.335116342+00:00 stderr F I1212 16:27:06.333954 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operators", Name:"obo-prometheus-operator-86648f486b-wbj29", UID:"18744739-d26e-4056-a036-656151fcc824", APIVersion:"v1", ResourceVersion:"41643", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.45/23] from ovn-kubernetes 2025-12-12T16:27:06.340248761+00:00 stderr F I1212 16:27:06.307348 26034 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:06.340248761+00:00 stderr F I1212 16:27:06.320781 26034 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:06.340248761+00:00 stderr F I1212 16:27:06.320821 26034 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:06.340248761+00:00 stderr F I1212 16:27:06.320843 26034 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:06.340248761+00:00 stderr F I1212 16:27:06.320851 26034 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:06.340569000+00:00 stderr F 2025-12-12T16:27:06Z [verbose] Add: openshift-operators:obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr:bc636fbb-cf50-4a1f-82f5-81db89bb0f5b:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"4f37588aa915265","mac":"a2:a0:36:68:10:4e"},{"name":"eth0","mac":"0a:58:0a:d9:00:2e","sandbox":"/var/run/netns/34445e48-0ebf-4fe7-8a25-84f7d6534fd1"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.46/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:06.340842906+00:00 stderr F I1212 16:27:06.340796 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operators", Name:"obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr", UID:"bc636fbb-cf50-4a1f-82f5-81db89bb0f5b", APIVersion:"v1", ResourceVersion:"41672", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.46/23] from ovn-kubernetes 2025-12-12T16:27:06.376938690+00:00 stderr F 2025-12-12T16:27:06Z [verbose] ADD finished CNI request ContainerID:"ca918709ac3de2a4eabe8d4d0736ffb5efe6c11c978e10732b7e348cea2388a2" Netns:"/var/run/netns/e5089e35-8fe8-4ab5-ac59-9e402a71c62d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=obo-prometheus-operator-86648f486b-wbj29;K8S_POD_INFRA_CONTAINER_ID=ca918709ac3de2a4eabe8d4d0736ffb5efe6c11c978e10732b7e348cea2388a2;K8S_POD_UID=18744739-d26e-4056-a036-656151fcc824" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"c6:e6:d9:23:17:e5\",\"name\":\"ca918709ac3de2a\"},{\"mac\":\"0a:58:0a:d9:00:2d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/e5089e35-8fe8-4ab5-ac59-9e402a71c62d\"}],\"ips\":[{\"address\":\"10.217.0.45/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:06.379375352+00:00 stderr F 2025-12-12T16:27:06Z [verbose] ADD finished CNI request ContainerID:"4f37588aa915265bb38384189d2bebd507e3439e01b1ab8101df19b76f529e46" Netns:"/var/run/netns/34445e48-0ebf-4fe7-8a25-84f7d6534fd1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operators;K8S_POD_NAME=obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr;K8S_POD_INFRA_CONTAINER_ID=4f37588aa915265bb38384189d2bebd507e3439e01b1ab8101df19b76f529e46;K8S_POD_UID=bc636fbb-cf50-4a1f-82f5-81db89bb0f5b" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"a2:a0:36:68:10:4e\",\"name\":\"4f37588aa915265\"},{\"mac\":\"0a:58:0a:d9:00:2e\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/34445e48-0ebf-4fe7-8a25-84f7d6534fd1\"}],\"ips\":[{\"address\":\"10.217.0.46/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:06.390117534+00:00 stderr F 2025-12-12T16:27:06Z [verbose] DEL starting CNI request ContainerID:"261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3" Netns:"/var/run/netns/3570c08d-63b8-4ff4-ae84-bebc7cfc1a12" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-8pl6d;K8S_POD_INFRA_CONTAINER_ID=261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3;K8S_POD_UID=3d7f1528-4228-46f7-8f31-311c3c561112" Path:"" 2025-12-12T16:27:06.390649127+00:00 stderr F 2025-12-12T16:27:06Z [verbose] Del: openshift-marketplace:certified-operators-8pl6d:3d7f1528-4228-46f7-8f31-311c3c561112:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:27:06.651535930+00:00 stderr P 2025-12-12T16:27:06Z [verbose] 2025-12-12T16:27:06.651735505+00:00 stderr P DEL finished CNI request ContainerID:"261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3" Netns:"/var/run/netns/3570c08d-63b8-4ff4-ae84-bebc7cfc1a12" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-8pl6d;K8S_POD_INFRA_CONTAINER_ID=261a1835ba124316e408e51da506d5cb50fed33202cf3b038da80e6df8dcbac3;K8S_POD_UID=3d7f1528-4228-46f7-8f31-311c3c561112" Path:"", result: "", err: 2025-12-12T16:27:06.651877709+00:00 stderr F 2025-12-12T16:27:09.416398074+00:00 stderr F 2025-12-12T16:27:09Z [verbose] DEL starting CNI request ContainerID:"2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56" Netns:"/var/run/netns/50c19c8e-c51a-410c-b7d3-b2cac4be8bce" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5;K8S_POD_INFRA_CONTAINER_ID=2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56;K8S_POD_UID=86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728" Path:"" 2025-12-12T16:27:09.417448991+00:00 stderr F 2025-12-12T16:27:09Z [verbose] Del: openshift-marketplace:1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5:86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:27:09.662627426+00:00 stderr F 2025-12-12T16:27:09Z [verbose] DEL finished CNI request ContainerID:"2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56" Netns:"/var/run/netns/50c19c8e-c51a-410c-b7d3-b2cac4be8bce" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5;K8S_POD_INFRA_CONTAINER_ID=2e8b42efd3171feed15bbc44b54f6ac59003e21923d0589f40a1a944cfdccf56;K8S_POD_UID=86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728" Path:"", result: "", err: 2025-12-12T16:27:27.757673235+00:00 stderr P 2025-12-12T16:27:27Z [verbose] 2025-12-12T16:27:27.757726067+00:00 stderr P DEL starting CNI request ContainerID:"fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310" Netns:"/var/run/netns/fb237388-4cc3-4796-bc10-1a36fe6a2990" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-b4n58;K8S_POD_INFRA_CONTAINER_ID=fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310;K8S_POD_UID=5f56514c-f6b2-4f15-8a4a-615ab5442708" Path:"" 2025-12-12T16:27:27.757744657+00:00 stderr F 2025-12-12T16:27:27.758546307+00:00 stderr P 2025-12-12T16:27:27Z [verbose] 2025-12-12T16:27:27.758574948+00:00 stderr P Del: openshift-marketplace:redhat-operators-b4n58:5f56514c-f6b2-4f15-8a4a-615ab5442708:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:27:27.758592819+00:00 stderr F 2025-12-12T16:27:27.933456034+00:00 stderr F 2025-12-12T16:27:27Z [verbose] DEL finished CNI request ContainerID:"fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310" Netns:"/var/run/netns/fb237388-4cc3-4796-bc10-1a36fe6a2990" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-b4n58;K8S_POD_INFRA_CONTAINER_ID=fe08d83e6afea017058d5fc6f57ddccb08368d775f104e9ac99e55142871b310;K8S_POD_UID=5f56514c-f6b2-4f15-8a4a-615ab5442708" Path:"", result: "", err: 2025-12-12T16:27:29.603457950+00:00 stderr F 2025-12-12T16:27:29Z [verbose] DEL starting CNI request ContainerID:"4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222" Netns:"/var/run/netns/8d661401-00f6-4192-9cfa-40024df85217" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=image-registry-66587d64c8-jqtjf;K8S_POD_INFRA_CONTAINER_ID=4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222;K8S_POD_UID=162da780-4bd3-4acf-b114-06ae104fc8ad" Path:"" 2025-12-12T16:27:29.604280331+00:00 stderr F 2025-12-12T16:27:29Z [verbose] Del: openshift-image-registry:image-registry-66587d64c8-jqtjf:162da780-4bd3-4acf-b114-06ae104fc8ad:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:27:29.614864298+00:00 stderr F 2025-12-12T16:27:29Z [verbose] ADD starting CNI request ContainerID:"66b94b25dc46df9365e11bb4d6f85afcf13fc946133634b28e06a6aeafa41bd9" Netns:"/var/run/netns/e338f006-d765-4cc5-aae9-ebe2abd8738e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager-operator;K8S_POD_NAME=cert-manager-operator-controller-manager-64c74584c4-djdmt;K8S_POD_INFRA_CONTAINER_ID=66b94b25dc46df9365e11bb4d6f85afcf13fc946133634b28e06a6aeafa41bd9;K8S_POD_UID=50e025ff-2065-4156-844d-68d8587d7b6c" Path:"" 2025-12-12T16:27:29.631651813+00:00 stderr F 2025-12-12T16:27:29Z [verbose] ADD starting CNI request ContainerID:"bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db" Netns:"/var/run/netns/9f4917ff-2de5-4b23-8063-9baa5615a264" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-9wq8j;K8S_POD_INFRA_CONTAINER_ID=bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db;K8S_POD_UID=098dcbcc-c98d-4de4-9c46-f40973d5ca17" Path:"" 2025-12-12T16:27:29.821151339+00:00 stderr F 2025-12-12T16:27:29Z [verbose] DEL finished CNI request ContainerID:"4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222" Netns:"/var/run/netns/8d661401-00f6-4192-9cfa-40024df85217" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=image-registry-66587d64c8-jqtjf;K8S_POD_INFRA_CONTAINER_ID=4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222;K8S_POD_UID=162da780-4bd3-4acf-b114-06ae104fc8ad" Path:"", result: "", err: 2025-12-12T16:27:29.860298740+00:00 stderr F I1212 16:27:29.850156 26830 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:29.860298740+00:00 stderr F I1212 16:27:29.850828 26830 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:29.860298740+00:00 stderr F I1212 16:27:29.850866 26830 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:29.860298740+00:00 stderr F I1212 16:27:29.850903 26830 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:29.860298740+00:00 stderr F I1212 16:27:29.850928 26830 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:29.860298740+00:00 stderr F 2025-12-12T16:27:29Z [verbose] Add: cert-manager-operator:cert-manager-operator-controller-manager-64c74584c4-djdmt:50e025ff-2065-4156-844d-68d8587d7b6c:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"66b94b25dc46df9","mac":"a6:5f:aa:d0:96:0f"},{"name":"eth0","mac":"0a:58:0a:d9:00:33","sandbox":"/var/run/netns/e338f006-d765-4cc5-aae9-ebe2abd8738e"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.51/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:29.860298740+00:00 stderr F I1212 16:27:29.859860 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"cert-manager-operator", Name:"cert-manager-operator-controller-manager-64c74584c4-djdmt", UID:"50e025ff-2065-4156-844d-68d8587d7b6c", APIVersion:"v1", ResourceVersion:"42440", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.51/23] from ovn-kubernetes 2025-12-12T16:27:29.892706190+00:00 stderr F 2025-12-12T16:27:29Z [verbose] ADD finished CNI request ContainerID:"66b94b25dc46df9365e11bb4d6f85afcf13fc946133634b28e06a6aeafa41bd9" Netns:"/var/run/netns/e338f006-d765-4cc5-aae9-ebe2abd8738e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager-operator;K8S_POD_NAME=cert-manager-operator-controller-manager-64c74584c4-djdmt;K8S_POD_INFRA_CONTAINER_ID=66b94b25dc46df9365e11bb4d6f85afcf13fc946133634b28e06a6aeafa41bd9;K8S_POD_UID=50e025ff-2065-4156-844d-68d8587d7b6c" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"a6:5f:aa:d0:96:0f\",\"name\":\"66b94b25dc46df9\"},{\"mac\":\"0a:58:0a:d9:00:33\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/e338f006-d765-4cc5-aae9-ebe2abd8738e\"}],\"ips\":[{\"address\":\"10.217.0.51/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:29.895745397+00:00 stderr F I1212 16:27:29.882447 26840 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:29.895745397+00:00 stderr F I1212 16:27:29.882669 26840 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:29.895745397+00:00 stderr F I1212 16:27:29.882679 26840 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:29.895745397+00:00 stderr F I1212 16:27:29.882686 26840 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:29.895745397+00:00 stderr F I1212 16:27:29.882693 26840 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:29.896068415+00:00 stderr F 2025-12-12T16:27:29Z [verbose] Add: openshift-marketplace:community-operators-9wq8j:098dcbcc-c98d-4de4-9c46-f40973d5ca17:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"bbad89595bffa7c","mac":"82:78:80:ea:e8:95"},{"name":"eth0","mac":"0a:58:0a:d9:00:34","sandbox":"/var/run/netns/9f4917ff-2de5-4b23-8063-9baa5615a264"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.52/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:29.896273671+00:00 stderr F I1212 16:27:29.896206 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"community-operators-9wq8j", UID:"098dcbcc-c98d-4de4-9c46-f40973d5ca17", APIVersion:"v1", ResourceVersion:"42450", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.52/23] from ovn-kubernetes 2025-12-12T16:27:29.917703963+00:00 stderr F 2025-12-12T16:27:29Z [verbose] ADD finished CNI request ContainerID:"bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db" Netns:"/var/run/netns/9f4917ff-2de5-4b23-8063-9baa5615a264" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-9wq8j;K8S_POD_INFRA_CONTAINER_ID=bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db;K8S_POD_UID=098dcbcc-c98d-4de4-9c46-f40973d5ca17" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"82:78:80:ea:e8:95\",\"name\":\"bbad89595bffa7c\"},{\"mac\":\"0a:58:0a:d9:00:34\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/9f4917ff-2de5-4b23-8063-9baa5615a264\"}],\"ips\":[{\"address\":\"10.217.0.52/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:31.841716937+00:00 stderr F 2025-12-12T16:27:31Z [verbose] ADD starting CNI request ContainerID:"4da9688ca213ab9e5cdb3054bb7b1306f907092097bc718be5cba29b736caff3" Netns:"/var/run/netns/921fefc7-d1af-4465-a650-5284ade74ee2" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=elasticsearch-es-default-0;K8S_POD_INFRA_CONTAINER_ID=4da9688ca213ab9e5cdb3054bb7b1306f907092097bc718be5cba29b736caff3;K8S_POD_UID=8b73b1a4-74b4-4b36-9c02-328f2cc9b99a" Path:"" 2025-12-12T16:27:32.027234062+00:00 stderr F I1212 16:27:32.012974 27125 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:27:32.027234062+00:00 stderr F I1212 16:27:32.013961 27125 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:27:32.027234062+00:00 stderr F I1212 16:27:32.014008 27125 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:27:32.027234062+00:00 stderr F I1212 16:27:32.014034 27125 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:27:32.027234062+00:00 stderr F I1212 16:27:32.014058 27125 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:27:32.027234062+00:00 stderr F 2025-12-12T16:27:32Z [verbose] Add: service-telemetry:elasticsearch-es-default-0:8b73b1a4-74b4-4b36-9c02-328f2cc9b99a:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"4da9688ca213ab9","mac":"ae:c5:18:57:77:0e"},{"name":"eth0","mac":"0a:58:0a:d9:00:35","sandbox":"/var/run/netns/921fefc7-d1af-4465-a650-5284ade74ee2"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.53/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:27:32.027234062+00:00 stderr F I1212 16:27:32.026162 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"elasticsearch-es-default-0", UID:"8b73b1a4-74b4-4b36-9c02-328f2cc9b99a", APIVersion:"v1", ResourceVersion:"42598", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.53/23] from ovn-kubernetes 2025-12-12T16:27:32.059965250+00:00 stderr F 2025-12-12T16:27:32Z [verbose] ADD finished CNI request ContainerID:"4da9688ca213ab9e5cdb3054bb7b1306f907092097bc718be5cba29b736caff3" Netns:"/var/run/netns/921fefc7-d1af-4465-a650-5284ade74ee2" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=elasticsearch-es-default-0;K8S_POD_INFRA_CONTAINER_ID=4da9688ca213ab9e5cdb3054bb7b1306f907092097bc718be5cba29b736caff3;K8S_POD_UID=8b73b1a4-74b4-4b36-9c02-328f2cc9b99a" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ae:c5:18:57:77:0e\",\"name\":\"4da9688ca213ab9\"},{\"mac\":\"0a:58:0a:d9:00:35\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/921fefc7-d1af-4465-a650-5284ade74ee2\"}],\"ips\":[{\"address\":\"10.217.0.53/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:27:55.679319407+00:00 stderr F 2025-12-12T16:27:55Z [verbose] DEL starting CNI request ContainerID:"bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db" Netns:"/var/run/netns/9f4917ff-2de5-4b23-8063-9baa5615a264" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-9wq8j;K8S_POD_INFRA_CONTAINER_ID=bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db;K8S_POD_UID=098dcbcc-c98d-4de4-9c46-f40973d5ca17" Path:"" 2025-12-12T16:27:55.679783988+00:00 stderr F 2025-12-12T16:27:55Z [verbose] Del: openshift-marketplace:community-operators-9wq8j:098dcbcc-c98d-4de4-9c46-f40973d5ca17:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:27:55.859260181+00:00 stderr F 2025-12-12T16:27:55Z [verbose] DEL finished CNI request ContainerID:"bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db" Netns:"/var/run/netns/9f4917ff-2de5-4b23-8063-9baa5615a264" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-9wq8j;K8S_POD_INFRA_CONTAINER_ID=bbad89595bffa7c2b78f2f4506d008724735866d5bdc5fb821bbce670a2547db;K8S_POD_UID=098dcbcc-c98d-4de4-9c46-f40973d5ca17" Path:"", result: "", err: 2025-12-12T16:27:59.945338744+00:00 stderr F 2025-12-12T16:27:59Z [verbose] ADD starting CNI request ContainerID:"4a7fc81ec19f101129a063dcd3ab5aa956613d70f452b2fe42d9679caa769ca9" Netns:"/var/run/netns/fed6c48b-8962-44bb-adaf-3bad100cd69f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager;K8S_POD_NAME=cert-manager-858d87f86b-r7f8q;K8S_POD_INFRA_CONTAINER_ID=4a7fc81ec19f101129a063dcd3ab5aa956613d70f452b2fe42d9679caa769ca9;K8S_POD_UID=7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc" Path:"" 2025-12-12T16:28:00.125053662+00:00 stderr F 2025-12-12T16:28:00Z [verbose] ADD starting CNI request ContainerID:"ce8620fb1a8db50e75c63b530a6c65737c31aa69dd54e1d85c343c451b3e8abc" Netns:"/var/run/netns/399d4089-d827-4fd6-a29b-703d6c802d17" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager;K8S_POD_NAME=cert-manager-cainjector-7dbf76d5c8-lv2hl;K8S_POD_INFRA_CONTAINER_ID=ce8620fb1a8db50e75c63b530a6c65737c31aa69dd54e1d85c343c451b3e8abc;K8S_POD_UID=7f3690b6-63d7-48cc-9508-e016e3476a99" Path:"" 2025-12-12T16:28:00.264493721+00:00 stderr F 2025-12-12T16:28:00Z [verbose] ADD starting CNI request ContainerID:"57f73a8550406dbbe8a60e3f5cb132d520c975894e5462ee359436abd49a1bbd" Netns:"/var/run/netns/5159ba3c-7f86-4274-9fdc-2471c2e194ff" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager;K8S_POD_NAME=cert-manager-webhook-7894b5b9b4-2kmrt;K8S_POD_INFRA_CONTAINER_ID=57f73a8550406dbbe8a60e3f5cb132d520c975894e5462ee359436abd49a1bbd;K8S_POD_UID=c184b148-4467-4bd5-8204-6369360370ee" Path:"" 2025-12-12T16:28:00.337457868+00:00 stderr F I1212 16:28:00.329496 27656 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:00.337457868+00:00 stderr F I1212 16:28:00.330653 27656 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:28:00.337457868+00:00 stderr F I1212 16:28:00.330713 27656 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:00.337457868+00:00 stderr F I1212 16:28:00.330730 27656 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:00.337457868+00:00 stderr F I1212 16:28:00.330742 27656 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:00.338031573+00:00 stderr F 2025-12-12T16:28:00Z [verbose] Add: cert-manager:cert-manager-858d87f86b-r7f8q:7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"4a7fc81ec19f101","mac":"6a:b3:9c:31:7f:d4"},{"name":"eth0","mac":"0a:58:0a:d9:00:38","sandbox":"/var/run/netns/fed6c48b-8962-44bb-adaf-3bad100cd69f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.56/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:28:00.338270969+00:00 stderr F I1212 16:28:00.338198 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"cert-manager", Name:"cert-manager-858d87f86b-r7f8q", UID:"7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc", APIVersion:"v1", ResourceVersion:"43517", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.56/23] from ovn-kubernetes 2025-12-12T16:28:00.344496536+00:00 stderr F I1212 16:28:00.332760 27730 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:00.344496536+00:00 stderr F I1212 16:28:00.333137 27730 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:28:00.344496536+00:00 stderr F I1212 16:28:00.333152 27730 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:00.344496536+00:00 stderr F I1212 16:28:00.333158 27730 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:00.344496536+00:00 stderr F I1212 16:28:00.333165 27730 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:00.344947478+00:00 stderr F 2025-12-12T16:28:00Z [verbose] Add: cert-manager:cert-manager-cainjector-7dbf76d5c8-lv2hl:7f3690b6-63d7-48cc-9508-e016e3476a99:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"ce8620fb1a8db50","mac":"b2:2c:85:bc:cf:34"},{"name":"eth0","mac":"0a:58:0a:d9:00:37","sandbox":"/var/run/netns/399d4089-d827-4fd6-a29b-703d6c802d17"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.55/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:28:00.345302176+00:00 stderr F I1212 16:28:00.345253 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"cert-manager", Name:"cert-manager-cainjector-7dbf76d5c8-lv2hl", UID:"7f3690b6-63d7-48cc-9508-e016e3476a99", APIVersion:"v1", ResourceVersion:"43185", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.55/23] from ovn-kubernetes 2025-12-12T16:28:00.356694745+00:00 stderr F 2025-12-12T16:28:00Z [verbose] ADD finished CNI request ContainerID:"4a7fc81ec19f101129a063dcd3ab5aa956613d70f452b2fe42d9679caa769ca9" Netns:"/var/run/netns/fed6c48b-8962-44bb-adaf-3bad100cd69f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager;K8S_POD_NAME=cert-manager-858d87f86b-r7f8q;K8S_POD_INFRA_CONTAINER_ID=4a7fc81ec19f101129a063dcd3ab5aa956613d70f452b2fe42d9679caa769ca9;K8S_POD_UID=7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"6a:b3:9c:31:7f:d4\",\"name\":\"4a7fc81ec19f101\"},{\"mac\":\"0a:58:0a:d9:00:38\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/fed6c48b-8962-44bb-adaf-3bad100cd69f\"}],\"ips\":[{\"address\":\"10.217.0.56/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:28:00.371069859+00:00 stderr F 2025-12-12T16:28:00Z [verbose] ADD finished CNI request ContainerID:"ce8620fb1a8db50e75c63b530a6c65737c31aa69dd54e1d85c343c451b3e8abc" Netns:"/var/run/netns/399d4089-d827-4fd6-a29b-703d6c802d17" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager;K8S_POD_NAME=cert-manager-cainjector-7dbf76d5c8-lv2hl;K8S_POD_INFRA_CONTAINER_ID=ce8620fb1a8db50e75c63b530a6c65737c31aa69dd54e1d85c343c451b3e8abc;K8S_POD_UID=7f3690b6-63d7-48cc-9508-e016e3476a99" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"b2:2c:85:bc:cf:34\",\"name\":\"ce8620fb1a8db50\"},{\"mac\":\"0a:58:0a:d9:00:37\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/399d4089-d827-4fd6-a29b-703d6c802d17\"}],\"ips\":[{\"address\":\"10.217.0.55/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:28:00.653499857+00:00 stderr F I1212 16:28:00.647240 27781 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:00.653499857+00:00 stderr F I1212 16:28:00.647896 27781 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:28:00.653499857+00:00 stderr F I1212 16:28:00.647913 27781 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:00.653499857+00:00 stderr F I1212 16:28:00.647921 27781 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:00.653499857+00:00 stderr F I1212 16:28:00.647927 27781 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:00.654187554+00:00 stderr F 2025-12-12T16:28:00Z [verbose] Add: cert-manager:cert-manager-webhook-7894b5b9b4-2kmrt:c184b148-4467-4bd5-8204-6369360370ee:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"57f73a8550406db","mac":"12:3a:fa:aa:68:81"},{"name":"eth0","mac":"0a:58:0a:d9:00:36","sandbox":"/var/run/netns/5159ba3c-7f86-4274-9fdc-2471c2e194ff"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.54/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:28:00.654446771+00:00 stderr F I1212 16:28:00.654383 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"cert-manager", Name:"cert-manager-webhook-7894b5b9b4-2kmrt", UID:"c184b148-4467-4bd5-8204-6369360370ee", APIVersion:"v1", ResourceVersion:"43093", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.54/23] from ovn-kubernetes 2025-12-12T16:28:00.669273546+00:00 stderr F 2025-12-12T16:28:00Z [verbose] ADD finished CNI request ContainerID:"57f73a8550406dbbe8a60e3f5cb132d520c975894e5462ee359436abd49a1bbd" Netns:"/var/run/netns/5159ba3c-7f86-4274-9fdc-2471c2e194ff" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=cert-manager;K8S_POD_NAME=cert-manager-webhook-7894b5b9b4-2kmrt;K8S_POD_INFRA_CONTAINER_ID=57f73a8550406dbbe8a60e3f5cb132d520c975894e5462ee359436abd49a1bbd;K8S_POD_UID=c184b148-4467-4bd5-8204-6369360370ee" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"12:3a:fa:aa:68:81\",\"name\":\"57f73a8550406db\"},{\"mac\":\"0a:58:0a:d9:00:36\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/5159ba3c-7f86-4274-9fdc-2471c2e194ff\"}],\"ips\":[{\"address\":\"10.217.0.54/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:28:18.584359051+00:00 stderr F 2025-12-12T16:28:18Z [verbose] ADD starting CNI request ContainerID:"1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997" Netns:"/var/run/netns/69289e11-bf03-4a1b-bfb8-b84474eb9fd1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-1-build;K8S_POD_INFRA_CONTAINER_ID=1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997;K8S_POD_UID=48700ccb-8fc3-4b07-af36-f0ff8573dc6a" Path:"" 2025-12-12T16:28:18.804894202+00:00 stderr F I1212 16:28:18.794847 28436 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:28:18.804894202+00:00 stderr F I1212 16:28:18.795471 28436 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:18.804894202+00:00 stderr F I1212 16:28:18.795489 28436 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:18.804894202+00:00 stderr F I1212 16:28:18.795496 28436 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:18.804894202+00:00 stderr F I1212 16:28:18.795515 28436 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:18.805421375+00:00 stderr F 2025-12-12T16:28:18Z [verbose] Add: service-telemetry:service-telemetry-framework-index-1-build:48700ccb-8fc3-4b07-af36-f0ff8573dc6a:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"1b850b36a6d06f6","mac":"fe:4e:36:cf:1e:31"},{"name":"eth0","mac":"0a:58:0a:d9:00:39","sandbox":"/var/run/netns/69289e11-bf03-4a1b-bfb8-b84474eb9fd1"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.57/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:28:18.805796245+00:00 stderr F I1212 16:28:18.805715 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"service-telemetry-framework-index-1-build", UID:"48700ccb-8fc3-4b07-af36-f0ff8573dc6a", APIVersion:"v1", ResourceVersion:"43660", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.57/23] from ovn-kubernetes 2025-12-12T16:28:18.834049950+00:00 stderr F 2025-12-12T16:28:18Z [verbose] ADD finished CNI request ContainerID:"1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997" Netns:"/var/run/netns/69289e11-bf03-4a1b-bfb8-b84474eb9fd1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-1-build;K8S_POD_INFRA_CONTAINER_ID=1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997;K8S_POD_UID=48700ccb-8fc3-4b07-af36-f0ff8573dc6a" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"fe:4e:36:cf:1e:31\",\"name\":\"1b850b36a6d06f6\"},{\"mac\":\"0a:58:0a:d9:00:39\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/69289e11-bf03-4a1b-bfb8-b84474eb9fd1\"}],\"ips\":[{\"address\":\"10.217.0.57/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:28:26.630624712+00:00 stderr F 2025-12-12T16:28:26Z [verbose] DEL starting CNI request ContainerID:"1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997" Netns:"/var/run/netns/69289e11-bf03-4a1b-bfb8-b84474eb9fd1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-1-build;K8S_POD_INFRA_CONTAINER_ID=1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997;K8S_POD_UID=48700ccb-8fc3-4b07-af36-f0ff8573dc6a" Path:"" 2025-12-12T16:28:26.631333900+00:00 stderr F 2025-12-12T16:28:26Z [verbose] Del: service-telemetry:service-telemetry-framework-index-1-build:48700ccb-8fc3-4b07-af36-f0ff8573dc6a:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:28:26.813089690+00:00 stderr F 2025-12-12T16:28:26Z [verbose] DEL finished CNI request ContainerID:"1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997" Netns:"/var/run/netns/69289e11-bf03-4a1b-bfb8-b84474eb9fd1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-1-build;K8S_POD_INFRA_CONTAINER_ID=1b850b36a6d06f6c415a15e0f884b2a0ab03b742b3c4517bf1f07e3c6aaca997;K8S_POD_UID=48700ccb-8fc3-4b07-af36-f0ff8573dc6a" Path:"", result: "", err: 2025-12-12T16:28:40.710817944+00:00 stderr F 2025-12-12T16:28:40Z [verbose] ADD starting CNI request ContainerID:"64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2" Netns:"/var/run/netns/8228b333-2c34-428d-a4b8-1ea2923fcabc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-2-build;K8S_POD_INFRA_CONTAINER_ID=64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2;K8S_POD_UID=a0bdf470-1147-4c82-95ba-9d4b8c87f076" Path:"" 2025-12-12T16:28:41.085163449+00:00 stderr F I1212 16:28:41.075111 28974 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:28:41.085163449+00:00 stderr F I1212 16:28:41.075648 28974 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:41.085163449+00:00 stderr F I1212 16:28:41.075669 28974 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:41.085163449+00:00 stderr F I1212 16:28:41.075680 28974 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:41.085163449+00:00 stderr F I1212 16:28:41.075691 28974 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:41.085820215+00:00 stderr F 2025-12-12T16:28:41Z [verbose] Add: service-telemetry:service-telemetry-framework-index-2-build:a0bdf470-1147-4c82-95ba-9d4b8c87f076:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"64676e9c945d07c","mac":"9e:e8:78:ff:6d:ba"},{"name":"eth0","mac":"0a:58:0a:d9:00:3c","sandbox":"/var/run/netns/8228b333-2c34-428d-a4b8-1ea2923fcabc"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.60/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:28:41.086119433+00:00 stderr F I1212 16:28:41.086035 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"service-telemetry-framework-index-2-build", UID:"a0bdf470-1147-4c82-95ba-9d4b8c87f076", APIVersion:"v1", ResourceVersion:"43754", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.60/23] from ovn-kubernetes 2025-12-12T16:28:41.105913184+00:00 stderr F 2025-12-12T16:28:41Z [verbose] ADD finished CNI request ContainerID:"64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2" Netns:"/var/run/netns/8228b333-2c34-428d-a4b8-1ea2923fcabc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-2-build;K8S_POD_INFRA_CONTAINER_ID=64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2;K8S_POD_UID=a0bdf470-1147-4c82-95ba-9d4b8c87f076" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"9e:e8:78:ff:6d:ba\",\"name\":\"64676e9c945d07c\"},{\"mac\":\"0a:58:0a:d9:00:3c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/8228b333-2c34-428d-a4b8-1ea2923fcabc\"}],\"ips\":[{\"address\":\"10.217.0.60/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:28:59.608681502+00:00 stderr F 2025-12-12T16:28:59Z [verbose] DEL starting CNI request ContainerID:"64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2" Netns:"/var/run/netns/8228b333-2c34-428d-a4b8-1ea2923fcabc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-2-build;K8S_POD_INFRA_CONTAINER_ID=64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2;K8S_POD_UID=a0bdf470-1147-4c82-95ba-9d4b8c87f076" Path:"" 2025-12-12T16:28:59.609575374+00:00 stderr F 2025-12-12T16:28:59Z [verbose] Del: service-telemetry:service-telemetry-framework-index-2-build:a0bdf470-1147-4c82-95ba-9d4b8c87f076:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:28:59.745396422+00:00 stderr F 2025-12-12T16:28:59Z [verbose] DEL finished CNI request ContainerID:"64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2" Netns:"/var/run/netns/8228b333-2c34-428d-a4b8-1ea2923fcabc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-2-build;K8S_POD_INFRA_CONTAINER_ID=64676e9c945d07cb91caa2a63ef6e06fc4232117e85b1e468f1091c7327252b2;K8S_POD_UID=a0bdf470-1147-4c82-95ba-9d4b8c87f076" Path:"", result: "", err: 2025-12-12T16:29:03.667417215+00:00 stderr F 2025-12-12T16:29:03Z [verbose] ADD starting CNI request ContainerID:"22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68" Netns:"/var/run/netns/7a97b3d3-c7d4-417a-bc94-5f2d0e56b810" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-3-build;K8S_POD_INFRA_CONTAINER_ID=22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68;K8S_POD_UID=cc0b2c0b-41d6-47c9-9812-9f70b101293e" Path:"" 2025-12-12T16:29:03.821571337+00:00 stderr F I1212 16:29:03.816566 29376 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:29:03.821571337+00:00 stderr F I1212 16:29:03.816983 29376 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:29:03.821571337+00:00 stderr F I1212 16:29:03.817010 29376 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:29:03.821571337+00:00 stderr F I1212 16:29:03.817019 29376 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:29:03.821571337+00:00 stderr F I1212 16:29:03.817028 29376 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:29:03.822113011+00:00 stderr F 2025-12-12T16:29:03Z [verbose] Add: service-telemetry:service-telemetry-framework-index-3-build:cc0b2c0b-41d6-47c9-9812-9f70b101293e:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"22a3d8dbda1abf7","mac":"76:44:f1:d1:d6:8e"},{"name":"eth0","mac":"0a:58:0a:d9:00:3d","sandbox":"/var/run/netns/7a97b3d3-c7d4-417a-bc94-5f2d0e56b810"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.61/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:29:03.822326096+00:00 stderr F I1212 16:29:03.822279 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"service-telemetry-framework-index-3-build", UID:"cc0b2c0b-41d6-47c9-9812-9f70b101293e", APIVersion:"v1", ResourceVersion:"43869", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.61/23] from ovn-kubernetes 2025-12-12T16:29:03.851301109+00:00 stderr F 2025-12-12T16:29:03Z [verbose] ADD finished CNI request ContainerID:"22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68" Netns:"/var/run/netns/7a97b3d3-c7d4-417a-bc94-5f2d0e56b810" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-3-build;K8S_POD_INFRA_CONTAINER_ID=22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68;K8S_POD_UID=cc0b2c0b-41d6-47c9-9812-9f70b101293e" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"76:44:f1:d1:d6:8e\",\"name\":\"22a3d8dbda1abf7\"},{\"mac\":\"0a:58:0a:d9:00:3d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/7a97b3d3-c7d4-417a-bc94-5f2d0e56b810\"}],\"ips\":[{\"address\":\"10.217.0.61/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:29:08.602044734+00:00 stderr F 2025-12-12T16:29:08Z [verbose] DEL starting CNI request ContainerID:"22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68" Netns:"/var/run/netns/7a97b3d3-c7d4-417a-bc94-5f2d0e56b810" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-3-build;K8S_POD_INFRA_CONTAINER_ID=22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68;K8S_POD_UID=cc0b2c0b-41d6-47c9-9812-9f70b101293e" Path:"" 2025-12-12T16:29:08.602730391+00:00 stderr F 2025-12-12T16:29:08Z [verbose] Del: service-telemetry:service-telemetry-framework-index-3-build:cc0b2c0b-41d6-47c9-9812-9f70b101293e:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:29:08.784271336+00:00 stderr F 2025-12-12T16:29:08Z [verbose] DEL finished CNI request ContainerID:"22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68" Netns:"/var/run/netns/7a97b3d3-c7d4-417a-bc94-5f2d0e56b810" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-3-build;K8S_POD_INFRA_CONTAINER_ID=22a3d8dbda1abf7ee9c885dfd7e1ec87a4a8e2d57b94763dda6f0c1b4996ba68;K8S_POD_UID=cc0b2c0b-41d6-47c9-9812-9f70b101293e" Path:"", result: "", err: 2025-12-12T16:29:19.475415462+00:00 stderr F 2025-12-12T16:29:19Z [verbose] ADD starting CNI request ContainerID:"e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710" Netns:"/var/run/netns/cce66837-1c17-4774-975f-974d3c3bee60" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-4-build;K8S_POD_INFRA_CONTAINER_ID=e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710;K8S_POD_UID=df3b5ffb-d260-40e8-bd13-ce6656fca9e0" Path:"" 2025-12-12T16:29:19.628573943+00:00 stderr F I1212 16:29:19.622870 29670 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:29:19.628573943+00:00 stderr F I1212 16:29:19.623513 29670 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:29:19.628573943+00:00 stderr F I1212 16:29:19.623551 29670 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:29:19.628573943+00:00 stderr F I1212 16:29:19.623574 29670 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:29:19.628573943+00:00 stderr F I1212 16:29:19.623593 29670 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:29:19.629095527+00:00 stderr F 2025-12-12T16:29:19Z [verbose] Add: service-telemetry:service-telemetry-framework-index-4-build:df3b5ffb-d260-40e8-bd13-ce6656fca9e0:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"e72c026b7694679","mac":"16:0a:16:df:3c:ad"},{"name":"eth0","mac":"0a:58:0a:d9:00:3e","sandbox":"/var/run/netns/cce66837-1c17-4774-975f-974d3c3bee60"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.62/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:29:19.629305852+00:00 stderr F I1212 16:29:19.629258 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"service-telemetry-framework-index-4-build", UID:"df3b5ffb-d260-40e8-bd13-ce6656fca9e0", APIVersion:"v1", ResourceVersion:"43923", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.62/23] from ovn-kubernetes 2025-12-12T16:29:19.650621329+00:00 stderr F 2025-12-12T16:29:19Z [verbose] ADD finished CNI request ContainerID:"e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710" Netns:"/var/run/netns/cce66837-1c17-4774-975f-974d3c3bee60" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-4-build;K8S_POD_INFRA_CONTAINER_ID=e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710;K8S_POD_UID=df3b5ffb-d260-40e8-bd13-ce6656fca9e0" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"16:0a:16:df:3c:ad\",\"name\":\"e72c026b7694679\"},{\"mac\":\"0a:58:0a:d9:00:3e\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/cce66837-1c17-4774-975f-974d3c3bee60\"}],\"ips\":[{\"address\":\"10.217.0.62/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:29:23.135318091+00:00 stderr F 2025-12-12T16:29:23Z [verbose] DEL starting CNI request ContainerID:"e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710" Netns:"/var/run/netns/cce66837-1c17-4774-975f-974d3c3bee60" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-4-build;K8S_POD_INFRA_CONTAINER_ID=e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710;K8S_POD_UID=df3b5ffb-d260-40e8-bd13-ce6656fca9e0" Path:"" 2025-12-12T16:29:23.136150622+00:00 stderr F 2025-12-12T16:29:23Z [verbose] Del: service-telemetry:service-telemetry-framework-index-4-build:df3b5ffb-d260-40e8-bd13-ce6656fca9e0:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:29:23.310125778+00:00 stderr F 2025-12-12T16:29:23Z [verbose] DEL finished CNI request ContainerID:"e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710" Netns:"/var/run/netns/cce66837-1c17-4774-975f-974d3c3bee60" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=service-telemetry-framework-index-4-build;K8S_POD_INFRA_CONTAINER_ID=e72c026b7694679c3c7620432ab3882148147f8b41516626acda65a5bf9bd710;K8S_POD_UID=df3b5ffb-d260-40e8-bd13-ce6656fca9e0" Path:"", result: "", err: 2025-12-12T16:29:23.622990235+00:00 stderr F 2025-12-12T16:29:23Z [verbose] ADD starting CNI request ContainerID:"05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2" Netns:"/var/run/netns/fe210992-63c7-4688-8778-427ec0856e10" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-cj72z;K8S_POD_INFRA_CONTAINER_ID=05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2;K8S_POD_UID=896500d1-8185-4d67-9e0d-c837eba1a9d1" Path:"" 2025-12-12T16:29:23.997347933+00:00 stderr F I1212 16:29:23.991123 29856 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:29:23.997347933+00:00 stderr F I1212 16:29:23.991604 29856 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:29:23.997347933+00:00 stderr F I1212 16:29:23.991617 29856 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:29:23.997347933+00:00 stderr F I1212 16:29:23.991623 29856 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:29:23.997347933+00:00 stderr F I1212 16:29:23.991630 29856 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:29:23.997700212+00:00 stderr F 2025-12-12T16:29:23Z [verbose] Add: service-telemetry:infrawatch-operators-cj72z:896500d1-8185-4d67-9e0d-c837eba1a9d1:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"05fb2c827c084b3","mac":"86:4d:50:6a:5e:c5"},{"name":"eth0","mac":"0a:58:0a:d9:00:3f","sandbox":"/var/run/netns/fe210992-63c7-4688-8778-427ec0856e10"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.63/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:29:23.998153604+00:00 stderr F I1212 16:29:23.998071 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"infrawatch-operators-cj72z", UID:"896500d1-8185-4d67-9e0d-c837eba1a9d1", APIVersion:"v1", ResourceVersion:"43962", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.63/23] from ovn-kubernetes 2025-12-12T16:29:24.011232803+00:00 stderr F 2025-12-12T16:29:24Z [verbose] ADD finished CNI request ContainerID:"05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2" Netns:"/var/run/netns/fe210992-63c7-4688-8778-427ec0856e10" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-cj72z;K8S_POD_INFRA_CONTAINER_ID=05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2;K8S_POD_UID=896500d1-8185-4d67-9e0d-c837eba1a9d1" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"86:4d:50:6a:5e:c5\",\"name\":\"05fb2c827c084b3\"},{\"mac\":\"0a:58:0a:d9:00:3f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/fe210992-63c7-4688-8778-427ec0856e10\"}],\"ips\":[{\"address\":\"10.217.0.63/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:29:27.869363070+00:00 stderr F 2025-12-12T16:29:27Z [verbose] DEL starting CNI request ContainerID:"05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2" Netns:"/var/run/netns/fe210992-63c7-4688-8778-427ec0856e10" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-cj72z;K8S_POD_INFRA_CONTAINER_ID=05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2;K8S_POD_UID=896500d1-8185-4d67-9e0d-c837eba1a9d1" Path:"" 2025-12-12T16:29:27.870067827+00:00 stderr F 2025-12-12T16:29:27Z [verbose] Del: service-telemetry:infrawatch-operators-cj72z:896500d1-8185-4d67-9e0d-c837eba1a9d1:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:29:28.058025546+00:00 stderr F 2025-12-12T16:29:28Z [verbose] DEL finished CNI request ContainerID:"05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2" Netns:"/var/run/netns/fe210992-63c7-4688-8778-427ec0856e10" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-cj72z;K8S_POD_INFRA_CONTAINER_ID=05fb2c827c084b307ef59d3eee3d07f35dabc8eee4b06b1b1986e20b9cedeeb2;K8S_POD_UID=896500d1-8185-4d67-9e0d-c837eba1a9d1" Path:"", result: "", err: 2025-12-12T16:29:29.076887031+00:00 stderr F 2025-12-12T16:29:29Z [verbose] ADD starting CNI request ContainerID:"264c7342999e104f19f407c20d616e113a7b10528040fa13b52d8cb14847e428" Netns:"/var/run/netns/1f102c0e-b89f-4284-8a69-478a57d702fe" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-cdpts;K8S_POD_INFRA_CONTAINER_ID=264c7342999e104f19f407c20d616e113a7b10528040fa13b52d8cb14847e428;K8S_POD_UID=eeed1a9b-f386-4d11-b730-03bcb44f9a55" Path:"" 2025-12-12T16:29:29.205276558+00:00 stderr F I1212 16:29:29.199613 30016 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:29:29.205276558+00:00 stderr F I1212 16:29:29.200099 30016 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:29:29.205276558+00:00 stderr F I1212 16:29:29.200117 30016 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:29:29.205276558+00:00 stderr F I1212 16:29:29.200126 30016 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:29:29.205276558+00:00 stderr F I1212 16:29:29.200133 30016 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:29:29.205848323+00:00 stderr F 2025-12-12T16:29:29Z [verbose] Add: service-telemetry:infrawatch-operators-cdpts:eeed1a9b-f386-4d11-b730-03bcb44f9a55:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"264c7342999e104","mac":"ce:5a:4c:8a:7a:7c"},{"name":"eth0","mac":"0a:58:0a:d9:00:41","sandbox":"/var/run/netns/1f102c0e-b89f-4284-8a69-478a57d702fe"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.65/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:29:29.206144930+00:00 stderr F I1212 16:29:29.206074 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"infrawatch-operators-cdpts", UID:"eeed1a9b-f386-4d11-b730-03bcb44f9a55", APIVersion:"v1", ResourceVersion:"44023", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.65/23] from ovn-kubernetes 2025-12-12T16:29:29.218957783+00:00 stderr F 2025-12-12T16:29:29Z [verbose] ADD finished CNI request ContainerID:"264c7342999e104f19f407c20d616e113a7b10528040fa13b52d8cb14847e428" Netns:"/var/run/netns/1f102c0e-b89f-4284-8a69-478a57d702fe" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-cdpts;K8S_POD_INFRA_CONTAINER_ID=264c7342999e104f19f407c20d616e113a7b10528040fa13b52d8cb14847e428;K8S_POD_UID=eeed1a9b-f386-4d11-b730-03bcb44f9a55" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ce:5a:4c:8a:7a:7c\",\"name\":\"264c7342999e104\"},{\"mac\":\"0a:58:0a:d9:00:41\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/1f102c0e-b89f-4284-8a69-478a57d702fe\"}],\"ips\":[{\"address\":\"10.217.0.65/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:30:00.577695733+00:00 stderr F 2025-12-12T16:30:00Z [verbose] ADD starting CNI request ContainerID:"766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4" Netns:"/var/run/netns/f537cdbb-5806-4230-bf36-bd70b094abfc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425950-g52jh;K8S_POD_INFRA_CONTAINER_ID=766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4;K8S_POD_UID=ab30f5e0-5097-4413-bb3e-fe8ca350378f" Path:"" 2025-12-12T16:30:00.950000259+00:00 stderr F I1212 16:30:00.943750 30592 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:30:00.950000259+00:00 stderr F I1212 16:30:00.944290 30592 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:30:00.950000259+00:00 stderr F I1212 16:30:00.944305 30592 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:30:00.950000259+00:00 stderr F I1212 16:30:00.944311 30592 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:30:00.950000259+00:00 stderr F I1212 16:30:00.944317 30592 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:30:00.950718507+00:00 stderr F 2025-12-12T16:30:00Z [verbose] Add: openshift-operator-lifecycle-manager:collect-profiles-29425950-g52jh:ab30f5e0-5097-4413-bb3e-fe8ca350378f:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"766e3243f515881","mac":"ae:8b:9e:8a:35:4d"},{"name":"eth0","mac":"0a:58:0a:d9:00:42","sandbox":"/var/run/netns/f537cdbb-5806-4230-bf36-bd70b094abfc"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.66/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:30:00.950974064+00:00 stderr F I1212 16:30:00.950881 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operator-lifecycle-manager", Name:"collect-profiles-29425950-g52jh", UID:"ab30f5e0-5097-4413-bb3e-fe8ca350378f", APIVersion:"v1", ResourceVersion:"44152", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.66/23] from ovn-kubernetes 2025-12-12T16:30:00.963807327+00:00 stderr F 2025-12-12T16:30:00Z [verbose] ADD finished CNI request ContainerID:"766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4" Netns:"/var/run/netns/f537cdbb-5806-4230-bf36-bd70b094abfc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425950-g52jh;K8S_POD_INFRA_CONTAINER_ID=766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4;K8S_POD_UID=ab30f5e0-5097-4413-bb3e-fe8ca350378f" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ae:8b:9e:8a:35:4d\",\"name\":\"766e3243f515881\"},{\"mac\":\"0a:58:0a:d9:00:42\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/f537cdbb-5806-4230-bf36-bd70b094abfc\"}],\"ips\":[{\"address\":\"10.217.0.66/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:30:03.256413276+00:00 stderr F 2025-12-12T16:30:03Z [verbose] DEL starting CNI request ContainerID:"766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4" Netns:"/var/run/netns/f537cdbb-5806-4230-bf36-bd70b094abfc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425950-g52jh;K8S_POD_INFRA_CONTAINER_ID=766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4;K8S_POD_UID=ab30f5e0-5097-4413-bb3e-fe8ca350378f" Path:"" 2025-12-12T16:30:03.257130844+00:00 stderr F 2025-12-12T16:30:03Z [verbose] Del: openshift-operator-lifecycle-manager:collect-profiles-29425950-g52jh:ab30f5e0-5097-4413-bb3e-fe8ca350378f:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:30:03.425018106+00:00 stderr F 2025-12-12T16:30:03Z [verbose] DEL finished CNI request ContainerID:"766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4" Netns:"/var/run/netns/f537cdbb-5806-4230-bf36-bd70b094abfc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425950-g52jh;K8S_POD_INFRA_CONTAINER_ID=766e3243f51588112a816c6f02f6d7f3501538eef1dfb32bee7ccca7116521e4;K8S_POD_UID=ab30f5e0-5097-4413-bb3e-fe8ca350378f" Path:"", result: "", err: 2025-12-12T16:34:26.086025458+00:00 stderr F 2025-12-12T16:34:26Z [verbose] ADD starting CNI request ContainerID:"32f2e1d4d60ac82efd45ae71478a261aa3a0041bccea7ebd07ee5c3e2380871a" Netns:"/var/run/netns/381dcd4c-149e-416e-b60b-5cbcd18e560c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-6bs58;K8S_POD_INFRA_CONTAINER_ID=32f2e1d4d60ac82efd45ae71478a261aa3a0041bccea7ebd07ee5c3e2380871a;K8S_POD_UID=6510d065-e486-4274-a8ca-4c2cdb8dd1ae" Path:"" 2025-12-12T16:34:26.257990245+00:00 stderr F I1212 16:34:26.250802 34293 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:34:26.257990245+00:00 stderr F I1212 16:34:26.251640 34293 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:34:26.257990245+00:00 stderr F I1212 16:34:26.251667 34293 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:34:26.257990245+00:00 stderr F I1212 16:34:26.251677 34293 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:34:26.257990245+00:00 stderr F I1212 16:34:26.251703 34293 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:34:26.258689463+00:00 stderr F 2025-12-12T16:34:26Z [verbose] Add: service-telemetry:infrawatch-operators-6bs58:6510d065-e486-4274-a8ca-4c2cdb8dd1ae:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"32f2e1d4d60ac82","mac":"1e:fa:17:a1:8f:d7"},{"name":"eth0","mac":"0a:58:0a:d9:00:43","sandbox":"/var/run/netns/381dcd4c-149e-416e-b60b-5cbcd18e560c"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.67/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:34:26.258919238+00:00 stderr F I1212 16:34:26.258842 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"service-telemetry", Name:"infrawatch-operators-6bs58", UID:"6510d065-e486-4274-a8ca-4c2cdb8dd1ae", APIVersion:"v1", ResourceVersion:"44792", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.67/23] from ovn-kubernetes 2025-12-12T16:34:26.272612391+00:00 stderr F 2025-12-12T16:34:26Z [verbose] ADD finished CNI request ContainerID:"32f2e1d4d60ac82efd45ae71478a261aa3a0041bccea7ebd07ee5c3e2380871a" Netns:"/var/run/netns/381dcd4c-149e-416e-b60b-5cbcd18e560c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=service-telemetry;K8S_POD_NAME=infrawatch-operators-6bs58;K8S_POD_INFRA_CONTAINER_ID=32f2e1d4d60ac82efd45ae71478a261aa3a0041bccea7ebd07ee5c3e2380871a;K8S_POD_UID=6510d065-e486-4274-a8ca-4c2cdb8dd1ae" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"1e:fa:17:a1:8f:d7\",\"name\":\"32f2e1d4d60ac82\"},{\"mac\":\"0a:58:0a:d9:00:43\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/381dcd4c-149e-416e-b60b-5cbcd18e560c\"}],\"ips\":[{\"address\":\"10.217.0.67/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:37:14.111812070+00:00 stderr F 2025-12-12T16:37:14Z [verbose] ADD starting CNI request ContainerID:"d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee" Netns:"/var/run/netns/2353c3f3-e36a-4aa5-a6db-a57c10ce6a78" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-h46w2;K8S_POD_INFRA_CONTAINER_ID=d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee;K8S_POD_UID=29b869ed-f7d2-4b6d-851f-b5e4d95c08c2" Path:"" 2025-12-12T16:37:14.254254969+00:00 stderr F I1212 16:37:14.247537 36623 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:37:14.254254969+00:00 stderr F I1212 16:37:14.248007 36623 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:37:14.254254969+00:00 stderr F I1212 16:37:14.248015 36623 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:37:14.254254969+00:00 stderr F I1212 16:37:14.248022 36623 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:37:14.254254969+00:00 stderr F I1212 16:37:14.248028 36623 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:37:14.254599608+00:00 stderr F 2025-12-12T16:37:14Z [verbose] Add: openshift-marketplace:certified-operators-h46w2:29b869ed-f7d2-4b6d-851f-b5e4d95c08c2:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"d1d4d05b754478a","mac":"fa:3b:ae:70:da:98"},{"name":"eth0","mac":"0a:58:0a:d9:00:44","sandbox":"/var/run/netns/2353c3f3-e36a-4aa5-a6db-a57c10ce6a78"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.68/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:37:14.254810583+00:00 stderr F I1212 16:37:14.254747 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"certified-operators-h46w2", UID:"29b869ed-f7d2-4b6d-851f-b5e4d95c08c2", APIVersion:"v1", ResourceVersion:"45213", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.68/23] from ovn-kubernetes 2025-12-12T16:37:14.268003644+00:00 stderr F 2025-12-12T16:37:14Z [verbose] ADD finished CNI request ContainerID:"d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee" Netns:"/var/run/netns/2353c3f3-e36a-4aa5-a6db-a57c10ce6a78" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-h46w2;K8S_POD_INFRA_CONTAINER_ID=d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee;K8S_POD_UID=29b869ed-f7d2-4b6d-851f-b5e4d95c08c2" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"fa:3b:ae:70:da:98\",\"name\":\"d1d4d05b754478a\"},{\"mac\":\"0a:58:0a:d9:00:44\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/2353c3f3-e36a-4aa5-a6db-a57c10ce6a78\"}],\"ips\":[{\"address\":\"10.217.0.68/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:37:23.484749929+00:00 stderr F 2025-12-12T16:37:23Z [verbose] ADD starting CNI request ContainerID:"970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf" Netns:"/var/run/netns/7076d4fa-3a30-4825-a0db-02993a5b08ba" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-k5p4x;K8S_POD_INFRA_CONTAINER_ID=970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf;K8S_POD_UID=6e1befc6-b980-4afa-ab59-48293a764532" Path:"" 2025-12-12T16:37:23.648986436+00:00 stderr F I1212 16:37:23.642558 36842 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:37:23.648986436+00:00 stderr F I1212 16:37:23.643158 36842 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:37:23.648986436+00:00 stderr F I1212 16:37:23.643171 36842 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:37:23.648986436+00:00 stderr F I1212 16:37:23.643196 36842 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:37:23.648986436+00:00 stderr F I1212 16:37:23.643207 36842 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:37:23.649607951+00:00 stderr F 2025-12-12T16:37:23Z [verbose] Add: openshift-marketplace:redhat-operators-k5p4x:6e1befc6-b980-4afa-ab59-48293a764532:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"970df140b69f5a9","mac":"62:a5:4c:7f:21:0e"},{"name":"eth0","mac":"0a:58:0a:d9:00:4a","sandbox":"/var/run/netns/7076d4fa-3a30-4825-a0db-02993a5b08ba"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.74/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:37:23.649843907+00:00 stderr F I1212 16:37:23.649802 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-operators-k5p4x", UID:"6e1befc6-b980-4afa-ab59-48293a764532", APIVersion:"v1", ResourceVersion:"45251", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.74/23] from ovn-kubernetes 2025-12-12T16:37:23.669840100+00:00 stderr F 2025-12-12T16:37:23Z [verbose] ADD finished CNI request ContainerID:"970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf" Netns:"/var/run/netns/7076d4fa-3a30-4825-a0db-02993a5b08ba" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-k5p4x;K8S_POD_INFRA_CONTAINER_ID=970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf;K8S_POD_UID=6e1befc6-b980-4afa-ab59-48293a764532" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"62:a5:4c:7f:21:0e\",\"name\":\"970df140b69f5a9\"},{\"mac\":\"0a:58:0a:d9:00:4a\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/7076d4fa-3a30-4825-a0db-02993a5b08ba\"}],\"ips\":[{\"address\":\"10.217.0.74/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:37:26.899681831+00:00 stderr F 2025-12-12T16:37:26Z [verbose] DEL starting CNI request ContainerID:"d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee" Netns:"/var/run/netns/2353c3f3-e36a-4aa5-a6db-a57c10ce6a78" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-h46w2;K8S_POD_INFRA_CONTAINER_ID=d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee;K8S_POD_UID=29b869ed-f7d2-4b6d-851f-b5e4d95c08c2" Path:"" 2025-12-12T16:37:26.900342798+00:00 stderr F 2025-12-12T16:37:26Z [verbose] Del: openshift-marketplace:certified-operators-h46w2:29b869ed-f7d2-4b6d-851f-b5e4d95c08c2:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:37:27.158282018+00:00 stderr F 2025-12-12T16:37:27Z [verbose] DEL finished CNI request ContainerID:"d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee" Netns:"/var/run/netns/2353c3f3-e36a-4aa5-a6db-a57c10ce6a78" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-h46w2;K8S_POD_INFRA_CONTAINER_ID=d1d4d05b754478a44a46557114554ff8d33446539ed1f76bfa1f327c87d9adee;K8S_POD_UID=29b869ed-f7d2-4b6d-851f-b5e4d95c08c2" Path:"", result: "", err: 2025-12-12T16:37:39.703337509+00:00 stderr F 2025-12-12T16:37:39Z [verbose] DEL starting CNI request ContainerID:"970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf" Netns:"/var/run/netns/7076d4fa-3a30-4825-a0db-02993a5b08ba" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-k5p4x;K8S_POD_INFRA_CONTAINER_ID=970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf;K8S_POD_UID=6e1befc6-b980-4afa-ab59-48293a764532" Path:"" 2025-12-12T16:37:39.703337509+00:00 stderr F 2025-12-12T16:37:39Z [verbose] Del: openshift-marketplace:redhat-operators-k5p4x:6e1befc6-b980-4afa-ab59-48293a764532:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:37:39.867636687+00:00 stderr F 2025-12-12T16:37:39Z [verbose] DEL finished CNI request ContainerID:"970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf" Netns:"/var/run/netns/7076d4fa-3a30-4825-a0db-02993a5b08ba" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-k5p4x;K8S_POD_INFRA_CONTAINER_ID=970df140b69f5a985f67d38cf7877d73e620bd664c49cbc41174b5a595b00dcf;K8S_POD_UID=6e1befc6-b980-4afa-ab59-48293a764532" Path:"", result: "", err: 2025-12-12T16:38:10.781736160+00:00 stderr F 2025-12-12T16:38:10Z [verbose] ADD starting CNI request ContainerID:"c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce" Netns:"/var/run/netns/c018953c-a12e-4a48-b5b1-5b38149afdc5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-4sccg;K8S_POD_INFRA_CONTAINER_ID=c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce;K8S_POD_UID=4dcc42e3-b653-498b-8ca0-bcbf16d0b1de" Path:"" 2025-12-12T16:38:10.937685288+00:00 stderr F I1212 16:38:10.931322 37729 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:38:10.937685288+00:00 stderr F I1212 16:38:10.931831 37729 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:38:10.937685288+00:00 stderr F I1212 16:38:10.931852 37729 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:38:10.937685288+00:00 stderr F I1212 16:38:10.931861 37729 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:38:10.937685288+00:00 stderr F I1212 16:38:10.931869 37729 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:38:10.938111179+00:00 stderr F 2025-12-12T16:38:10Z [verbose] Add: openshift-marketplace:community-operators-4sccg:4dcc42e3-b653-498b-8ca0-bcbf16d0b1de:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"c4596c3be96f08e","mac":"a2:70:ac:e2:86:3c"},{"name":"eth0","mac":"0a:58:0a:d9:00:4c","sandbox":"/var/run/netns/c018953c-a12e-4a48-b5b1-5b38149afdc5"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.76/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:38:10.938394146+00:00 stderr F I1212 16:38:10.938319 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"community-operators-4sccg", UID:"4dcc42e3-b653-498b-8ca0-bcbf16d0b1de", APIVersion:"v1", ResourceVersion:"45402", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.76/23] from ovn-kubernetes 2025-12-12T16:38:10.956549282+00:00 stderr F 2025-12-12T16:38:10Z [verbose] ADD finished CNI request ContainerID:"c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce" Netns:"/var/run/netns/c018953c-a12e-4a48-b5b1-5b38149afdc5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-4sccg;K8S_POD_INFRA_CONTAINER_ID=c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce;K8S_POD_UID=4dcc42e3-b653-498b-8ca0-bcbf16d0b1de" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"a2:70:ac:e2:86:3c\",\"name\":\"c4596c3be96f08e\"},{\"mac\":\"0a:58:0a:d9:00:4c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/c018953c-a12e-4a48-b5b1-5b38149afdc5\"}],\"ips\":[{\"address\":\"10.217.0.76/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:38:27.118363620+00:00 stderr P 2025-12-12T16:38:27Z [verbose] 2025-12-12T16:38:27.118429952+00:00 stderr P DEL starting CNI request ContainerID:"c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce" Netns:"/var/run/netns/c018953c-a12e-4a48-b5b1-5b38149afdc5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-4sccg;K8S_POD_INFRA_CONTAINER_ID=c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce;K8S_POD_UID=4dcc42e3-b653-498b-8ca0-bcbf16d0b1de" Path:"" 2025-12-12T16:38:27.118450332+00:00 stderr F 2025-12-12T16:38:27.119142760+00:00 stderr P 2025-12-12T16:38:27Z [verbose] 2025-12-12T16:38:27.119164560+00:00 stderr P Del: openshift-marketplace:community-operators-4sccg:4dcc42e3-b653-498b-8ca0-bcbf16d0b1de:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:38:27.119201531+00:00 stderr F 2025-12-12T16:38:27.301373418+00:00 stderr P 2025-12-12T16:38:27Z [verbose] 2025-12-12T16:38:27.301435870+00:00 stderr P DEL finished CNI request ContainerID:"c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce" Netns:"/var/run/netns/c018953c-a12e-4a48-b5b1-5b38149afdc5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-4sccg;K8S_POD_INFRA_CONTAINER_ID=c4596c3be96f08edc7a76fc72bd094fbf2673d1da01776e82e1e95eb549bf0ce;K8S_POD_UID=4dcc42e3-b653-498b-8ca0-bcbf16d0b1de" Path:"", result: "", err: 2025-12-12T16:38:27.301455540+00:00 stderr F 2025-12-12T16:40:38.651363445+00:00 stderr F 2025-12-12T16:40:38Z [verbose] ADD starting CNI request ContainerID:"e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95" Netns:"/var/run/netns/27ae0112-3440-465b-9836-745b63c3c0cf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-must-gather-2sjxj;K8S_POD_NAME=must-gather-v4h5l;K8S_POD_INFRA_CONTAINER_ID=e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95;K8S_POD_UID=e591e9a3-fc25-45d7-bb1b-84d59c92c39d" Path:"" 2025-12-12T16:40:39.005441912+00:00 stderr F I1212 16:40:38.995556 39973 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:40:39.005441912+00:00 stderr F I1212 16:40:38.996572 39973 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:40:39.005441912+00:00 stderr F I1212 16:40:38.996595 39973 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:40:39.005441912+00:00 stderr F I1212 16:40:38.996626 39973 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:40:39.005441912+00:00 stderr F I1212 16:40:38.996636 39973 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:40:39.006297283+00:00 stderr F 2025-12-12T16:40:39Z [verbose] Add: openshift-must-gather-2sjxj:must-gather-v4h5l:e591e9a3-fc25-45d7-bb1b-84d59c92c39d:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"e61ff18407c4da6","mac":"ea:05:b8:9c:5f:b8"},{"name":"eth0","mac":"0a:58:0a:d9:00:4d","sandbox":"/var/run/netns/27ae0112-3440-465b-9836-745b63c3c0cf"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.77/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:40:39.006534759+00:00 stderr F I1212 16:40:39.006472 23414 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-must-gather-2sjxj", Name:"must-gather-v4h5l", UID:"e591e9a3-fc25-45d7-bb1b-84d59c92c39d", APIVersion:"v1", ResourceVersion:"45788", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.77/23] from ovn-kubernetes 2025-12-12T16:40:39.021129086+00:00 stderr F 2025-12-12T16:40:39Z [verbose] ADD finished CNI request ContainerID:"e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95" Netns:"/var/run/netns/27ae0112-3440-465b-9836-745b63c3c0cf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-must-gather-2sjxj;K8S_POD_NAME=must-gather-v4h5l;K8S_POD_INFRA_CONTAINER_ID=e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95;K8S_POD_UID=e591e9a3-fc25-45d7-bb1b-84d59c92c39d" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ea:05:b8:9c:5f:b8\",\"name\":\"e61ff18407c4da6\"},{\"mac\":\"0a:58:0a:d9:00:4d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/27ae0112-3440-465b-9836-745b63c3c0cf\"}],\"ips\":[{\"address\":\"10.217.0.77/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:43:04.028154126+00:00 stderr F 2025-12-12T16:43:04Z [verbose] DEL starting CNI request ContainerID:"e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95" Netns:"/var/run/netns/27ae0112-3440-465b-9836-745b63c3c0cf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-must-gather-2sjxj;K8S_POD_NAME=must-gather-v4h5l;K8S_POD_INFRA_CONTAINER_ID=e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95;K8S_POD_UID=e591e9a3-fc25-45d7-bb1b-84d59c92c39d" Path:"" 2025-12-12T16:43:04.030444693+00:00 stderr F 2025-12-12T16:43:04Z [error] Multus: GetPod failed: pod not found during Multus GetPod, but continue to delete 2025-12-12T16:43:04.030444693+00:00 stderr F 2025-12-12T16:43:04Z [verbose] Del: openshift-must-gather-2sjxj:must-gather-v4h5l:unknownUID:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:43:04.180978706+00:00 stderr F 2025-12-12T16:43:04Z [verbose] DEL finished CNI request ContainerID:"e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95" Netns:"/var/run/netns/27ae0112-3440-465b-9836-745b63c3c0cf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-must-gather-2sjxj;K8S_POD_NAME=must-gather-v4h5l;K8S_POD_INFRA_CONTAINER_ID=e61ff18407c4da69daabe52c3f069448d5b6b9470c5e8e023f00d37e559a8d95;K8S_POD_UID=e591e9a3-fc25-45d7-bb1b-84d59c92c39d" Path:"", result: "", err: ././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000644000175000017500000102164715117043043033242 0ustar zuulzuul2025-12-12T16:16:23.591075845+00:00 stdout F 2025-12-12T16:16:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0c00b44c-8af6-4998-a555-eb6f827f55ff 2025-12-12T16:16:23.600324501+00:00 stdout F 2025-12-12T16:16:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0c00b44c-8af6-4998-a555-eb6f827f55ff to /host/opt/cni/bin/ 2025-12-12T16:16:23.697450702+00:00 stderr F 2025-12-12T16:16:23Z [verbose] multus-daemon started 2025-12-12T16:16:23.697450702+00:00 stderr F 2025-12-12T16:16:23Z [verbose] Readiness Indicator file check 2025-12-12T16:16:37.698721392+00:00 stderr F 2025-12-12T16:16:37Z [verbose] Readiness Indicator file check done! 2025-12-12T16:16:37.701022618+00:00 stderr F I1212 16:16:37.700942 6507 certificate_store.go:130] Loading cert/key pair from "/etc/cni/multus/certs/multus-client-current.pem". 2025-12-12T16:16:37.701812047+00:00 stderr F 2025-12-12T16:16:37Z [verbose] Waiting for certificate 2025-12-12T16:16:37.717240684+00:00 stderr F 2025-12-12T16:16:37Z [error] failed to list pods with new certs: pods is forbidden: User "system:node:crc" cannot list resource "pods" in API group "" at the cluster scope: can only list/watch pods with spec.nodeName field selector 2025-12-12T16:16:38.702902728+00:00 stderr F I1212 16:16:38.702812 6507 certificate_store.go:130] Loading cert/key pair from "/etc/cni/multus/certs/multus-client-current.pem". 2025-12-12T16:16:38.703606225+00:00 stderr P 2025-12-12T16:16:38Z [verbose] 2025-12-12T16:16:38.703743208+00:00 stderr F Certificate found! 2025-12-12T16:16:38.704408774+00:00 stderr F 2025-12-12T16:16:38Z [verbose] server configured with chroot: /hostroot 2025-12-12T16:16:38.704408774+00:00 stderr F 2025-12-12T16:16:38Z [verbose] Filtering pod watch for node "crc" 2025-12-12T16:16:38.806666881+00:00 stderr F 2025-12-12T16:16:38Z [verbose] API readiness check 2025-12-12T16:16:38.807452500+00:00 stderr F 2025-12-12T16:16:38Z [verbose] API readiness check done! 2025-12-12T16:16:38.807794898+00:00 stderr F 2025-12-12T16:16:38Z [verbose] Generated MultusCNI config: {"binDir":"/var/lib/cni/bin","cniVersion":"0.3.1","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","namespaceIsolation":true,"globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","type":"multus-shim","auxiliaryCNIChainName":"vendor-cni-chain","daemonSocketDir":"/run/multus/socket"} 2025-12-12T16:16:38.808016464+00:00 stderr F 2025-12-12T16:16:38Z [verbose] started to watch file /host/run/multus/cni/net.d/10-ovn-kubernetes.conf 2025-12-12T16:16:40.565802299+00:00 stderr F 2025-12-12T16:16:40Z [verbose] ADD starting CNI request ContainerID:"46d4077c2585bacb39949d15d21428f3e181029f32d32ad523532640b0b77944" Netns:"/var/run/netns/feaaab91-ceb7-4986-9e9c-30e59cc9f616" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-api;K8S_POD_NAME=machine-api-operator-755bb95488-dmjfw;K8S_POD_INFRA_CONTAINER_ID=46d4077c2585bacb39949d15d21428f3e181029f32d32ad523532640b0b77944;K8S_POD_UID=0abafdd2-351e-4f65-9dea-5578d313b760" Path:"" 2025-12-12T16:16:40.568700690+00:00 stderr F 2025-12-12T16:16:40Z [verbose] ADD starting CNI request ContainerID:"18ae7415275d436b3001de65256fb1f46f23bff514599b79402add982df4c117" Netns:"/var/run/netns/95f428cf-4c1d-4043-a818-ab7cbc93961a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-oauth-apiserver;K8S_POD_NAME=apiserver-8596bd845d-njgb5;K8S_POD_INFRA_CONTAINER_ID=18ae7415275d436b3001de65256fb1f46f23bff514599b79402add982df4c117;K8S_POD_UID=1bfafc57-4718-4d71-9f69-52b321379a27" Path:"" 2025-12-12T16:16:40.575532307+00:00 stderr F 2025-12-12T16:16:40Z [verbose] ADD starting CNI request ContainerID:"feea72f891d532e7dfe73c08b63e823fe57b57ede311144b739a5b42f15b970e" Netns:"/var/run/netns/00ba7519-5af3-4aae-9ff9-6c82a03fb866" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-apiserver-operator;K8S_POD_NAME=openshift-apiserver-operator-846cbfc458-zf8cv;K8S_POD_INFRA_CONTAINER_ID=feea72f891d532e7dfe73c08b63e823fe57b57ede311144b739a5b42f15b970e;K8S_POD_UID=e0a1decf-4248-4f48-ba06-e9ec8fdbbea8" Path:"" 2025-12-12T16:16:40.822593568+00:00 stderr F I1212 16:16:40.816911 7603 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:40.822593568+00:00 stderr F I1212 16:16:40.817643 7603 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:40.822593568+00:00 stderr F I1212 16:16:40.817661 7603 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:40.822593568+00:00 stderr F I1212 16:16:40.817668 7603 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:40.822593568+00:00 stderr F I1212 16:16:40.817674 7603 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:40.823694105+00:00 stderr F 2025-12-12T16:16:40Z [verbose] Add: openshift-machine-api:machine-api-operator-755bb95488-dmjfw:0abafdd2-351e-4f65-9dea-5578d313b760:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"46d4077c2585bac","mac":"1a:a3:d1:15:87:bd"},{"name":"eth0","mac":"0a:58:0a:d9:00:05","sandbox":"/var/run/netns/feaaab91-ceb7-4986-9e9c-30e59cc9f616"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.5/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:40.824895195+00:00 stderr F I1212 16:16:40.824640 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-machine-api", Name:"machine-api-operator-755bb95488-dmjfw", UID:"0abafdd2-351e-4f65-9dea-5578d313b760", APIVersion:"v1", ResourceVersion:"36813", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.5/23] from ovn-kubernetes 2025-12-12T16:16:40.847856875+00:00 stderr F 2025-12-12T16:16:40Z [verbose] ADD finished CNI request ContainerID:"46d4077c2585bacb39949d15d21428f3e181029f32d32ad523532640b0b77944" Netns:"/var/run/netns/feaaab91-ceb7-4986-9e9c-30e59cc9f616" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-api;K8S_POD_NAME=machine-api-operator-755bb95488-dmjfw;K8S_POD_INFRA_CONTAINER_ID=46d4077c2585bacb39949d15d21428f3e181029f32d32ad523532640b0b77944;K8S_POD_UID=0abafdd2-351e-4f65-9dea-5578d313b760" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"1a:a3:d1:15:87:bd\",\"name\":\"46d4077c2585bac\"},{\"mac\":\"0a:58:0a:d9:00:05\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/feaaab91-ceb7-4986-9e9c-30e59cc9f616\"}],\"ips\":[{\"address\":\"10.217.0.5/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:40.851962315+00:00 stderr F I1212 16:16:40.841380 7604 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:40.851962315+00:00 stderr F I1212 16:16:40.841495 7604 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:40.851962315+00:00 stderr F I1212 16:16:40.841503 7604 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:40.851962315+00:00 stderr F I1212 16:16:40.841509 7604 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:40.851962315+00:00 stderr F I1212 16:16:40.841515 7604 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:40.851962315+00:00 stderr F 2025-12-12T16:16:40Z [verbose] Add: openshift-oauth-apiserver:apiserver-8596bd845d-njgb5:1bfafc57-4718-4d71-9f69-52b321379a27:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"18ae7415275d436","mac":"6e:91:2a:b4:ea:a6"},{"name":"eth0","mac":"0a:58:0a:d9:00:06","sandbox":"/var/run/netns/95f428cf-4c1d-4043-a818-ab7cbc93961a"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.6/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:40.851962315+00:00 stderr F I1212 16:16:40.851888 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-oauth-apiserver", Name:"apiserver-8596bd845d-njgb5", UID:"1bfafc57-4718-4d71-9f69-52b321379a27", APIVersion:"v1", ResourceVersion:"36815", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.6/23] from ovn-kubernetes 2025-12-12T16:16:40.869625607+00:00 stderr F 2025-12-12T16:16:40Z [verbose] ADD finished CNI request ContainerID:"18ae7415275d436b3001de65256fb1f46f23bff514599b79402add982df4c117" Netns:"/var/run/netns/95f428cf-4c1d-4043-a818-ab7cbc93961a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-oauth-apiserver;K8S_POD_NAME=apiserver-8596bd845d-njgb5;K8S_POD_INFRA_CONTAINER_ID=18ae7415275d436b3001de65256fb1f46f23bff514599b79402add982df4c117;K8S_POD_UID=1bfafc57-4718-4d71-9f69-52b321379a27" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"6e:91:2a:b4:ea:a6\",\"name\":\"18ae7415275d436\"},{\"mac\":\"0a:58:0a:d9:00:06\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/95f428cf-4c1d-4043-a818-ab7cbc93961a\"}],\"ips\":[{\"address\":\"10.217.0.6/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:40.875403848+00:00 stderr F I1212 16:16:40.868318 7602 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:40.875403848+00:00 stderr F I1212 16:16:40.868503 7602 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:40.875403848+00:00 stderr F I1212 16:16:40.868519 7602 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:40.875403848+00:00 stderr F I1212 16:16:40.868562 7602 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:40.875403848+00:00 stderr F I1212 16:16:40.868579 7602 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:40.875842938+00:00 stderr F 2025-12-12T16:16:40Z [verbose] Add: openshift-apiserver-operator:openshift-apiserver-operator-846cbfc458-zf8cv:e0a1decf-4248-4f48-ba06-e9ec8fdbbea8:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"feea72f891d532e","mac":"5e:42:fe:34:d1:e6"},{"name":"eth0","mac":"0a:58:0a:d9:00:1e","sandbox":"/var/run/netns/00ba7519-5af3-4aae-9ff9-6c82a03fb866"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.30/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:40.876017103+00:00 stderr F I1212 16:16:40.875957 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-apiserver-operator", Name:"openshift-apiserver-operator-846cbfc458-zf8cv", UID:"e0a1decf-4248-4f48-ba06-e9ec8fdbbea8", APIVersion:"v1", ResourceVersion:"36814", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.30/23] from ovn-kubernetes 2025-12-12T16:16:40.892897635+00:00 stderr F 2025-12-12T16:16:40Z [verbose] ADD finished CNI request ContainerID:"feea72f891d532e7dfe73c08b63e823fe57b57ede311144b739a5b42f15b970e" Netns:"/var/run/netns/00ba7519-5af3-4aae-9ff9-6c82a03fb866" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-apiserver-operator;K8S_POD_NAME=openshift-apiserver-operator-846cbfc458-zf8cv;K8S_POD_INFRA_CONTAINER_ID=feea72f891d532e7dfe73c08b63e823fe57b57ede311144b739a5b42f15b970e;K8S_POD_UID=e0a1decf-4248-4f48-ba06-e9ec8fdbbea8" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"5e:42:fe:34:d1:e6\",\"name\":\"feea72f891d532e\"},{\"mac\":\"0a:58:0a:d9:00:1e\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/00ba7519-5af3-4aae-9ff9-6c82a03fb866\"}],\"ips\":[{\"address\":\"10.217.0.30/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:42.376609178+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07" Netns:"/var/run/netns/bb273ee9-e71c-4d44-94cf-f1c3b9a37afa" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-776cdc94d6-zksq4;K8S_POD_INFRA_CONTAINER_ID=fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07;K8S_POD_UID=a78c6a97-054e-484e-aae2-a33bd3bb7b40" Path:"" 2025-12-12T16:16:42.399923077+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"5260b3857fe9178b42ba78b26a810de66780669b2c78a7cae29a736661bc1aa5" Netns:"/var/run/netns/82d60f51-b9db-42f3-bd96-b0e5fc9aa682" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication-operator;K8S_POD_NAME=authentication-operator-7f5c659b84-6t92c;K8S_POD_INFRA_CONTAINER_ID=5260b3857fe9178b42ba78b26a810de66780669b2c78a7cae29a736661bc1aa5;K8S_POD_UID=d55f43e2-46df-4460-b17f-0daa75b89154" Path:"" 2025-12-12T16:16:42.462268989+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d" Netns:"/var/run/netns/a4551d8c-1979-4b4f-a759-a09da6eab559" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-cluster-samples-operator;K8S_POD_NAME=cluster-samples-operator-6b564684c8-fzlkp;K8S_POD_INFRA_CONTAINER_ID=d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d;K8S_POD_UID=2a282672-c872-405b-9325-f8f48865334c" Path:"" 2025-12-12T16:16:42.490531519+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"33a249d7e78465e9a718be39e7a906df97782cbb66486425e35a61af822326a2" Netns:"/var/run/netns/390eb047-aa35-4b40-87ed-2244e1485ff3" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-console;K8S_POD_NAME=console-64d44f6ddf-zhgm9;K8S_POD_INFRA_CONTAINER_ID=33a249d7e78465e9a718be39e7a906df97782cbb66486425e35a61af822326a2;K8S_POD_UID=4651322b-9aec-4667-afa3-1602ad5176fe" Path:"" 2025-12-12T16:16:42.502551633+00:00 stderr P 2025-12-12T16:16:42Z [verbose] 2025-12-12T16:16:42.502623675+00:00 stderr P ADD starting CNI request ContainerID:"e5e60228e9d988aefb88921e0968711cfd881db58c97c8a8c5b23da573180a35" Netns:"/var/run/netns/1f9e8cc8-97c2-4e25-8df1-d5aa5fc129d6" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-console-operator;K8S_POD_NAME=console-operator-67c89758df-5tw72;K8S_POD_INFRA_CONTAINER_ID=e5e60228e9d988aefb88921e0968711cfd881db58c97c8a8c5b23da573180a35;K8S_POD_UID=65efae24-6623-454c-b665-e5e407e86269" Path:"" 2025-12-12T16:16:42.502642205+00:00 stderr F 2025-12-12T16:16:42.531630263+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"a586e36317c1dad58a3f250eb491cacfc3c9a9f2c0593e3b418803d4fd07f2f5" Netns:"/var/run/netns/62b54ba6-6a48-42a7-a753-01518adfd02e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=cluster-image-registry-operator-86c45576b9-sfm9v;K8S_POD_INFRA_CONTAINER_ID=a586e36317c1dad58a3f250eb491cacfc3c9a9f2c0593e3b418803d4fd07f2f5;K8S_POD_UID=5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9" Path:"" 2025-12-12T16:16:42.536153133+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"7fc07748f28ad23d569f851a2e2338c4bb871689212066814cea4580cd9faf67" Netns:"/var/run/netns/a96d9320-2f76-42a6-b315-045d9bab631b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-controller-manager-operator;K8S_POD_NAME=kube-controller-manager-operator-69d5f845f8-nsdgk;K8S_POD_INFRA_CONTAINER_ID=7fc07748f28ad23d569f851a2e2338c4bb871689212066814cea4580cd9faf67;K8S_POD_UID=4c111429-5512-4d9c-898b-d3ec0bdb5d08" Path:"" 2025-12-12T16:16:42.547654074+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"755f26522b7a517535508aa0c7585634c4261c35ec0bddd08f3d85a3886e6e64" Netns:"/var/run/netns/a1a1a555-4810-45e1-b46b-f8bf70f2b2a7" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver-operator;K8S_POD_NAME=kube-apiserver-operator-575994946d-wff8v;K8S_POD_INFRA_CONTAINER_ID=755f26522b7a517535508aa0c7585634c4261c35ec0bddd08f3d85a3886e6e64;K8S_POD_UID=22a6a238-12c9-43ae-afbc-f9595d46e727" Path:"" 2025-12-12T16:16:42.550474823+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"cdd4b26e97241fbc52121884f8e472181831c434a65673b7d8859d2c2b10af54" Netns:"/var/run/netns/e0e586a1-128f-49db-b67a-6feecf2bc116" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-service-ca-operator;K8S_POD_NAME=service-ca-operator-5b9c976747-9wbcx;K8S_POD_INFRA_CONTAINER_ID=cdd4b26e97241fbc52121884f8e472181831c434a65673b7d8859d2c2b10af54;K8S_POD_UID=8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7" Path:"" 2025-12-12T16:16:42.568448372+00:00 stderr F I1212 16:16:42.558564 7867 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:42.568448372+00:00 stderr F I1212 16:16:42.558990 7867 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:42.568448372+00:00 stderr F I1212 16:16:42.559000 7867 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:42.568448372+00:00 stderr F I1212 16:16:42.559008 7867 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:42.568448372+00:00 stderr F I1212 16:16:42.559016 7867 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:42.568787260+00:00 stderr F 2025-12-12T16:16:42Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-776cdc94d6-zksq4:a78c6a97-054e-484e-aae2-a33bd3bb7b40:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"fe12aa686f8f130","mac":"d2:60:8b:bf:55:84"},{"name":"eth0","mac":"0a:58:0a:d9:00:07","sandbox":"/var/run/netns/bb273ee9-e71c-4d44-94cf-f1c3b9a37afa"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.7/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:42.569023736+00:00 stderr F I1212 16:16:42.568961 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-776cdc94d6-zksq4", UID:"a78c6a97-054e-484e-aae2-a33bd3bb7b40", APIVersion:"v1", ResourceVersion:"36710", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.7/23] from ovn-kubernetes 2025-12-12T16:16:42.589068135+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD finished CNI request ContainerID:"fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07" Netns:"/var/run/netns/bb273ee9-e71c-4d44-94cf-f1c3b9a37afa" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-776cdc94d6-zksq4;K8S_POD_INFRA_CONTAINER_ID=fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07;K8S_POD_UID=a78c6a97-054e-484e-aae2-a33bd3bb7b40" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"d2:60:8b:bf:55:84\",\"name\":\"fe12aa686f8f130\"},{\"mac\":\"0a:58:0a:d9:00:07\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/bb273ee9-e71c-4d44-94cf-f1c3b9a37afa\"}],\"ips\":[{\"address\":\"10.217.0.7/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:42.604815960+00:00 stderr F I1212 16:16:42.599686 7887 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:42.604815960+00:00 stderr F I1212 16:16:42.599855 7887 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:42.604815960+00:00 stderr F I1212 16:16:42.599870 7887 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:42.604815960+00:00 stderr F I1212 16:16:42.599877 7887 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:42.604815960+00:00 stderr F I1212 16:16:42.599883 7887 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:42.605279921+00:00 stderr F 2025-12-12T16:16:42Z [verbose] Add: openshift-authentication-operator:authentication-operator-7f5c659b84-6t92c:d55f43e2-46df-4460-b17f-0daa75b89154:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"5260b3857fe9178","mac":"c2:75:61:6f:b6:eb"},{"name":"eth0","mac":"0a:58:0a:d9:00:08","sandbox":"/var/run/netns/82d60f51-b9db-42f3-bd96-b0e5fc9aa682"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.8/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:42.605622749+00:00 stderr F I1212 16:16:42.605415 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-authentication-operator", Name:"authentication-operator-7f5c659b84-6t92c", UID:"d55f43e2-46df-4460-b17f-0daa75b89154", APIVersion:"v1", ResourceVersion:"36711", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.8/23] from ovn-kubernetes 2025-12-12T16:16:42.621208780+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD finished CNI request ContainerID:"5260b3857fe9178b42ba78b26a810de66780669b2c78a7cae29a736661bc1aa5" Netns:"/var/run/netns/82d60f51-b9db-42f3-bd96-b0e5fc9aa682" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication-operator;K8S_POD_NAME=authentication-operator-7f5c659b84-6t92c;K8S_POD_INFRA_CONTAINER_ID=5260b3857fe9178b42ba78b26a810de66780669b2c78a7cae29a736661bc1aa5;K8S_POD_UID=d55f43e2-46df-4460-b17f-0daa75b89154" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"c2:75:61:6f:b6:eb\",\"name\":\"5260b3857fe9178\"},{\"mac\":\"0a:58:0a:d9:00:08\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/82d60f51-b9db-42f3-bd96-b0e5fc9aa682\"}],\"ips\":[{\"address\":\"10.217.0.8/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:42.646371634+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716" Netns:"/var/run/netns/e78fe4df-e35f-4fe8-84a4-6fd0e3ebe069" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425935-7hkrm;K8S_POD_INFRA_CONTAINER_ID=328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716;K8S_POD_UID=19e81fea-065e-43b5-8e56-49bfcfa342f7" Path:"" 2025-12-12T16:16:42.646371634+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"2e91da059032f204e0635056eca162922cc8d96e36eddcee276e26db40504fa7" Netns:"/var/run/netns/835890d7-7d5a-4ef2-ad32-13639a53c31f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager-operator;K8S_POD_NAME=openshift-controller-manager-operator-686468bdd5-xknw6;K8S_POD_INFRA_CONTAINER_ID=2e91da059032f204e0635056eca162922cc8d96e36eddcee276e26db40504fa7;K8S_POD_UID=9cc5b0f4-dc96-4a65-8404-f3d36ad70787" Path:"" 2025-12-12T16:16:42.776492441+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2" Netns:"/var/run/netns/3974ea76-7613-460e-bb4d-be275a41269d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-65b6cccf98-flnsl;K8S_POD_INFRA_CONTAINER_ID=2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2;K8S_POD_UID=d259a06e-3949-41b6-a067-7c01441da4b1" Path:"" 2025-12-12T16:16:42.779151326+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"d2cde16dded540f6a71d463b56d9b3fbe9cdcaa1c96d46e5cd1e32779c9eb5af" Netns:"/var/run/netns/bb49c6c7-7fc5-498d-bf3b-7ed17b20566c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-config-operator;K8S_POD_NAME=machine-config-operator-67c9d58cbb-bg744;K8S_POD_INFRA_CONTAINER_ID=d2cde16dded540f6a71d463b56d9b3fbe9cdcaa1c96d46e5cd1e32779c9eb5af;K8S_POD_UID=1999cfc6-e5a0-4ddb-883d-71f861b286a8" Path:"" 2025-12-12T16:16:42.809145818+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba" Netns:"/var/run/netns/60c60576-fd2a-4668-b2a8-885a179c5e34" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-66458b6674-brfdj;K8S_POD_INFRA_CONTAINER_ID=63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba;K8S_POD_UID=e13eeec0-72dd-418b-9180-87ca0d56870d" Path:"" 2025-12-12T16:16:42.809145818+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"8d1ab54b80fb5cd41339903f0b79bcc9051bd0ddd510fc12c03b27b312424770" Netns:"/var/run/netns/3163793a-d062-4f3c-a3af-8759f4a804fa" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-service-ca;K8S_POD_NAME=service-ca-74545575db-gsm6t;K8S_POD_INFRA_CONTAINER_ID=8d1ab54b80fb5cd41339903f0b79bcc9051bd0ddd510fc12c03b27b312424770;K8S_POD_UID=6baa2db5-b688-47dd-8d81-7dadbbbd3759" Path:"" 2025-12-12T16:16:42.816023976+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"ee8088f28b3197cc5469945413422c389a49ba5ba0a440f3aa89c9ac372e7839" Netns:"/var/run/netns/dd757d13-cde8-40ee-97ce-773ddab76a40" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-api;K8S_POD_NAME=control-plane-machine-set-operator-75ffdb6fcd-m8gw7;K8S_POD_INFRA_CONTAINER_ID=ee8088f28b3197cc5469945413422c389a49ba5ba0a440f3aa89c9ac372e7839;K8S_POD_UID=9c49153e-af72-4d2f-8184-fa7ba43a5a3e" Path:"" 2025-12-12T16:16:42.866361665+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"7de9f74eaf8718433f9098110c5587849d2104fa8fe5544832d4f1d0b4185212" Netns:"/var/run/netns/337a9e0f-e3ff-40f9-a4e2-d1e9f169bb37" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-ingress-canary;K8S_POD_NAME=ingress-canary-tqcqf;K8S_POD_INFRA_CONTAINER_ID=7de9f74eaf8718433f9098110c5587849d2104fa8fe5544832d4f1d0b4185212;K8S_POD_UID=47102097-389c-44ce-a25f-6b8d25a70e1d" Path:"" 2025-12-12T16:16:42.944447621+00:00 stderr F 2025-12-12T16:16:42Z [verbose] ADD starting CNI request ContainerID:"821a1e5c7483c039e70cdf4bd3be662cd8906606bd0abbb72d4d89a53f25635c" Netns:"/var/run/netns/9a3edcb5-2cf5-47c6-9a48-e7376a15b588" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=olm-operator-5cdf44d969-kcw92;K8S_POD_INFRA_CONTAINER_ID=821a1e5c7483c039e70cdf4bd3be662cd8906606bd0abbb72d4d89a53f25635c;K8S_POD_UID=124ec2f9-0e23-47da-b25f-66a13947465e" Path:"" 2025-12-12T16:16:43.044131995+00:00 stderr F I1212 16:16:43.037171 8025 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.044131995+00:00 stderr F I1212 16:16:43.037480 8025 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.044131995+00:00 stderr F I1212 16:16:43.037511 8025 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.044131995+00:00 stderr F I1212 16:16:43.037533 8025 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.044131995+00:00 stderr F I1212 16:16:43.037547 8025 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.044663348+00:00 stderr P 2025-12-12T16:16:43Z [verbose] 2025-12-12T16:16:43.045130720+00:00 stderr P Add: openshift-kube-controller-manager-operator:kube-controller-manager-operator-69d5f845f8-nsdgk:4c111429-5512-4d9c-898b-d3ec0bdb5d08:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"7fc07748f28ad23","mac":"06:7c:0d:b0:72:db"},{"name":"eth0","mac":"0a:58:0a:d9:00:0e","sandbox":"/var/run/netns/a96d9320-2f76-42a6-b315-045d9bab631b"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.14/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.045161380+00:00 stderr F 2025-12-12T16:16:43.045578350+00:00 stderr F I1212 16:16:43.045504 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator-69d5f845f8-nsdgk", UID:"4c111429-5512-4d9c-898b-d3ec0bdb5d08", APIVersion:"v1", ResourceVersion:"36732", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.14/23] from ovn-kubernetes 2025-12-12T16:16:43.054999670+00:00 stderr F I1212 16:16:43.036870 7922 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.054999670+00:00 stderr F I1212 16:16:43.037503 7922 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.054999670+00:00 stderr F I1212 16:16:43.037526 7922 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.054999670+00:00 stderr F I1212 16:16:43.037539 7922 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.054999670+00:00 stderr F I1212 16:16:43.037550 7922 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.055866112+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-cluster-samples-operator:cluster-samples-operator-6b564684c8-fzlkp:2a282672-c872-405b-9325-f8f48865334c:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"d25a5167e83c106","mac":"82:32:93:42:d8:5d"},{"name":"eth0","mac":"0a:58:0a:d9:00:0a","sandbox":"/var/run/netns/a4551d8c-1979-4b4f-a759-a09da6eab559"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.10/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.055866112+00:00 stderr F I1212 16:16:43.055640 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-cluster-samples-operator", Name:"cluster-samples-operator-6b564684c8-fzlkp", UID:"2a282672-c872-405b-9325-f8f48865334c", APIVersion:"v1", ResourceVersion:"36723", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.10/23] from ovn-kubernetes 2025-12-12T16:16:43.056262931+00:00 stderr F I1212 16:16:43.047267 8016 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.056262931+00:00 stderr F I1212 16:16:43.047377 8016 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.056262931+00:00 stderr F I1212 16:16:43.047390 8016 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.056262931+00:00 stderr F I1212 16:16:43.047396 8016 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.056262931+00:00 stderr F I1212 16:16:43.047402 8016 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.056452296+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-image-registry:cluster-image-registry-operator-86c45576b9-sfm9v:5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"a586e36317c1dad","mac":"82:35:e6:d9:c5:1e"},{"name":"eth0","mac":"0a:58:0a:d9:00:0d","sandbox":"/var/run/netns/62b54ba6-6a48-42a7-a753-01518adfd02e"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.13/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.056761723+00:00 stderr F I1212 16:16:43.056699 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-image-registry", Name:"cluster-image-registry-operator-86c45576b9-sfm9v", UID:"5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9", APIVersion:"v1", ResourceVersion:"36857", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.13/23] from ovn-kubernetes 2025-12-12T16:16:43.059465989+00:00 stderr F I1212 16:16:43.037011 7958 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.059465989+00:00 stderr F I1212 16:16:43.037832 7958 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.059465989+00:00 stderr F I1212 16:16:43.037843 7958 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.059465989+00:00 stderr F I1212 16:16:43.037851 7958 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.059465989+00:00 stderr F I1212 16:16:43.037858 7958 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.059945731+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-console:console-64d44f6ddf-zhgm9:4651322b-9aec-4667-afa3-1602ad5176fe:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"33a249d7e78465e","mac":"c6:2c:c5:d5:3d:aa"},{"name":"eth0","mac":"0a:58:0a:d9:00:0b","sandbox":"/var/run/netns/390eb047-aa35-4b40-87ed-2244e1485ff3"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.11/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.060463474+00:00 stderr F I1212 16:16:43.060352 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-console", Name:"console-64d44f6ddf-zhgm9", UID:"4651322b-9aec-4667-afa3-1602ad5176fe", APIVersion:"v1", ResourceVersion:"36717", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.11/23] from ovn-kubernetes 2025-12-12T16:16:43.065531618+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"7fc07748f28ad23d569f851a2e2338c4bb871689212066814cea4580cd9faf67" Netns:"/var/run/netns/a96d9320-2f76-42a6-b315-045d9bab631b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-controller-manager-operator;K8S_POD_NAME=kube-controller-manager-operator-69d5f845f8-nsdgk;K8S_POD_INFRA_CONTAINER_ID=7fc07748f28ad23d569f851a2e2338c4bb871689212066814cea4580cd9faf67;K8S_POD_UID=4c111429-5512-4d9c-898b-d3ec0bdb5d08" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"06:7c:0d:b0:72:db\",\"name\":\"7fc07748f28ad23\"},{\"mac\":\"0a:58:0a:d9:00:0e\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a96d9320-2f76-42a6-b315-045d9bab631b\"}],\"ips\":[{\"address\":\"10.217.0.14/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.077729825+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d" Netns:"/var/run/netns/a4551d8c-1979-4b4f-a759-a09da6eab559" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-cluster-samples-operator;K8S_POD_NAME=cluster-samples-operator-6b564684c8-fzlkp;K8S_POD_INFRA_CONTAINER_ID=d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d;K8S_POD_UID=2a282672-c872-405b-9325-f8f48865334c" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"82:32:93:42:d8:5d\",\"name\":\"d25a5167e83c106\"},{\"mac\":\"0a:58:0a:d9:00:0a\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a4551d8c-1979-4b4f-a759-a09da6eab559\"}],\"ips\":[{\"address\":\"10.217.0.10/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.084934851+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"a586e36317c1dad58a3f250eb491cacfc3c9a9f2c0593e3b418803d4fd07f2f5" Netns:"/var/run/netns/62b54ba6-6a48-42a7-a753-01518adfd02e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=cluster-image-registry-operator-86c45576b9-sfm9v;K8S_POD_INFRA_CONTAINER_ID=a586e36317c1dad58a3f250eb491cacfc3c9a9f2c0593e3b418803d4fd07f2f5;K8S_POD_UID=5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"82:35:e6:d9:c5:1e\",\"name\":\"a586e36317c1dad\"},{\"mac\":\"0a:58:0a:d9:00:0d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/62b54ba6-6a48-42a7-a753-01518adfd02e\"}],\"ips\":[{\"address\":\"10.217.0.13/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.084995743+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"33a249d7e78465e9a718be39e7a906df97782cbb66486425e35a61af822326a2" Netns:"/var/run/netns/390eb047-aa35-4b40-87ed-2244e1485ff3" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-console;K8S_POD_NAME=console-64d44f6ddf-zhgm9;K8S_POD_INFRA_CONTAINER_ID=33a249d7e78465e9a718be39e7a906df97782cbb66486425e35a61af822326a2;K8S_POD_UID=4651322b-9aec-4667-afa3-1602ad5176fe" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"c6:2c:c5:d5:3d:aa\",\"name\":\"33a249d7e78465e\"},{\"mac\":\"0a:58:0a:d9:00:0b\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/390eb047-aa35-4b40-87ed-2244e1485ff3\"}],\"ips\":[{\"address\":\"10.217.0.11/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.151039205+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba" Netns:"/var/run/netns/9b75a81c-54ca-4204-bf8f-6df7be735241" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-547dbd544d-xpvsb;K8S_POD_INFRA_CONTAINER_ID=80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba;K8S_POD_UID=1de41ef3-7896-4e9c-8201-8174bc4468c4" Path:"" 2025-12-12T16:16:43.218045061+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"46360fc83f1cc220589b13a8c27ffd0f5770b5b67075b08286b1c9ec648960cd" Netns:"/var/run/netns/d4fa20e8-c6d3-41c0-9d9c-67a3f80ced61" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-ingress-operator;K8S_POD_NAME=ingress-operator-6b9cb4dbcf-5twrv;K8S_POD_INFRA_CONTAINER_ID=46360fc83f1cc220589b13a8c27ffd0f5770b5b67075b08286b1c9ec648960cd;K8S_POD_UID=338f89a1-1c2f-4e37-9572-c5b13d682ca9" Path:"" 2025-12-12T16:16:43.264528496+00:00 stderr F I1212 16:16:43.244730 7974 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.264528496+00:00 stderr F I1212 16:16:43.244872 7974 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.264528496+00:00 stderr F I1212 16:16:43.244881 7974 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.264528496+00:00 stderr F I1212 16:16:43.244887 7974 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.264528496+00:00 stderr F I1212 16:16:43.244894 7974 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.265107860+00:00 stderr P 2025-12-12T16:16:43Z [verbose] 2025-12-12T16:16:43.265142491+00:00 stderr P Add: openshift-console-operator:console-operator-67c89758df-5tw72:65efae24-6623-454c-b665-e5e407e86269:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"e5e60228e9d988a","mac":"ba:2c:02:b8:7f:ec"},{"name":"eth0","mac":"0a:58:0a:d9:00:0c","sandbox":"/var/run/netns/1f9e8cc8-97c2-4e25-8df1-d5aa5fc129d6"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.12/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.265163031+00:00 stderr F 2025-12-12T16:16:43.266006222+00:00 stderr F I1212 16:16:43.265958 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-console-operator", Name:"console-operator-67c89758df-5tw72", UID:"65efae24-6623-454c-b665-e5e407e86269", APIVersion:"v1", ResourceVersion:"36719", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.12/23] from ovn-kubernetes 2025-12-12T16:16:43.268361930+00:00 stderr F I1212 16:16:43.237261 8042 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.268361930+00:00 stderr F I1212 16:16:43.237585 8042 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.268361930+00:00 stderr F I1212 16:16:43.237592 8042 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.268361930+00:00 stderr F I1212 16:16:43.237606 8042 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.268361930+00:00 stderr F I1212 16:16:43.237611 8042 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.268361930+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-kube-apiserver-operator:kube-apiserver-operator-575994946d-wff8v:22a6a238-12c9-43ae-afbc-f9595d46e727:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"755f26522b7a517","mac":"5a:21:81:b5:31:61"},{"name":"eth0","mac":"0a:58:0a:d9:00:14","sandbox":"/var/run/netns/a1a1a555-4810-45e1-b46b-f8bf70f2b2a7"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.20/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.272294736+00:00 stderr F I1212 16:16:43.268525 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-apiserver-operator", Name:"kube-apiserver-operator-575994946d-wff8v", UID:"22a6a238-12c9-43ae-afbc-f9595d46e727", APIVersion:"v1", ResourceVersion:"36730", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.20/23] from ovn-kubernetes 2025-12-12T16:16:43.288316397+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"fdca212b909c7f735bc64f2733588572386b0252f8ee3ee0ccb2ee9c6af3fae7" Netns:"/var/run/netns/fe01bcb7-c970-4c2a-ad34-195f57f74d32" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-console;K8S_POD_NAME=downloads-747b44746d-sm46g;K8S_POD_INFRA_CONTAINER_ID=fdca212b909c7f735bc64f2733588572386b0252f8ee3ee0ccb2ee9c6af3fae7;K8S_POD_UID=f967d508-b683-4df4-9be0-3a7fb5afa7bb" Path:"" 2025-12-12T16:16:43.291986806+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"e5e60228e9d988aefb88921e0968711cfd881db58c97c8a8c5b23da573180a35" Netns:"/var/run/netns/1f9e8cc8-97c2-4e25-8df1-d5aa5fc129d6" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-console-operator;K8S_POD_NAME=console-operator-67c89758df-5tw72;K8S_POD_INFRA_CONTAINER_ID=e5e60228e9d988aefb88921e0968711cfd881db58c97c8a8c5b23da573180a35;K8S_POD_UID=65efae24-6623-454c-b665-e5e407e86269" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ba:2c:02:b8:7f:ec\",\"name\":\"e5e60228e9d988a\"},{\"mac\":\"0a:58:0a:d9:00:0c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/1f9e8cc8-97c2-4e25-8df1-d5aa5fc129d6\"}],\"ips\":[{\"address\":\"10.217.0.12/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.303136088+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"755f26522b7a517535508aa0c7585634c4261c35ec0bddd08f3d85a3886e6e64" Netns:"/var/run/netns/a1a1a555-4810-45e1-b46b-f8bf70f2b2a7" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver-operator;K8S_POD_NAME=kube-apiserver-operator-575994946d-wff8v;K8S_POD_INFRA_CONTAINER_ID=755f26522b7a517535508aa0c7585634c4261c35ec0bddd08f3d85a3886e6e64;K8S_POD_UID=22a6a238-12c9-43ae-afbc-f9595d46e727" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"5a:21:81:b5:31:61\",\"name\":\"755f26522b7a517\"},{\"mac\":\"0a:58:0a:d9:00:14\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a1a1a555-4810-45e1-b46b-f8bf70f2b2a7\"}],\"ips\":[{\"address\":\"10.217.0.20/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.398018385+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"d0489ccdfa99a6b99c7ac3a0e870ca5488db2ca428180aa64741de104ff8555e" Netns:"/var/run/netns/bdb7ad24-ca88-4d88-bdf6-75139ebea202" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-storage-version-migrator-operator;K8S_POD_NAME=kube-storage-version-migrator-operator-565b79b866-krgxf;K8S_POD_INFRA_CONTAINER_ID=d0489ccdfa99a6b99c7ac3a0e870ca5488db2ca428180aa64741de104ff8555e;K8S_POD_UID=dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7" Path:"" 2025-12-12T16:16:43.439299143+00:00 stderr F I1212 16:16:43.426446 8047 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.439299143+00:00 stderr F I1212 16:16:43.426830 8047 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.439299143+00:00 stderr F I1212 16:16:43.426839 8047 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.439299143+00:00 stderr F I1212 16:16:43.426846 8047 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.439299143+00:00 stderr F I1212 16:16:43.426851 8047 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.439299143+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-service-ca-operator:service-ca-operator-5b9c976747-9wbcx:8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"cdd4b26e97241fb","mac":"d6:95:74:03:43:49"},{"name":"eth0","mac":"0a:58:0a:d9:00:19","sandbox":"/var/run/netns/e0e586a1-128f-49db-b67a-6feecf2bc116"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.25/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.439299143+00:00 stderr F I1212 16:16:43.437773 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-service-ca-operator", Name:"service-ca-operator-5b9c976747-9wbcx", UID:"8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7", APIVersion:"v1", ResourceVersion:"36742", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.25/23] from ovn-kubernetes 2025-12-12T16:16:43.442856610+00:00 stderr P 2025-12-12T16:16:43Z [verbose] 2025-12-12T16:16:43.442893081+00:00 stderr P ADD starting CNI request ContainerID:"0702298b930d9df41b5167fe6b3a06e9d2f7dd988ab81b6dc47a3b0d81221bce" Netns:"/var/run/netns/84768e52-fe57-4c71-81de-46deb9472742" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-etcd-operator;K8S_POD_NAME=etcd-operator-69b85846b6-mrrt5;K8S_POD_INFRA_CONTAINER_ID=0702298b930d9df41b5167fe6b3a06e9d2f7dd988ab81b6dc47a3b0d81221bce;K8S_POD_UID=a6c070b2-83ee-4c73-9201-3ab5dcc9aeca" Path:"" 2025-12-12T16:16:43.442911871+00:00 stderr F 2025-12-12T16:16:43.453848428+00:00 stderr P 2025-12-12T16:16:43Z [verbose] 2025-12-12T16:16:43.453891789+00:00 stderr P ADD starting CNI request ContainerID:"c49e08c7880c95445bad61bbd80bc3fa3e3679d96d95853d151678fb7b42e5db" Netns:"/var/run/netns/cb9db6af-231a-4c8f-9ef7-83ea90d4c5f9" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-multus;K8S_POD_NAME=multus-admission-controller-69db94689b-xks9x;K8S_POD_INFRA_CONTAINER_ID=c49e08c7880c95445bad61bbd80bc3fa3e3679d96d95853d151678fb7b42e5db;K8S_POD_UID=be106c32-9849-49fd-9e4a-4b5b9c16920a" Path:"" 2025-12-12T16:16:43.453909860+00:00 stderr F 2025-12-12T16:16:43.471243413+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"cdd4b26e97241fbc52121884f8e472181831c434a65673b7d8859d2c2b10af54" Netns:"/var/run/netns/e0e586a1-128f-49db-b67a-6feecf2bc116" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-service-ca-operator;K8S_POD_NAME=service-ca-operator-5b9c976747-9wbcx;K8S_POD_INFRA_CONTAINER_ID=cdd4b26e97241fbc52121884f8e472181831c434a65673b7d8859d2c2b10af54;K8S_POD_UID=8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"d6:95:74:03:43:49\",\"name\":\"cdd4b26e97241fb\"},{\"mac\":\"0a:58:0a:d9:00:19\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/e0e586a1-128f-49db-b67a-6feecf2bc116\"}],\"ips\":[{\"address\":\"10.217.0.25/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.474230336+00:00 stderr F I1212 16:16:43.463060 8249 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.474230336+00:00 stderr F I1212 16:16:43.463303 8249 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.474230336+00:00 stderr F I1212 16:16:43.463310 8249 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.474230336+00:00 stderr F I1212 16:16:43.463316 8249 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.474230336+00:00 stderr F I1212 16:16:43.463321 8249 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.474635096+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-service-ca:service-ca-74545575db-gsm6t:6baa2db5-b688-47dd-8d81-7dadbbbd3759:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"8d1ab54b80fb5cd","mac":"ee:bc:0e:b7:d0:29"},{"name":"eth0","mac":"0a:58:0a:d9:00:28","sandbox":"/var/run/netns/3163793a-d062-4f3c-a3af-8759f4a804fa"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.40/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.476236875+00:00 stderr F I1212 16:16:43.474950 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-service-ca", Name:"service-ca-74545575db-gsm6t", UID:"6baa2db5-b688-47dd-8d81-7dadbbbd3759", APIVersion:"v1", ResourceVersion:"36766", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.40/23] from ovn-kubernetes 2025-12-12T16:16:43.501521672+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"8d1ab54b80fb5cd41339903f0b79bcc9051bd0ddd510fc12c03b27b312424770" Netns:"/var/run/netns/3163793a-d062-4f3c-a3af-8759f4a804fa" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-service-ca;K8S_POD_NAME=service-ca-74545575db-gsm6t;K8S_POD_INFRA_CONTAINER_ID=8d1ab54b80fb5cd41339903f0b79bcc9051bd0ddd510fc12c03b27b312424770;K8S_POD_UID=6baa2db5-b688-47dd-8d81-7dadbbbd3759" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ee:bc:0e:b7:d0:29\",\"name\":\"8d1ab54b80fb5cd\"},{\"mac\":\"0a:58:0a:d9:00:28\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/3163793a-d062-4f3c-a3af-8759f4a804fa\"}],\"ips\":[{\"address\":\"10.217.0.40/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.552881036+00:00 stderr F I1212 16:16:43.543998 8194 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.552881036+00:00 stderr F I1212 16:16:43.544134 8194 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.552881036+00:00 stderr F I1212 16:16:43.544146 8194 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.552881036+00:00 stderr F I1212 16:16:43.544152 8194 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.552881036+00:00 stderr F I1212 16:16:43.544159 8194 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.553326567+00:00 stderr P 2025-12-12T16:16:43Z [verbose] 2025-12-12T16:16:43.553352297+00:00 stderr P Add: openshift-machine-config-operator:machine-config-operator-67c9d58cbb-bg744:1999cfc6-e5a0-4ddb-883d-71f861b286a8:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"d2cde16dded540f","mac":"4a:09:a6:54:b0:b1"},{"name":"eth0","mac":"0a:58:0a:d9:00:21","sandbox":"/var/run/netns/bb49c6c7-7fc5-498d-bf3b-7ed17b20566c"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.33/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.553370188+00:00 stderr F 2025-12-12T16:16:43.553845329+00:00 stderr F I1212 16:16:43.553766 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-machine-config-operator", Name:"machine-config-operator-67c9d58cbb-bg744", UID:"1999cfc6-e5a0-4ddb-883d-71f861b286a8", APIVersion:"v1", ResourceVersion:"36757", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.33/23] from ovn-kubernetes 2025-12-12T16:16:43.570452825+00:00 stderr F I1212 16:16:43.534004 8129 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.570452825+00:00 stderr F I1212 16:16:43.534119 8129 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.570452825+00:00 stderr F I1212 16:16:43.534127 8129 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.570452825+00:00 stderr F I1212 16:16:43.534134 8129 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.570452825+00:00 stderr F I1212 16:16:43.534140 8129 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.571694125+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-operator-lifecycle-manager:collect-profiles-29425935-7hkrm:19e81fea-065e-43b5-8e56-49bfcfa342f7:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"328df9b4f48f0ad","mac":"ee:55:68:e8:ac:40"},{"name":"eth0","mac":"0a:58:0a:d9:00:20","sandbox":"/var/run/netns/e78fe4df-e35f-4fe8-84a4-6fd0e3ebe069"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.32/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.571694125+00:00 stderr F I1212 16:16:43.571436 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operator-lifecycle-manager", Name:"collect-profiles-29425935-7hkrm", UID:"19e81fea-065e-43b5-8e56-49bfcfa342f7", APIVersion:"v1", ResourceVersion:"36749", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.32/23] from ovn-kubernetes 2025-12-12T16:16:43.573348876+00:00 stderr F I1212 16:16:43.557783 8130 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.573348876+00:00 stderr F I1212 16:16:43.557858 8130 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.573348876+00:00 stderr F I1212 16:16:43.557865 8130 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.573348876+00:00 stderr F I1212 16:16:43.557871 8130 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.573348876+00:00 stderr F I1212 16:16:43.557877 8130 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.573348876+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-controller-manager-operator:openshift-controller-manager-operator-686468bdd5-xknw6:9cc5b0f4-dc96-4a65-8404-f3d36ad70787:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"2e91da059032f20","mac":"42:c7:fb:b4:17:72"},{"name":"eth0","mac":"0a:58:0a:d9:00:1f","sandbox":"/var/run/netns/835890d7-7d5a-4ef2-ad32-13639a53c31f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.31/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.573348876+00:00 stderr F I1212 16:16:43.572591 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator-686468bdd5-xknw6", UID:"9cc5b0f4-dc96-4a65-8404-f3d36ad70787", APIVersion:"v1", ResourceVersion:"36759", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.31/23] from ovn-kubernetes 2025-12-12T16:16:43.593459417+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"d2cde16dded540f6a71d463b56d9b3fbe9cdcaa1c96d46e5cd1e32779c9eb5af" Netns:"/var/run/netns/bb49c6c7-7fc5-498d-bf3b-7ed17b20566c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-config-operator;K8S_POD_NAME=machine-config-operator-67c9d58cbb-bg744;K8S_POD_INFRA_CONTAINER_ID=d2cde16dded540f6a71d463b56d9b3fbe9cdcaa1c96d46e5cd1e32779c9eb5af;K8S_POD_UID=1999cfc6-e5a0-4ddb-883d-71f861b286a8" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"4a:09:a6:54:b0:b1\",\"name\":\"d2cde16dded540f\"},{\"mac\":\"0a:58:0a:d9:00:21\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/bb49c6c7-7fc5-498d-bf3b-7ed17b20566c\"}],\"ips\":[{\"address\":\"10.217.0.33/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.593627311+00:00 stderr F I1212 16:16:43.557553 8193 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.593627311+00:00 stderr F I1212 16:16:43.557747 8193 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.593627311+00:00 stderr F I1212 16:16:43.557757 8193 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.593627311+00:00 stderr F I1212 16:16:43.557764 8193 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.593627311+00:00 stderr F I1212 16:16:43.557771 8193 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.594636475+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-controller-manager:controller-manager-65b6cccf98-flnsl:d259a06e-3949-41b6-a067-7c01441da4b1:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"2bf714089818fd6","mac":"be:88:db:96:53:e1"},{"name":"eth0","mac":"0a:58:0a:d9:00:0f","sandbox":"/var/run/netns/3974ea76-7613-460e-bb4d-be275a41269d"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.15/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.594842840+00:00 stderr F I1212 16:16:43.594774 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-65b6cccf98-flnsl", UID:"d259a06e-3949-41b6-a067-7c01441da4b1", APIVersion:"v1", ResourceVersion:"36739", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.15/23] from ovn-kubernetes 2025-12-12T16:16:43.611153589+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716" Netns:"/var/run/netns/e78fe4df-e35f-4fe8-84a4-6fd0e3ebe069" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425935-7hkrm;K8S_POD_INFRA_CONTAINER_ID=328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716;K8S_POD_UID=19e81fea-065e-43b5-8e56-49bfcfa342f7" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ee:55:68:e8:ac:40\",\"name\":\"328df9b4f48f0ad\"},{\"mac\":\"0a:58:0a:d9:00:20\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/e78fe4df-e35f-4fe8-84a4-6fd0e3ebe069\"}],\"ips\":[{\"address\":\"10.217.0.32/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.615454864+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"2e91da059032f204e0635056eca162922cc8d96e36eddcee276e26db40504fa7" Netns:"/var/run/netns/835890d7-7d5a-4ef2-ad32-13639a53c31f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager-operator;K8S_POD_NAME=openshift-controller-manager-operator-686468bdd5-xknw6;K8S_POD_INFRA_CONTAINER_ID=2e91da059032f204e0635056eca162922cc8d96e36eddcee276e26db40504fa7;K8S_POD_UID=9cc5b0f4-dc96-4a65-8404-f3d36ad70787" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"42:c7:fb:b4:17:72\",\"name\":\"2e91da059032f20\"},{\"mac\":\"0a:58:0a:d9:00:1f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/835890d7-7d5a-4ef2-ad32-13639a53c31f\"}],\"ips\":[{\"address\":\"10.217.0.31/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.620542188+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2" Netns:"/var/run/netns/3974ea76-7613-460e-bb4d-be275a41269d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-65b6cccf98-flnsl;K8S_POD_INFRA_CONTAINER_ID=2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2;K8S_POD_UID=d259a06e-3949-41b6-a067-7c01441da4b1" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"be:88:db:96:53:e1\",\"name\":\"2bf714089818fd6\"},{\"mac\":\"0a:58:0a:d9:00:0f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/3974ea76-7613-460e-bb4d-be275a41269d\"}],\"ips\":[{\"address\":\"10.217.0.15/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.638608199+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"09a0809ecd406bcde0a1ea4cede12fbba5d473a8969cda641566b6406f205a3a" Netns:"/var/run/netns/a04d18d5-afde-4abe-9947-38eab7cb8853" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-dns-operator;K8S_POD_NAME=dns-operator-799b87ffcd-2w9hn;K8S_POD_INFRA_CONTAINER_ID=09a0809ecd406bcde0a1ea4cede12fbba5d473a8969cda641566b6406f205a3a;K8S_POD_UID=e1875478-2fa5-47f4-9c0a-13afc9166e8e" Path:"" 2025-12-12T16:16:43.644486142+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"e2067efe5898138e478d288454b72bfd1c053e7c41955ea58a683e8f80ed626f" Netns:"/var/run/netns/ff87e156-9e6d-409f-abbc-76c902de0950" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-config-operator;K8S_POD_NAME=machine-config-controller-f9cdd68f7-ndnxt;K8S_POD_INFRA_CONTAINER_ID=e2067efe5898138e478d288454b72bfd1c053e7c41955ea58a683e8f80ed626f;K8S_POD_UID=097ff9f3-52cb-4063-a6a1-0c8178adccc9" Path:"" 2025-12-12T16:16:43.668652202+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"60ac5b8dfa3a85aae95f0de2721afb6ca7a3cce575e7ecfe2560293af9d7574f" Netns:"/var/run/netns/b1402ac9-62a1-47db-a893-03b0fb43528a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-scheduler-operator;K8S_POD_NAME=openshift-kube-scheduler-operator-54f497555d-dcs9d;K8S_POD_INFRA_CONTAINER_ID=60ac5b8dfa3a85aae95f0de2721afb6ca7a3cce575e7ecfe2560293af9d7574f;K8S_POD_UID=60d98f7f-99e4-4bb4-a7b6-48de2ff6071c" Path:"" 2025-12-12T16:16:43.668652202+00:00 stderr F I1212 16:16:43.657049 8298 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.668652202+00:00 stderr F I1212 16:16:43.657367 8298 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.668652202+00:00 stderr F I1212 16:16:43.657388 8298 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.668652202+00:00 stderr F I1212 16:16:43.657396 8298 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.668652202+00:00 stderr F I1212 16:16:43.657404 8298 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.668899548+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-ingress-canary:ingress-canary-tqcqf:47102097-389c-44ce-a25f-6b8d25a70e1d:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"7de9f74eaf87184","mac":"f6:59:3e:76:5f:60"},{"name":"eth0","mac":"0a:58:0a:d9:00:29","sandbox":"/var/run/netns/337a9e0f-e3ff-40f9-a4e2-d1e9f169bb37"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.41/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.669040432+00:00 stderr F I1212 16:16:43.668956 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-ingress-canary", Name:"ingress-canary-tqcqf", UID:"47102097-389c-44ce-a25f-6b8d25a70e1d", APIVersion:"v1", ResourceVersion:"36778", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.41/23] from ovn-kubernetes 2025-12-12T16:16:43.686688373+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"7de9f74eaf8718433f9098110c5587849d2104fa8fe5544832d4f1d0b4185212" Netns:"/var/run/netns/337a9e0f-e3ff-40f9-a4e2-d1e9f169bb37" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-ingress-canary;K8S_POD_NAME=ingress-canary-tqcqf;K8S_POD_INFRA_CONTAINER_ID=7de9f74eaf8718433f9098110c5587849d2104fa8fe5544832d4f1d0b4185212;K8S_POD_UID=47102097-389c-44ce-a25f-6b8d25a70e1d" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"f6:59:3e:76:5f:60\",\"name\":\"7de9f74eaf87184\"},{\"mac\":\"0a:58:0a:d9:00:29\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/337a9e0f-e3ff-40f9-a4e2-d1e9f169bb37\"}],\"ips\":[{\"address\":\"10.217.0.41/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.708455244+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"1b8886f3552073404488fbfde536fb57d7c18b1397ebdad914354193a33ea0ce" Netns:"/var/run/netns/99f9b8a7-1726-4814-9df6-fdafe8c77d6b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=packageserver-7d4fc7d867-lfwgk;K8S_POD_INFRA_CONTAINER_ID=1b8886f3552073404488fbfde536fb57d7c18b1397ebdad914354193a33ea0ce;K8S_POD_UID=6e354e82-d648-4680-b0c8-e901bfcfbd5f" Path:"" 2025-12-12T16:16:43.733999528+00:00 stderr F I1212 16:16:43.723449 8619 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.733999528+00:00 stderr F I1212 16:16:43.723568 8619 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.733999528+00:00 stderr F I1212 16:16:43.723585 8619 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.733999528+00:00 stderr F I1212 16:16:43.723595 8619 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.733999528+00:00 stderr F I1212 16:16:43.723608 8619 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.737398801+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-multus:multus-admission-controller-69db94689b-xks9x:be106c32-9849-49fd-9e4a-4b5b9c16920a:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"c49e08c7880c954","mac":"2a:9b:31:d4:be:7c"},{"name":"eth0","mac":"0a:58:0a:d9:00:26","sandbox":"/var/run/netns/cb9db6af-231a-4c8f-9ef7-83ea90d4c5f9"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.38/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.737879033+00:00 stderr F I1212 16:16:43.737823 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-multus", Name:"multus-admission-controller-69db94689b-xks9x", UID:"be106c32-9849-49fd-9e4a-4b5b9c16920a", APIVersion:"v1", ResourceVersion:"36751", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.38/23] from ovn-kubernetes 2025-12-12T16:16:43.755229586+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"c49e08c7880c95445bad61bbd80bc3fa3e3679d96d95853d151678fb7b42e5db" Netns:"/var/run/netns/cb9db6af-231a-4c8f-9ef7-83ea90d4c5f9" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-multus;K8S_POD_NAME=multus-admission-controller-69db94689b-xks9x;K8S_POD_INFRA_CONTAINER_ID=c49e08c7880c95445bad61bbd80bc3fa3e3679d96d95853d151678fb7b42e5db;K8S_POD_UID=be106c32-9849-49fd-9e4a-4b5b9c16920a" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"2a:9b:31:d4:be:7c\",\"name\":\"c49e08c7880c954\"},{\"mac\":\"0a:58:0a:d9:00:26\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/cb9db6af-231a-4c8f-9ef7-83ea90d4c5f9\"}],\"ips\":[{\"address\":\"10.217.0.38/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.769712260+00:00 stderr F I1212 16:16:43.750752 8438 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.769712260+00:00 stderr F I1212 16:16:43.751050 8438 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.769712260+00:00 stderr F I1212 16:16:43.751058 8438 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.769712260+00:00 stderr F I1212 16:16:43.751068 8438 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.769712260+00:00 stderr F I1212 16:16:43.751075 8438 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.770106569+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-marketplace:marketplace-operator-547dbd544d-xpvsb:1de41ef3-7896-4e9c-8201-8174bc4468c4:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"80eb6b504f4b6d2","mac":"ce:4d:e9:ff:8a:39"},{"name":"eth0","mac":"0a:58:0a:d9:00:1c","sandbox":"/var/run/netns/9b75a81c-54ca-4204-bf8f-6df7be735241"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.28/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.770959410+00:00 stderr F I1212 16:16:43.770891 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"marketplace-operator-547dbd544d-xpvsb", UID:"1de41ef3-7896-4e9c-8201-8174bc4468c4", APIVersion:"v1", ResourceVersion:"36747", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.28/23] from ovn-kubernetes 2025-12-12T16:16:43.782212925+00:00 stderr F I1212 16:16:43.774067 8248 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.782212925+00:00 stderr F I1212 16:16:43.774250 8248 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.782212925+00:00 stderr F I1212 16:16:43.774259 8248 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.782212925+00:00 stderr F I1212 16:16:43.774265 8248 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.782212925+00:00 stderr F I1212 16:16:43.774271 8248 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.784739767+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-machine-api:control-plane-machine-set-operator-75ffdb6fcd-m8gw7:9c49153e-af72-4d2f-8184-fa7ba43a5a3e:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"ee8088f28b3197c","mac":"c6:71:df:c9:7d:f9"},{"name":"eth0","mac":"0a:58:0a:d9:00:17","sandbox":"/var/run/netns/dd757d13-cde8-40ee-97ce-773ddab76a40"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.23/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.785004713+00:00 stderr F I1212 16:16:43.784962 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-machine-api", Name:"control-plane-machine-set-operator-75ffdb6fcd-m8gw7", UID:"9c49153e-af72-4d2f-8184-fa7ba43a5a3e", APIVersion:"v1", ResourceVersion:"36741", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.23/23] from ovn-kubernetes 2025-12-12T16:16:43.789786280+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba" Netns:"/var/run/netns/9b75a81c-54ca-4204-bf8f-6df7be735241" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-547dbd544d-xpvsb;K8S_POD_INFRA_CONTAINER_ID=80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba;K8S_POD_UID=1de41ef3-7896-4e9c-8201-8174bc4468c4" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ce:4d:e9:ff:8a:39\",\"name\":\"80eb6b504f4b6d2\"},{\"mac\":\"0a:58:0a:d9:00:1c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/9b75a81c-54ca-4204-bf8f-6df7be735241\"}],\"ips\":[{\"address\":\"10.217.0.28/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.807233046+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"ee8088f28b3197cc5469945413422c389a49ba5ba0a440f3aa89c9ac372e7839" Netns:"/var/run/netns/dd757d13-cde8-40ee-97ce-773ddab76a40" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-api;K8S_POD_NAME=control-plane-machine-set-operator-75ffdb6fcd-m8gw7;K8S_POD_INFRA_CONTAINER_ID=ee8088f28b3197cc5469945413422c389a49ba5ba0a440f3aa89c9ac372e7839;K8S_POD_UID=9c49153e-af72-4d2f-8184-fa7ba43a5a3e" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"c6:71:df:c9:7d:f9\",\"name\":\"ee8088f28b3197c\"},{\"mac\":\"0a:58:0a:d9:00:17\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/dd757d13-cde8-40ee-97ce-773ddab76a40\"}],\"ips\":[{\"address\":\"10.217.0.23/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.807233046+00:00 stderr F I1212 16:16:43.772391 8243 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.807233046+00:00 stderr F I1212 16:16:43.772522 8243 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.807233046+00:00 stderr F I1212 16:16:43.772534 8243 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.807233046+00:00 stderr F I1212 16:16:43.772541 8243 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.807233046+00:00 stderr F I1212 16:16:43.772547 8243 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.807233046+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-authentication:oauth-openshift-66458b6674-brfdj:e13eeec0-72dd-418b-9180-87ca0d56870d:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"63d4f7893d2a6e5","mac":"e2:8d:e1:fd:b8:86"},{"name":"eth0","mac":"0a:58:0a:d9:00:09","sandbox":"/var/run/netns/60c60576-fd2a-4668-b2a8-885a179c5e34"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.9/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.807233046+00:00 stderr F I1212 16:16:43.805322 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-authentication", Name:"oauth-openshift-66458b6674-brfdj", UID:"e13eeec0-72dd-418b-9180-87ca0d56870d", APIVersion:"v1", ResourceVersion:"36728", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.9/23] from ovn-kubernetes 2025-12-12T16:16:43.816836110+00:00 stderr F I1212 16:16:43.807997 8414 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.816836110+00:00 stderr F I1212 16:16:43.808117 8414 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.816836110+00:00 stderr F I1212 16:16:43.808124 8414 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.816836110+00:00 stderr F I1212 16:16:43.808129 8414 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.816836110+00:00 stderr F I1212 16:16:43.808135 8414 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.817267491+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-operator-lifecycle-manager:olm-operator-5cdf44d969-kcw92:124ec2f9-0e23-47da-b25f-66a13947465e:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"821a1e5c7483c03","mac":"16:f1:0f:f7:66:38"},{"name":"eth0","mac":"0a:58:0a:d9:00:15","sandbox":"/var/run/netns/9a3edcb5-2cf5-47c6-9a48-e7376a15b588"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.21/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.817730932+00:00 stderr F I1212 16:16:43.817679 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operator-lifecycle-manager", Name:"olm-operator-5cdf44d969-kcw92", UID:"124ec2f9-0e23-47da-b25f-66a13947465e", APIVersion:"v1", ResourceVersion:"36731", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.21/23] from ovn-kubernetes 2025-12-12T16:16:43.823625146+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba" Netns:"/var/run/netns/60c60576-fd2a-4668-b2a8-885a179c5e34" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-66458b6674-brfdj;K8S_POD_INFRA_CONTAINER_ID=63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba;K8S_POD_UID=e13eeec0-72dd-418b-9180-87ca0d56870d" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"e2:8d:e1:fd:b8:86\",\"name\":\"63d4f7893d2a6e5\"},{\"mac\":\"0a:58:0a:d9:00:09\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/60c60576-fd2a-4668-b2a8-885a179c5e34\"}],\"ips\":[{\"address\":\"10.217.0.9/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.829935180+00:00 stderr P 2025-12-12T16:16:43Z [verbose] 2025-12-12T16:16:43.829955000+00:00 stderr F ADD finished CNI request ContainerID:"821a1e5c7483c039e70cdf4bd3be662cd8906606bd0abbb72d4d89a53f25635c" Netns:"/var/run/netns/9a3edcb5-2cf5-47c6-9a48-e7376a15b588" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=olm-operator-5cdf44d969-kcw92;K8S_POD_INFRA_CONTAINER_ID=821a1e5c7483c039e70cdf4bd3be662cd8906606bd0abbb72d4d89a53f25635c;K8S_POD_UID=124ec2f9-0e23-47da-b25f-66a13947465e" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"16:f1:0f:f7:66:38\",\"name\":\"821a1e5c7483c03\"},{\"mac\":\"0a:58:0a:d9:00:15\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/9a3edcb5-2cf5-47c6-9a48-e7376a15b588\"}],\"ips\":[{\"address\":\"10.217.0.21/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.853641079+00:00 stderr F I1212 16:16:43.830964 8484 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.853641079+00:00 stderr F I1212 16:16:43.831105 8484 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.853641079+00:00 stderr F I1212 16:16:43.831115 8484 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.853641079+00:00 stderr F I1212 16:16:43.831123 8484 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.853641079+00:00 stderr F I1212 16:16:43.831130 8484 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.853641079+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-ingress-operator:ingress-operator-6b9cb4dbcf-5twrv:338f89a1-1c2f-4e37-9572-c5b13d682ca9:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"46360fc83f1cc22","mac":"76:71:7b:70:9e:2d"},{"name":"eth0","mac":"0a:58:0a:d9:00:1d","sandbox":"/var/run/netns/d4fa20e8-c6d3-41c0-9d9c-67a3f80ced61"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.29/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.853641079+00:00 stderr F I1212 16:16:43.851036 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-ingress-operator", Name:"ingress-operator-6b9cb4dbcf-5twrv", UID:"338f89a1-1c2f-4e37-9572-c5b13d682ca9", APIVersion:"v1", ResourceVersion:"36755", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.29/23] from ovn-kubernetes 2025-12-12T16:16:43.864585566+00:00 stderr F I1212 16:16:43.850472 8526 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.864585566+00:00 stderr F I1212 16:16:43.850865 8526 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:43.864585566+00:00 stderr F I1212 16:16:43.850873 8526 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.864585566+00:00 stderr F I1212 16:16:43.850879 8526 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.864585566+00:00 stderr F I1212 16:16:43.850885 8526 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.864841662+00:00 stderr F 2025-12-12T16:16:43Z [verbose] Add: openshift-console:downloads-747b44746d-sm46g:f967d508-b683-4df4-9be0-3a7fb5afa7bb:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"fdca212b909c7f7","mac":"f6:c5:e5:a8:0d:df"},{"name":"eth0","mac":"0a:58:0a:d9:00:12","sandbox":"/var/run/netns/fe01bcb7-c970-4c2a-ad34-195f57f74d32"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.18/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:43.871417353+00:00 stderr F I1212 16:16:43.870879 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-console", Name:"downloads-747b44746d-sm46g", UID:"f967d508-b683-4df4-9be0-3a7fb5afa7bb", APIVersion:"v1", ResourceVersion:"36736", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.18/23] from ovn-kubernetes 2025-12-12T16:16:43.871417353+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"22cad0b592117b6ba03b75b60b8f5302b6ad18f85483bc9c43e7174f7395c192" Netns:"/var/run/netns/74634fee-9e79-4fa0-948c-f2fcef0f4511" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-storage-version-migrator;K8S_POD_NAME=migrator-866fcbc849-6mhsj;K8S_POD_INFRA_CONTAINER_ID=22cad0b592117b6ba03b75b60b8f5302b6ad18f85483bc9c43e7174f7395c192;K8S_POD_UID=2403b973-68b3-4a15-a444-7e271aea91c1" Path:"" 2025-12-12T16:16:43.878192718+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"46360fc83f1cc220589b13a8c27ffd0f5770b5b67075b08286b1c9ec648960cd" Netns:"/var/run/netns/d4fa20e8-c6d3-41c0-9d9c-67a3f80ced61" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-ingress-operator;K8S_POD_NAME=ingress-operator-6b9cb4dbcf-5twrv;K8S_POD_INFRA_CONTAINER_ID=46360fc83f1cc220589b13a8c27ffd0f5770b5b67075b08286b1c9ec648960cd;K8S_POD_UID=338f89a1-1c2f-4e37-9572-c5b13d682ca9" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"76:71:7b:70:9e:2d\",\"name\":\"46360fc83f1cc22\"},{\"mac\":\"0a:58:0a:d9:00:1d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/d4fa20e8-c6d3-41c0-9d9c-67a3f80ced61\"}],\"ips\":[{\"address\":\"10.217.0.29/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.888374497+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"7d7a7ab3b09b90e07fa8f45335ab5037ab6ebd27daf1316e35a98496a30a938f" Netns:"/var/run/netns/b6d65604-689b-4dcc-aec0-9b38ecf245f0" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=hostpath-provisioner;K8S_POD_NAME=csi-hostpathplugin-59hhc;K8S_POD_INFRA_CONTAINER_ID=7d7a7ab3b09b90e07fa8f45335ab5037ab6ebd27daf1316e35a98496a30a938f;K8S_POD_UID=e0adb788-edae-4099-900e-8af998a81f87" Path:"" 2025-12-12T16:16:43.901239911+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD finished CNI request ContainerID:"fdca212b909c7f735bc64f2733588572386b0252f8ee3ee0ccb2ee9c6af3fae7" Netns:"/var/run/netns/fe01bcb7-c970-4c2a-ad34-195f57f74d32" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-console;K8S_POD_NAME=downloads-747b44746d-sm46g;K8S_POD_INFRA_CONTAINER_ID=fdca212b909c7f735bc64f2733588572386b0252f8ee3ee0ccb2ee9c6af3fae7;K8S_POD_UID=f967d508-b683-4df4-9be0-3a7fb5afa7bb" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"f6:c5:e5:a8:0d:df\",\"name\":\"fdca212b909c7f7\"},{\"mac\":\"0a:58:0a:d9:00:12\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/fe01bcb7-c970-4c2a-ad34-195f57f74d32\"}],\"ips\":[{\"address\":\"10.217.0.18/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:43.914439173+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"6a1b62d609b3e5420485ba99a5b2f09e53a6d758231e9a8c18d91b6f411c606a" Netns:"/var/run/netns/c37f5da0-8436-48f8-aae7-e9b3947f0faf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-dns;K8S_POD_NAME=dns-default-rl44g;K8S_POD_INFRA_CONTAINER_ID=6a1b62d609b3e5420485ba99a5b2f09e53a6d758231e9a8c18d91b6f411c606a;K8S_POD_UID=9dc06dad-6486-4dd5-9456-40ce964abc7f" Path:"" 2025-12-12T16:16:43.970891291+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"9151190c168a5118b524699b9ef9f7265e6266487898dc6b740d348f2d538032" Netns:"/var/run/netns/8250334f-385b-4e19-89d9-e0bcfb0784bc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-config-operator;K8S_POD_NAME=openshift-config-operator-5777786469-49zmj;K8S_POD_INFRA_CONTAINER_ID=9151190c168a5118b524699b9ef9f7265e6266487898dc6b740d348f2d538032;K8S_POD_UID=eb351b5c-811a-4e79-ace2-5d78737aef4c" Path:"" 2025-12-12T16:16:43.988646065+00:00 stderr F 2025-12-12T16:16:43Z [verbose] ADD starting CNI request ContainerID:"158cfb1d84690be2e4cd14b1137db76e8e77421e894f66629f65967812dc332a" Netns:"/var/run/netns/da578760-57ba-46cf-a756-c1decd3d1ffd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=catalog-operator-75ff9f647d-4v9cj;K8S_POD_INFRA_CONTAINER_ID=158cfb1d84690be2e4cd14b1137db76e8e77421e894f66629f65967812dc332a;K8S_POD_UID=5a94df8d-2607-41a1-b1f9-21016895dcd6" Path:"" 2025-12-12T16:16:44.010255362+00:00 stderr F I1212 16:16:44.001122 8615 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.010255362+00:00 stderr F I1212 16:16:44.001438 8615 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.010255362+00:00 stderr F I1212 16:16:44.001445 8615 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.010255362+00:00 stderr F I1212 16:16:44.001450 8615 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.010255362+00:00 stderr F I1212 16:16:44.001456 8615 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.010255362+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-etcd-operator:etcd-operator-69b85846b6-mrrt5:a6c070b2-83ee-4c73-9201-3ab5dcc9aeca:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"0702298b930d9df","mac":"7e:3c:9d:4b:11:c8"},{"name":"eth0","mac":"0a:58:0a:d9:00:27","sandbox":"/var/run/netns/84768e52-fe57-4c71-81de-46deb9472742"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.39/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.010255362+00:00 stderr F I1212 16:16:44.010198 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-etcd-operator", Name:"etcd-operator-69b85846b6-mrrt5", UID:"a6c070b2-83ee-4c73-9201-3ab5dcc9aeca", APIVersion:"v1", ResourceVersion:"36761", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.39/23] from ovn-kubernetes 2025-12-12T16:16:44.017529740+00:00 stderr F I1212 16:16:44.008086 8595 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.017529740+00:00 stderr F I1212 16:16:44.008258 8595 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.017529740+00:00 stderr F I1212 16:16:44.008269 8595 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.017529740+00:00 stderr F I1212 16:16:44.008277 8595 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.017529740+00:00 stderr F I1212 16:16:44.008284 8595 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.017808937+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-kube-storage-version-migrator-operator:kube-storage-version-migrator-operator-565b79b866-krgxf:dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"d0489ccdfa99a6b","mac":"6a:8f:72:42:53:97"},{"name":"eth0","mac":"0a:58:0a:d9:00:11","sandbox":"/var/run/netns/bdb7ad24-ca88-4d88-bdf6-75139ebea202"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.17/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.018030452+00:00 stderr F I1212 16:16:44.017990 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-storage-version-migrator-operator", Name:"kube-storage-version-migrator-operator-565b79b866-krgxf", UID:"dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7", APIVersion:"v1", ResourceVersion:"36737", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.17/23] from ovn-kubernetes 2025-12-12T16:16:44.037704143+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"0702298b930d9df41b5167fe6b3a06e9d2f7dd988ab81b6dc47a3b0d81221bce" Netns:"/var/run/netns/84768e52-fe57-4c71-81de-46deb9472742" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-etcd-operator;K8S_POD_NAME=etcd-operator-69b85846b6-mrrt5;K8S_POD_INFRA_CONTAINER_ID=0702298b930d9df41b5167fe6b3a06e9d2f7dd988ab81b6dc47a3b0d81221bce;K8S_POD_UID=a6c070b2-83ee-4c73-9201-3ab5dcc9aeca" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"7e:3c:9d:4b:11:c8\",\"name\":\"0702298b930d9df\"},{\"mac\":\"0a:58:0a:d9:00:27\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/84768e52-fe57-4c71-81de-46deb9472742\"}],\"ips\":[{\"address\":\"10.217.0.39/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.044975970+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"d0489ccdfa99a6b99c7ac3a0e870ca5488db2ca428180aa64741de104ff8555e" Netns:"/var/run/netns/bdb7ad24-ca88-4d88-bdf6-75139ebea202" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-storage-version-migrator-operator;K8S_POD_NAME=kube-storage-version-migrator-operator-565b79b866-krgxf;K8S_POD_INFRA_CONTAINER_ID=d0489ccdfa99a6b99c7ac3a0e870ca5488db2ca428180aa64741de104ff8555e;K8S_POD_UID=dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"6a:8f:72:42:53:97\",\"name\":\"d0489ccdfa99a6b\"},{\"mac\":\"0a:58:0a:d9:00:11\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/bdb7ad24-ca88-4d88-bdf6-75139ebea202\"}],\"ips\":[{\"address\":\"10.217.0.17/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.196534480+00:00 stderr F I1212 16:16:44.190077 8724 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.196534480+00:00 stderr F I1212 16:16:44.190408 8724 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.196534480+00:00 stderr F I1212 16:16:44.190426 8724 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.196534480+00:00 stderr F I1212 16:16:44.190435 8724 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.196534480+00:00 stderr F I1212 16:16:44.190442 8724 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.197779781+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-dns-operator:dns-operator-799b87ffcd-2w9hn:e1875478-2fa5-47f4-9c0a-13afc9166e8e:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"09a0809ecd406bc","mac":"2a:ad:17:5d:78:6e"},{"name":"eth0","mac":"0a:58:0a:d9:00:18","sandbox":"/var/run/netns/a04d18d5-afde-4abe-9947-38eab7cb8853"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.24/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.197779781+00:00 stderr F I1212 16:16:44.197259 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-dns-operator", Name:"dns-operator-799b87ffcd-2w9hn", UID:"e1875478-2fa5-47f4-9c0a-13afc9166e8e", APIVersion:"v1", ResourceVersion:"36743", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.24/23] from ovn-kubernetes 2025-12-12T16:16:44.211702581+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"09a0809ecd406bcde0a1ea4cede12fbba5d473a8969cda641566b6406f205a3a" Netns:"/var/run/netns/a04d18d5-afde-4abe-9947-38eab7cb8853" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-dns-operator;K8S_POD_NAME=dns-operator-799b87ffcd-2w9hn;K8S_POD_INFRA_CONTAINER_ID=09a0809ecd406bcde0a1ea4cede12fbba5d473a8969cda641566b6406f205a3a;K8S_POD_UID=e1875478-2fa5-47f4-9c0a-13afc9166e8e" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"2a:ad:17:5d:78:6e\",\"name\":\"09a0809ecd406bc\"},{\"mac\":\"0a:58:0a:d9:00:18\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a04d18d5-afde-4abe-9947-38eab7cb8853\"}],\"ips\":[{\"address\":\"10.217.0.24/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.216091358+00:00 stderr F I1212 16:16:44.206290 8730 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.216091358+00:00 stderr F I1212 16:16:44.206580 8730 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.216091358+00:00 stderr F I1212 16:16:44.206596 8730 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.216091358+00:00 stderr F I1212 16:16:44.206602 8730 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.216091358+00:00 stderr F I1212 16:16:44.206609 8730 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.216479147+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-machine-config-operator:machine-config-controller-f9cdd68f7-ndnxt:097ff9f3-52cb-4063-a6a1-0c8178adccc9:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"e2067efe5898138","mac":"9e:18:ea:9c:3e:d3"},{"name":"eth0","mac":"0a:58:0a:d9:00:1b","sandbox":"/var/run/netns/ff87e156-9e6d-409f-abbc-76c902de0950"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.27/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.218510037+00:00 stderr F I1212 16:16:44.216890 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-machine-config-operator", Name:"machine-config-controller-f9cdd68f7-ndnxt", UID:"097ff9f3-52cb-4063-a6a1-0c8178adccc9", APIVersion:"v1", ResourceVersion:"36748", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.27/23] from ovn-kubernetes 2025-12-12T16:16:44.237516881+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"e2067efe5898138e478d288454b72bfd1c053e7c41955ea58a683e8f80ed626f" Netns:"/var/run/netns/ff87e156-9e6d-409f-abbc-76c902de0950" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-machine-config-operator;K8S_POD_NAME=machine-config-controller-f9cdd68f7-ndnxt;K8S_POD_INFRA_CONTAINER_ID=e2067efe5898138e478d288454b72bfd1c053e7c41955ea58a683e8f80ed626f;K8S_POD_UID=097ff9f3-52cb-4063-a6a1-0c8178adccc9" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"9e:18:ea:9c:3e:d3\",\"name\":\"e2067efe5898138\"},{\"mac\":\"0a:58:0a:d9:00:1b\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/ff87e156-9e6d-409f-abbc-76c902de0950\"}],\"ips\":[{\"address\":\"10.217.0.27/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.262976543+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD starting CNI request ContainerID:"c8a5496c4a2472a0beb924c848fd9bb5edee60a3fd69c13df19d2b01d3a9ec7a" Netns:"/var/run/netns/b1cf10f9-daeb-489d-8724-db46eda0e4e1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-apiserver;K8S_POD_NAME=apiserver-9ddfb9f55-sg8rq;K8S_POD_INFRA_CONTAINER_ID=c8a5496c4a2472a0beb924c848fd9bb5edee60a3fd69c13df19d2b01d3a9ec7a;K8S_POD_UID=693e66ed-f826-4819-a47d-f32faf9dab96" Path:"" 2025-12-12T16:16:44.271775117+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD starting CNI request ContainerID:"b300a9516c529128a4338eca3e85a4bc7c3e16956b5d84d622809c31a00f651b" Netns:"/var/run/netns/d15b219f-1af2-40a0-a4f5-2dd5cf3fcd1f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=package-server-manager-77f986bd66-mjzlp;K8S_POD_INFRA_CONTAINER_ID=b300a9516c529128a4338eca3e85a4bc7c3e16956b5d84d622809c31a00f651b;K8S_POD_UID=00c7f3b3-f4dd-4d19-9739-512a35f436f5" Path:"" 2025-12-12T16:16:44.332821388+00:00 stderr F I1212 16:16:44.323785 8752 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.332821388+00:00 stderr F I1212 16:16:44.324100 8752 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.332821388+00:00 stderr F I1212 16:16:44.324110 8752 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.332821388+00:00 stderr F I1212 16:16:44.324118 8752 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.332821388+00:00 stderr F I1212 16:16:44.324125 8752 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.333232848+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-kube-scheduler-operator:openshift-kube-scheduler-operator-54f497555d-dcs9d:60d98f7f-99e4-4bb4-a7b6-48de2ff6071c:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"60ac5b8dfa3a85a","mac":"a2:a7:51:79:b8:aa"},{"name":"eth0","mac":"0a:58:0a:d9:00:22","sandbox":"/var/run/netns/b1402ac9-62a1-47db-a893-03b0fb43528a"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.34/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.333760581+00:00 stderr F I1212 16:16:44.333553 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-scheduler-operator", Name:"openshift-kube-scheduler-operator-54f497555d-dcs9d", UID:"60d98f7f-99e4-4bb4-a7b6-48de2ff6071c", APIVersion:"v1", ResourceVersion:"36756", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.34/23] from ovn-kubernetes 2025-12-12T16:16:44.341654263+00:00 stderr F I1212 16:16:44.327726 8785 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.341654263+00:00 stderr F I1212 16:16:44.327856 8785 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.341654263+00:00 stderr F I1212 16:16:44.327866 8785 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.341654263+00:00 stderr F I1212 16:16:44.327873 8785 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.341654263+00:00 stderr F I1212 16:16:44.327881 8785 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.341654263+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-operator-lifecycle-manager:packageserver-7d4fc7d867-lfwgk:6e354e82-d648-4680-b0c8-e901bfcfbd5f:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"1b8886f35520734","mac":"d2:f3:a8:0e:04:1f"},{"name":"eth0","mac":"0a:58:0a:d9:00:25","sandbox":"/var/run/netns/99f9b8a7-1726-4814-9df6-fdafe8c77d6b"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.37/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.341654263+00:00 stderr F I1212 16:16:44.340835 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operator-lifecycle-manager", Name:"packageserver-7d4fc7d867-lfwgk", UID:"6e354e82-d648-4680-b0c8-e901bfcfbd5f", APIVersion:"v1", ResourceVersion:"36753", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.37/23] from ovn-kubernetes 2025-12-12T16:16:44.359275454+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"60ac5b8dfa3a85aae95f0de2721afb6ca7a3cce575e7ecfe2560293af9d7574f" Netns:"/var/run/netns/b1402ac9-62a1-47db-a893-03b0fb43528a" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-scheduler-operator;K8S_POD_NAME=openshift-kube-scheduler-operator-54f497555d-dcs9d;K8S_POD_INFRA_CONTAINER_ID=60ac5b8dfa3a85aae95f0de2721afb6ca7a3cce575e7ecfe2560293af9d7574f;K8S_POD_UID=60d98f7f-99e4-4bb4-a7b6-48de2ff6071c" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"a2:a7:51:79:b8:aa\",\"name\":\"60ac5b8dfa3a85a\"},{\"mac\":\"0a:58:0a:d9:00:22\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b1402ac9-62a1-47db-a893-03b0fb43528a\"}],\"ips\":[{\"address\":\"10.217.0.34/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.359511799+00:00 stderr P 2025-12-12T16:16:44Z [verbose] 2025-12-12T16:16:44.359546300+00:00 stderr P ADD finished CNI request ContainerID:"1b8886f3552073404488fbfde536fb57d7c18b1397ebdad914354193a33ea0ce" Netns:"/var/run/netns/99f9b8a7-1726-4814-9df6-fdafe8c77d6b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=packageserver-7d4fc7d867-lfwgk;K8S_POD_INFRA_CONTAINER_ID=1b8886f3552073404488fbfde536fb57d7c18b1397ebdad914354193a33ea0ce;K8S_POD_UID=6e354e82-d648-4680-b0c8-e901bfcfbd5f" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"d2:f3:a8:0e:04:1f\",\"name\":\"1b8886f35520734\"},{\"mac\":\"0a:58:0a:d9:00:25\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/99f9b8a7-1726-4814-9df6-fdafe8c77d6b\"}],\"ips\":[{\"address\":\"10.217.0.37/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.359569161+00:00 stderr F 2025-12-12T16:16:44.456163809+00:00 stderr F I1212 16:16:44.448369 8925 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.456163809+00:00 stderr F I1212 16:16:44.448639 8925 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.456163809+00:00 stderr F I1212 16:16:44.448651 8925 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.456163809+00:00 stderr F I1212 16:16:44.448660 8925 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.456163809+00:00 stderr F I1212 16:16:44.448669 8925 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.458065666+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-config-operator:openshift-config-operator-5777786469-49zmj:eb351b5c-811a-4e79-ace2-5d78737aef4c:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"9151190c168a511","mac":"52:0f:ba:c5:9c:5a"},{"name":"eth0","mac":"0a:58:0a:d9:00:24","sandbox":"/var/run/netns/8250334f-385b-4e19-89d9-e0bcfb0784bc"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.36/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.458065666+00:00 stderr F I1212 16:16:44.457284 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-config-operator", Name:"openshift-config-operator-5777786469-49zmj", UID:"eb351b5c-811a-4e79-ace2-5d78737aef4c", APIVersion:"v1", ResourceVersion:"36752", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.36/23] from ovn-kubernetes 2025-12-12T16:16:44.474072916+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"9151190c168a5118b524699b9ef9f7265e6266487898dc6b740d348f2d538032" Netns:"/var/run/netns/8250334f-385b-4e19-89d9-e0bcfb0784bc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-config-operator;K8S_POD_NAME=openshift-config-operator-5777786469-49zmj;K8S_POD_INFRA_CONTAINER_ID=9151190c168a5118b524699b9ef9f7265e6266487898dc6b740d348f2d538032;K8S_POD_UID=eb351b5c-811a-4e79-ace2-5d78737aef4c" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"52:0f:ba:c5:9c:5a\",\"name\":\"9151190c168a511\"},{\"mac\":\"0a:58:0a:d9:00:24\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/8250334f-385b-4e19-89d9-e0bcfb0784bc\"}],\"ips\":[{\"address\":\"10.217.0.36/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.501279621+00:00 stderr F I1212 16:16:44.491986 8941 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.501279621+00:00 stderr F I1212 16:16:44.492103 8941 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.501279621+00:00 stderr F I1212 16:16:44.492111 8941 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.501279621+00:00 stderr F I1212 16:16:44.492117 8941 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.501279621+00:00 stderr F I1212 16:16:44.492123 8941 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.501279621+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-operator-lifecycle-manager:catalog-operator-75ff9f647d-4v9cj:5a94df8d-2607-41a1-b1f9-21016895dcd6:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"158cfb1d84690be","mac":"96:c0:c9:89:8e:97"},{"name":"eth0","mac":"0a:58:0a:d9:00:10","sandbox":"/var/run/netns/da578760-57ba-46cf-a756-c1decd3d1ffd"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.16/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.502428509+00:00 stderr F I1212 16:16:44.501117 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operator-lifecycle-manager", Name:"catalog-operator-75ff9f647d-4v9cj", UID:"5a94df8d-2607-41a1-b1f9-21016895dcd6", APIVersion:"v1", ResourceVersion:"36926", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.16/23] from ovn-kubernetes 2025-12-12T16:16:44.513925849+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"158cfb1d84690be2e4cd14b1137db76e8e77421e894f66629f65967812dc332a" Netns:"/var/run/netns/da578760-57ba-46cf-a756-c1decd3d1ffd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=catalog-operator-75ff9f647d-4v9cj;K8S_POD_INFRA_CONTAINER_ID=158cfb1d84690be2e4cd14b1137db76e8e77421e894f66629f65967812dc332a;K8S_POD_UID=5a94df8d-2607-41a1-b1f9-21016895dcd6" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"96:c0:c9:89:8e:97\",\"name\":\"158cfb1d84690be\"},{\"mac\":\"0a:58:0a:d9:00:10\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/da578760-57ba-46cf-a756-c1decd3d1ffd\"}],\"ips\":[{\"address\":\"10.217.0.16/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.564811162+00:00 stderr F I1212 16:16:44.545302 8865 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.564811162+00:00 stderr F I1212 16:16:44.545412 8865 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.564811162+00:00 stderr F I1212 16:16:44.545420 8865 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.564811162+00:00 stderr F I1212 16:16:44.545426 8865 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.564811162+00:00 stderr F I1212 16:16:44.545433 8865 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.564811162+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: hostpath-provisioner:csi-hostpathplugin-59hhc:e0adb788-edae-4099-900e-8af998a81f87:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"7d7a7ab3b09b90e","mac":"7a:93:31:72:bd:c9"},{"name":"eth0","mac":"0a:58:0a:d9:00:2a","sandbox":"/var/run/netns/b6d65604-689b-4dcc-aec0-9b38ecf245f0"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.42/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.564811162+00:00 stderr F I1212 16:16:44.556932 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"hostpath-provisioner", Name:"csi-hostpathplugin-59hhc", UID:"e0adb788-edae-4099-900e-8af998a81f87", APIVersion:"v1", ResourceVersion:"36782", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.42/23] from ovn-kubernetes 2025-12-12T16:16:44.568223195+00:00 stderr F I1212 16:16:44.554737 8886 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.568223195+00:00 stderr F I1212 16:16:44.554954 8886 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.568223195+00:00 stderr F I1212 16:16:44.554962 8886 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.568223195+00:00 stderr F I1212 16:16:44.554968 8886 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.568223195+00:00 stderr F I1212 16:16:44.554975 8886 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.568683696+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-dns:dns-default-rl44g:9dc06dad-6486-4dd5-9456-40ce964abc7f:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"6a1b62d609b3e54","mac":"22:d0:9c:1e:4d:42"},{"name":"eth0","mac":"0a:58:0a:d9:00:2b","sandbox":"/var/run/netns/c37f5da0-8436-48f8-aae7-e9b3947f0faf"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.43/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.569086236+00:00 stderr F I1212 16:16:44.569004 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-dns", Name:"dns-default-rl44g", UID:"9dc06dad-6486-4dd5-9456-40ce964abc7f", APIVersion:"v1", ResourceVersion:"36783", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.43/23] from ovn-kubernetes 2025-12-12T16:16:44.570122061+00:00 stderr F I1212 16:16:44.558145 8848 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.570122061+00:00 stderr F I1212 16:16:44.558268 8848 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.570122061+00:00 stderr F I1212 16:16:44.558290 8848 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.570122061+00:00 stderr F I1212 16:16:44.558299 8848 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.570122061+00:00 stderr F I1212 16:16:44.558305 8848 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.592566899+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-kube-storage-version-migrator:migrator-866fcbc849-6mhsj:2403b973-68b3-4a15-a444-7e271aea91c1:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"22cad0b592117b6","mac":"02:7b:73:c2:b7:c5"},{"name":"eth0","mac":"0a:58:0a:d9:00:1a","sandbox":"/var/run/netns/74634fee-9e79-4fa0-948c-f2fcef0f4511"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.26/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.592566899+00:00 stderr F I1212 16:16:44.572036 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-storage-version-migrator", Name:"migrator-866fcbc849-6mhsj", UID:"2403b973-68b3-4a15-a444-7e271aea91c1", APIVersion:"v1", ResourceVersion:"36746", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.26/23] from ovn-kubernetes 2025-12-12T16:16:44.592566899+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"7d7a7ab3b09b90e07fa8f45335ab5037ab6ebd27daf1316e35a98496a30a938f" Netns:"/var/run/netns/b6d65604-689b-4dcc-aec0-9b38ecf245f0" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=hostpath-provisioner;K8S_POD_NAME=csi-hostpathplugin-59hhc;K8S_POD_INFRA_CONTAINER_ID=7d7a7ab3b09b90e07fa8f45335ab5037ab6ebd27daf1316e35a98496a30a938f;K8S_POD_UID=e0adb788-edae-4099-900e-8af998a81f87" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"7a:93:31:72:bd:c9\",\"name\":\"7d7a7ab3b09b90e\"},{\"mac\":\"0a:58:0a:d9:00:2a\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b6d65604-689b-4dcc-aec0-9b38ecf245f0\"}],\"ips\":[{\"address\":\"10.217.0.42/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.592566899+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"22cad0b592117b6ba03b75b60b8f5302b6ad18f85483bc9c43e7174f7395c192" Netns:"/var/run/netns/74634fee-9e79-4fa0-948c-f2fcef0f4511" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-storage-version-migrator;K8S_POD_NAME=migrator-866fcbc849-6mhsj;K8S_POD_INFRA_CONTAINER_ID=22cad0b592117b6ba03b75b60b8f5302b6ad18f85483bc9c43e7174f7395c192;K8S_POD_UID=2403b973-68b3-4a15-a444-7e271aea91c1" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"02:7b:73:c2:b7:c5\",\"name\":\"22cad0b592117b6\"},{\"mac\":\"0a:58:0a:d9:00:1a\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/74634fee-9e79-4fa0-948c-f2fcef0f4511\"}],\"ips\":[{\"address\":\"10.217.0.26/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.592566899+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"6a1b62d609b3e5420485ba99a5b2f09e53a6d758231e9a8c18d91b6f411c606a" Netns:"/var/run/netns/c37f5da0-8436-48f8-aae7-e9b3947f0faf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-dns;K8S_POD_NAME=dns-default-rl44g;K8S_POD_INFRA_CONTAINER_ID=6a1b62d609b3e5420485ba99a5b2f09e53a6d758231e9a8c18d91b6f411c606a;K8S_POD_UID=9dc06dad-6486-4dd5-9456-40ce964abc7f" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"22:d0:9c:1e:4d:42\",\"name\":\"6a1b62d609b3e54\"},{\"mac\":\"0a:58:0a:d9:00:2b\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/c37f5da0-8436-48f8-aae7-e9b3947f0faf\"}],\"ips\":[{\"address\":\"10.217.0.43/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.600402461+00:00 stderr F I1212 16:16:44.589992 9074 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.600402461+00:00 stderr F I1212 16:16:44.590287 9074 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.600402461+00:00 stderr F I1212 16:16:44.590294 9074 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.600402461+00:00 stderr F I1212 16:16:44.590300 9074 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.600402461+00:00 stderr F I1212 16:16:44.590305 9074 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.600402461+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-operator-lifecycle-manager:package-server-manager-77f986bd66-mjzlp:00c7f3b3-f4dd-4d19-9739-512a35f436f5:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"b300a9516c52912","mac":"42:85:3d:d9:dc:15"},{"name":"eth0","mac":"0a:58:0a:d9:00:13","sandbox":"/var/run/netns/d15b219f-1af2-40a0-a4f5-2dd5cf3fcd1f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.19/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.600402461+00:00 stderr F I1212 16:16:44.599523 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-operator-lifecycle-manager", Name:"package-server-manager-77f986bd66-mjzlp", UID:"00c7f3b3-f4dd-4d19-9739-512a35f436f5", APIVersion:"v1", ResourceVersion:"36733", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.19/23] from ovn-kubernetes 2025-12-12T16:16:44.608312204+00:00 stderr F I1212 16:16:44.593703 9067 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:44.608312204+00:00 stderr F I1212 16:16:44.593828 9067 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:44.608312204+00:00 stderr F I1212 16:16:44.593835 9067 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:44.608312204+00:00 stderr F I1212 16:16:44.593841 9067 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:44.608312204+00:00 stderr F I1212 16:16:44.593847 9067 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:44.608312204+00:00 stderr F 2025-12-12T16:16:44Z [verbose] Add: openshift-apiserver:apiserver-9ddfb9f55-sg8rq:693e66ed-f826-4819-a47d-f32faf9dab96:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"c8a5496c4a2472a","mac":"de:04:57:35:f1:f7"},{"name":"eth0","mac":"0a:58:0a:d9:00:16","sandbox":"/var/run/netns/b1cf10f9-daeb-489d-8724-db46eda0e4e1"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.22/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:44.608312204+00:00 stderr F I1212 16:16:44.607651 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-apiserver", Name:"apiserver-9ddfb9f55-sg8rq", UID:"693e66ed-f826-4819-a47d-f32faf9dab96", APIVersion:"v1", ResourceVersion:"36933", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.22/23] from ovn-kubernetes 2025-12-12T16:16:44.629283836+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"b300a9516c529128a4338eca3e85a4bc7c3e16956b5d84d622809c31a00f651b" Netns:"/var/run/netns/d15b219f-1af2-40a0-a4f5-2dd5cf3fcd1f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=package-server-manager-77f986bd66-mjzlp;K8S_POD_INFRA_CONTAINER_ID=b300a9516c529128a4338eca3e85a4bc7c3e16956b5d84d622809c31a00f651b;K8S_POD_UID=00c7f3b3-f4dd-4d19-9739-512a35f436f5" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"42:85:3d:d9:dc:15\",\"name\":\"b300a9516c52912\"},{\"mac\":\"0a:58:0a:d9:00:13\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/d15b219f-1af2-40a0-a4f5-2dd5cf3fcd1f\"}],\"ips\":[{\"address\":\"10.217.0.19/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:44.652063292+00:00 stderr F 2025-12-12T16:16:44Z [verbose] ADD finished CNI request ContainerID:"c8a5496c4a2472a0beb924c848fd9bb5edee60a3fd69c13df19d2b01d3a9ec7a" Netns:"/var/run/netns/b1cf10f9-daeb-489d-8724-db46eda0e4e1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-apiserver;K8S_POD_NAME=apiserver-9ddfb9f55-sg8rq;K8S_POD_INFRA_CONTAINER_ID=c8a5496c4a2472a0beb924c848fd9bb5edee60a3fd69c13df19d2b01d3a9ec7a;K8S_POD_UID=693e66ed-f826-4819-a47d-f32faf9dab96" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"de:04:57:35:f1:f7\",\"name\":\"c8a5496c4a2472a\"},{\"mac\":\"0a:58:0a:d9:00:16\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b1cf10f9-daeb-489d-8724-db46eda0e4e1\"}],\"ips\":[{\"address\":\"10.217.0.22/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:49.832587240+00:00 stderr F 2025-12-12T16:16:49Z [verbose] ADD starting CNI request ContainerID:"70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571" Netns:"/var/run/netns/b0d35401-5792-4682-8d69-7f39cf91cd6e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-pvzzz;K8S_POD_INFRA_CONTAINER_ID=70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571;K8S_POD_UID=f1a12a40-8493-41e1-84b7-312fc948fca8" Path:"" 2025-12-12T16:16:49.920385193+00:00 stderr F 2025-12-12T16:16:49Z [verbose] ADD starting CNI request ContainerID:"5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd" Netns:"/var/run/netns/b41854f2-a31a-48b8-9915-6efa69ec94e8" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2gt6h;K8S_POD_INFRA_CONTAINER_ID=5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd;K8S_POD_UID=3686d912-c8e4-413f-b036-f206a4e826a2" Path:"" 2025-12-12T16:16:50.095454348+00:00 stderr F I1212 16:16:50.085735 9974 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:50.095454348+00:00 stderr F I1212 16:16:50.086230 9974 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:50.095454348+00:00 stderr F I1212 16:16:50.086241 9974 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:50.095454348+00:00 stderr F I1212 16:16:50.086248 9974 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:50.095454348+00:00 stderr F I1212 16:16:50.086254 9974 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:50.095966990+00:00 stderr F 2025-12-12T16:16:50Z [verbose] Add: openshift-marketplace:certified-operators-pvzzz:f1a12a40-8493-41e1-84b7-312fc948fca8:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"70771a8a130e632","mac":"42:3f:7f:4b:41:8e"},{"name":"eth0","mac":"0a:58:0a:d9:00:2c","sandbox":"/var/run/netns/b0d35401-5792-4682-8d69-7f39cf91cd6e"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.44/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:50.096364990+00:00 stderr F I1212 16:16:50.096298 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"certified-operators-pvzzz", UID:"f1a12a40-8493-41e1-84b7-312fc948fca8", APIVersion:"v1", ResourceVersion:"37512", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.44/23] from ovn-kubernetes 2025-12-12T16:16:50.116586994+00:00 stderr F 2025-12-12T16:16:50Z [verbose] ADD finished CNI request ContainerID:"70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571" Netns:"/var/run/netns/b0d35401-5792-4682-8d69-7f39cf91cd6e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-pvzzz;K8S_POD_INFRA_CONTAINER_ID=70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571;K8S_POD_UID=f1a12a40-8493-41e1-84b7-312fc948fca8" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"42:3f:7f:4b:41:8e\",\"name\":\"70771a8a130e632\"},{\"mac\":\"0a:58:0a:d9:00:2c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b0d35401-5792-4682-8d69-7f39cf91cd6e\"}],\"ips\":[{\"address\":\"10.217.0.44/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:50.182952184+00:00 stderr F 2025-12-12T16:16:50Z [verbose] ADD starting CNI request ContainerID:"ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451" Netns:"/var/run/netns/2f0a4a9c-7a74-4fd3-8443-f40eda9809c4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-kxjp8;K8S_POD_INFRA_CONTAINER_ID=ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451;K8S_POD_UID=5319f16c-f39a-4bd6-836a-cb336099dbc2" Path:"" 2025-12-12T16:16:50.396990749+00:00 stderr F I1212 16:16:50.383361 9996 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:50.396990749+00:00 stderr F I1212 16:16:50.384071 9996 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:50.396990749+00:00 stderr F I1212 16:16:50.384083 9996 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:50.396990749+00:00 stderr F I1212 16:16:50.384091 9996 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:50.396990749+00:00 stderr F I1212 16:16:50.384099 9996 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:50.397800599+00:00 stderr F 2025-12-12T16:16:50Z [verbose] Add: openshift-marketplace:community-operators-2gt6h:3686d912-c8e4-413f-b036-f206a4e826a2:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"5cc1da989e963af","mac":"d2:5e:a7:9f:ff:87"},{"name":"eth0","mac":"0a:58:0a:d9:00:2d","sandbox":"/var/run/netns/b41854f2-a31a-48b8-9915-6efa69ec94e8"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.45/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:50.398368653+00:00 stderr F I1212 16:16:50.398324 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"community-operators-2gt6h", UID:"3686d912-c8e4-413f-b036-f206a4e826a2", APIVersion:"v1", ResourceVersion:"37545", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.45/23] from ovn-kubernetes 2025-12-12T16:16:50.416845214+00:00 stderr P 2025-12-12T16:16:50Z [verbose] 2025-12-12T16:16:50.416948077+00:00 stderr P ADD starting CNI request ContainerID:"36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed" Netns:"/var/run/netns/e337e284-954e-4a1e-a479-14d6b90526f1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-p7s65;K8S_POD_INFRA_CONTAINER_ID=36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed;K8S_POD_UID=5957e518-15e6-4acf-9e45-4985b7713fc8" Path:"" 2025-12-12T16:16:50.416984528+00:00 stderr F 2025-12-12T16:16:50.434039634+00:00 stderr F 2025-12-12T16:16:50Z [verbose] ADD finished CNI request ContainerID:"5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd" Netns:"/var/run/netns/b41854f2-a31a-48b8-9915-6efa69ec94e8" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2gt6h;K8S_POD_INFRA_CONTAINER_ID=5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd;K8S_POD_UID=3686d912-c8e4-413f-b036-f206a4e826a2" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"d2:5e:a7:9f:ff:87\",\"name\":\"5cc1da989e963af\"},{\"mac\":\"0a:58:0a:d9:00:2d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b41854f2-a31a-48b8-9915-6efa69ec94e8\"}],\"ips\":[{\"address\":\"10.217.0.45/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:50.436978636+00:00 stderr F I1212 16:16:50.403586 10061 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:50.436978636+00:00 stderr F I1212 16:16:50.403830 10061 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:50.436978636+00:00 stderr F I1212 16:16:50.403841 10061 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:50.436978636+00:00 stderr F I1212 16:16:50.403849 10061 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:50.436978636+00:00 stderr F I1212 16:16:50.403856 10061 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:50.440097202+00:00 stderr F 2025-12-12T16:16:50Z [verbose] Add: openshift-marketplace:certified-operators-kxjp8:5319f16c-f39a-4bd6-836a-cb336099dbc2:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"ff8c45863778a48","mac":"82:f5:9b:31:93:98"},{"name":"eth0","mac":"0a:58:0a:d9:00:2e","sandbox":"/var/run/netns/2f0a4a9c-7a74-4fd3-8443-f40eda9809c4"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.46/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:50.440097202+00:00 stderr F I1212 16:16:50.437728 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"certified-operators-kxjp8", UID:"5319f16c-f39a-4bd6-836a-cb336099dbc2", APIVersion:"v1", ResourceVersion:"37559", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.46/23] from ovn-kubernetes 2025-12-12T16:16:50.549632346+00:00 stderr P 2025-12-12T16:16:50Z [verbose] 2025-12-12T16:16:50.549734869+00:00 stderr P ADD finished CNI request ContainerID:"ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451" Netns:"/var/run/netns/2f0a4a9c-7a74-4fd3-8443-f40eda9809c4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-kxjp8;K8S_POD_INFRA_CONTAINER_ID=ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451;K8S_POD_UID=5319f16c-f39a-4bd6-836a-cb336099dbc2" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"82:f5:9b:31:93:98\",\"name\":\"ff8c45863778a48\"},{\"mac\":\"0a:58:0a:d9:00:2e\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/2f0a4a9c-7a74-4fd3-8443-f40eda9809c4\"}],\"ips\":[{\"address\":\"10.217.0.46/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:50.549781200+00:00 stderr F 2025-12-12T16:16:50.800371288+00:00 stderr F I1212 16:16:50.794705 10099 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:50.800371288+00:00 stderr F I1212 16:16:50.795345 10099 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:50.800371288+00:00 stderr F I1212 16:16:50.795377 10099 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:50.800371288+00:00 stderr F I1212 16:16:50.795397 10099 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:50.800371288+00:00 stderr F I1212 16:16:50.795415 10099 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:50.800919331+00:00 stderr P 2025-12-12T16:16:50Z [verbose] 2025-12-12T16:16:50.800943002+00:00 stderr P Add: openshift-marketplace:community-operators-p7s65:5957e518-15e6-4acf-9e45-4985b7713fc8:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"36bd50d659f1abd","mac":"8a:ce:76:74:e7:82"},{"name":"eth0","mac":"0a:58:0a:d9:00:2f","sandbox":"/var/run/netns/e337e284-954e-4a1e-a479-14d6b90526f1"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.47/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:50.800960842+00:00 stderr F 2025-12-12T16:16:50.801377442+00:00 stderr F I1212 16:16:50.801324 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"community-operators-p7s65", UID:"5957e518-15e6-4acf-9e45-4985b7713fc8", APIVersion:"v1", ResourceVersion:"37567", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.47/23] from ovn-kubernetes 2025-12-12T16:16:50.816974653+00:00 stderr P 2025-12-12T16:16:50Z [verbose] 2025-12-12T16:16:50.817089436+00:00 stderr P ADD finished CNI request ContainerID:"36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed" Netns:"/var/run/netns/e337e284-954e-4a1e-a479-14d6b90526f1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-p7s65;K8S_POD_INFRA_CONTAINER_ID=36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed;K8S_POD_UID=5957e518-15e6-4acf-9e45-4985b7713fc8" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"8a:ce:76:74:e7:82\",\"name\":\"36bd50d659f1abd\"},{\"mac\":\"0a:58:0a:d9:00:2f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/e337e284-954e-4a1e-a479-14d6b90526f1\"}],\"ips\":[{\"address\":\"10.217.0.47/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:50.817112136+00:00 stderr F 2025-12-12T16:16:51.927719801+00:00 stderr P 2025-12-12T16:16:51Z [verbose] 2025-12-12T16:16:51.927809624+00:00 stderr P ADD starting CNI request ContainerID:"f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20" Netns:"/var/run/netns/a6b27e17-1c48-4213-837f-159d82f2ff20" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-s7x92;K8S_POD_INFRA_CONTAINER_ID=f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20;K8S_POD_UID=1aaf652b-1019-4193-839d-875d12cc1e27" Path:"" 2025-12-12T16:16:51.927827874+00:00 stderr F 2025-12-12T16:16:52.104620000+00:00 stderr F I1212 16:16:52.096010 10223 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:52.104620000+00:00 stderr F I1212 16:16:52.096393 10223 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:52.104620000+00:00 stderr F I1212 16:16:52.096410 10223 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:52.104620000+00:00 stderr F I1212 16:16:52.096416 10223 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:52.104620000+00:00 stderr F I1212 16:16:52.096422 10223 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:52.107245524+00:00 stderr F 2025-12-12T16:16:52Z [verbose] Add: openshift-marketplace:redhat-marketplace-s7x92:1aaf652b-1019-4193-839d-875d12cc1e27:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"f1da0765a97fe21","mac":"0a:cc:a1:5d:de:c3"},{"name":"eth0","mac":"0a:58:0a:d9:00:30","sandbox":"/var/run/netns/a6b27e17-1c48-4213-837f-159d82f2ff20"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.48/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:52.107245524+00:00 stderr F I1212 16:16:52.105823 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-marketplace-s7x92", UID:"1aaf652b-1019-4193-839d-875d12cc1e27", APIVersion:"v1", ResourceVersion:"37633", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.48/23] from ovn-kubernetes 2025-12-12T16:16:52.126320590+00:00 stderr P 2025-12-12T16:16:52Z [verbose] 2025-12-12T16:16:52.126487514+00:00 stderr P ADD finished CNI request ContainerID:"f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20" Netns:"/var/run/netns/a6b27e17-1c48-4213-837f-159d82f2ff20" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-s7x92;K8S_POD_INFRA_CONTAINER_ID=f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20;K8S_POD_UID=1aaf652b-1019-4193-839d-875d12cc1e27" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"0a:cc:a1:5d:de:c3\",\"name\":\"f1da0765a97fe21\"},{\"mac\":\"0a:58:0a:d9:00:30\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a6b27e17-1c48-4213-837f-159d82f2ff20\"}],\"ips\":[{\"address\":\"10.217.0.48/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:52.126508865+00:00 stderr F 2025-12-12T16:16:52.331830258+00:00 stderr P 2025-12-12T16:16:52Z [verbose] 2025-12-12T16:16:52.331919830+00:00 stderr P ADD starting CNI request ContainerID:"6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94" Netns:"/var/run/netns/004de1b2-071e-42b4-bf95-47e3937a2607" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-mgp9n;K8S_POD_INFRA_CONTAINER_ID=6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94;K8S_POD_UID=86909e43-e62d-4532-8232-aa3ca0de5d28" Path:"" 2025-12-12T16:16:52.332164446+00:00 stderr F 2025-12-12T16:16:52.502048673+00:00 stderr F I1212 16:16:52.492900 10266 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:52.502048673+00:00 stderr F I1212 16:16:52.493517 10266 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:52.502048673+00:00 stderr F I1212 16:16:52.493531 10266 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:52.502048673+00:00 stderr F I1212 16:16:52.493539 10266 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:52.502048673+00:00 stderr F I1212 16:16:52.493552 10266 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:52.502680729+00:00 stderr P 2025-12-12T16:16:52Z [verbose] 2025-12-12T16:16:52.502904684+00:00 stderr P Add: openshift-marketplace:redhat-marketplace-mgp9n:86909e43-e62d-4532-8232-aa3ca0de5d28:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"6f2c7e4ee800505","mac":"e2:62:8e:56:36:2e"},{"name":"eth0","mac":"0a:58:0a:d9:00:31","sandbox":"/var/run/netns/004de1b2-071e-42b4-bf95-47e3937a2607"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.49/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:52.502933175+00:00 stderr F 2025-12-12T16:16:52.505803975+00:00 stderr F I1212 16:16:52.503510 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-marketplace-mgp9n", UID:"86909e43-e62d-4532-8232-aa3ca0de5d28", APIVersion:"v1", ResourceVersion:"37667", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.49/23] from ovn-kubernetes 2025-12-12T16:16:52.527278229+00:00 stderr P 2025-12-12T16:16:52Z [verbose] 2025-12-12T16:16:52.528422117+00:00 stderr F ADD finished CNI request ContainerID:"6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94" Netns:"/var/run/netns/004de1b2-071e-42b4-bf95-47e3937a2607" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-mgp9n;K8S_POD_INFRA_CONTAINER_ID=6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94;K8S_POD_UID=86909e43-e62d-4532-8232-aa3ca0de5d28" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"e2:62:8e:56:36:2e\",\"name\":\"6f2c7e4ee800505\"},{\"mac\":\"0a:58:0a:d9:00:31\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/004de1b2-071e-42b4-bf95-47e3937a2607\"}],\"ips\":[{\"address\":\"10.217.0.49/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:52.639683594+00:00 stderr F 2025-12-12T16:16:52Z [verbose] ADD starting CNI request ContainerID:"ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401" Netns:"/var/run/netns/20784d2b-953c-4bb2-b7dd-18baa07e5659" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-scheduler;K8S_POD_NAME=revision-pruner-6-crc;K8S_POD_INFRA_CONTAINER_ID=ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401;K8S_POD_UID=6e33370d-b952-4a48-a6cb-73e765546903" Path:"" 2025-12-12T16:16:52.850384757+00:00 stderr F 2025-12-12T16:16:52Z [verbose] DEL starting CNI request ContainerID:"328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716" Netns:"/var/run/netns/e78fe4df-e35f-4fe8-84a4-6fd0e3ebe069" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425935-7hkrm;K8S_POD_INFRA_CONTAINER_ID=328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716;K8S_POD_UID=19e81fea-065e-43b5-8e56-49bfcfa342f7" Path:"" 2025-12-12T16:16:52.851006043+00:00 stderr F 2025-12-12T16:16:52Z [verbose] Del: openshift-operator-lifecycle-manager:collect-profiles-29425935-7hkrm:19e81fea-065e-43b5-8e56-49bfcfa342f7:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:16:53.041714168+00:00 stderr F I1212 16:16:52.983795 10323 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:53.041714168+00:00 stderr F I1212 16:16:52.991672 10323 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:53.041714168+00:00 stderr F I1212 16:16:52.991839 10323 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:53.041714168+00:00 stderr F I1212 16:16:52.991867 10323 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:53.041714168+00:00 stderr F I1212 16:16:52.991892 10323 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:53.042601529+00:00 stderr F 2025-12-12T16:16:53Z [verbose] Add: openshift-kube-scheduler:revision-pruner-6-crc:6e33370d-b952-4a48-a6cb-73e765546903:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"ef76e0aa0ff828d","mac":"aa:69:ee:68:bd:ae"},{"name":"eth0","mac":"0a:58:0a:d9:00:32","sandbox":"/var/run/netns/20784d2b-953c-4bb2-b7dd-18baa07e5659"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.50/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:53.044127617+00:00 stderr F I1212 16:16:53.042651 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-scheduler", Name:"revision-pruner-6-crc", UID:"6e33370d-b952-4a48-a6cb-73e765546903", APIVersion:"v1", ResourceVersion:"37681", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.50/23] from ovn-kubernetes 2025-12-12T16:16:53.066001070+00:00 stderr F 2025-12-12T16:16:53Z [verbose] ADD finished CNI request ContainerID:"ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401" Netns:"/var/run/netns/20784d2b-953c-4bb2-b7dd-18baa07e5659" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-scheduler;K8S_POD_NAME=revision-pruner-6-crc;K8S_POD_INFRA_CONTAINER_ID=ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401;K8S_POD_UID=6e33370d-b952-4a48-a6cb-73e765546903" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"aa:69:ee:68:bd:ae\",\"name\":\"ef76e0aa0ff828d\"},{\"mac\":\"0a:58:0a:d9:00:32\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/20784d2b-953c-4bb2-b7dd-18baa07e5659\"}],\"ips\":[{\"address\":\"10.217.0.50/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:53.124952690+00:00 stderr F 2025-12-12T16:16:53Z [verbose] DEL finished CNI request ContainerID:"328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716" Netns:"/var/run/netns/e78fe4df-e35f-4fe8-84a4-6fd0e3ebe069" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29425935-7hkrm;K8S_POD_INFRA_CONTAINER_ID=328df9b4f48f0adc7c6483781e32bef2bbf38c7a3bc72162f9752fc54e642716;K8S_POD_UID=19e81fea-065e-43b5-8e56-49bfcfa342f7" Path:"", result: "", err: 2025-12-12T16:16:53.149539840+00:00 stderr F 2025-12-12T16:16:53Z [verbose] ADD starting CNI request ContainerID:"ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0" Netns:"/var/run/netns/a0382a0c-e1d9-4346-bb26-fc803e64a6dd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-9ndfc;K8S_POD_INFRA_CONTAINER_ID=ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0;K8S_POD_UID=573d2658-6034-4715-a9ad-a7828b324fd5" Path:"" 2025-12-12T16:16:53.474022192+00:00 stderr F I1212 16:16:53.461688 10407 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:53.474022192+00:00 stderr F I1212 16:16:53.462308 10407 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:53.474022192+00:00 stderr F I1212 16:16:53.462343 10407 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:53.474022192+00:00 stderr F I1212 16:16:53.462364 10407 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:53.474022192+00:00 stderr F I1212 16:16:53.462382 10407 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:53.474022192+00:00 stderr F 2025-12-12T16:16:53Z [verbose] Add: openshift-marketplace:redhat-operators-9ndfc:573d2658-6034-4715-a9ad-a7828b324fd5:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"ac163b2bf1c1d57","mac":"52:6c:45:c0:c7:44"},{"name":"eth0","mac":"0a:58:0a:d9:00:33","sandbox":"/var/run/netns/a0382a0c-e1d9-4346-bb26-fc803e64a6dd"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.51/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:53.474232727+00:00 stderr F I1212 16:16:53.474199 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-operators-9ndfc", UID:"573d2658-6034-4715-a9ad-a7828b324fd5", APIVersion:"v1", ResourceVersion:"37698", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.51/23] from ovn-kubernetes 2025-12-12T16:16:53.497572407+00:00 stderr F 2025-12-12T16:16:53Z [verbose] ADD finished CNI request ContainerID:"ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0" Netns:"/var/run/netns/a0382a0c-e1d9-4346-bb26-fc803e64a6dd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-9ndfc;K8S_POD_INFRA_CONTAINER_ID=ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0;K8S_POD_UID=573d2658-6034-4715-a9ad-a7828b324fd5" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"52:6c:45:c0:c7:44\",\"name\":\"ac163b2bf1c1d57\"},{\"mac\":\"0a:58:0a:d9:00:33\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a0382a0c-e1d9-4346-bb26-fc803e64a6dd\"}],\"ips\":[{\"address\":\"10.217.0.51/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:53.524852033+00:00 stderr F 2025-12-12T16:16:53Z [verbose] ADD starting CNI request ContainerID:"4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d" Netns:"/var/run/netns/b5243c69-84a2-41be-99ed-a94a39b1c756" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-2blsm;K8S_POD_INFRA_CONTAINER_ID=4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d;K8S_POD_UID=fb3b2430-d128-4d2d-9518-6be0ca0ddc6f" Path:"" 2025-12-12T16:16:53.782962655+00:00 stderr F I1212 16:16:53.774265 10467 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:53.782962655+00:00 stderr F I1212 16:16:53.774750 10467 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:53.782962655+00:00 stderr F I1212 16:16:53.774758 10467 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:53.782962655+00:00 stderr F I1212 16:16:53.774764 10467 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:53.782962655+00:00 stderr F I1212 16:16:53.774769 10467 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:53.783244082+00:00 stderr F 2025-12-12T16:16:53Z [verbose] ADD starting CNI request ContainerID:"24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7" Netns:"/var/run/netns/78334b6c-47d3-43f1-9d3e-165f6c26e086" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-11-crc;K8S_POD_INFRA_CONTAINER_ID=24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7;K8S_POD_UID=0ad9be1e-b38d-4280-8a67-505c4461c55d" Path:"" 2025-12-12T16:16:53.783890098+00:00 stderr F 2025-12-12T16:16:53Z [verbose] Add: openshift-marketplace:redhat-operators-2blsm:fb3b2430-d128-4d2d-9518-6be0ca0ddc6f:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"4e9f04b1e852fa9","mac":"92:b1:53:59:22:29"},{"name":"eth0","mac":"0a:58:0a:d9:00:34","sandbox":"/var/run/netns/b5243c69-84a2-41be-99ed-a94a39b1c756"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.52/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:53.784461582+00:00 stderr F I1212 16:16:53.784346 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-operators-2blsm", UID:"fb3b2430-d128-4d2d-9518-6be0ca0ddc6f", APIVersion:"v1", ResourceVersion:"37716", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.52/23] from ovn-kubernetes 2025-12-12T16:16:53.805732401+00:00 stderr F 2025-12-12T16:16:53Z [verbose] ADD finished CNI request ContainerID:"4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d" Netns:"/var/run/netns/b5243c69-84a2-41be-99ed-a94a39b1c756" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-2blsm;K8S_POD_INFRA_CONTAINER_ID=4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d;K8S_POD_UID=fb3b2430-d128-4d2d-9518-6be0ca0ddc6f" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"92:b1:53:59:22:29\",\"name\":\"4e9f04b1e852fa9\"},{\"mac\":\"0a:58:0a:d9:00:34\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b5243c69-84a2-41be-99ed-a94a39b1c756\"}],\"ips\":[{\"address\":\"10.217.0.52/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:54.176232967+00:00 stderr F 2025-12-12T16:16:54Z [verbose] ADD starting CNI request ContainerID:"4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222" Netns:"/var/run/netns/8d661401-00f6-4192-9cfa-40024df85217" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=image-registry-66587d64c8-jqtjf;K8S_POD_INFRA_CONTAINER_ID=4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222;K8S_POD_UID=162da780-4bd3-4acf-b114-06ae104fc8ad" Path:"" 2025-12-12T16:16:54.315363753+00:00 stderr F I1212 16:16:54.298101 10508 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:54.315363753+00:00 stderr F I1212 16:16:54.299591 10508 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:54.315363753+00:00 stderr F I1212 16:16:54.299603 10508 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:54.315363753+00:00 stderr F I1212 16:16:54.299612 10508 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:54.315363753+00:00 stderr F I1212 16:16:54.299621 10508 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:54.315363753+00:00 stderr F 2025-12-12T16:16:54Z [verbose] Add: openshift-kube-apiserver:revision-pruner-11-crc:0ad9be1e-b38d-4280-8a67-505c4461c55d:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"24759bb2246b4ec","mac":"da:a2:68:83:7b:27"},{"name":"eth0","mac":"0a:58:0a:d9:00:35","sandbox":"/var/run/netns/78334b6c-47d3-43f1-9d3e-165f6c26e086"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.53/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:54.315363753+00:00 stderr F I1212 16:16:54.311459 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-apiserver", Name:"revision-pruner-11-crc", UID:"0ad9be1e-b38d-4280-8a67-505c4461c55d", APIVersion:"v1", ResourceVersion:"37732", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.53/23] from ovn-kubernetes 2025-12-12T16:16:54.405258978+00:00 stderr F 2025-12-12T16:16:54Z [verbose] ADD finished CNI request ContainerID:"24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7" Netns:"/var/run/netns/78334b6c-47d3-43f1-9d3e-165f6c26e086" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-11-crc;K8S_POD_INFRA_CONTAINER_ID=24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7;K8S_POD_UID=0ad9be1e-b38d-4280-8a67-505c4461c55d" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"da:a2:68:83:7b:27\",\"name\":\"24759bb2246b4ec\"},{\"mac\":\"0a:58:0a:d9:00:35\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/78334b6c-47d3-43f1-9d3e-165f6c26e086\"}],\"ips\":[{\"address\":\"10.217.0.53/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:54.584509274+00:00 stderr F I1212 16:16:54.551840 10574 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:54.584509274+00:00 stderr F I1212 16:16:54.552449 10574 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:54.584509274+00:00 stderr F I1212 16:16:54.552457 10574 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:54.584509274+00:00 stderr F I1212 16:16:54.552463 10574 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:54.584509274+00:00 stderr F I1212 16:16:54.552469 10574 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:54.585231882+00:00 stderr F 2025-12-12T16:16:54Z [verbose] Add: openshift-image-registry:image-registry-66587d64c8-jqtjf:162da780-4bd3-4acf-b114-06ae104fc8ad:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"4d802f5dbe85c76","mac":"ba:69:fc:57:55:53"},{"name":"eth0","mac":"0a:58:0a:d9:00:23","sandbox":"/var/run/netns/8d661401-00f6-4192-9cfa-40024df85217"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.35/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:54.585720744+00:00 stderr F I1212 16:16:54.585416 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-image-registry", Name:"image-registry-66587d64c8-jqtjf", UID:"162da780-4bd3-4acf-b114-06ae104fc8ad", APIVersion:"v1", ResourceVersion:"36980", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.35/23] from ovn-kubernetes 2025-12-12T16:16:54.622977743+00:00 stderr F 2025-12-12T16:16:54Z [verbose] ADD finished CNI request ContainerID:"4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222" Netns:"/var/run/netns/8d661401-00f6-4192-9cfa-40024df85217" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-image-registry;K8S_POD_NAME=image-registry-66587d64c8-jqtjf;K8S_POD_INFRA_CONTAINER_ID=4d802f5dbe85c769c5b4afa6aaa710f145332a5713a213a44b0344adeeb96222;K8S_POD_UID=162da780-4bd3-4acf-b114-06ae104fc8ad" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ba:69:fc:57:55:53\",\"name\":\"4d802f5dbe85c76\"},{\"mac\":\"0a:58:0a:d9:00:23\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/8d661401-00f6-4192-9cfa-40024df85217\"}],\"ips\":[{\"address\":\"10.217.0.35/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:54.880695746+00:00 stderr F 2025-12-12T16:16:54Z [verbose] ADD starting CNI request ContainerID:"f1d3e6758829eb05a7b38e13aecaad83049b28eb2bb658e628e89dbc8458f4c8" Netns:"/var/run/netns/a3be0d26-4dcb-46c9-b347-fbc4b2590d76" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-network-diagnostics;K8S_POD_NAME=network-check-target-fhkjl;K8S_POD_INFRA_CONTAINER_ID=f1d3e6758829eb05a7b38e13aecaad83049b28eb2bb658e628e89dbc8458f4c8;K8S_POD_UID=17b87002-b798-480a-8e17-83053d698239" Path:"" 2025-12-12T16:16:54.896085771+00:00 stderr F 2025-12-12T16:16:54Z [verbose] ADD starting CNI request ContainerID:"46d88d467caf27dc4b86e7ddcbe6d4e9acb4ad8dee93e430fe19e83b37470960" Netns:"/var/run/netns/e7240bfa-25e3-4def-93b3-56e95e347430" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-network-diagnostics;K8S_POD_NAME=network-check-source-5bb8f5cd97-xdvz5;K8S_POD_INFRA_CONTAINER_ID=46d88d467caf27dc4b86e7ddcbe6d4e9acb4ad8dee93e430fe19e83b37470960;K8S_POD_UID=f863fff9-286a-45fa-b8f0-8a86994b8440" Path:"" 2025-12-12T16:16:54.931745372+00:00 stderr F 2025-12-12T16:16:54Z [verbose] ADD starting CNI request ContainerID:"acb628d8928528762ad899b9ca2ae3961510926ca5dadc7a016a7f22008b5399" Netns:"/var/run/netns/16a230dd-eb6c-47e2-ad8a-54d4b943665c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-network-console;K8S_POD_NAME=networking-console-plugin-5ff7774fd9-nljh6;K8S_POD_INFRA_CONTAINER_ID=acb628d8928528762ad899b9ca2ae3961510926ca5dadc7a016a7f22008b5399;K8S_POD_UID=6a9ae5f6-97bd-46ac-bafa-ca1b4452a141" Path:"" 2025-12-12T16:16:55.071894944+00:00 stderr F 2025-12-12T16:16:55Z [verbose] ADD starting CNI request ContainerID:"54bc15a964e93b2b97abe7832b8620ad0b73ee6a55fc6aac574f17bf4ef514c3" Netns:"/var/run/netns/853ccbf3-72a9-4a06-90c6-098fc2732e36" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-multus;K8S_POD_NAME=network-metrics-daemon-jhhcn;K8S_POD_INFRA_CONTAINER_ID=54bc15a964e93b2b97abe7832b8620ad0b73ee6a55fc6aac574f17bf4ef514c3;K8S_POD_UID=4e8bbb2d-9d91-4541-a2d2-891ab81dd883" Path:"" 2025-12-12T16:16:55.347117413+00:00 stderr F I1212 16:16:55.333474 10646 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:55.347117413+00:00 stderr F I1212 16:16:55.333973 10646 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:55.347117413+00:00 stderr F I1212 16:16:55.333993 10646 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:55.347117413+00:00 stderr F I1212 16:16:55.333999 10646 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:55.347117413+00:00 stderr F I1212 16:16:55.334006 10646 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:55.347117413+00:00 stderr F 2025-12-12T16:16:55Z [verbose] Add: openshift-network-diagnostics:network-check-target-fhkjl:17b87002-b798-480a-8e17-83053d698239:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"f1d3e6758829eb0","mac":"26:e1:ce:98:09:e4"},{"name":"eth0","mac":"0a:58:0a:d9:00:04","sandbox":"/var/run/netns/a3be0d26-4dcb-46c9-b347-fbc4b2590d76"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.4/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:55.352217227+00:00 stderr F I1212 16:16:55.347254 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-network-diagnostics", Name:"network-check-target-fhkjl", UID:"17b87002-b798-480a-8e17-83053d698239", APIVersion:"v1", ResourceVersion:"36433", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.4/23] from ovn-kubernetes 2025-12-12T16:16:55.380148579+00:00 stderr F 2025-12-12T16:16:55Z [verbose] ADD finished CNI request ContainerID:"f1d3e6758829eb05a7b38e13aecaad83049b28eb2bb658e628e89dbc8458f4c8" Netns:"/var/run/netns/a3be0d26-4dcb-46c9-b347-fbc4b2590d76" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-network-diagnostics;K8S_POD_NAME=network-check-target-fhkjl;K8S_POD_INFRA_CONTAINER_ID=f1d3e6758829eb05a7b38e13aecaad83049b28eb2bb658e628e89dbc8458f4c8;K8S_POD_UID=17b87002-b798-480a-8e17-83053d698239" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"26:e1:ce:98:09:e4\",\"name\":\"f1d3e6758829eb0\"},{\"mac\":\"0a:58:0a:d9:00:04\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a3be0d26-4dcb-46c9-b347-fbc4b2590d76\"}],\"ips\":[{\"address\":\"10.217.0.4/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:55.427866194+00:00 stderr F I1212 16:16:55.412378 10679 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:55.427866194+00:00 stderr F I1212 16:16:55.412509 10679 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:55.427866194+00:00 stderr F I1212 16:16:55.412517 10679 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:55.427866194+00:00 stderr F I1212 16:16:55.412524 10679 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:55.427866194+00:00 stderr F I1212 16:16:55.412530 10679 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:55.428683744+00:00 stderr F 2025-12-12T16:16:55Z [verbose] Add: openshift-network-console:networking-console-plugin-5ff7774fd9-nljh6:6a9ae5f6-97bd-46ac-bafa-ca1b4452a141:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"acb628d89285287","mac":"56:b9:0b:86:a1:6a"},{"name":"eth0","mac":"0a:58:0a:d9:00:3b","sandbox":"/var/run/netns/16a230dd-eb6c-47e2-ad8a-54d4b943665c"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.59/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:55.429091894+00:00 stderr F I1212 16:16:55.429048 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-network-console", Name:"networking-console-plugin-5ff7774fd9-nljh6", UID:"6a9ae5f6-97bd-46ac-bafa-ca1b4452a141", APIVersion:"v1", ResourceVersion:"36445", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.59/23] from ovn-kubernetes 2025-12-12T16:16:55.456369630+00:00 stderr F 2025-12-12T16:16:55Z [verbose] ADD finished CNI request ContainerID:"acb628d8928528762ad899b9ca2ae3961510926ca5dadc7a016a7f22008b5399" Netns:"/var/run/netns/16a230dd-eb6c-47e2-ad8a-54d4b943665c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-network-console;K8S_POD_NAME=networking-console-plugin-5ff7774fd9-nljh6;K8S_POD_INFRA_CONTAINER_ID=acb628d8928528762ad899b9ca2ae3961510926ca5dadc7a016a7f22008b5399;K8S_POD_UID=6a9ae5f6-97bd-46ac-bafa-ca1b4452a141" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"56:b9:0b:86:a1:6a\",\"name\":\"acb628d89285287\"},{\"mac\":\"0a:58:0a:d9:00:3b\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/16a230dd-eb6c-47e2-ad8a-54d4b943665c\"}],\"ips\":[{\"address\":\"10.217.0.59/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:55.525264362+00:00 stderr F I1212 16:16:55.517235 10710 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:55.525264362+00:00 stderr F I1212 16:16:55.517387 10710 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:55.525264362+00:00 stderr F I1212 16:16:55.517395 10710 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:55.525264362+00:00 stderr F I1212 16:16:55.517400 10710 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:55.525264362+00:00 stderr F I1212 16:16:55.517406 10710 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:55.526474572+00:00 stderr F 2025-12-12T16:16:55Z [verbose] Add: openshift-multus:network-metrics-daemon-jhhcn:4e8bbb2d-9d91-4541-a2d2-891ab81dd883:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"54bc15a964e93b2","mac":"ea:fc:11:eb:e0:f0"},{"name":"eth0","mac":"0a:58:0a:d9:00:03","sandbox":"/var/run/netns/853ccbf3-72a9-4a06-90c6-098fc2732e36"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.3/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:55.527359543+00:00 stderr F I1212 16:16:55.527302 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-multus", Name:"network-metrics-daemon-jhhcn", UID:"4e8bbb2d-9d91-4541-a2d2-891ab81dd883", APIVersion:"v1", ResourceVersion:"36623", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.3/23] from ovn-kubernetes 2025-12-12T16:16:55.546427519+00:00 stderr F 2025-12-12T16:16:55Z [verbose] ADD finished CNI request ContainerID:"54bc15a964e93b2b97abe7832b8620ad0b73ee6a55fc6aac574f17bf4ef514c3" Netns:"/var/run/netns/853ccbf3-72a9-4a06-90c6-098fc2732e36" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-multus;K8S_POD_NAME=network-metrics-daemon-jhhcn;K8S_POD_INFRA_CONTAINER_ID=54bc15a964e93b2b97abe7832b8620ad0b73ee6a55fc6aac574f17bf4ef514c3;K8S_POD_UID=4e8bbb2d-9d91-4541-a2d2-891ab81dd883" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"ea:fc:11:eb:e0:f0\",\"name\":\"54bc15a964e93b2\"},{\"mac\":\"0a:58:0a:d9:00:03\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/853ccbf3-72a9-4a06-90c6-098fc2732e36\"}],\"ips\":[{\"address\":\"10.217.0.3/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:55.667252249+00:00 stderr F I1212 16:16:55.654980 10659 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:55.667252249+00:00 stderr F I1212 16:16:55.655372 10659 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:55.667252249+00:00 stderr F I1212 16:16:55.655380 10659 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:55.667252249+00:00 stderr F I1212 16:16:55.655387 10659 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:55.667252249+00:00 stderr F I1212 16:16:55.655394 10659 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:55.668472849+00:00 stderr F 2025-12-12T16:16:55Z [verbose] Add: openshift-network-diagnostics:network-check-source-5bb8f5cd97-xdvz5:f863fff9-286a-45fa-b8f0-8a86994b8440:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"46d88d467caf27d","mac":"0a:6c:be:4a:ae:67"},{"name":"eth0","mac":"0a:58:0a:d9:00:3a","sandbox":"/var/run/netns/e7240bfa-25e3-4def-93b3-56e95e347430"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.58/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:16:55.668472849+00:00 stderr F I1212 16:16:55.668090 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-network-diagnostics", Name:"network-check-source-5bb8f5cd97-xdvz5", UID:"f863fff9-286a-45fa-b8f0-8a86994b8440", APIVersion:"v1", ResourceVersion:"36411", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.58/23] from ovn-kubernetes 2025-12-12T16:16:55.742812354+00:00 stderr F 2025-12-12T16:16:55Z [verbose] ADD finished CNI request ContainerID:"46d88d467caf27dc4b86e7ddcbe6d4e9acb4ad8dee93e430fe19e83b37470960" Netns:"/var/run/netns/e7240bfa-25e3-4def-93b3-56e95e347430" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-network-diagnostics;K8S_POD_NAME=network-check-source-5bb8f5cd97-xdvz5;K8S_POD_INFRA_CONTAINER_ID=46d88d467caf27dc4b86e7ddcbe6d4e9acb4ad8dee93e430fe19e83b37470960;K8S_POD_UID=f863fff9-286a-45fa-b8f0-8a86994b8440" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"0a:6c:be:4a:ae:67\",\"name\":\"46d88d467caf27d\"},{\"mac\":\"0a:58:0a:d9:00:3a\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/e7240bfa-25e3-4def-93b3-56e95e347430\"}],\"ips\":[{\"address\":\"10.217.0.58/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:16:56.365329292+00:00 stderr F 2025-12-12T16:16:56Z [verbose] DEL starting CNI request ContainerID:"ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401" Netns:"/var/run/netns/20784d2b-953c-4bb2-b7dd-18baa07e5659" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-scheduler;K8S_POD_NAME=revision-pruner-6-crc;K8S_POD_INFRA_CONTAINER_ID=ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401;K8S_POD_UID=6e33370d-b952-4a48-a6cb-73e765546903" Path:"" 2025-12-12T16:16:56.365329292+00:00 stderr F 2025-12-12T16:16:56Z [verbose] Del: openshift-kube-scheduler:revision-pruner-6-crc:6e33370d-b952-4a48-a6cb-73e765546903:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:16:56.629280675+00:00 stderr F 2025-12-12T16:16:56Z [verbose] DEL finished CNI request ContainerID:"ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401" Netns:"/var/run/netns/20784d2b-953c-4bb2-b7dd-18baa07e5659" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-scheduler;K8S_POD_NAME=revision-pruner-6-crc;K8S_POD_INFRA_CONTAINER_ID=ef76e0aa0ff828ddf012582e32a39ad73fae468c8e2f7f3b7834e520001cf401;K8S_POD_UID=6e33370d-b952-4a48-a6cb-73e765546903" Path:"", result: "", err: 2025-12-12T16:16:57.370460101+00:00 stderr F 2025-12-12T16:16:57Z [verbose] DEL starting CNI request ContainerID:"24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7" Netns:"/var/run/netns/78334b6c-47d3-43f1-9d3e-165f6c26e086" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-11-crc;K8S_POD_INFRA_CONTAINER_ID=24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7;K8S_POD_UID=0ad9be1e-b38d-4280-8a67-505c4461c55d" Path:"" 2025-12-12T16:16:57.370758548+00:00 stderr F 2025-12-12T16:16:57Z [verbose] Del: openshift-kube-apiserver:revision-pruner-11-crc:0ad9be1e-b38d-4280-8a67-505c4461c55d:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:16:57.542023519+00:00 stderr F 2025-12-12T16:16:57Z [verbose] DEL finished CNI request ContainerID:"24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7" Netns:"/var/run/netns/78334b6c-47d3-43f1-9d3e-165f6c26e086" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-11-crc;K8S_POD_INFRA_CONTAINER_ID=24759bb2246b4ec47d790729a7b754f9ac9ba3507bc3ec20b520d87ac9c1c2f7;K8S_POD_UID=0ad9be1e-b38d-4280-8a67-505c4461c55d" Path:"", result: "", err: 2025-12-12T16:17:30.578408363+00:00 stderr F 2025-12-12T16:17:30Z [verbose] DEL starting CNI request ContainerID:"2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2" Netns:"/var/run/netns/3974ea76-7613-460e-bb4d-be275a41269d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-65b6cccf98-flnsl;K8S_POD_INFRA_CONTAINER_ID=2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2;K8S_POD_UID=d259a06e-3949-41b6-a067-7c01441da4b1" Path:"" 2025-12-12T16:17:30.579481673+00:00 stderr F 2025-12-12T16:17:30Z [verbose] Del: openshift-controller-manager:controller-manager-65b6cccf98-flnsl:d259a06e-3949-41b6-a067-7c01441da4b1:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:30.590846160+00:00 stderr F 2025-12-12T16:17:30Z [verbose] DEL starting CNI request ContainerID:"fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07" Netns:"/var/run/netns/bb273ee9-e71c-4d44-94cf-f1c3b9a37afa" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-776cdc94d6-zksq4;K8S_POD_INFRA_CONTAINER_ID=fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07;K8S_POD_UID=a78c6a97-054e-484e-aae2-a33bd3bb7b40" Path:"" 2025-12-12T16:17:30.591062796+00:00 stderr F 2025-12-12T16:17:30Z [verbose] Del: openshift-route-controller-manager:route-controller-manager-776cdc94d6-zksq4:a78c6a97-054e-484e-aae2-a33bd3bb7b40:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:30.769430543+00:00 stderr F 2025-12-12T16:17:30Z [verbose] ADD starting CNI request ContainerID:"225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702" Netns:"/var/run/netns/13b38fef-9d41-40c8-aeb9-89f07be5445f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-12-crc;K8S_POD_INFRA_CONTAINER_ID=225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702;K8S_POD_UID=24732491-f54a-410e-a29e-c8fb26fd9cde" Path:"" 2025-12-12T16:17:30.801484503+00:00 stderr F 2025-12-12T16:17:30Z [verbose] DEL finished CNI request ContainerID:"2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2" Netns:"/var/run/netns/3974ea76-7613-460e-bb4d-be275a41269d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-65b6cccf98-flnsl;K8S_POD_INFRA_CONTAINER_ID=2bf714089818fd6477a262dc7b43a76fa700b53d570bf643af2f365afa9909f2;K8S_POD_UID=d259a06e-3949-41b6-a067-7c01441da4b1" Path:"", result: "", err: 2025-12-12T16:17:30.838111734+00:00 stderr F 2025-12-12T16:17:30Z [verbose] DEL finished CNI request ContainerID:"fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07" Netns:"/var/run/netns/bb273ee9-e71c-4d44-94cf-f1c3b9a37afa" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-776cdc94d6-zksq4;K8S_POD_INFRA_CONTAINER_ID=fe12aa686f8f130f2ed0db07a57b150e66a6ef1f7c1242cf968402245bac1b07;K8S_POD_UID=a78c6a97-054e-484e-aae2-a33bd3bb7b40" Path:"", result: "", err: 2025-12-12T16:17:31.048240523+00:00 stderr F I1212 16:17:31.036636 11598 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:31.048240523+00:00 stderr F I1212 16:17:31.037386 11598 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:31.048240523+00:00 stderr F I1212 16:17:31.037407 11598 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:31.048240523+00:00 stderr F I1212 16:17:31.037414 11598 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:31.048240523+00:00 stderr F I1212 16:17:31.037419 11598 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:31.049211151+00:00 stderr F 2025-12-12T16:17:31Z [verbose] Add: openshift-kube-apiserver:revision-pruner-12-crc:24732491-f54a-410e-a29e-c8fb26fd9cde:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"225a20de8d07f41","mac":"d2:30:3f:c8:3a:20"},{"name":"eth0","mac":"0a:58:0a:d9:00:36","sandbox":"/var/run/netns/13b38fef-9d41-40c8-aeb9-89f07be5445f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.54/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:17:31.049211151+00:00 stderr F I1212 16:17:31.048910 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-apiserver", Name:"revision-pruner-12-crc", UID:"24732491-f54a-410e-a29e-c8fb26fd9cde", APIVersion:"v1", ResourceVersion:"38561", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.54/23] from ovn-kubernetes 2025-12-12T16:17:31.069233195+00:00 stderr F 2025-12-12T16:17:31Z [verbose] ADD finished CNI request ContainerID:"225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702" Netns:"/var/run/netns/13b38fef-9d41-40c8-aeb9-89f07be5445f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-12-crc;K8S_POD_INFRA_CONTAINER_ID=225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702;K8S_POD_UID=24732491-f54a-410e-a29e-c8fb26fd9cde" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"d2:30:3f:c8:3a:20\",\"name\":\"225a20de8d07f41\"},{\"mac\":\"0a:58:0a:d9:00:36\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/13b38fef-9d41-40c8-aeb9-89f07be5445f\"}],\"ips\":[{\"address\":\"10.217.0.54/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:17:31.303715573+00:00 stderr F 2025-12-12T16:17:31Z [verbose] ADD starting CNI request ContainerID:"a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f" Netns:"/var/run/netns/a9e6a432-90f2-4eb2-a837-6d9f411e3f3b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-69f958c846-qd8rg;K8S_POD_INFRA_CONTAINER_ID=a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f;K8S_POD_UID=94e12db4-0aff-472b-9bb0-82451f7e2e17" Path:"" 2025-12-12T16:17:31.518067643+00:00 stderr F 2025-12-12T16:17:31Z [verbose] ADD starting CNI request ContainerID:"8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7" Netns:"/var/run/netns/cbfde274-2671-40f2-a1ab-8dcc916fe15f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-f4599bd79-7rg9b;K8S_POD_INFRA_CONTAINER_ID=8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7;K8S_POD_UID=e6c91f7f-5413-4050-bfac-93d5daa7e99f" Path:"" 2025-12-12T16:17:31.554001924+00:00 stderr F I1212 16:17:31.535774 11788 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:31.554001924+00:00 stderr F I1212 16:17:31.536307 11788 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:31.554001924+00:00 stderr F I1212 16:17:31.536346 11788 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:31.554001924+00:00 stderr F I1212 16:17:31.536367 11788 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:31.554001924+00:00 stderr F I1212 16:17:31.536387 11788 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:31.554001924+00:00 stderr F 2025-12-12T16:17:31Z [verbose] Add: openshift-controller-manager:controller-manager-69f958c846-qd8rg:94e12db4-0aff-472b-9bb0-82451f7e2e17:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"a91e75a5dac6930","mac":"46:ac:74:28:ea:b4"},{"name":"eth0","mac":"0a:58:0a:d9:00:37","sandbox":"/var/run/netns/a9e6a432-90f2-4eb2-a837-6d9f411e3f3b"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.55/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:17:31.556596338+00:00 stderr F I1212 16:17:31.554086 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-69f958c846-qd8rg", UID:"94e12db4-0aff-472b-9bb0-82451f7e2e17", APIVersion:"v1", ResourceVersion:"38604", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.55/23] from ovn-kubernetes 2025-12-12T16:17:31.581880624+00:00 stderr F 2025-12-12T16:17:31Z [verbose] ADD finished CNI request ContainerID:"a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f" Netns:"/var/run/netns/a9e6a432-90f2-4eb2-a837-6d9f411e3f3b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-69f958c846-qd8rg;K8S_POD_INFRA_CONTAINER_ID=a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f;K8S_POD_UID=94e12db4-0aff-472b-9bb0-82451f7e2e17" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"46:ac:74:28:ea:b4\",\"name\":\"a91e75a5dac6930\"},{\"mac\":\"0a:58:0a:d9:00:37\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a9e6a432-90f2-4eb2-a837-6d9f411e3f3b\"}],\"ips\":[{\"address\":\"10.217.0.55/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:17:31.770018231+00:00 stderr F I1212 16:17:31.755676 11837 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:31.770018231+00:00 stderr F I1212 16:17:31.756054 11837 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:31.770018231+00:00 stderr F I1212 16:17:31.756062 11837 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:31.770018231+00:00 stderr F I1212 16:17:31.756069 11837 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:31.770018231+00:00 stderr F I1212 16:17:31.756075 11837 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:31.780657397+00:00 stderr F 2025-12-12T16:17:31Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-f4599bd79-7rg9b:e6c91f7f-5413-4050-bfac-93d5daa7e99f:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"8e06db9851a8139","mac":"86:0d:2c:21:2c:af"},{"name":"eth0","mac":"0a:58:0a:d9:00:38","sandbox":"/var/run/netns/cbfde274-2671-40f2-a1ab-8dcc916fe15f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.56/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:17:31.781086959+00:00 stderr F I1212 16:17:31.781029 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-f4599bd79-7rg9b", UID:"e6c91f7f-5413-4050-bfac-93d5daa7e99f", APIVersion:"v1", ResourceVersion:"38606", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.56/23] from ovn-kubernetes 2025-12-12T16:17:31.811583304+00:00 stderr F 2025-12-12T16:17:31Z [verbose] ADD finished CNI request ContainerID:"8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7" Netns:"/var/run/netns/cbfde274-2671-40f2-a1ab-8dcc916fe15f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-f4599bd79-7rg9b;K8S_POD_INFRA_CONTAINER_ID=8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7;K8S_POD_UID=e6c91f7f-5413-4050-bfac-93d5daa7e99f" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"86:0d:2c:21:2c:af\",\"name\":\"8e06db9851a8139\"},{\"mac\":\"0a:58:0a:d9:00:38\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/cbfde274-2671-40f2-a1ab-8dcc916fe15f\"}],\"ips\":[{\"address\":\"10.217.0.56/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:17:34.957730240+00:00 stderr F 2025-12-12T16:17:34Z [verbose] DEL starting CNI request ContainerID:"225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702" Netns:"/var/run/netns/13b38fef-9d41-40c8-aeb9-89f07be5445f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-12-crc;K8S_POD_INFRA_CONTAINER_ID=225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702;K8S_POD_UID=24732491-f54a-410e-a29e-c8fb26fd9cde" Path:"" 2025-12-12T16:17:34.957730240+00:00 stderr F 2025-12-12T16:17:34Z [verbose] Del: openshift-kube-apiserver:revision-pruner-12-crc:24732491-f54a-410e-a29e-c8fb26fd9cde:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:35.193018700+00:00 stderr F 2025-12-12T16:17:35Z [verbose] DEL finished CNI request ContainerID:"225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702" Netns:"/var/run/netns/13b38fef-9d41-40c8-aeb9-89f07be5445f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=revision-pruner-12-crc;K8S_POD_INFRA_CONTAINER_ID=225a20de8d07f41a40e55510e4ee7645069a3a5efa08475a72cc5ac4c3d86702;K8S_POD_UID=24732491-f54a-410e-a29e-c8fb26fd9cde" Path:"", result: "", err: 2025-12-12T16:17:35.268255419+00:00 stderr F 2025-12-12T16:17:35Z [verbose] ADD starting CNI request ContainerID:"e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d" Netns:"/var/run/netns/27839b15-789c-400c-95ff-4957ec849f4f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=installer-12-crc;K8S_POD_INFRA_CONTAINER_ID=e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d;K8S_POD_UID=214aeed8-f6a2-4251-b4d0-c81fd217c7c2" Path:"" 2025-12-12T16:17:35.489615980+00:00 stderr F I1212 16:17:35.470980 12270 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:35.489615980+00:00 stderr F I1212 16:17:35.471630 12270 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:35.489615980+00:00 stderr F I1212 16:17:35.471641 12270 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:35.489615980+00:00 stderr F I1212 16:17:35.471673 12270 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:35.489615980+00:00 stderr F I1212 16:17:35.471680 12270 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:35.489693592+00:00 stderr F 2025-12-12T16:17:35Z [verbose] Add: openshift-kube-apiserver:installer-12-crc:214aeed8-f6a2-4251-b4d0-c81fd217c7c2:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"e9a0bf2b155dc14","mac":"62:cb:05:82:c3:7b"},{"name":"eth0","mac":"0a:58:0a:d9:00:39","sandbox":"/var/run/netns/27839b15-789c-400c-95ff-4957ec849f4f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.57/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:17:35.492148963+00:00 stderr F I1212 16:17:35.489952 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-apiserver", Name:"installer-12-crc", UID:"214aeed8-f6a2-4251-b4d0-c81fd217c7c2", APIVersion:"v1", ResourceVersion:"38725", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.57/23] from ovn-kubernetes 2025-12-12T16:17:35.511268741+00:00 stderr F 2025-12-12T16:17:35Z [verbose] ADD finished CNI request ContainerID:"e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d" Netns:"/var/run/netns/27839b15-789c-400c-95ff-4957ec849f4f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=installer-12-crc;K8S_POD_INFRA_CONTAINER_ID=e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d;K8S_POD_UID=214aeed8-f6a2-4251-b4d0-c81fd217c7c2" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"62:cb:05:82:c3:7b\",\"name\":\"e9a0bf2b155dc14\"},{\"mac\":\"0a:58:0a:d9:00:39\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/27839b15-789c-400c-95ff-4957ec849f4f\"}],\"ips\":[{\"address\":\"10.217.0.57/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:17:45.934001708+00:00 stderr F 2025-12-12T16:17:45Z [verbose] DEL starting CNI request ContainerID:"ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451" Netns:"/var/run/netns/2f0a4a9c-7a74-4fd3-8443-f40eda9809c4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-kxjp8;K8S_POD_INFRA_CONTAINER_ID=ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451;K8S_POD_UID=5319f16c-f39a-4bd6-836a-cb336099dbc2" Path:"" 2025-12-12T16:17:45.935351101+00:00 stderr F 2025-12-12T16:17:45Z [verbose] Del: openshift-marketplace:certified-operators-kxjp8:5319f16c-f39a-4bd6-836a-cb336099dbc2:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:46.198527068+00:00 stderr F 2025-12-12T16:17:46Z [verbose] DEL finished CNI request ContainerID:"ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451" Netns:"/var/run/netns/2f0a4a9c-7a74-4fd3-8443-f40eda9809c4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-kxjp8;K8S_POD_INFRA_CONTAINER_ID=ff8c45863778a48a425a28a9a87918b0efc06a9a71abddaf0a58cf0518f7b451;K8S_POD_UID=5319f16c-f39a-4bd6-836a-cb336099dbc2" Path:"", result: "", err: 2025-12-12T16:17:46.563574793+00:00 stderr F 2025-12-12T16:17:46Z [verbose] DEL starting CNI request ContainerID:"36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed" Netns:"/var/run/netns/e337e284-954e-4a1e-a479-14d6b90526f1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-p7s65;K8S_POD_INFRA_CONTAINER_ID=36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed;K8S_POD_UID=5957e518-15e6-4acf-9e45-4985b7713fc8" Path:"" 2025-12-12T16:17:46.565360477+00:00 stderr F 2025-12-12T16:17:46Z [verbose] Del: openshift-marketplace:community-operators-p7s65:5957e518-15e6-4acf-9e45-4985b7713fc8:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:46.593803340+00:00 stderr F 2025-12-12T16:17:46Z [verbose] DEL starting CNI request ContainerID:"6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94" Netns:"/var/run/netns/004de1b2-071e-42b4-bf95-47e3937a2607" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-mgp9n;K8S_POD_INFRA_CONTAINER_ID=6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94;K8S_POD_UID=86909e43-e62d-4532-8232-aa3ca0de5d28" Path:"" 2025-12-12T16:17:46.593803340+00:00 stderr F 2025-12-12T16:17:46Z [verbose] Del: openshift-marketplace:redhat-marketplace-mgp9n:86909e43-e62d-4532-8232-aa3ca0de5d28:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:46.852009473+00:00 stderr F 2025-12-12T16:17:46Z [verbose] DEL finished CNI request ContainerID:"36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed" Netns:"/var/run/netns/e337e284-954e-4a1e-a479-14d6b90526f1" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-p7s65;K8S_POD_INFRA_CONTAINER_ID=36bd50d659f1abd49597b7cae2eaed8aebe612ec36c3f9fbc5758f96ffbde8ed;K8S_POD_UID=5957e518-15e6-4acf-9e45-4985b7713fc8" Path:"", result: "", err: 2025-12-12T16:17:46.907025793+00:00 stderr F 2025-12-12T16:17:46Z [verbose] DEL finished CNI request ContainerID:"6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94" Netns:"/var/run/netns/004de1b2-071e-42b4-bf95-47e3937a2607" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-mgp9n;K8S_POD_INFRA_CONTAINER_ID=6f2c7e4ee8005058653be608254682e6f8ccf99963c0cc49075bb88e3c4fee94;K8S_POD_UID=86909e43-e62d-4532-8232-aa3ca0de5d28" Path:"", result: "", err: 2025-12-12T16:17:47.613552130+00:00 stderr F 2025-12-12T16:17:47Z [verbose] DEL starting CNI request ContainerID:"8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7" Netns:"/var/run/netns/cbfde274-2671-40f2-a1ab-8dcc916fe15f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-f4599bd79-7rg9b;K8S_POD_INFRA_CONTAINER_ID=8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7;K8S_POD_UID=e6c91f7f-5413-4050-bfac-93d5daa7e99f" Path:"" 2025-12-12T16:17:47.614049853+00:00 stderr F 2025-12-12T16:17:47Z [verbose] Del: openshift-route-controller-manager:route-controller-manager-f4599bd79-7rg9b:e6c91f7f-5413-4050-bfac-93d5daa7e99f:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:47.616589015+00:00 stderr F 2025-12-12T16:17:47Z [verbose] DEL starting CNI request ContainerID:"a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f" Netns:"/var/run/netns/a9e6a432-90f2-4eb2-a837-6d9f411e3f3b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-69f958c846-qd8rg;K8S_POD_INFRA_CONTAINER_ID=a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f;K8S_POD_UID=94e12db4-0aff-472b-9bb0-82451f7e2e17" Path:"" 2025-12-12T16:17:47.616798420+00:00 stderr F 2025-12-12T16:17:47Z [verbose] Del: openshift-controller-manager:controller-manager-69f958c846-qd8rg:94e12db4-0aff-472b-9bb0-82451f7e2e17:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:47.628017928+00:00 stderr F 2025-12-12T16:17:47Z [verbose] DEL starting CNI request ContainerID:"4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d" Netns:"/var/run/netns/b5243c69-84a2-41be-99ed-a94a39b1c756" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-2blsm;K8S_POD_INFRA_CONTAINER_ID=4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d;K8S_POD_UID=fb3b2430-d128-4d2d-9518-6be0ca0ddc6f" Path:"" 2025-12-12T16:17:47.628309155+00:00 stderr F 2025-12-12T16:17:47Z [verbose] Del: openshift-marketplace:redhat-operators-2blsm:fb3b2430-d128-4d2d-9518-6be0ca0ddc6f:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:17:47.971649553+00:00 stderr F 2025-12-12T16:17:47Z [verbose] DEL finished CNI request ContainerID:"8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7" Netns:"/var/run/netns/cbfde274-2671-40f2-a1ab-8dcc916fe15f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-f4599bd79-7rg9b;K8S_POD_INFRA_CONTAINER_ID=8e06db9851a81391ddff393260eef28cf7e0fe05ed2c6b8c6e0a25403f2c97d7;K8S_POD_UID=e6c91f7f-5413-4050-bfac-93d5daa7e99f" Path:"", result: "", err: 2025-12-12T16:17:47.974406792+00:00 stderr F 2025-12-12T16:17:47Z [verbose] DEL finished CNI request ContainerID:"4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d" Netns:"/var/run/netns/b5243c69-84a2-41be-99ed-a94a39b1c756" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-2blsm;K8S_POD_INFRA_CONTAINER_ID=4e9f04b1e852fa9141933d1eca7d926563f8ad649e9315eff76350ec836adf3d;K8S_POD_UID=fb3b2430-d128-4d2d-9518-6be0ca0ddc6f" Path:"", result: "", err: 2025-12-12T16:17:48.196059401+00:00 stderr F 2025-12-12T16:17:48Z [verbose] DEL finished CNI request ContainerID:"a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f" Netns:"/var/run/netns/a9e6a432-90f2-4eb2-a837-6d9f411e3f3b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-69f958c846-qd8rg;K8S_POD_INFRA_CONTAINER_ID=a91e75a5dac6930aac28aa81157a93d650d81215f1bbe01d548fac770f1d603f;K8S_POD_UID=94e12db4-0aff-472b-9bb0-82451f7e2e17" Path:"", result: "", err: 2025-12-12T16:17:48.601934026+00:00 stderr F 2025-12-12T16:17:48Z [verbose] ADD starting CNI request ContainerID:"609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881" Netns:"/var/run/netns/b9d4315c-b0c1-4912-8453-5574a067031d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-6b47f77689-5r77s;K8S_POD_INFRA_CONTAINER_ID=609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881;K8S_POD_UID=1f01c145-aa38-45ce-bd88-2ec20e5b6b01" Path:"" 2025-12-12T16:17:48.751649418+00:00 stderr F I1212 16:17:48.746047 13145 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:48.751649418+00:00 stderr F I1212 16:17:48.746604 13145 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:48.751649418+00:00 stderr F I1212 16:17:48.746618 13145 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:48.751649418+00:00 stderr F I1212 16:17:48.746626 13145 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:48.751649418+00:00 stderr F I1212 16:17:48.746632 13145 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:48.751976126+00:00 stderr F 2025-12-12T16:17:48Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-6b47f77689-5r77s:1f01c145-aa38-45ce-bd88-2ec20e5b6b01:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"609e288f6150383","mac":"72:bf:cc:fe:a3:0f"},{"name":"eth0","mac":"0a:58:0a:d9:00:3c","sandbox":"/var/run/netns/b9d4315c-b0c1-4912-8453-5574a067031d"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.60/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:17:48.752348075+00:00 stderr F I1212 16:17:48.752316 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-6b47f77689-5r77s", UID:"1f01c145-aa38-45ce-bd88-2ec20e5b6b01", APIVersion:"v1", ResourceVersion:"38882", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.60/23] from ovn-kubernetes 2025-12-12T16:17:48.771621101+00:00 stderr F 2025-12-12T16:17:48Z [verbose] ADD finished CNI request ContainerID:"609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881" Netns:"/var/run/netns/b9d4315c-b0c1-4912-8453-5574a067031d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-6b47f77689-5r77s;K8S_POD_INFRA_CONTAINER_ID=609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881;K8S_POD_UID=1f01c145-aa38-45ce-bd88-2ec20e5b6b01" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"72:bf:cc:fe:a3:0f\",\"name\":\"609e288f6150383\"},{\"mac\":\"0a:58:0a:d9:00:3c\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b9d4315c-b0c1-4912-8453-5574a067031d\"}],\"ips\":[{\"address\":\"10.217.0.60/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:17:49.311372977+00:00 stderr F 2025-12-12T16:17:49Z [verbose] ADD starting CNI request ContainerID:"5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b" Netns:"/var/run/netns/a553ccb1-7a61-4c09-ad96-465e78e20067" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6445bd5bb7-qhd4b;K8S_POD_INFRA_CONTAINER_ID=5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b;K8S_POD_UID=0ce08791-98bd-44a9-8d91-e27aefc67c18" Path:"" 2025-12-12T16:17:49.474337396+00:00 stderr F I1212 16:17:49.461243 13216 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:17:49.474337396+00:00 stderr F I1212 16:17:49.461960 13216 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:17:49.474337396+00:00 stderr F I1212 16:17:49.461976 13216 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:17:49.474337396+00:00 stderr F I1212 16:17:49.461982 13216 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:49.474337396+00:00 stderr F I1212 16:17:49.461988 13216 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:17:49.474337396+00:00 stderr F 2025-12-12T16:17:49Z [verbose] Add: openshift-controller-manager:controller-manager-6445bd5bb7-qhd4b:0ce08791-98bd-44a9-8d91-e27aefc67c18:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"5f7de056136ddaf","mac":"52:6b:b0:24:67:8b"},{"name":"eth0","mac":"0a:58:0a:d9:00:3d","sandbox":"/var/run/netns/a553ccb1-7a61-4c09-ad96-465e78e20067"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.61/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:17:49.474510380+00:00 stderr F I1212 16:17:49.474413 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-6445bd5bb7-qhd4b", UID:"0ce08791-98bd-44a9-8d91-e27aefc67c18", APIVersion:"v1", ResourceVersion:"38897", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.61/23] from ovn-kubernetes 2025-12-12T16:17:49.496306819+00:00 stderr F 2025-12-12T16:17:49Z [verbose] ADD finished CNI request ContainerID:"5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b" Netns:"/var/run/netns/a553ccb1-7a61-4c09-ad96-465e78e20067" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6445bd5bb7-qhd4b;K8S_POD_INFRA_CONTAINER_ID=5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b;K8S_POD_UID=0ce08791-98bd-44a9-8d91-e27aefc67c18" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"52:6b:b0:24:67:8b\",\"name\":\"5f7de056136ddaf\"},{\"mac\":\"0a:58:0a:d9:00:3d\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/a553ccb1-7a61-4c09-ad96-465e78e20067\"}],\"ips\":[{\"address\":\"10.217.0.61/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:18:07.110696893+00:00 stderr F 2025-12-12T16:18:07Z [verbose] DEL starting CNI request ContainerID:"5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b" Netns:"/var/run/netns/a553ccb1-7a61-4c09-ad96-465e78e20067" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6445bd5bb7-qhd4b;K8S_POD_INFRA_CONTAINER_ID=5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b;K8S_POD_UID=0ce08791-98bd-44a9-8d91-e27aefc67c18" Path:"" 2025-12-12T16:18:07.111482562+00:00 stderr F 2025-12-12T16:18:07Z [verbose] Del: openshift-controller-manager:controller-manager-6445bd5bb7-qhd4b:0ce08791-98bd-44a9-8d91-e27aefc67c18:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:18:07.114311132+00:00 stderr F 2025-12-12T16:18:07Z [verbose] DEL starting CNI request ContainerID:"609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881" Netns:"/var/run/netns/b9d4315c-b0c1-4912-8453-5574a067031d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-6b47f77689-5r77s;K8S_POD_INFRA_CONTAINER_ID=609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881;K8S_POD_UID=1f01c145-aa38-45ce-bd88-2ec20e5b6b01" Path:"" 2025-12-12T16:18:07.114491177+00:00 stderr F 2025-12-12T16:18:07Z [verbose] Del: openshift-route-controller-manager:route-controller-manager-6b47f77689-5r77s:1f01c145-aa38-45ce-bd88-2ec20e5b6b01:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:18:07.401723228+00:00 stderr F 2025-12-12T16:18:07Z [verbose] DEL finished CNI request ContainerID:"609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881" Netns:"/var/run/netns/b9d4315c-b0c1-4912-8453-5574a067031d" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-6b47f77689-5r77s;K8S_POD_INFRA_CONTAINER_ID=609e288f6150383689e6e4701e91eb57b7f4ba8565dc180e325d258aabd97881;K8S_POD_UID=1f01c145-aa38-45ce-bd88-2ec20e5b6b01" Path:"", result: "", err: 2025-12-12T16:18:07.678598493+00:00 stderr F 2025-12-12T16:18:07Z [verbose] DEL finished CNI request ContainerID:"5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b" Netns:"/var/run/netns/a553ccb1-7a61-4c09-ad96-465e78e20067" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6445bd5bb7-qhd4b;K8S_POD_INFRA_CONTAINER_ID=5f7de056136ddaf6c387370bd5cf72cf4ec9d929b91af02b0dcd7c0aceeb020b;K8S_POD_UID=0ce08791-98bd-44a9-8d91-e27aefc67c18" Path:"", result: "", err: 2025-12-12T16:18:07.845905049+00:00 stderr F 2025-12-12T16:18:07Z [verbose] ADD starting CNI request ContainerID:"2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e" Netns:"/var/run/netns/670304b3-e042-4a71-8181-579dd4a038c5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-67bd47cff9-br6nz;K8S_POD_INFRA_CONTAINER_ID=2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e;K8S_POD_UID=a3af7089-05b2-4dcb-947b-3dd784d92815" Path:"" 2025-12-12T16:18:08.162376452+00:00 stderr F 2025-12-12T16:18:08Z [verbose] ADD starting CNI request ContainerID:"0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532" Netns:"/var/run/netns/907494a8-5725-49e2-b13a-19c4c7fbd83e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7fffb5779-6br5z;K8S_POD_INFRA_CONTAINER_ID=0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532;K8S_POD_UID=b2944f3c-2b29-4f86-8a67-59975d09aa88" Path:"" 2025-12-12T16:18:08.186801106+00:00 stderr F I1212 16:18:08.181710 13672 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:18:08.186801106+00:00 stderr F I1212 16:18:08.182044 13672 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:18:08.186801106+00:00 stderr F I1212 16:18:08.182059 13672 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:18:08.186801106+00:00 stderr F I1212 16:18:08.182069 13672 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:18:08.186801106+00:00 stderr F I1212 16:18:08.182078 13672 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:18:08.187284888+00:00 stderr F 2025-12-12T16:18:08Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-67bd47cff9-br6nz:a3af7089-05b2-4dcb-947b-3dd784d92815:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"2d3dc2744fa5b4e","mac":"66:2f:c6:d2:82:f9"},{"name":"eth0","mac":"0a:58:0a:d9:00:3e","sandbox":"/var/run/netns/670304b3-e042-4a71-8181-579dd4a038c5"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.62/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:18:08.187473023+00:00 stderr F I1212 16:18:08.187398 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-67bd47cff9-br6nz", UID:"a3af7089-05b2-4dcb-947b-3dd784d92815", APIVersion:"v1", ResourceVersion:"39071", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.62/23] from ovn-kubernetes 2025-12-12T16:18:08.204244197+00:00 stderr F 2025-12-12T16:18:08Z [verbose] ADD finished CNI request ContainerID:"2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e" Netns:"/var/run/netns/670304b3-e042-4a71-8181-579dd4a038c5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-67bd47cff9-br6nz;K8S_POD_INFRA_CONTAINER_ID=2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e;K8S_POD_UID=a3af7089-05b2-4dcb-947b-3dd784d92815" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"66:2f:c6:d2:82:f9\",\"name\":\"2d3dc2744fa5b4e\"},{\"mac\":\"0a:58:0a:d9:00:3e\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/670304b3-e042-4a71-8181-579dd4a038c5\"}],\"ips\":[{\"address\":\"10.217.0.62/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:18:08.514939019+00:00 stderr F I1212 16:18:08.509273 13725 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:18:08.514939019+00:00 stderr F I1212 16:18:08.509589 13725 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:18:08.514939019+00:00 stderr F I1212 16:18:08.509600 13725 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:18:08.514939019+00:00 stderr F I1212 16:18:08.509613 13725 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:18:08.514939019+00:00 stderr F I1212 16:18:08.509621 13725 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:18:08.515306458+00:00 stderr F 2025-12-12T16:18:08Z [verbose] Add: openshift-controller-manager:controller-manager-7fffb5779-6br5z:b2944f3c-2b29-4f86-8a67-59975d09aa88:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"0f2c183e8f515b2","mac":"f2:64:f5:25:92:50"},{"name":"eth0","mac":"0a:58:0a:d9:00:3f","sandbox":"/var/run/netns/907494a8-5725-49e2-b13a-19c4c7fbd83e"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.63/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:18:08.515507443+00:00 stderr F I1212 16:18:08.515420 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-7fffb5779-6br5z", UID:"b2944f3c-2b29-4f86-8a67-59975d09aa88", APIVersion:"v1", ResourceVersion:"39080", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.63/23] from ovn-kubernetes 2025-12-12T16:18:08.535656481+00:00 stderr F 2025-12-12T16:18:08Z [verbose] ADD finished CNI request ContainerID:"0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532" Netns:"/var/run/netns/907494a8-5725-49e2-b13a-19c4c7fbd83e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7fffb5779-6br5z;K8S_POD_INFRA_CONTAINER_ID=0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532;K8S_POD_UID=b2944f3c-2b29-4f86-8a67-59975d09aa88" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"f2:64:f5:25:92:50\",\"name\":\"0f2c183e8f515b2\"},{\"mac\":\"0a:58:0a:d9:00:3f\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/907494a8-5725-49e2-b13a-19c4c7fbd83e\"}],\"ips\":[{\"address\":\"10.217.0.63/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:18:15.317625940+00:00 stderr F 2025-12-12T16:18:15Z [verbose] DEL starting CNI request ContainerID:"e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d" Netns:"/var/run/netns/27839b15-789c-400c-95ff-4957ec849f4f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=installer-12-crc;K8S_POD_INFRA_CONTAINER_ID=e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d;K8S_POD_UID=214aeed8-f6a2-4251-b4d0-c81fd217c7c2" Path:"" 2025-12-12T16:18:15.318549752+00:00 stderr F 2025-12-12T16:18:15Z [verbose] Del: openshift-kube-apiserver:installer-12-crc:214aeed8-f6a2-4251-b4d0-c81fd217c7c2:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:18:15.513598394+00:00 stderr F 2025-12-12T16:18:15Z [verbose] DEL finished CNI request ContainerID:"e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d" Netns:"/var/run/netns/27839b15-789c-400c-95ff-4957ec849f4f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-kube-apiserver;K8S_POD_NAME=installer-12-crc;K8S_POD_INFRA_CONTAINER_ID=e9a0bf2b155dc14ff07a59baf202683f9cd8e1f0c8d1a97324c66ce16b92ed3d;K8S_POD_UID=214aeed8-f6a2-4251-b4d0-c81fd217c7c2" Path:"", result: "", err: 2025-12-12T16:18:26.538861548+00:00 stderr F 2025-12-12T16:18:26Z [verbose] DEL starting CNI request ContainerID:"63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba" Netns:"/var/run/netns/60c60576-fd2a-4668-b2a8-885a179c5e34" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-66458b6674-brfdj;K8S_POD_INFRA_CONTAINER_ID=63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba;K8S_POD_UID=e13eeec0-72dd-418b-9180-87ca0d56870d" Path:"" 2025-12-12T16:18:26.539638728+00:00 stderr F 2025-12-12T16:18:26Z [verbose] Del: openshift-authentication:oauth-openshift-66458b6674-brfdj:e13eeec0-72dd-418b-9180-87ca0d56870d:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:18:26.787986678+00:00 stderr F 2025-12-12T16:18:26Z [verbose] DEL finished CNI request ContainerID:"63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba" Netns:"/var/run/netns/60c60576-fd2a-4668-b2a8-885a179c5e34" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-66458b6674-brfdj;K8S_POD_INFRA_CONTAINER_ID=63d4f7893d2a6e51680e692730931a8e2db49032b3b5feb5b320f7d42af3e4ba;K8S_POD_UID=e13eeec0-72dd-418b-9180-87ca0d56870d" Path:"", result: "", err: 2025-12-12T16:18:52.430304826+00:00 stderr F 2025-12-12T16:18:52Z [verbose] ADD starting CNI request ContainerID:"8e6e27ebbeb78e69b2b8b28991eb52250199f5ef450238666cf895de621d609a" Netns:"/var/run/netns/79cf75b5-82c2-413b-b768-9fe606923849" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6567f5ffdb-jrpfr;K8S_POD_INFRA_CONTAINER_ID=8e6e27ebbeb78e69b2b8b28991eb52250199f5ef450238666cf895de621d609a;K8S_POD_UID=5b0a332f-52bd-409b-b5c0-f2723c617bed" Path:"" 2025-12-12T16:18:58.765618071+00:00 stderr F I1212 16:18:58.755063 14915 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:18:58.765618071+00:00 stderr F I1212 16:18:58.756457 14915 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:18:58.765618071+00:00 stderr F I1212 16:18:58.756504 14915 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:18:58.765618071+00:00 stderr F I1212 16:18:58.756512 14915 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:18:58.765618071+00:00 stderr F I1212 16:18:58.756519 14915 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:18:58.765618071+00:00 stderr F 2025-12-12T16:18:58Z [verbose] Add: openshift-authentication:oauth-openshift-6567f5ffdb-jrpfr:5b0a332f-52bd-409b-b5c0-f2723c617bed:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"8e6e27ebbeb78e6","mac":"aa:a9:2d:3c:6c:a0"},{"name":"eth0","mac":"0a:58:0a:d9:00:40","sandbox":"/var/run/netns/79cf75b5-82c2-413b-b768-9fe606923849"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.64/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:18:58.765618071+00:00 stderr F I1212 16:18:58.765376 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-authentication", Name:"oauth-openshift-6567f5ffdb-jrpfr", UID:"5b0a332f-52bd-409b-b5c0-f2723c617bed", APIVersion:"v1", ResourceVersion:"39207", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.64/23] from ovn-kubernetes 2025-12-12T16:18:58.783982545+00:00 stderr F 2025-12-12T16:18:58Z [verbose] ADD finished CNI request ContainerID:"8e6e27ebbeb78e69b2b8b28991eb52250199f5ef450238666cf895de621d609a" Netns:"/var/run/netns/79cf75b5-82c2-413b-b768-9fe606923849" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6567f5ffdb-jrpfr;K8S_POD_INFRA_CONTAINER_ID=8e6e27ebbeb78e69b2b8b28991eb52250199f5ef450238666cf895de621d609a;K8S_POD_UID=5b0a332f-52bd-409b-b5c0-f2723c617bed" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"aa:a9:2d:3c:6c:a0\",\"name\":\"8e6e27ebbeb78e6\"},{\"mac\":\"0a:58:0a:d9:00:40\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/79cf75b5-82c2-413b-b768-9fe606923849\"}],\"ips\":[{\"address\":\"10.217.0.64/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:19:06.733019297+00:00 stderr F 2025-12-12T16:19:06Z [verbose] DEL starting CNI request ContainerID:"2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e" Netns:"/var/run/netns/670304b3-e042-4a71-8181-579dd4a038c5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-67bd47cff9-br6nz;K8S_POD_INFRA_CONTAINER_ID=2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e;K8S_POD_UID=a3af7089-05b2-4dcb-947b-3dd784d92815" Path:"" 2025-12-12T16:19:06.733902219+00:00 stderr F 2025-12-12T16:19:06Z [verbose] Del: openshift-route-controller-manager:route-controller-manager-67bd47cff9-br6nz:a3af7089-05b2-4dcb-947b-3dd784d92815:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:19:06.735659772+00:00 stderr F 2025-12-12T16:19:06Z [verbose] DEL starting CNI request ContainerID:"0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532" Netns:"/var/run/netns/907494a8-5725-49e2-b13a-19c4c7fbd83e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7fffb5779-6br5z;K8S_POD_INFRA_CONTAINER_ID=0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532;K8S_POD_UID=b2944f3c-2b29-4f86-8a67-59975d09aa88" Path:"" 2025-12-12T16:19:06.735820476+00:00 stderr F 2025-12-12T16:19:06Z [verbose] Del: openshift-controller-manager:controller-manager-7fffb5779-6br5z:b2944f3c-2b29-4f86-8a67-59975d09aa88:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:19:06.966087469+00:00 stderr F 2025-12-12T16:19:06Z [verbose] DEL finished CNI request ContainerID:"2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e" Netns:"/var/run/netns/670304b3-e042-4a71-8181-579dd4a038c5" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-67bd47cff9-br6nz;K8S_POD_INFRA_CONTAINER_ID=2d3dc2744fa5b4ed8734404b5f41bcb8d9a837bba2ffffb3ba8c6a4da8a52f1e;K8S_POD_UID=a3af7089-05b2-4dcb-947b-3dd784d92815" Path:"", result: "", err: 2025-12-12T16:19:07.016595738+00:00 stderr P 2025-12-12T16:19:07Z [verbose] 2025-12-12T16:19:07.016642529+00:00 stderr P DEL finished CNI request ContainerID:"0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532" Netns:"/var/run/netns/907494a8-5725-49e2-b13a-19c4c7fbd83e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7fffb5779-6br5z;K8S_POD_INFRA_CONTAINER_ID=0f2c183e8f515b2f190e8930f24422ca27de87c62982b51512617516d3516532;K8S_POD_UID=b2944f3c-2b29-4f86-8a67-59975d09aa88" Path:"", result: "", err: 2025-12-12T16:19:07.016661219+00:00 stderr F 2025-12-12T16:19:07.666898475+00:00 stderr F 2025-12-12T16:19:07Z [verbose] ADD starting CNI request ContainerID:"cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457" Netns:"/var/run/netns/887076ef-8e25-49db-a7d8-5e350808b823" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-8fdcdbb66-vvkdl;K8S_POD_INFRA_CONTAINER_ID=cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457;K8S_POD_UID=7e21d17f-ba99-44c0-9127-7a65e5d9bdca" Path:"" 2025-12-12T16:19:07.679684761+00:00 stderr F 2025-12-12T16:19:07Z [verbose] ADD starting CNI request ContainerID:"3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4" Netns:"/var/run/netns/eaa4cfbb-518c-4a69-93c8-54257ecf0f3e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7b9f779b68-xk96c;K8S_POD_INFRA_CONTAINER_ID=3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4;K8S_POD_UID=c0bc7bd1-3831-4f42-b4fe-d83030ae43bb" Path:"" 2025-12-12T16:19:07.892711438+00:00 stderr F I1212 16:19:07.888379 15337 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:19:07.892711438+00:00 stderr F I1212 16:19:07.888778 15337 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:19:07.892711438+00:00 stderr F I1212 16:19:07.888792 15337 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:19:07.892711438+00:00 stderr F I1212 16:19:07.888800 15337 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:19:07.892711438+00:00 stderr F I1212 16:19:07.888828 15337 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:19:07.893480227+00:00 stderr P 2025-12-12T16:19:07Z [verbose] 2025-12-12T16:19:07.894287677+00:00 stderr P Add: openshift-controller-manager:controller-manager-7b9f779b68-xk96c:c0bc7bd1-3831-4f42-b4fe-d83030ae43bb:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"3be55320e701051","mac":"5a:6f:d5:14:05:b0"},{"name":"eth0","mac":"0a:58:0a:d9:00:42","sandbox":"/var/run/netns/eaa4cfbb-518c-4a69-93c8-54257ecf0f3e"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.66/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:19:07.894336498+00:00 stderr F 2025-12-12T16:19:07.894881031+00:00 stderr F I1212 16:19:07.894851 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-7b9f779b68-xk96c", UID:"c0bc7bd1-3831-4f42-b4fe-d83030ae43bb", APIVersion:"v1", ResourceVersion:"39354", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.66/23] from ovn-kubernetes 2025-12-12T16:19:07.908072997+00:00 stderr F 2025-12-12T16:19:07Z [verbose] ADD finished CNI request ContainerID:"3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4" Netns:"/var/run/netns/eaa4cfbb-518c-4a69-93c8-54257ecf0f3e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7b9f779b68-xk96c;K8S_POD_INFRA_CONTAINER_ID=3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4;K8S_POD_UID=c0bc7bd1-3831-4f42-b4fe-d83030ae43bb" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"5a:6f:d5:14:05:b0\",\"name\":\"3be55320e701051\"},{\"mac\":\"0a:58:0a:d9:00:42\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/eaa4cfbb-518c-4a69-93c8-54257ecf0f3e\"}],\"ips\":[{\"address\":\"10.217.0.66/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:19:08.030647038+00:00 stderr F I1212 16:19:08.025374 15331 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:19:08.030647038+00:00 stderr F I1212 16:19:08.025761 15331 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:19:08.030647038+00:00 stderr F I1212 16:19:08.025797 15331 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:19:08.030647038+00:00 stderr F I1212 16:19:08.025835 15331 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:19:08.030647038+00:00 stderr F I1212 16:19:08.025847 15331 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:19:08.031131300+00:00 stderr F 2025-12-12T16:19:08Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-8fdcdbb66-vvkdl:7e21d17f-ba99-44c0-9127-7a65e5d9bdca:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"cea2f31a5364e06","mac":"6e:db:21:db:fe:ee"},{"name":"eth0","mac":"0a:58:0a:d9:00:41","sandbox":"/var/run/netns/887076ef-8e25-49db-a7d8-5e350808b823"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.65/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:19:08.031403076+00:00 stderr F I1212 16:19:08.031365 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-8fdcdbb66-vvkdl", UID:"7e21d17f-ba99-44c0-9127-7a65e5d9bdca", APIVersion:"v1", ResourceVersion:"39353", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.65/23] from ovn-kubernetes 2025-12-12T16:19:08.047341020+00:00 stderr F 2025-12-12T16:19:08Z [verbose] ADD finished CNI request ContainerID:"cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457" Netns:"/var/run/netns/887076ef-8e25-49db-a7d8-5e350808b823" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-8fdcdbb66-vvkdl;K8S_POD_INFRA_CONTAINER_ID=cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457;K8S_POD_UID=7e21d17f-ba99-44c0-9127-7a65e5d9bdca" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"6e:db:21:db:fe:ee\",\"name\":\"cea2f31a5364e06\"},{\"mac\":\"0a:58:0a:d9:00:41\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/887076ef-8e25-49db-a7d8-5e350808b823\"}],\"ips\":[{\"address\":\"10.217.0.65/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:19:26.536569562+00:00 stderr P 2025-12-12T16:19:26Z [verbose] 2025-12-12T16:19:26.536689735+00:00 stderr P DEL starting CNI request ContainerID:"3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4" Netns:"/var/run/netns/eaa4cfbb-518c-4a69-93c8-54257ecf0f3e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7b9f779b68-xk96c;K8S_POD_INFRA_CONTAINER_ID=3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4;K8S_POD_UID=c0bc7bd1-3831-4f42-b4fe-d83030ae43bb" Path:"" 2025-12-12T16:19:26.536714726+00:00 stderr F 2025-12-12T16:19:26.538076370+00:00 stderr P 2025-12-12T16:19:26Z [verbose] 2025-12-12T16:19:26.538113331+00:00 stderr P Del: openshift-controller-manager:controller-manager-7b9f779b68-xk96c:c0bc7bd1-3831-4f42-b4fe-d83030ae43bb:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:19:26.538134691+00:00 stderr F 2025-12-12T16:19:26.564051552+00:00 stderr F 2025-12-12T16:19:26Z [verbose] DEL starting CNI request ContainerID:"cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457" Netns:"/var/run/netns/887076ef-8e25-49db-a7d8-5e350808b823" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-8fdcdbb66-vvkdl;K8S_POD_INFRA_CONTAINER_ID=cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457;K8S_POD_UID=7e21d17f-ba99-44c0-9127-7a65e5d9bdca" Path:"" 2025-12-12T16:19:26.564278498+00:00 stderr F 2025-12-12T16:19:26Z [verbose] Del: openshift-route-controller-manager:route-controller-manager-8fdcdbb66-vvkdl:7e21d17f-ba99-44c0-9127-7a65e5d9bdca:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:19:26.903291799+00:00 stderr F 2025-12-12T16:19:26Z [verbose] DEL finished CNI request ContainerID:"cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457" Netns:"/var/run/netns/887076ef-8e25-49db-a7d8-5e350808b823" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-8fdcdbb66-vvkdl;K8S_POD_INFRA_CONTAINER_ID=cea2f31a5364e0661dd115d55f51b171cc11167d004f5da71c03a7ca7d10a457;K8S_POD_UID=7e21d17f-ba99-44c0-9127-7a65e5d9bdca" Path:"", result: "", err: 2025-12-12T16:19:27.148295280+00:00 stderr F 2025-12-12T16:19:27Z [verbose] DEL finished CNI request ContainerID:"3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4" Netns:"/var/run/netns/eaa4cfbb-518c-4a69-93c8-54257ecf0f3e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7b9f779b68-xk96c;K8S_POD_INFRA_CONTAINER_ID=3be55320e701051abeba36d445f32eebf452dd08a6e9fafb9975dd9edab245e4;K8S_POD_UID=c0bc7bd1-3831-4f42-b4fe-d83030ae43bb" Path:"", result: "", err: 2025-12-12T16:19:27.429931362+00:00 stderr F 2025-12-12T16:19:27Z [verbose] ADD starting CNI request ContainerID:"a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9" Netns:"/var/run/netns/b40e6d75-139a-4e52-aa6e-672bfa12dd6b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-bf6bf5794-d5zzt;K8S_POD_INFRA_CONTAINER_ID=a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9;K8S_POD_UID=1fad0dc5-4596-4305-9545-f2525bf2a5f6" Path:"" 2025-12-12T16:19:27.783759646+00:00 stderr F I1212 16:19:27.776966 16042 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:19:27.783759646+00:00 stderr F I1212 16:19:27.777748 16042 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:19:27.783759646+00:00 stderr F I1212 16:19:27.777807 16042 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:19:27.783759646+00:00 stderr F I1212 16:19:27.777844 16042 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:19:27.783759646+00:00 stderr F I1212 16:19:27.777865 16042 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:19:27.784622727+00:00 stderr F 2025-12-12T16:19:27Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-bf6bf5794-d5zzt:1fad0dc5-4596-4305-9545-f2525bf2a5f6:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"a617fc7065f1a27","mac":"66:f6:06:8a:a6:d9"},{"name":"eth0","mac":"0a:58:0a:d9:00:43","sandbox":"/var/run/netns/b40e6d75-139a-4e52-aa6e-672bfa12dd6b"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.67/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:19:27.784979846+00:00 stderr F I1212 16:19:27.784899 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-bf6bf5794-d5zzt", UID:"1fad0dc5-4596-4305-9545-f2525bf2a5f6", APIVersion:"v1", ResourceVersion:"39515", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.67/23] from ovn-kubernetes 2025-12-12T16:19:27.798846754+00:00 stderr F 2025-12-12T16:19:27Z [verbose] ADD finished CNI request ContainerID:"a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9" Netns:"/var/run/netns/b40e6d75-139a-4e52-aa6e-672bfa12dd6b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-bf6bf5794-d5zzt;K8S_POD_INFRA_CONTAINER_ID=a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9;K8S_POD_UID=1fad0dc5-4596-4305-9545-f2525bf2a5f6" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"66:f6:06:8a:a6:d9\",\"name\":\"a617fc7065f1a27\"},{\"mac\":\"0a:58:0a:d9:00:43\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b40e6d75-139a-4e52-aa6e-672bfa12dd6b\"}],\"ips\":[{\"address\":\"10.217.0.67/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:19:27.897463951+00:00 stderr F 2025-12-12T16:19:27Z [verbose] ADD starting CNI request ContainerID:"9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce" Netns:"/var/run/netns/ea413e6e-16cf-4812-924c-7d52e0aca8cd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-79d797b698-v4v6j;K8S_POD_INFRA_CONTAINER_ID=9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce;K8S_POD_UID=f239ae24-879a-4441-8fdf-e35f8be83d86" Path:"" 2025-12-12T16:19:28.244915844+00:00 stderr F I1212 16:19:28.238115 16095 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:19:28.244915844+00:00 stderr F I1212 16:19:28.238656 16095 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:19:28.244915844+00:00 stderr F I1212 16:19:28.238674 16095 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:19:28.244915844+00:00 stderr F I1212 16:19:28.238680 16095 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:19:28.244915844+00:00 stderr F I1212 16:19:28.238686 16095 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:19:28.245516910+00:00 stderr F 2025-12-12T16:19:28Z [verbose] Add: openshift-controller-manager:controller-manager-79d797b698-v4v6j:f239ae24-879a-4441-8fdf-e35f8be83d86:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"9053e2f67cbba91","mac":"4a:e8:c1:48:ab:f4"},{"name":"eth0","mac":"0a:58:0a:d9:00:44","sandbox":"/var/run/netns/ea413e6e-16cf-4812-924c-7d52e0aca8cd"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.68/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:19:28.245799127+00:00 stderr F I1212 16:19:28.245736 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-79d797b698-v4v6j", UID:"f239ae24-879a-4441-8fdf-e35f8be83d86", APIVersion:"v1", ResourceVersion:"39521", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.68/23] from ovn-kubernetes 2025-12-12T16:19:28.258712371+00:00 stderr F 2025-12-12T16:19:28Z [verbose] ADD finished CNI request ContainerID:"9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce" Netns:"/var/run/netns/ea413e6e-16cf-4812-924c-7d52e0aca8cd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-79d797b698-v4v6j;K8S_POD_INFRA_CONTAINER_ID=9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce;K8S_POD_UID=f239ae24-879a-4441-8fdf-e35f8be83d86" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"4a:e8:c1:48:ab:f4\",\"name\":\"9053e2f67cbba91\"},{\"mac\":\"0a:58:0a:d9:00:44\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/ea413e6e-16cf-4812-924c-7d52e0aca8cd\"}],\"ips\":[{\"address\":\"10.217.0.68/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:19:46.539069643+00:00 stderr F 2025-12-12T16:19:46Z [verbose] DEL starting CNI request ContainerID:"9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce" Netns:"/var/run/netns/ea413e6e-16cf-4812-924c-7d52e0aca8cd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-79d797b698-v4v6j;K8S_POD_INFRA_CONTAINER_ID=9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce;K8S_POD_UID=f239ae24-879a-4441-8fdf-e35f8be83d86" Path:"" 2025-12-12T16:19:46.539751970+00:00 stderr F 2025-12-12T16:19:46Z [verbose] Del: openshift-controller-manager:controller-manager-79d797b698-v4v6j:f239ae24-879a-4441-8fdf-e35f8be83d86:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:19:47.046659397+00:00 stderr F 2025-12-12T16:19:47Z [verbose] DEL finished CNI request ContainerID:"9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce" Netns:"/var/run/netns/ea413e6e-16cf-4812-924c-7d52e0aca8cd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-79d797b698-v4v6j;K8S_POD_INFRA_CONTAINER_ID=9053e2f67cbba91f751ed0b628915367d15c8f735f68236bc3df7299c22cf5ce;K8S_POD_UID=f239ae24-879a-4441-8fdf-e35f8be83d86" Path:"", result: "", err: 2025-12-12T16:19:47.477670919+00:00 stderr F 2025-12-12T16:19:47Z [verbose] ADD starting CNI request ContainerID:"2442fefb3ec630c1459249d541dea75bdd7b8cce13dfc98f86cd71a04f5a5896" Netns:"/var/run/netns/830ac5f7-87fd-4f1a-9a48-7206a15ffaad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7b9f779b68-rhrzf;K8S_POD_INFRA_CONTAINER_ID=2442fefb3ec630c1459249d541dea75bdd7b8cce13dfc98f86cd71a04f5a5896;K8S_POD_UID=7313ab95-a89a-4df9-a791-1d048a6beba9" Path:"" 2025-12-12T16:19:47.840667953+00:00 stderr F I1212 16:19:47.835075 16554 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:19:47.840667953+00:00 stderr F I1212 16:19:47.835743 16554 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:19:47.840667953+00:00 stderr F I1212 16:19:47.835756 16554 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:19:47.840667953+00:00 stderr F I1212 16:19:47.835766 16554 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:19:47.840667953+00:00 stderr F I1212 16:19:47.835776 16554 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:19:47.841220697+00:00 stderr F 2025-12-12T16:19:47Z [verbose] Add: openshift-controller-manager:controller-manager-7b9f779b68-rhrzf:7313ab95-a89a-4df9-a791-1d048a6beba9:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"2442fefb3ec630c","mac":"46:bf:c6:ea:9e:1e"},{"name":"eth0","mac":"0a:58:0a:d9:00:45","sandbox":"/var/run/netns/830ac5f7-87fd-4f1a-9a48-7206a15ffaad"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.69/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:19:47.841533785+00:00 stderr F I1212 16:19:47.841421 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-controller-manager", Name:"controller-manager-7b9f779b68-rhrzf", UID:"7313ab95-a89a-4df9-a791-1d048a6beba9", APIVersion:"v1", ResourceVersion:"39643", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.69/23] from ovn-kubernetes 2025-12-12T16:19:47.861978308+00:00 stderr F 2025-12-12T16:19:47Z [verbose] ADD finished CNI request ContainerID:"2442fefb3ec630c1459249d541dea75bdd7b8cce13dfc98f86cd71a04f5a5896" Netns:"/var/run/netns/830ac5f7-87fd-4f1a-9a48-7206a15ffaad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-7b9f779b68-rhrzf;K8S_POD_INFRA_CONTAINER_ID=2442fefb3ec630c1459249d541dea75bdd7b8cce13dfc98f86cd71a04f5a5896;K8S_POD_UID=7313ab95-a89a-4df9-a791-1d048a6beba9" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"46:bf:c6:ea:9e:1e\",\"name\":\"2442fefb3ec630c\"},{\"mac\":\"0a:58:0a:d9:00:45\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/830ac5f7-87fd-4f1a-9a48-7206a15ffaad\"}],\"ips\":[{\"address\":\"10.217.0.69/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:20:36.078486086+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL starting CNI request ContainerID:"ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0" Netns:"/var/run/netns/a0382a0c-e1d9-4346-bb26-fc803e64a6dd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-9ndfc;K8S_POD_INFRA_CONTAINER_ID=ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0;K8S_POD_UID=573d2658-6034-4715-a9ad-a7828b324fd5" Path:"" 2025-12-12T16:20:36.078992440+00:00 stderr F 2025-12-12T16:20:36Z [verbose] Del: openshift-marketplace:redhat-operators-9ndfc:573d2658-6034-4715-a9ad-a7828b324fd5:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:20:36.293699731+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL finished CNI request ContainerID:"ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0" Netns:"/var/run/netns/a0382a0c-e1d9-4346-bb26-fc803e64a6dd" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-9ndfc;K8S_POD_INFRA_CONTAINER_ID=ac163b2bf1c1d578b9037f0b59dae7dd262bb9d00e98558c9f328edeb8dabdb0;K8S_POD_UID=573d2658-6034-4715-a9ad-a7828b324fd5" Path:"", result: "", err: 2025-12-12T16:20:36.368980186+00:00 stderr F 2025-12-12T16:20:36Z [verbose] ADD starting CNI request ContainerID:"1ebc495b03b4eaae43e1bfdbf980668073684b96795435a1d924a5270ea75ef3" Netns:"/var/run/netns/2c154a48-93f0-4aa6-aaa7-8b2564aeea44" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-547dbd544d-4vhrb;K8S_POD_INFRA_CONTAINER_ID=1ebc495b03b4eaae43e1bfdbf980668073684b96795435a1d924a5270ea75ef3;K8S_POD_UID=9a0e237f-ebef-42b0-ad96-926e15307914" Path:"" 2025-12-12T16:20:36.585074244+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL starting CNI request ContainerID:"5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd" Netns:"/var/run/netns/b41854f2-a31a-48b8-9915-6efa69ec94e8" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2gt6h;K8S_POD_INFRA_CONTAINER_ID=5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd;K8S_POD_UID=3686d912-c8e4-413f-b036-f206a4e826a2" Path:"" 2025-12-12T16:20:36.585074244+00:00 stderr F 2025-12-12T16:20:36Z [verbose] Del: openshift-marketplace:community-operators-2gt6h:3686d912-c8e4-413f-b036-f206a4e826a2:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:20:36.586757368+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL starting CNI request ContainerID:"f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20" Netns:"/var/run/netns/a6b27e17-1c48-4213-837f-159d82f2ff20" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-s7x92;K8S_POD_INFRA_CONTAINER_ID=f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20;K8S_POD_UID=1aaf652b-1019-4193-839d-875d12cc1e27" Path:"" 2025-12-12T16:20:36.586976584+00:00 stderr F 2025-12-12T16:20:36Z [verbose] Del: openshift-marketplace:redhat-marketplace-s7x92:1aaf652b-1019-4193-839d-875d12cc1e27:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:20:36.587387415+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL starting CNI request ContainerID:"80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba" Netns:"/var/run/netns/9b75a81c-54ca-4204-bf8f-6df7be735241" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-547dbd544d-xpvsb;K8S_POD_INFRA_CONTAINER_ID=80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba;K8S_POD_UID=1de41ef3-7896-4e9c-8201-8174bc4468c4" Path:"" 2025-12-12T16:20:36.587526008+00:00 stderr F 2025-12-12T16:20:36Z [verbose] Del: openshift-marketplace:marketplace-operator-547dbd544d-xpvsb:1de41ef3-7896-4e9c-8201-8174bc4468c4:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:20:36.589279014+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL starting CNI request ContainerID:"70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571" Netns:"/var/run/netns/b0d35401-5792-4682-8d69-7f39cf91cd6e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-pvzzz;K8S_POD_INFRA_CONTAINER_ID=70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571;K8S_POD_UID=f1a12a40-8493-41e1-84b7-312fc948fca8" Path:"" 2025-12-12T16:20:36.589576002+00:00 stderr F 2025-12-12T16:20:36Z [verbose] Del: openshift-marketplace:certified-operators-pvzzz:f1a12a40-8493-41e1-84b7-312fc948fca8:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:20:36.724593464+00:00 stderr F I1212 16:20:36.717215 17788 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:20:36.724593464+00:00 stderr F I1212 16:20:36.717679 17788 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:20:36.724593464+00:00 stderr F I1212 16:20:36.717695 17788 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:20:36.724593464+00:00 stderr F I1212 16:20:36.717711 17788 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:20:36.724593464+00:00 stderr F I1212 16:20:36.717724 17788 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:20:36.725024645+00:00 stderr F 2025-12-12T16:20:36Z [verbose] Add: openshift-marketplace:marketplace-operator-547dbd544d-4vhrb:9a0e237f-ebef-42b0-ad96-926e15307914:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"1ebc495b03b4eaa","mac":"e6:7e:d9:69:6f:11"},{"name":"eth0","mac":"0a:58:0a:d9:00:46","sandbox":"/var/run/netns/2c154a48-93f0-4aa6-aaa7-8b2564aeea44"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.70/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:20:36.725214700+00:00 stderr F I1212 16:20:36.725154 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"marketplace-operator-547dbd544d-4vhrb", UID:"9a0e237f-ebef-42b0-ad96-926e15307914", APIVersion:"v1", ResourceVersion:"39820", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.70/23] from ovn-kubernetes 2025-12-12T16:20:36.741008944+00:00 stderr F 2025-12-12T16:20:36Z [verbose] ADD finished CNI request ContainerID:"1ebc495b03b4eaae43e1bfdbf980668073684b96795435a1d924a5270ea75ef3" Netns:"/var/run/netns/2c154a48-93f0-4aa6-aaa7-8b2564aeea44" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-547dbd544d-4vhrb;K8S_POD_INFRA_CONTAINER_ID=1ebc495b03b4eaae43e1bfdbf980668073684b96795435a1d924a5270ea75ef3;K8S_POD_UID=9a0e237f-ebef-42b0-ad96-926e15307914" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"e6:7e:d9:69:6f:11\",\"name\":\"1ebc495b03b4eaa\"},{\"mac\":\"0a:58:0a:d9:00:46\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/2c154a48-93f0-4aa6-aaa7-8b2564aeea44\"}],\"ips\":[{\"address\":\"10.217.0.70/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:20:36.764940822+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL finished CNI request ContainerID:"5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd" Netns:"/var/run/netns/b41854f2-a31a-48b8-9915-6efa69ec94e8" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-2gt6h;K8S_POD_INFRA_CONTAINER_ID=5cc1da989e963af873e82696b122995145445095ec336e5b958ae3ddef9bfffd;K8S_POD_UID=3686d912-c8e4-413f-b036-f206a4e826a2" Path:"", result: "", err: 2025-12-12T16:20:36.970379701+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL finished CNI request ContainerID:"f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20" Netns:"/var/run/netns/a6b27e17-1c48-4213-837f-159d82f2ff20" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-s7x92;K8S_POD_INFRA_CONTAINER_ID=f1da0765a97fe218a374080c0f1f06e2731cd63af36a922455361a4960727e20;K8S_POD_UID=1aaf652b-1019-4193-839d-875d12cc1e27" Path:"", result: "", err: 2025-12-12T16:20:36.977113787+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL finished CNI request ContainerID:"70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571" Netns:"/var/run/netns/b0d35401-5792-4682-8d69-7f39cf91cd6e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-pvzzz;K8S_POD_INFRA_CONTAINER_ID=70771a8a130e6322df73890d22e5b58e9c784d9164e5ed9740d937291a171571;K8S_POD_UID=f1a12a40-8493-41e1-84b7-312fc948fca8" Path:"", result: "", err: 2025-12-12T16:20:36.991851104+00:00 stderr F 2025-12-12T16:20:36Z [verbose] DEL finished CNI request ContainerID:"80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba" Netns:"/var/run/netns/9b75a81c-54ca-4204-bf8f-6df7be735241" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-547dbd544d-xpvsb;K8S_POD_INFRA_CONTAINER_ID=80eb6b504f4b6d215a1fdd56503837348aea4e832c71cca42b9c33074674fdba;K8S_POD_UID=1de41ef3-7896-4e9c-8201-8174bc4468c4" Path:"", result: "", err: 2025-12-12T16:20:38.787254076+00:00 stderr F 2025-12-12T16:20:38Z [verbose] ADD starting CNI request ContainerID:"08d678588a91fedaa50b05baac41cdac2d5c0355efa6380e596d1d26d3cd8ee4" Netns:"/var/run/netns/b8bca2c1-9dc8-414b-911f-d5060670ee18" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-wqdb8;K8S_POD_INFRA_CONTAINER_ID=08d678588a91fedaa50b05baac41cdac2d5c0355efa6380e596d1d26d3cd8ee4;K8S_POD_UID=c82ddae8-4dc3-4d48-96b1-cd9613cc32c3" Path:"" 2025-12-12T16:20:39.137468882+00:00 stderr F I1212 16:20:39.131846 18068 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:20:39.137468882+00:00 stderr F I1212 16:20:39.132354 18068 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:20:39.137468882+00:00 stderr F I1212 16:20:39.132373 18068 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:20:39.137468882+00:00 stderr F I1212 16:20:39.132382 18068 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:20:39.137468882+00:00 stderr F I1212 16:20:39.132394 18068 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:20:39.137868033+00:00 stderr F 2025-12-12T16:20:39Z [verbose] Add: openshift-marketplace:redhat-operators-wqdb8:c82ddae8-4dc3-4d48-96b1-cd9613cc32c3:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"08d678588a91fed","mac":"96:74:20:e5:ac:d8"},{"name":"eth0","mac":"0a:58:0a:d9:00:47","sandbox":"/var/run/netns/b8bca2c1-9dc8-414b-911f-d5060670ee18"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.71/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:20:39.138080198+00:00 stderr F I1212 16:20:39.138030 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-operators-wqdb8", UID:"c82ddae8-4dc3-4d48-96b1-cd9613cc32c3", APIVersion:"v1", ResourceVersion:"39870", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.71/23] from ovn-kubernetes 2025-12-12T16:20:39.153504643+00:00 stderr F 2025-12-12T16:20:39Z [verbose] ADD finished CNI request ContainerID:"08d678588a91fedaa50b05baac41cdac2d5c0355efa6380e596d1d26d3cd8ee4" Netns:"/var/run/netns/b8bca2c1-9dc8-414b-911f-d5060670ee18" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-operators-wqdb8;K8S_POD_INFRA_CONTAINER_ID=08d678588a91fedaa50b05baac41cdac2d5c0355efa6380e596d1d26d3cd8ee4;K8S_POD_UID=c82ddae8-4dc3-4d48-96b1-cd9613cc32c3" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"96:74:20:e5:ac:d8\",\"name\":\"08d678588a91fed\"},{\"mac\":\"0a:58:0a:d9:00:47\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b8bca2c1-9dc8-414b-911f-d5060670ee18\"}],\"ips\":[{\"address\":\"10.217.0.71/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:20:39.770999170+00:00 stderr F 2025-12-12T16:20:39Z [verbose] ADD starting CNI request ContainerID:"7773d79f7edba7c2fade19500e032cb8eda2fddefa0dffa30bcd136741f76b43" Netns:"/var/run/netns/b501df5a-cec1-4125-a157-1dda57fd5f93" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-psnw2;K8S_POD_INFRA_CONTAINER_ID=7773d79f7edba7c2fade19500e032cb8eda2fddefa0dffa30bcd136741f76b43;K8S_POD_UID=2d107578-4c5d-4271-a1a7-660aadfab0d1" Path:"" 2025-12-12T16:20:40.097061572+00:00 stderr F I1212 16:20:40.090981 18150 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:20:40.097061572+00:00 stderr F I1212 16:20:40.091467 18150 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:20:40.097061572+00:00 stderr F I1212 16:20:40.091488 18150 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:20:40.097061572+00:00 stderr F I1212 16:20:40.091494 18150 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:20:40.097061572+00:00 stderr F I1212 16:20:40.091500 18150 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:20:40.097493514+00:00 stderr F 2025-12-12T16:20:40Z [verbose] Add: openshift-marketplace:certified-operators-psnw2:2d107578-4c5d-4271-a1a7-660aadfab0d1:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"7773d79f7edba7c","mac":"fe:83:58:be:d6:d3"},{"name":"eth0","mac":"0a:58:0a:d9:00:48","sandbox":"/var/run/netns/b501df5a-cec1-4125-a157-1dda57fd5f93"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.72/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:20:40.097741980+00:00 stderr F I1212 16:20:40.097674 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"certified-operators-psnw2", UID:"2d107578-4c5d-4271-a1a7-660aadfab0d1", APIVersion:"v1", ResourceVersion:"39882", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.72/23] from ovn-kubernetes 2025-12-12T16:20:40.112966320+00:00 stderr F 2025-12-12T16:20:40Z [verbose] ADD finished CNI request ContainerID:"7773d79f7edba7c2fade19500e032cb8eda2fddefa0dffa30bcd136741f76b43" Netns:"/var/run/netns/b501df5a-cec1-4125-a157-1dda57fd5f93" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=certified-operators-psnw2;K8S_POD_INFRA_CONTAINER_ID=7773d79f7edba7c2fade19500e032cb8eda2fddefa0dffa30bcd136741f76b43;K8S_POD_UID=2d107578-4c5d-4271-a1a7-660aadfab0d1" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"fe:83:58:be:d6:d3\",\"name\":\"7773d79f7edba7c\"},{\"mac\":\"0a:58:0a:d9:00:48\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/b501df5a-cec1-4125-a157-1dda57fd5f93\"}],\"ips\":[{\"address\":\"10.217.0.72/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:20:41.167536361+00:00 stderr F 2025-12-12T16:20:41Z [verbose] ADD starting CNI request ContainerID:"b765b5c8351a8fb93680585f3e7f0cfc2b1c43781870e36adc047f36c6ef9bf0" Netns:"/var/run/netns/2ec6428b-757b-41a7-8144-dc7f8b8c6d3f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-6jgv5;K8S_POD_INFRA_CONTAINER_ID=b765b5c8351a8fb93680585f3e7f0cfc2b1c43781870e36adc047f36c6ef9bf0;K8S_POD_UID=0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8" Path:"" 2025-12-12T16:20:41.505771103+00:00 stderr F I1212 16:20:41.496775 18258 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:20:41.505771103+00:00 stderr F I1212 16:20:41.497703 18258 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:20:41.505771103+00:00 stderr F I1212 16:20:41.497718 18258 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:20:41.505771103+00:00 stderr F I1212 16:20:41.497733 18258 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:20:41.505771103+00:00 stderr F I1212 16:20:41.497740 18258 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:20:41.506320197+00:00 stderr F 2025-12-12T16:20:41Z [verbose] Add: openshift-marketplace:community-operators-6jgv5:0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"b765b5c8351a8fb","mac":"c2:00:b2:fe:62:7f"},{"name":"eth0","mac":"0a:58:0a:d9:00:49","sandbox":"/var/run/netns/2ec6428b-757b-41a7-8144-dc7f8b8c6d3f"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.73/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:20:41.506542343+00:00 stderr F I1212 16:20:41.506484 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"community-operators-6jgv5", UID:"0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8", APIVersion:"v1", ResourceVersion:"39903", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.73/23] from ovn-kubernetes 2025-12-12T16:20:41.522467791+00:00 stderr F 2025-12-12T16:20:41Z [verbose] ADD finished CNI request ContainerID:"b765b5c8351a8fb93680585f3e7f0cfc2b1c43781870e36adc047f36c6ef9bf0" Netns:"/var/run/netns/2ec6428b-757b-41a7-8144-dc7f8b8c6d3f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=community-operators-6jgv5;K8S_POD_INFRA_CONTAINER_ID=b765b5c8351a8fb93680585f3e7f0cfc2b1c43781870e36adc047f36c6ef9bf0;K8S_POD_UID=0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"c2:00:b2:fe:62:7f\",\"name\":\"b765b5c8351a8fb\"},{\"mac\":\"0a:58:0a:d9:00:49\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/2ec6428b-757b-41a7-8144-dc7f8b8c6d3f\"}],\"ips\":[{\"address\":\"10.217.0.73/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:20:42.191259292+00:00 stderr F 2025-12-12T16:20:42Z [verbose] ADD starting CNI request ContainerID:"8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e" Netns:"/var/run/netns/f4daef72-d6c9-455b-b870-17f85a9da346" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-jkgqd;K8S_POD_INFRA_CONTAINER_ID=8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e;K8S_POD_UID=5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4" Path:"" 2025-12-12T16:20:42.554627753+00:00 stderr F I1212 16:20:42.547720 18375 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:20:42.554627753+00:00 stderr F I1212 16:20:42.548580 18375 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:20:42.554627753+00:00 stderr F I1212 16:20:42.548669 18375 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:20:42.554627753+00:00 stderr F I1212 16:20:42.548704 18375 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:20:42.554627753+00:00 stderr F I1212 16:20:42.548735 18375 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:20:42.555661860+00:00 stderr F 2025-12-12T16:20:42Z [verbose] Add: openshift-marketplace:redhat-marketplace-jkgqd:5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"8bea72c104dd523","mac":"06:75:52:71:0f:f9"},{"name":"eth0","mac":"0a:58:0a:d9:00:4a","sandbox":"/var/run/netns/f4daef72-d6c9-455b-b870-17f85a9da346"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.74/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:20:42.555936348+00:00 stderr F I1212 16:20:42.555875 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-marketplace", Name:"redhat-marketplace-jkgqd", UID:"5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4", APIVersion:"v1", ResourceVersion:"39923", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.74/23] from ovn-kubernetes 2025-12-12T16:20:42.570866549+00:00 stderr F 2025-12-12T16:20:42Z [verbose] ADD finished CNI request ContainerID:"8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e" Netns:"/var/run/netns/f4daef72-d6c9-455b-b870-17f85a9da346" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=redhat-marketplace-jkgqd;K8S_POD_INFRA_CONTAINER_ID=8bea72c104dd5234c4cd3783a470a5fd8615adb812871de1a18d6c25aed0610e;K8S_POD_UID=5307a6d2-3f00-4ebd-8c7b-e101e24f4dd4" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"06:75:52:71:0f:f9\",\"name\":\"8bea72c104dd523\"},{\"mac\":\"0a:58:0a:d9:00:4a\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/f4daef72-d6c9-455b-b870-17f85a9da346\"}],\"ips\":[{\"address\":\"10.217.0.74/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:20:47.431664447+00:00 stderr F 2025-12-12T16:20:47Z [verbose] DEL starting CNI request ContainerID:"a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9" Netns:"/var/run/netns/b40e6d75-139a-4e52-aa6e-672bfa12dd6b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-bf6bf5794-d5zzt;K8S_POD_INFRA_CONTAINER_ID=a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9;K8S_POD_UID=1fad0dc5-4596-4305-9545-f2525bf2a5f6" Path:"" 2025-12-12T16:20:47.432685864+00:00 stderr F 2025-12-12T16:20:47Z [verbose] Del: openshift-route-controller-manager:route-controller-manager-bf6bf5794-d5zzt:1fad0dc5-4596-4305-9545-f2525bf2a5f6:ovn-kubernetes:eth0 {"cniVersion":"0.4.0","name":"ovn-kubernetes","type":"ovn-k8s-cni-overlay","ipam":{},"dns":{},"logFile":"/var/log/ovn-kubernetes/ovn-k8s-cni-overlay.log","logLevel":"4","logfile-maxsize":100,"logfile-maxbackups":5,"logfile-maxage":0,"runtimeConfig":{}} 2025-12-12T16:20:47.684389136+00:00 stderr F 2025-12-12T16:20:47Z [verbose] DEL finished CNI request ContainerID:"a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9" Netns:"/var/run/netns/b40e6d75-139a-4e52-aa6e-672bfa12dd6b" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-bf6bf5794-d5zzt;K8S_POD_INFRA_CONTAINER_ID=a617fc7065f1a27b47bb99a0229d4625e224ad99323bbfe378c7893aeb2e13f9;K8S_POD_UID=1fad0dc5-4596-4305-9545-f2525bf2a5f6" Path:"", result: "", err: 2025-12-12T16:20:48.196642742+00:00 stderr F 2025-12-12T16:20:48Z [verbose] ADD starting CNI request ContainerID:"67ac35c04ae5d5bd39d34e7ec55083a9d5fce60d8261f2f593679d0ef3030a1f" Netns:"/var/run/netns/0f433365-4756-470b-bea4-8c1f1f2c2967" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-8fdcdbb66-mzfqh;K8S_POD_INFRA_CONTAINER_ID=67ac35c04ae5d5bd39d34e7ec55083a9d5fce60d8261f2f593679d0ef3030a1f;K8S_POD_UID=952b1cf6-a983-4b00-bca6-24b95d6bff57" Path:"" 2025-12-12T16:20:48.553984945+00:00 stderr F I1212 16:20:48.548129 18947 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:20:48.553984945+00:00 stderr F I1212 16:20:48.548693 18947 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:20:48.553984945+00:00 stderr F I1212 16:20:48.548712 18947 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:20:48.553984945+00:00 stderr F I1212 16:20:48.548718 18947 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:20:48.553984945+00:00 stderr F I1212 16:20:48.548724 18947 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:20:48.554733645+00:00 stderr F 2025-12-12T16:20:48Z [verbose] Add: openshift-route-controller-manager:route-controller-manager-8fdcdbb66-mzfqh:952b1cf6-a983-4b00-bca6-24b95d6bff57:ovn-kubernetes(ovn-kubernetes):eth0 {"cniVersion":"0.4.0","interfaces":[{"name":"67ac35c04ae5d5b","mac":"8e:a6:70:d9:df:c2"},{"name":"eth0","mac":"0a:58:0a:d9:00:4b","sandbox":"/var/run/netns/0f433365-4756-470b-bea4-8c1f1f2c2967"}],"ips":[{"version":"4","interface":1,"address":"10.217.0.75/23","gateway":"10.217.0.1"}],"dns":{}} 2025-12-12T16:20:48.555110975+00:00 stderr F I1212 16:20:48.555063 6507 event.go:364] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-route-controller-manager", Name:"route-controller-manager-8fdcdbb66-mzfqh", UID:"952b1cf6-a983-4b00-bca6-24b95d6bff57", APIVersion:"v1", ResourceVersion:"40020", FieldPath:""}): type: 'Normal' reason: 'AddedInterface' Add eth0 [10.217.0.75/23] from ovn-kubernetes 2025-12-12T16:20:48.571878375+00:00 stderr F 2025-12-12T16:20:48Z [verbose] ADD finished CNI request ContainerID:"67ac35c04ae5d5bd39d34e7ec55083a9d5fce60d8261f2f593679d0ef3030a1f" Netns:"/var/run/netns/0f433365-4756-470b-bea4-8c1f1f2c2967" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-route-controller-manager;K8S_POD_NAME=route-controller-manager-8fdcdbb66-mzfqh;K8S_POD_INFRA_CONTAINER_ID=67ac35c04ae5d5bd39d34e7ec55083a9d5fce60d8261f2f593679d0ef3030a1f;K8S_POD_UID=952b1cf6-a983-4b00-bca6-24b95d6bff57" Path:"", result: "{\"Result\":{\"cniVersion\":\"1.1.0\",\"interfaces\":[{\"mac\":\"8e:a6:70:d9:df:c2\",\"name\":\"67ac35c04ae5d5b\"},{\"mac\":\"0a:58:0a:d9:00:4b\",\"name\":\"eth0\",\"sandbox\":\"/var/run/netns/0f433365-4756-470b-bea4-8c1f1f2c2967\"}],\"ips\":[{\"address\":\"10.217.0.75/23\",\"gateway\":\"10.217.0.1\",\"interface\":1}]}}", err: 2025-12-12T16:25:27.141523381+00:00 stderr F 2025-12-12T16:25:27Z [verbose] readiness indicator file is gone. restart multus-daemon ././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043043033075 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000502215117043043033076 0ustar zuulzuul2025-12-12T16:26:45.470163861+00:00 stdout F skipping a dir without errors: / 2025-12-12T16:26:45.470163861+00:00 stdout F skipping a dir without errors: /bundle 2025-12-12T16:26:45.470446828+00:00 stdout F skipping all files in the dir: /dev 2025-12-12T16:26:45.470446828+00:00 stdout F skipping a dir without errors: /etc 2025-12-12T16:26:45.470446828+00:00 stdout F skipping a dir without errors: /manifests 2025-12-12T16:26:45.470745915+00:00 stdout F skipping a dir without errors: /metadata 2025-12-12T16:26:45.470911779+00:00 stdout F skipping all files in the dir: /proc 2025-12-12T16:26:45.470958231+00:00 stdout F skipping a dir without errors: /root 2025-12-12T16:26:45.470965851+00:00 stdout F skipping a dir without errors: /root/buildinfo 2025-12-12T16:26:45.471006752+00:00 stdout F skipping a dir without errors: /run 2025-12-12T16:26:45.471043543+00:00 stdout F skipping a dir without errors: /run/secrets 2025-12-12T16:26:45.471050903+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm 2025-12-12T16:26:45.471077584+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm/ca 2025-12-12T16:26:45.471126325+00:00 stdout F skipping a dir without errors: /run/secrets/rhsm/syspurpose 2025-12-12T16:26:45.471134085+00:00 stdout F skipping all files in the dir: /sys 2025-12-12T16:26:45.471172286+00:00 stdout F skipping a dir without errors: /tests 2025-12-12T16:26:45.471233668+00:00 stdout F skipping a dir without errors: /tests/scorecard 2025-12-12T16:26:45.471304009+00:00 stdout F skipping a dir without errors: /usr 2025-12-12T16:26:45.471313550+00:00 stdout F skipping a dir without errors: /usr/share 2025-12-12T16:26:45.471350151+00:00 stdout F skipping a dir without errors: /usr/share/buildinfo 2025-12-12T16:26:45.471394992+00:00 stdout F skipping a dir without errors: /util 2025-12-12T16:26:45.471425012+00:00 stdout F skipping a dir without errors: /var 2025-12-12T16:26:45.471439013+00:00 stdout F skipping a dir without errors: /var/run 2025-12-12T16:26:45.471462643+00:00 stdout F skipping a dir without errors: /var/run/secrets 2025-12-12T16:26:45.471494494+00:00 stdout F skipping a dir without errors: /var/run/secrets/kubernetes.io 2025-12-12T16:26:45.471520105+00:00 stdout F skipping a dir without errors: /var/run/secrets/kubernetes.io/serviceaccount 2025-12-12T16:26:45.471542735+00:00 stdout F skipping a dir without errors: /var/run/secrets/kubernetes.io/serviceaccount/..2025_12_12_16_26_43.2292268698 2025-12-12T16:26:45.471599867+00:00 stdout F &{metadata/annotations.yaml manifests/} ././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000000010715117043043033075 0ustar zuulzuul2025-12-12T16:26:43.783769316+00:00 stdout F '/bin/cpb' -> '/util/cpb' ././@LongLink0000644000000000000000000000032200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015117043062033076 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000644000175000017500000001224515117043043033103 0ustar zuulzuul2025-12-12T16:26:46.207083638+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Using in-cluster kube client config" 2025-12-12T16:26:46.225925364+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/cluster-observability-operator.clusterserviceversion.yaml 2025-12-12T16:26:46.227040672+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_alertmanagerconfigs.yaml 2025-12-12T16:26:46.239677951+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_alertmanagers.yaml 2025-12-12T16:26:46.250445623+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_monitoringstacks.yaml 2025-12-12T16:26:46.252793783+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_podmonitors.yaml 2025-12-12T16:26:46.254510856+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_probes.yaml 2025-12-12T16:26:46.258402764+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_prometheusagents.yaml 2025-12-12T16:26:46.274084821+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_prometheuses.yaml 2025-12-12T16:26:46.288522625+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_prometheusrules.yaml 2025-12-12T16:26:46.289007878+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_scrapeconfigs.yaml 2025-12-12T16:26:46.299122043+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_servicemonitors.yaml 2025-12-12T16:26:46.301333839+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_thanosqueriers.yaml 2025-12-12T16:26:46.301333839+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/monitoring.rhobs_thanosrulers.yaml 2025-12-12T16:26:46.313585458+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/obo-prometheus-operator-admission-webhook_policy_v1_poddisruptionbudget.yaml 2025-12-12T16:26:46.315377024+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/obo-prometheus-operator-admission-webhook_v1_service.yaml 2025-12-12T16:26:46.315727583+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/obo-prometheus-operator_v1_service.yaml 2025-12-12T16:26:46.316462971+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/observability-operator_monitoring.coreos.com_v1_prometheusrule.yaml 2025-12-12T16:26:46.316710077+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/observability-operator_rbac.authorization.k8s.io_v1_rolebinding.yaml 2025-12-12T16:26:46.316903942+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/observability-operator_v1_service.yaml 2025-12-12T16:26:46.317089687+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/observability.openshift.io_observabilityinstallers.yaml 2025-12-12T16:26:46.317633371+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/observability.openshift.io_uiplugins.yaml 2025-12-12T16:26:46.318612035+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/perses.dev_perses.yaml 2025-12-12T16:26:46.322584136+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/perses.dev_persesdashboards.yaml 2025-12-12T16:26:46.323167531+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/perses.dev_persesdatasources.yaml 2025-12-12T16:26:46.323670383+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/perses_v1_serviceaccount.yaml 2025-12-12T16:26:46.323905289+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/persesdashboard-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:46.324135745+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/persesdashboard-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:46.324383651+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/persesdatasource-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:46.325498219+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/manifests/persesdatasource-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:46.325837628+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="Reading file" file=/bundle/metadata/annotations.yaml ././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015117043043033063 5ustar zuulzuul././@LongLink0000644000000000000000000000036700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015117043062033064 5ustar zuulzuul././@LongLink0000644000000000000000000000037400000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000644000175000017500000113532115117043043033073 0ustar zuulzuul2025-12-12T16:16:45.197587221+00:00 stderr F I1212 16:16:45.178934 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:45.197587221+00:00 stderr F I1212 16:16:45.180071 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.197587221+00:00 stderr F I1212 16:16:45.193612 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:45.252282616+00:00 stderr F I1212 16:16:45.252220 1 builder.go:304] openshift-controller-manager-operator version 4.20.0-202510211040.p2.gaa455c0.assembly.stream.el9-aa455c0-aa455c043152123595c2b4f72e02279aad9dd48a 2025-12-12T16:16:46.155882126+00:00 stderr F I1212 16:16:46.147310 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:46.155882126+00:00 stderr F W1212 16:16:46.153775 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.155882126+00:00 stderr F W1212 16:16:46.153792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.155882126+00:00 stderr F W1212 16:16:46.153798 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:46.155882126+00:00 stderr F W1212 16:16:46.153802 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:46.155882126+00:00 stderr F W1212 16:16:46.153805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:46.155882126+00:00 stderr F W1212 16:16:46.153808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:46.155882126+00:00 stderr F I1212 16:16:46.155171 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:46.163858761+00:00 stderr F I1212 16:16:46.160481 1 leaderelection.go:257] attempting to acquire leader lease openshift-controller-manager-operator/openshift-controller-manager-operator-lock... 2025-12-12T16:16:46.183407038+00:00 stderr F I1212 16:16:46.181404 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:46.186456622+00:00 stderr F I1212 16:16:46.186422 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:46.186524074+00:00 stderr F I1212 16:16:46.186504 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.190124902+00:00 stderr F I1212 16:16:46.190074 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:46.190804158+00:00 stderr F I1212 16:16:46.190602 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:46.190971802+00:00 stderr F I1212 16:16:46.190958 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.191004553+00:00 stderr F I1212 16:16:46.190995 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.191037354+00:00 stderr F I1212 16:16:46.191028 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.191060725+00:00 stderr F I1212 16:16:46.191052 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.198251800+00:00 stderr F I1212 16:16:46.195690 1 leaderelection.go:271] successfully acquired lease openshift-controller-manager-operator/openshift-controller-manager-operator-lock 2025-12-12T16:16:46.201334155+00:00 stderr F I1212 16:16:46.201206 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator-lock", UID:"045eab69-2a73-449e-aa06-9200f90ad227", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37199", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' openshift-controller-manager-operator-686468bdd5-xknw6_76efa8f7-4603-4204-b79b-f7dc678272e0 became leader 2025-12-12T16:16:46.222337818+00:00 stderr F I1212 16:16:46.222112 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.263531284+00:00 stderr F I1212 16:16:46.245965 1 starter.go:117] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:46.271357895+00:00 stderr F I1212 16:16:46.248233 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:46.290366139+00:00 stderr F I1212 16:16:46.289563 1 base_controller.go:76] Waiting for caches to sync for ImagePullSecretCleanupController 2025-12-12T16:16:46.290366139+00:00 stderr F I1212 16:16:46.290076 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.300250070+00:00 stderr F I1212 16:16:46.291662 1 base_controller.go:76] Waiting for caches to sync for OpenshiftControllerManagerStaticResources-StaticResources 2025-12-12T16:16:46.300250070+00:00 stderr F I1212 16:16:46.291687 1 operator.go:145] Starting OpenShiftControllerManagerOperator 2025-12-12T16:16:46.300250070+00:00 stderr F I1212 16:16:46.299981 1 base_controller.go:76] Waiting for caches to sync for openshift-controller-manager 2025-12-12T16:16:46.300250070+00:00 stderr F I1212 16:16:46.299987 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:46.300250070+00:00 stderr F I1212 16:16:46.300028 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:46.300250070+00:00 stderr F I1212 16:16:46.300054 1 base_controller.go:76] Waiting for caches to sync for UserCAObservationController 2025-12-12T16:16:46.301059950+00:00 stderr F I1212 16:16:46.301020 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.301059950+00:00 stderr F I1212 16:16:46.301033 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.301684746+00:00 stderr F I1212 16:16:46.301618 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_openshift-controller-manager 2025-12-12T16:16:46.491889939+00:00 stderr F I1212 16:16:46.491821 1 base_controller.go:82] Caches are synced for OpenshiftControllerManagerStaticResources-StaticResources 2025-12-12T16:16:46.491953341+00:00 stderr F I1212 16:16:46.491941 1 base_controller.go:119] Starting #1 worker of OpenshiftControllerManagerStaticResources-StaticResources controller ... 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502390 1 base_controller.go:82] Caches are synced for openshift-controller-manager 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502422 1 base_controller.go:119] Starting #1 worker of openshift-controller-manager controller ... 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502598 1 base_controller.go:82] Caches are synced for StatusSyncer_openshift-controller-manager 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502605 1 base_controller.go:119] Starting #1 worker of StatusSyncer_openshift-controller-manager controller ... 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502619 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502623 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502653 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502657 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502671 1 base_controller.go:82] Caches are synced for UserCAObservationController 2025-12-12T16:16:46.523399259+00:00 stderr F I1212 16:16:46.502674 1 base_controller.go:119] Starting #1 worker of UserCAObservationController controller ... 2025-12-12T16:16:46.818383670+00:00 stderr F I1212 16:16:46.811932 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.892000298+00:00 stderr F I1212 16:16:46.890377 1 base_controller.go:82] Caches are synced for ImagePullSecretCleanupController 2025-12-12T16:16:46.892000298+00:00 stderr F I1212 16:16:46.891079 1 base_controller.go:119] Starting #1 worker of ImagePullSecretCleanupController controller ... 2025-12-12T16:16:47.074994766+00:00 stderr F I1212 16:16:47.071566 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-03T09:40:45Z","message":"Progressing: deployment/controller-manager: no available replica found","reason":"_DesiredStateNotYetAchieved","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-11-03T09:40:45Z","message":"Available: no openshift controller manager deployment pods available on any node","reason":"_NoPodsAvailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:47.110349199+00:00 stderr F I1212 16:16:47.099425 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing message changed from "Progressing: deployment/controller-manager: no available replica found\nRouteControllerManagerProgressing: deployment/route-controller-manager: no available replica found" to "Progressing: deployment/controller-manager: no available replica found",Available message changed from "Available: no openshift controller manager deployment pods available on any node\nRouteControllerManagerAvailable: no route controller manager deployment pods available on any node" to "Available: no openshift controller manager deployment pods available on any node" 2025-12-12T16:16:50.308242873+00:00 stderr F I1212 16:16:50.305738 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:50.322850859+00:00 stderr F I1212 16:16:50.322093 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing changed from True to False ("All is well"),Available changed from False to True ("All is well") 2025-12-12T16:16:51.096580289+00:00 stderr F I1212 16:16:51.095686 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:16:50Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:16:50Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:51.102390791+00:00 stderr F E1212 16:16:51.102314 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_openshift-controller-manager reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"openshift-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.905551 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.905502856 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912787 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.912719302 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912825 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.912806044 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912849 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.912833685 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912871 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.912856715 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912901 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.912880556 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912923 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.912906206 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912940 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.912928527 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912970 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.912946247 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.912996 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.912980218 +0000 UTC))" 2025-12-12T16:16:55.913513811+00:00 stderr F I1212 16:16:55.913399 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-controller-manager-operator.svc\" [serving] validServingFor=[metrics.openshift-controller-manager-operator.svc,metrics.openshift-controller-manager-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:16:55.913377498 +0000 UTC))" 2025-12-12T16:16:55.915201502+00:00 stderr F I1212 16:16:55.913588 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556206\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:55.913570093 +0000 UTC))" 2025-12-12T16:17:06.355320230+00:00 stderr P I1212 16:17:06.354313 1 core.go:352] ConfigMap "openshift-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3VmsOEf 2025-12-12T16:17:06.355471393+00:00 stderr F CJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:16:55Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:17:06.355921164+00:00 stderr F I1212 16:17:06.355873 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-controller-manager: 2025-12-12T16:17:06.355921164+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:06.378418674+00:00 stderr P I1212 16:17:06.378310 1 core.go:352] ConfigMap "openshift-route-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3 2025-12-12T16:17:06.378537836+00:00 stderr F VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:16:55Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:17:06.379021258+00:00 stderr F I1212 16:17:06.378969 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-route-controller-manager: 2025-12-12T16:17:06.379021258+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:06.384758208+00:00 stderr F I1212 16:17:06.384322 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"b99e2b39cebb3a9ed7c377ee2895b2959d12d7fd10fb13b4f3665b051f465dcd"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/client-ca":"38339"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:17:06.393312357+00:00 stderr F I1212 16:17:06.393270 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:17:06.393890621+00:00 stderr F I1212 16:17:06.393824 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:17:06.398964915+00:00 stderr F I1212 16:17:06.398884 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"7af42e374cfd1d44c82aa1307ab00d1025062ce21dc0ac35212f1978b4c3cb27"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/client-ca":"38341"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:17:06.405744661+00:00 stderr F I1212 16:17:06.405705 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:17:06.406001907+00:00 stderr F I1212 16:17:06.405949 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:17:06.423033143+00:00 stderr F I1212 16:17:06.422963 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:06Z","message":"Progressing: deployment/controller-manager: observed generation is 15, desired generation is 16\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 13, desired generation is 14","reason":"RouteControllerManager_DesiredStateNotYetAchieved::_DesiredStateNotYetAchieved","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:06.433345855+00:00 stderr F I1212 16:17:06.431465 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing changed from False to True ("Progressing: deployment/controller-manager: observed generation is 15, desired generation is 16\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 13, desired generation is 14") 2025-12-12T16:17:26.335899821+00:00 stderr F I1212 16:17:26.334996 1 core.go:352] ConfigMap "openshift-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-controller-manager.client-ca.configmap":"3nWptw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:17:26.336202750+00:00 stderr F I1212 16:17:26.336090 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/config -n openshift-controller-manager: 2025-12-12T16:17:26.336202750+00:00 stderr F cause by changes in data.openshift-controller-manager.client-ca.configmap 2025-12-12T16:17:26.351500669+00:00 stderr F I1212 16:17:26.351443 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"3nWptw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:17:26.352064615+00:00 stderr F I1212 16:17:26.352033 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/config -n openshift-route-controller-manager: 2025-12-12T16:17:26.352064615+00:00 stderr F cause by changes in data.openshift-route-controller-manager.client-ca.configmap 2025-12-12T16:17:26.363562255+00:00 stderr F I1212 16:17:26.363481 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"1b895a377a3fa2fa67e9ca210bff915e23bcc815e7e140eb51fb62e4946fa678"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":"38503"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:17:26.373871991+00:00 stderr F I1212 16:17:26.373804 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:17:26.375904059+00:00 stderr F I1212 16:17:26.375856 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:17:26.379426230+00:00 stderr F I1212 16:17:26.379363 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"f7af6f7d16c5d3ac9bfed1d4788e659984e53f07ff44b1448d1a0e94961f112f"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":"38505"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:17:26.389151079+00:00 stderr F I1212 16:17:26.387975 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:17:26.389151079+00:00 stderr F I1212 16:17:26.388959 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:17:26.416495794+00:00 stderr F I1212 16:17:26.416414 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:06Z","message":"Progressing: deployment/controller-manager: observed generation is 16, desired generation is 17\nProgressing: deployment/controller-manager: no available replica found\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 14, desired generation is 15\nRouteControllerManagerProgressing: deployment/route-controller-manager: no available replica found","reason":"RouteControllerManager_DesiredStateNotYetAchieved::_DesiredStateNotYetAchieved","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:26Z","message":"Available: no openshift controller manager deployment pods available on any node\nRouteControllerManagerAvailable: no route controller manager deployment pods available on any node","reason":"RouteControllerManager_NoPodsAvailable::_NoPodsAvailable","status":"False","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:26.427290053+00:00 stderr F I1212 16:17:26.425794 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing message changed from "Progressing: deployment/controller-manager: observed generation is 15, desired generation is 16\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 13, desired generation is 14" to "Progressing: deployment/controller-manager: observed generation is 16, desired generation is 17\nProgressing: deployment/controller-manager: no available replica found\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 14, desired generation is 15\nRouteControllerManagerProgressing: deployment/route-controller-manager: no available replica found",Available changed from True to False ("Available: no openshift controller manager deployment pods available on any node\nRouteControllerManagerAvailable: no route controller manager deployment pods available on any node") 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318516 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.318468483 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318747 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.318724989 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318772 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.31875584 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318789 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.318778121 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318808 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.318795061 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318829 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.318815992 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318852 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.318837292 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318875 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.318859103 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318898 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.318883933 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318923 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.318910524 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.318949 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.318934045 +0000 UTC))" 2025-12-12T16:17:46.321469217+00:00 stderr F I1212 16:17:46.319207 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-controller-manager-operator.svc\" [serving] validServingFor=[metrics.openshift-controller-manager-operator.svc,metrics.openshift-controller-manager-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:11 +0000 UTC to 2027-11-02 07:52:12 +0000 UTC (now=2025-12-12 16:17:46.31916748 +0000 UTC))" 2025-12-12T16:17:46.337054543+00:00 stderr F I1212 16:17:46.333044 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556206\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:17:46.332979352 +0000 UTC))" 2025-12-12T16:17:46.381779538+00:00 stderr P I1212 16:17:46.381430 1 core.go:352] ConfigMap "openshift-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3VmsOEf 2025-12-12T16:17:46.381854730+00:00 stderr F CJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:46Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:17:46.382356673+00:00 stderr F I1212 16:17:46.382235 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-controller-manager: 2025-12-12T16:17:46.382356673+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:46.415312587+00:00 stderr P I1212 16:17:46.412819 1 core.go:352] ConfigMap "openshift-route-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3 2025-12-12T16:17:46.415380699+00:00 stderr F VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:46Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:17:46.415380699+00:00 stderr F I1212 16:17:46.414012 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-route-controller-manager: 2025-12-12T16:17:46.415380699+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:46.418309301+00:00 stderr F I1212 16:17:46.418236 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"f398e3a6ef0bf949e20a31f18dacb2cf27b0c055ebef4248b45ca89a9177fb19"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/client-ca":"38798"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:17:46.435158418+00:00 stderr F I1212 16:17:46.431226 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:17:46.435158418+00:00 stderr F I1212 16:17:46.432441 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:17:46.439033894+00:00 stderr F I1212 16:17:46.438984 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"ad5c1f749b778cf4566115406d0fa2737ed9bf3c458d5b0c8aa4dff6d0eb9b1c"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/client-ca":"38800"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:17:46.447224946+00:00 stderr F I1212 16:17:46.447158 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:17:46.447832151+00:00 stderr F I1212 16:17:46.447792 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:17:46.482199881+00:00 stderr F I1212 16:17:46.480590 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:06Z","message":"Progressing: deployment/controller-manager: observed generation is 17, desired generation is 18\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 15, desired generation is 16","reason":"RouteControllerManager_DesiredStateNotYetAchieved::_DesiredStateNotYetAchieved","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:46Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:17:46.514654803+00:00 stderr F I1212 16:17:46.507281 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing message changed from "Progressing: deployment/controller-manager: observed generation is 16, desired generation is 17\nProgressing: deployment/controller-manager: no available replica found\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 14, desired generation is 15\nRouteControllerManagerProgressing: deployment/route-controller-manager: no available replica found" to "Progressing: deployment/controller-manager: observed generation is 17, desired generation is 18\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 15, desired generation is 16",Available changed from False to True ("All is well") 2025-12-12T16:18:06.339649510+00:00 stderr F I1212 16:18:06.338857 1 core.go:352] ConfigMap "openshift-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-controller-manager.client-ca.configmap":"cyMWKQ=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:18:06.340058520+00:00 stderr F I1212 16:18:06.339964 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/config -n openshift-controller-manager: 2025-12-12T16:18:06.340058520+00:00 stderr F cause by changes in data.openshift-controller-manager.client-ca.configmap 2025-12-12T16:18:06.361118161+00:00 stderr P I1212 16:18:06.361018 1 core.go:352] ConfigMap "openshift-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03 2025-12-12T16:18:06.361210343+00:00 stderr F xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:51Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:18:06.361933841+00:00 stderr F I1212 16:18:06.361885 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-controller-manager: 2025-12-12T16:18:06.361933841+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:18:06.381740371+00:00 stderr F I1212 16:18:06.381664 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"cyMWKQ=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:18:06.382152031+00:00 stderr F I1212 16:18:06.382104 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/config -n openshift-route-controller-manager: 2025-12-12T16:18:06.382152031+00:00 stderr F cause by changes in data.openshift-route-controller-manager.client-ca.configmap 2025-12-12T16:18:06.394643480+00:00 stderr P I1212 16:18:06.394405 1 core.go:352] ConfigMap "openshift-route-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA 2025-12-12T16:18:06.394759013+00:00 stderr F 0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:51Z"}],"resourceVersion":null,"uid":"e7bc0b2c-2af6-488e-bf6f-25875798350a"}} 2025-12-12T16:18:06.394829424+00:00 stderr F I1212 16:18:06.394778 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-route-controller-manager: 2025-12-12T16:18:06.394829424+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:18:06.406941154+00:00 stderr F I1212 16:18:06.405795 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"5a5c897cc69e45a451d6e871da91fe0b03fa822d4f8b157c2c35abc309aac06e"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/client-ca":"39003","configmaps/config":"39001"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:18:06.418151281+00:00 stderr F I1212 16:18:06.418056 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:18:06.419814022+00:00 stderr F I1212 16:18:06.419773 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:18:06.425907993+00:00 stderr F I1212 16:18:06.425825 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"2e03889ea8698c408339c5743c417ba829e430cbc6dd0546af4bd0616b1211b0"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/client-ca":"39007","configmaps/config":"39005"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:18:06.435631473+00:00 stderr F I1212 16:18:06.434253 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:18:06.435631473+00:00 stderr F I1212 16:18:06.434975 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:18:06.456371966+00:00 stderr F I1212 16:18:06.455773 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:17:06Z","message":"Progressing: deployment/controller-manager: observed generation is 18, desired generation is 19\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 16, desired generation is 17","reason":"RouteControllerManager_DesiredStateNotYetAchieved::_DesiredStateNotYetAchieved","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:46Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:18:06.475635562+00:00 stderr F I1212 16:18:06.475293 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing message changed from "Progressing: deployment/controller-manager: observed generation is 17, desired generation is 18\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 15, desired generation is 16" to "Progressing: deployment/controller-manager: observed generation is 18, desired generation is 19\nRouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 16, desired generation is 17" 2025-12-12T16:18:26.303716805+00:00 stderr F E1212 16:18:26.302749 1 operator.go:174] "Unhandled Error" err="key failed with : Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.212923726+00:00 stderr F E1212 16:18:46.212297 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-controller-manager-operator/leases/openshift-controller-manager-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:46.213333246+00:00 stderr F E1212 16:18:46.213281 1 leaderelection.go:436] error retrieving resource lock openshift-controller-manager-operator/openshift-controller-manager-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-controller-manager-operator/leases/openshift-controller-manager-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.302646334+00:00 stderr F E1212 16:18:46.302568 1 operator.go:174] "Unhandled Error" err="key failed with : Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.507818537+00:00 stderr F E1212 16:18:46.507759 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.507995741+00:00 stderr F E1212 16:18:46.507957 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.515172209+00:00 stderr F E1212 16:18:46.515108 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.515234840+00:00 stderr F E1212 16:18:46.515174 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.527124454+00:00 stderr F E1212 16:18:46.527044 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.527501723+00:00 stderr F E1212 16:18:46.527445 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.530730173+00:00 stderr P E1212 16:18:46.530679 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:46.530765734+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.549481407+00:00 stderr F E1212 16:18:46.549414 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.549481407+00:00 stderr F E1212 16:18:46.549436 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.709242387+00:00 stderr P E1212 16:18:46.709147 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:46.709310628+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.909120418+00:00 stderr F E1212 16:18:46.909010 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.108470567+00:00 stderr F E1212 16:18:47.108401 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.309240991+00:00 stderr P E1212 16:18:47.308919 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:47.309317492+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.508921526+00:00 stderr F E1212 16:18:47.508860 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.708802668+00:00 stderr F E1212 16:18:47.708733 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.908941226+00:00 stderr P E1212 16:18:47.908844 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:47.909010278+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.108715755+00:00 stderr F E1212 16:18:48.108632 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.308931985+00:00 stderr F E1212 16:18:48.308367 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.510128899+00:00 stderr P E1212 16:18:48.509745 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:48.510282673+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.708505213+00:00 stderr F E1212 16:18:48.708424 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.908103798+00:00 stderr F E1212 16:18:48.908024 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.108221776+00:00 stderr P E1212 16:18:49.108140 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:49.108321818+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.351242034+00:00 stderr F E1212 16:18:49.351168 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.550893170+00:00 stderr F E1212 16:18:49.550832 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.709248535+00:00 stderr P E1212 16:18:49.709103 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:49.709335957+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:50.423011991+00:00 stderr P E1212 16:18:50.421883 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:50.423109894+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:50.634104530+00:00 stderr F E1212 16:18:50.634022 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.834334580+00:00 stderr F E1212 16:18:50.834269 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.160992165+00:00 stderr P E1212 16:18:51.160911 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:51.161055967+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:52.480624811+00:00 stderr P E1212 16:18:52.480050 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:52.480683872+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.197717219+00:00 stderr F E1212 16:18:53.197531 1 base_controller.go:279] "Unhandled Error" err="openshift-controller-manager reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.397799876+00:00 stderr F E1212 16:18:53.397729 1 base_controller.go:279] "Unhandled Error" err="ConfigObserver reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.073291228+00:00 stderr P E1212 16:18:55.073206 1 base_controller.go:279] "Unhandled Error" err="OpenshiftControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/openshift-controller-manager/informer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/informer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-leader-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/system:openshift:leader-locking-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-route-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-route-controller-manager/services/route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-tokenreview-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:tokenreview-openshift-route-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/route-controller-manager-ingress-to-route-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:ingress-to-route-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/roles/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/separate-sa-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-infra/rolebindings/system:openshift:sa-creating-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-controller-manager/services/controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/servicemonitor-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-controller-manager/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/ 2025-12-12T16:18:55.073352319+00:00 stderr F apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/buildconfigstatus-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:update-buildconfig-status\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/deployer-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:deployer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/image-trigger-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:openshift-controller-manager:image-trigger-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/openshift-controller-manager/old-leader-rolebinding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-openshift-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/openshiftcontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:19:06.338128564+00:00 stderr F I1212 16:19:06.337354 1 core.go:352] ConfigMap "openshift-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:19:06.338420831+00:00 stderr F I1212 16:19:06.338343 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/config -n openshift-controller-manager: 2025-12-12T16:19:06.338420831+00:00 stderr F cause by changes in data.openshift-controller-manager.client-ca.configmap 2025-12-12T16:19:06.367259694+00:00 stderr F I1212 16:19:06.364654 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:19:06.367259694+00:00 stderr F I1212 16:19:06.365103 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/config -n openshift-route-controller-manager: 2025-12-12T16:19:06.367259694+00:00 stderr F cause by changes in data.openshift-route-controller-manager.client-ca.configmap 2025-12-12T16:19:06.383994858+00:00 stderr F I1212 16:19:06.381272 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"9990b01407e5c494575f11e085c6db4ae529950efc395327c862e3f638972a84"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":"39282"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:19:06.396246021+00:00 stderr F I1212 16:19:06.394533 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:19:06.396246021+00:00 stderr F I1212 16:19:06.395832 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:19:06.400296851+00:00 stderr F I1212 16:19:06.400211 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"ce7dc34102648b8da91c1619d5f2dbd89e87ea3f5720128c30e323bda0dafdde"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":"39284"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:19:06.417342782+00:00 stderr F I1212 16:19:06.415788 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:19:06.417342782+00:00 stderr F I1212 16:19:06.416384 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:19:24.248094866+00:00 stderr F I1212 16:19:24.247577 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:26.328818046+00:00 stderr F I1212 16:19:26.328298 1 core.go:352] ConfigMap "openshift-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:19:26.329010831+00:00 stderr F I1212 16:19:26.328975 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ConfigMapUpdateFailed' Failed to update ConfigMap/config -n openshift-controller-manager: Operation cannot be fulfilled on configmaps "config": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:26.344882079+00:00 stderr F I1212 16:19:26.344804 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:19:26.345062764+00:00 stderr F I1212 16:19:26.345008 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ConfigMapUpdateFailed' Failed to update ConfigMap/config -n openshift-route-controller-manager: Operation cannot be fulfilled on configmaps "config": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:26.353574617+00:00 stderr F I1212 16:19:26.353508 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"b60297b4154e637c1cea7a10a2700f6d1202f86f379d13e97bab066bb0d41f29"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":null}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:19:26.360091401+00:00 stderr F I1212 16:19:26.360034 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:19:26.360859920+00:00 stderr F I1212 16:19:26.360823 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:19:26.365691481+00:00 stderr F I1212 16:19:26.365229 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"5987c0c98e5277f467510eeef2969eca1ed0a1ac2564ffcf8c6001c5fd8756e8"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":null}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:19:26.375458637+00:00 stderr F I1212 16:19:26.375390 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:19:26.375836546+00:00 stderr F I1212 16:19:26.375764 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:19:31.659500987+00:00 stderr F I1212 16:19:31.658934 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:34.262286548+00:00 stderr F I1212 16:19:34.259943 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:34.738387192+00:00 stderr F I1212 16:19:34.738298 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:35.921596071+00:00 stderr F I1212 16:19:35.921511 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:40.087911328+00:00 stderr F I1212 16:19:40.087104 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:40.947482120+00:00 stderr F I1212 16:19:40.945625 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:41.381635710+00:00 stderr F I1212 16:19:41.381572 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:42.186262253+00:00 stderr F I1212 16:19:42.186196 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:46.334999779+00:00 stderr F I1212 16:19:46.334462 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:19:46.335261635+00:00 stderr F I1212 16:19:46.335165 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ConfigMapUpdateFailed' Failed to update ConfigMap/config -n openshift-route-controller-manager: Operation cannot be fulfilled on configmaps "config": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:46.344102167+00:00 stderr F I1212 16:19:46.344027 1 apps.go:155] Deployment "openshift-controller-manager/controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"9990b01407e5c494575f11e085c6db4ae529950efc395327c862e3f638972a84"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":"39282"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["openshift-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9506bdcf97d5200cf2cf4cdf110aebafdd141a24f6589bf1e1cfe27bb7fc1ed2","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/etc/pki/ca-trust/extracted/pem","name":"proxy-ca-bundles"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"configMap":{"items":[{"key":"ca-bundle.crt","path":"tls-ca-bundle.pem"}],"name":"openshift-global-ca"},"name":"proxy-ca-bundles"},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:19:46.351823261+00:00 stderr F I1212 16:19:46.351757 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:19:46.352409856+00:00 stderr F I1212 16:19:46.352127 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/controller-manager -n openshift-controller-manager because it changed 2025-12-12T16:19:46.396049852+00:00 stderr F I1212 16:19:46.395917 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:49.354792529+00:00 stderr F I1212 16:19:49.354277 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:50.720981302+00:00 stderr F I1212 16:19:50.720883 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:50.754777030+00:00 stderr F I1212 16:19:50.754725 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:50.940209336+00:00 stderr F I1212 16:19:50.940132 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.779379791+00:00 stderr F I1212 16:19:53.779302 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:56.651799861+00:00 stderr F I1212 16:19:56.650585 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:58.994951984+00:00 stderr F I1212 16:19:58.994868 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:00.310300398+00:00 stderr F I1212 16:20:00.309777 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:01.903519161+00:00 stderr F I1212 16:20:01.903451 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:05.065271626+00:00 stderr F I1212 16:20:05.064079 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:05.482594004+00:00 stderr F I1212 16:20:05.482512 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:05.811785710+00:00 stderr F I1212 16:20:05.811704 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:05.956869852+00:00 stderr F I1212 16:20:05.956758 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:06.395788722+00:00 stderr F I1212 16:20:06.395743 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:20:06.396089220+00:00 stderr F I1212 16:20:06.396023 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ConfigMapUpdateFailed' Failed to update ConfigMap/config -n openshift-route-controller-manager: Operation cannot be fulfilled on configmaps "config": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:06.578315255+00:00 stderr F I1212 16:20:06.578195 1 reflector.go:430] "Caches populated" type="*v1.Build" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:06.736794114+00:00 stderr F I1212 16:20:06.736687 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:07.003355287+00:00 stderr F I1212 16:20:07.003282 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:07.327683350+00:00 stderr F I1212 16:20:07.327609 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:07.583510774+00:00 stderr F I1212 16:20:07.583434 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.736629677+00:00 stderr F I1212 16:20:08.736528 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:10.494012790+00:00 stderr F I1212 16:20:10.493948 1 reflector.go:430] "Caches populated" type="*v1.OpenShiftControllerManager" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:10.500041181+00:00 stderr F I1212 16:20:10.499966 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"RouteControllerManagerDegraded: \"route-controller-manager\" \"configmap\": Operation cannot be fulfilled on configmaps \"config\": the object has been modified; please apply your changes to the latest version and try again\nRouteControllerManagerDegraded: ","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:10Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:46Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:10.511503349+00:00 stderr F I1212 16:20:10.511420 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Degraded message changed from "All is well" to "RouteControllerManagerDegraded: \"route-controller-manager\" \"configmap\": Operation cannot be fulfilled on configmaps \"config\": the object has been modified; please apply your changes to the latest version and try again\nRouteControllerManagerDegraded: ",Progressing changed from True to False ("All is well") 2025-12-12T16:20:10.876836112+00:00 stderr F I1212 16:20:10.876707 1 reflector.go:430] "Caches populated" type="*v1.Deployment" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:12.550814173+00:00 stderr F I1212 16:20:12.550524 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.249987058+00:00 stderr F I1212 16:20:13.249607 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:16.659395361+00:00 stderr F I1212 16:20:16.659322 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:16.793566540+00:00 stderr F I1212 16:20:16.793493 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:17.853570384+00:00 stderr F I1212 16:20:17.853142 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:19.691383418+00:00 stderr F I1212 16:20:19.691305 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:22.132307064+00:00 stderr F I1212 16:20:22.130992 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:23.384942935+00:00 stderr F I1212 16:20:23.384867 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:23.602703233+00:00 stderr F I1212 16:20:23.602576 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:25.277419101+00:00 stderr F I1212 16:20:25.277363 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:26.342540054+00:00 stderr F I1212 16:20:26.342442 1 core.go:352] ConfigMap "openshift-route-controller-manager/config" changes: {"apiVersion":"v1","data":{"openshift-route-controller-manager.client-ca.configmap":"UePdZw=="},"kind":"ConfigMap","metadata":{"creationTimestamp":null,"managedFields":null,"resourceVersion":null,"uid":null}} 2025-12-12T16:20:26.342758140+00:00 stderr F I1212 16:20:26.342689 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ConfigMapUpdateFailed' Failed to update ConfigMap/config -n openshift-route-controller-manager: Operation cannot be fulfilled on configmaps "config": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:26.386430656+00:00 stderr F I1212 16:20:26.386299 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:26.602136232+00:00 stderr F I1212 16:20:26.602021 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:26.721158981+00:00 stderr F I1212 16:20:26.721100 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:29.882006035+00:00 stderr F I1212 16:20:29.881015 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:29.888686330+00:00 stderr F I1212 16:20:29.887750 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"RouteControllerManagerDegraded: \"route-controller-manager\" \"configmap\": Operation cannot be fulfilled on configmaps \"config\": the object has been modified; please apply your changes to the latest version and try again\nRouteControllerManagerDegraded: ","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:29Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:46Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:29.896690160+00:00 stderr F E1212 16:20:29.896578 1 base_controller.go:279] "Unhandled Error" err="StatusSyncer_openshift-controller-manager reconciliation failed: Operation cannot be fulfilled on clusteroperators.config.openshift.io \"openshift-controller-manager\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:20:46.360219243+00:00 stderr F I1212 16:20:46.359117 1 apps.go:155] Deployment "openshift-route-controller-manager/route-controller-manager" changes: {"metadata":{"annotations":{"operator.openshift.io/spec-hash":"ce7dc34102648b8da91c1619d5f2dbd89e87ea3f5720128c30e323bda0dafdde"}},"spec":{"progressDeadlineSeconds":null,"revisionHistoryLimit":null,"template":{"metadata":{"annotations":{"configmaps/config":"39284"}},"spec":{"containers":[{"args":["--config=/var/run/configmaps/config/config.yaml","-v=2"],"command":["route-controller-manager","start"],"env":[{"name":"POD_NAME","valueFrom":{"fieldRef":{"fieldPath":"metadata.name"}}}],"image":"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7a726c68cebc9b08edd734a8bae5150ae5950f7734fe9b9c2a6e0d06f21cc095","imagePullPolicy":"IfNotPresent","livenessProbe":{"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"},"initialDelaySeconds":30},"name":"route-controller-manager","ports":[{"containerPort":8443}],"readinessProbe":{"failureThreshold":10,"httpGet":{"path":"healthz","port":8443,"scheme":"HTTPS"}},"resources":{"requests":{"cpu":"100m","memory":"100Mi"}},"securityContext":{"allowPrivilegeEscalation":false,"capabilities":{"drop":["ALL"]},"readOnlyRootFilesystem":true},"terminationMessagePolicy":"FallbackToLogsOnError","volumeMounts":[{"mountPath":"/var/run/configmaps/config","name":"config"},{"mountPath":"/var/run/configmaps/client-ca","name":"client-ca"},{"mountPath":"/var/run/secrets/serving-cert","name":"serving-cert"},{"mountPath":"/tmp","name":"tmp"}]}],"dnsPolicy":null,"restartPolicy":null,"schedulerName":null,"serviceAccount":null,"terminationGracePeriodSeconds":null,"volumes":[{"configMap":{"name":"config"},"name":"config"},{"configMap":{"name":"client-ca"},"name":"client-ca"},{"name":"serving-cert","secret":{"secretName":"serving-cert"}},{"emptyDir":{},"name":"tmp"}]}}}} 2025-12-12T16:20:46.367723180+00:00 stderr F I1212 16:20:46.367242 1 warnings.go:110] "Warning: spec.template.spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:20:46.369301051+00:00 stderr F I1212 16:20:46.369238 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'DeploymentUpdated' Updated Deployment.apps/route-controller-manager -n openshift-route-controller-manager because it changed 2025-12-12T16:20:46.387543219+00:00 stderr F I1212 16:20:46.387490 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:20:46Z","message":"RouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 19, desired generation is 20","reason":"RouteControllerManager_DesiredStateNotYetAchieved","status":"True","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:46Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:46.398395834+00:00 stderr F I1212 16:20:46.398328 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Degraded message changed from "RouteControllerManagerDegraded: \"route-controller-manager\" \"configmap\": Operation cannot be fulfilled on configmaps \"config\": the object has been modified; please apply your changes to the latest version and try again\nRouteControllerManagerDegraded: " to "All is well",Progressing changed from False to True ("RouteControllerManagerProgressing: deployment/route-controller-manager: observed generation is 19, desired generation is 20") 2025-12-12T16:21:06.354532258+00:00 stderr F I1212 16:21:06.354052 1 status_controller.go:229] clusteroperator/openshift-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:51:57Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:21:06Z","message":"All is well","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-12-12T16:17:46Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:01Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:57Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:21:06.362088316+00:00 stderr F I1212 16:21:06.360155 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-controller-manager-operator", Name:"openshift-controller-manager-operator", UID:"5effb0d2-94d8-48b7-8c69-e538f7848429", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/openshift-controller-manager changed: Progressing changed from True to False ("All is well") ././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_pe0000755000175000017500000000000015117043044033151 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_pe0000755000175000017500000000000015117043063033152 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_pe0000644000175000017500000000653615117043044033165 0ustar zuulzuul2025-12-12T16:27:30.299683509+00:00 stderr F 2025-12-12T16:27:30Z INFO setup starting manager 2025-12-12T16:27:30.300043839+00:00 stderr F 2025-12-12T16:27:30Z INFO controller-runtime.metrics Starting metrics server 2025-12-12T16:27:30.300140001+00:00 stderr F 2025-12-12T16:27:30Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8082", "secure": false} 2025-12-12T16:27:30.300221923+00:00 stderr F 2025-12-12T16:27:30Z INFO starting server {"name": "pprof", "addr": "127.0.0.1:8083"} 2025-12-12T16:27:30.300261484+00:00 stderr F 2025-12-12T16:27:30Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-12T16:27:30.300500420+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses", "source": "kind source: *v1.Service"} 2025-12-12T16:27:30.301741772+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "persesdatasource", "controllerGroup": "perses.dev", "controllerKind": "PersesDatasource", "source": "kind source: *v1alpha1.PersesDatasource"} 2025-12-12T16:27:30.301741772+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "persesdashboard", "controllerGroup": "perses.dev", "controllerKind": "PersesDashboard", "source": "kind source: *v1alpha1.PersesDashboard"} 2025-12-12T16:27:30.301741772+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses", "source": "kind source: *v1.Deployment"} 2025-12-12T16:27:30.301763292+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses", "source": "kind source: *v1alpha1.Perses"} 2025-12-12T16:27:30.301823574+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses", "source": "kind source: *v1.StatefulSet"} 2025-12-12T16:27:30.301823574+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting EventSource {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:27:30.727281151+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting Controller {"controller": "persesdashboard", "controllerGroup": "perses.dev", "controllerKind": "PersesDashboard"} 2025-12-12T16:27:30.727281151+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting workers {"controller": "persesdashboard", "controllerGroup": "perses.dev", "controllerKind": "PersesDashboard", "worker count": 1} 2025-12-12T16:27:30.727281151+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting Controller {"controller": "persesdatasource", "controllerGroup": "perses.dev", "controllerKind": "PersesDatasource"} 2025-12-12T16:27:30.727281151+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting workers {"controller": "persesdatasource", "controllerGroup": "perses.dev", "controllerKind": "PersesDatasource", "worker count": 1} 2025-12-12T16:27:30.729316203+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting Controller {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses"} 2025-12-12T16:27:30.729316203+00:00 stderr F 2025-12-12T16:27:30Z INFO Starting workers {"controller": "perses", "controllerGroup": "perses.dev", "controllerKind": "Perses", "worker count": 1} ././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000755000175000017500000000000015117043043033063 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000755000175000017500000000000015117043062033064 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000644000175000017500000003347515117043043033101 0ustar zuulzuul2025-12-12T16:16:47.573307132+00:00 stderr F I1212 16:16:47.570542 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:47.665453431+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="FeatureGates initializedknownFeatures[AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk AzureWorkloadIdentity BootImageSkewEnforcement BootcNodeManagement BuildCSIVolumes CPMSMachineNamePrefix ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration ConsolePluginContentSecurityPolicy DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageModeStatusReporting ImageStreamImportMode ImageVolume IngressControllerDynamicConfigurationManager IngressControllerLBSubnetsAWS InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider KMSv1 MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController MachineConfigNodes ManagedBootImages ManagedBootImagesAWS ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MetricsCollectionProfiles MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PinnedImages PreconfiguredUDNAddresses ProcMountType RouteAdvertisements RouteExternalCertificate SELinuxMount ServiceAccountTokenNodeBinding SetEIPForNLBIngressController ShortCertRotation SignatureStores SigstoreImageVerification SigstoreImageVerificationPKI StoragePerformantSecurityPolicy TranslateStreamCloseWebsocketRequests UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VSphereMultiDisk VSphereMultiNetworks VolumeAttributesClass VolumeGroupSnapshot]" 2025-12-12T16:16:47.665453431+00:00 stderr F I1212 16:16:47.665278 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-dns-operator", Name:"dns-operator", UID:"", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:47.674280067+00:00 stderr F 2025-12-12T16:16:47Z INFO controller-runtime.metrics Starting metrics server 2025-12-12T16:16:47.674280067+00:00 stderr F 2025-12-12T16:16:47Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": "127.0.0.1:60000", "secure": false} 2025-12-12T16:16:47.674602625+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.DNS"} 2025-12-12T16:16:47.674637696+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.DaemonSet"} 2025-12-12T16:16:47.674661166+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.Service"} 2025-12-12T16:16:47.674683607+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.674707797+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.ConfigMap"} 2025-12-12T16:16:47.674732498+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "dns_controller", "source": "kind source: *v1.Node"} 2025-12-12T16:16:47.674756118+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting Controller {"controller": "dns_controller"} 2025-12-12T16:16:47.675541518+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "status_controller", "source": "kind source: *v1.DNS"} 2025-12-12T16:16:47.675541518+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "status_controller", "source": "kind source: *v1.DaemonSet"} 2025-12-12T16:16:47.675561418+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting EventSource {"controller": "status_controller", "source": "kind source: *v1.ClusterOperator"} 2025-12-12T16:16:47.675561418+00:00 stderr F 2025-12-12T16:16:47Z INFO Starting Controller {"controller": "status_controller"} 2025-12-12T16:16:48.164889915+00:00 stderr F 2025-12-12T16:16:48Z INFO Starting workers {"controller": "status_controller", "worker count": 1} 2025-12-12T16:16:48.249615873+00:00 stderr F 2025-12-12T16:16:48Z INFO Starting workers {"controller": "dns_controller", "worker count": 1} 2025-12-12T16:16:48.249615873+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="reconciling request: /default" 2025-12-12T16:16:48.622656191+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="updated DNS default status: old: v1.DNSStatus{ClusterIP:\"10.217.4.10\", ClusterDomain:\"cluster.local\", Conditions:[]v1.OperatorCondition{v1.OperatorCondition{Type:\"Degraded\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 47, 0, time.Local), Reason:\"NoDNSPodsDesired\", Message:\"No DNS pods are desired; this could mean all nodes are tainted or unschedulable.\"}, v1.OperatorCondition{Type:\"Progressing\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 48, 0, time.Local), Reason:\"Reconciling\", Message:\"Have 0 available node-resolver pods, want 1.\"}, v1.OperatorCondition{Type:\"Available\", Status:\"False\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 47, 0, time.Local), Reason:\"NoDaemonSetPods\", Message:\"The DNS daemonset has no pods available.\"}, v1.OperatorCondition{Type:\"Upgradeable\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 2, 7, 55, 11, 0, time.Local), Reason:\"AsExpected\", Message:\"DNS Operator can be upgraded\"}}}, new: v1.DNSStatus{ClusterIP:\"10.217.4.10\", ClusterDomain:\"cluster.local\", Conditions:[]v1.OperatorCondition{v1.OperatorCondition{Type:\"Degraded\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 47, 0, time.Local), Reason:\"NoDNSPodsAvailable\", Message:\"No DNS pods are available.\"}, v1.OperatorCondition{Type:\"Progressing\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 48, 0, time.Local), Reason:\"Reconciling\", Message:\"Have 0 available DNS pods, want 1.\"}, v1.OperatorCondition{Type:\"Available\", Status:\"False\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 47, 0, time.Local), Reason:\"NoDaemonSetPods\", Message:\"The DNS daemonset has no pods available.\"}, v1.OperatorCondition{Type:\"Upgradeable\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 2, 7, 55, 11, 0, time.Local), Reason:\"AsExpected\", Message:\"DNS Operator can be upgraded\"}}}" 2025-12-12T16:16:48.634771537+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="reconciling request: /default" 2025-12-12T16:16:57.775618142+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="reconciling request: /default" 2025-12-12T16:17:06.783685128+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="reconciling request: /default" 2025-12-12T16:17:06.845430675+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="updated DNS default status: old: v1.DNSStatus{ClusterIP:\"10.217.4.10\", ClusterDomain:\"cluster.local\", Conditions:[]v1.OperatorCondition{v1.OperatorCondition{Type:\"Degraded\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 47, 0, time.Local), Reason:\"NoDNSPodsAvailable\", Message:\"No DNS pods are available.\"}, v1.OperatorCondition{Type:\"Progressing\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 48, 0, time.Local), Reason:\"Reconciling\", Message:\"Have 0 available DNS pods, want 1.\"}, v1.OperatorCondition{Type:\"Available\", Status:\"False\", LastTransitionTime:time.Date(2025, time.November, 3, 9, 40, 47, 0, time.Local), Reason:\"NoDaemonSetPods\", Message:\"The DNS daemonset has no pods available.\"}, v1.OperatorCondition{Type:\"Upgradeable\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 2, 7, 55, 11, 0, time.Local), Reason:\"AsExpected\", Message:\"DNS Operator can be upgraded\"}}}, new: v1.DNSStatus{ClusterIP:\"10.217.4.10\", ClusterDomain:\"cluster.local\", Conditions:[]v1.OperatorCondition{v1.OperatorCondition{Type:\"Degraded\", Status:\"False\", LastTransitionTime:time.Date(2025, time.December, 12, 16, 17, 6, 0, time.Local), Reason:\"AsExpected\", Message:\"Enough DNS pods are available, and the DNS service has a cluster IP address.\"}, v1.OperatorCondition{Type:\"Progressing\", Status:\"False\", LastTransitionTime:time.Date(2025, time.December, 12, 16, 17, 6, 0, time.Local), Reason:\"AsExpected\", Message:\"All DNS and node-resolver pods are available, and the DNS service has a cluster IP address.\"}, v1.OperatorCondition{Type:\"Available\", Status:\"True\", LastTransitionTime:time.Date(2025, time.December, 12, 16, 17, 6, 0, time.Local), Reason:\"AsExpected\", Message:\"The DNS daemonset has available pods, and the DNS service has a cluster IP address.\"}, v1.OperatorCondition{Type:\"Upgradeable\", Status:\"True\", LastTransitionTime:time.Date(2025, time.November, 2, 7, 55, 11, 0, time.Local), Reason:\"AsExpected\", Message:\"DNS Operator can be upgraded\"}}}" 2025-12-12T16:17:06.847337252+00:00 stderr F time="2025-12-12T16:17:06Z" level=info msg="reconciling request: /default" 2025-12-12T16:17:44.885849475+00:00 stderr F time="2025-12-12T16:17:44Z" level=info msg="reconciling request: /default" 2025-12-12T16:17:47.101895201+00:00 stderr F time="2025-12-12T16:17:47Z" level=info msg="reconciling request: /default" 2025-12-12T16:18:47.871416498+00:00 stderr F time="2025-12-12T16:18:47Z" level=error msg="failed to ensure default dns Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/dnses/default\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:31.206309559+00:00 stderr F time="2025-12-12T16:19:31Z" level=info msg="reconciling request: /default" 2025-12-12T16:19:31.237041200+00:00 stderr F time="2025-12-12T16:19:31Z" level=info msg="reconciling request: /default" 2025-12-12T16:19:46.607687006+00:00 stderr F time="2025-12-12T16:19:46Z" level=info msg="reconciling request: /default" 2025-12-12T16:19:57.014606471+00:00 stderr F time="2025-12-12T16:19:57Z" level=info msg="reconciling request: /default" 2025-12-12T16:20:16.350119396+00:00 stderr F time="2025-12-12T16:20:16Z" level=info msg="reconciling request: /default" 2025-12-12T16:20:20.434088466+00:00 stderr F time="2025-12-12T16:20:20Z" level=info msg="reconciling request: /default" 2025-12-12T16:20:20.497987750+00:00 stderr F time="2025-12-12T16:20:20Z" level=info msg="reconciling request: /default" ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000755000175000017500000000000015117043062033064 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000644000175000017500000000202015117043043033057 0ustar zuulzuul2025-12-12T16:16:48.373242642+00:00 stderr F W1212 16:16:48.363268 1 deprecated.go:66] 2025-12-12T16:16:48.373242642+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:48.373242642+00:00 stderr F 2025-12-12T16:16:48.373242642+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:48.373242642+00:00 stderr F 2025-12-12T16:16:48.373242642+00:00 stderr F =============================================== 2025-12-12T16:16:48.373242642+00:00 stderr F 2025-12-12T16:16:48.373242642+00:00 stderr F I1212 16:16:48.368509 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:48.381733399+00:00 stderr F I1212 16:16:48.379656 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:48.381733399+00:00 stderr F I1212 16:16:48.380134 1 kube-rbac-proxy.go:397] Starting TCP socket on :9393 2025-12-12T16:16:48.381733399+00:00 stderr F I1212 16:16:48.380554 1 kube-rbac-proxy.go:404] Listening securely on :9393 ././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-o0000755000175000017500000000000015117043043032723 5ustar zuulzuul././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-o0000755000175000017500000000000015117043062032724 5ustar zuulzuul././@LongLink0000644000000000000000000000032000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-o0000644000175000017500000013727615117043043032745 0ustar zuulzuul2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.128508 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.128990 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.129890 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.129996 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.130009 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.130020 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:45.130287397+00:00 stderr F I1212 16:16:45.130025 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:45.250709477+00:00 stderr F I1212 16:16:45.243696 1 builder.go:304] service-ca-operator version - 2025-12-12T16:16:45.250709477+00:00 stderr F I1212 16:16:45.249999 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:46.008230761+00:00 stderr F I1212 16:16:46.005299 1 requestheader_controller.go:255] Loaded a new request header values for RequestHeaderAuthRequestController 2025-12-12T16:16:46.021506865+00:00 stderr F I1212 16:16:46.020718 1 maxinflight.go:139] "Initialized nonMutatingChan" len=400 2025-12-12T16:16:46.021506865+00:00 stderr F I1212 16:16:46.020753 1 maxinflight.go:145] "Initialized mutatingChan" len=200 2025-12-12T16:16:46.021506865+00:00 stderr F I1212 16:16:46.020776 1 maxinflight.go:116] "Set denominator for readonly requests" limit=400 2025-12-12T16:16:46.021506865+00:00 stderr F I1212 16:16:46.020781 1 maxinflight.go:120] "Set denominator for mutating requests" limit=200 2025-12-12T16:16:46.035732402+00:00 stderr F I1212 16:16:46.033744 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:46.035732402+00:00 stderr F W1212 16:16:46.033789 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.035732402+00:00 stderr F W1212 16:16:46.033795 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.035732402+00:00 stderr F W1212 16:16:46.033800 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:46.035732402+00:00 stderr F W1212 16:16:46.033804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:46.035732402+00:00 stderr F W1212 16:16:46.033807 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:46.035732402+00:00 stderr F W1212 16:16:46.033811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:46.040223752+00:00 stderr F I1212 16:16:46.039622 1 genericapiserver.go:535] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete 2025-12-12T16:16:46.043242786+00:00 stderr F I1212 16:16:46.042716 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:46.044207499+00:00 stderr F I1212 16:16:46.043906 1 leaderelection.go:257] attempting to acquire leader lease openshift-service-ca-operator/service-ca-operator-lock... 2025-12-12T16:16:46.048234668+00:00 stderr F I1212 16:16:46.047890 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-service-ca-operator.svc\" [serving] validServingFor=[metrics.openshift-service-ca-operator.svc,metrics.openshift-service-ca-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:46.047861709 +0000 UTC))" 2025-12-12T16:16:46.048234668+00:00 stderr F I1212 16:16:46.048206 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.048368 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.048417 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050362 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050553 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:46.048039173 +0000 UTC))" 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050578 1 secure_serving.go:213] Serving securely on [::]:8443 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050600 1 genericapiserver.go:685] [graceful-termination] waiting for shutdown to be initiated 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050622 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050745 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050899 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController 2025-12-12T16:16:46.052223175+00:00 stderr F I1212 16:16:46.050925 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:16:46.053790883+00:00 stderr F I1212 16:16:46.053680 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.054207443+00:00 stderr F I1212 16:16:46.053813 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.055225968+00:00 stderr F I1212 16:16:46.054626 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.067320794+00:00 stderr F I1212 16:16:46.063547 1 leaderelection.go:271] successfully acquired lease openshift-service-ca-operator/service-ca-operator-lock 2025-12-12T16:16:46.067320794+00:00 stderr F I1212 16:16:46.064308 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-service-ca-operator", Name:"service-ca-operator-lock", UID:"87292836-4d30-4ad5-a0e3-7d0e1620558a", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37186", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' service-ca-operator-5b9c976747-9wbcx_1746b679-ed69-4d95-b821-b6262ce7c2f1 became leader 2025-12-12T16:16:46.079541832+00:00 stderr F I1212 16:16:46.079268 1 starter.go:111] Fetching FeatureGates 2025-12-12T16:16:46.080293900+00:00 stderr F I1212 16:16:46.079709 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.087752362+00:00 stderr F I1212 16:16:46.087571 1 reflector.go:376] Caches populated for *v1.FeatureGate from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.089519286+00:00 stderr F I1212 16:16:46.088524 1 starter.go:160] Setting signing certificate lifetime to 18960h0m0s, minimum trust duration to 9480h0m0s 2025-12-12T16:16:46.089519286+00:00 stderr F I1212 16:16:46.088994 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-service-ca-operator", Name:"service-ca-operator", UID:"1703c560-9cd5-4273-a6b7-22510bce9318", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:46.089519286+00:00 stderr F I1212 16:16:46.089053 1 base_controller.go:76] Waiting for caches to sync for resource-sync 2025-12-12T16:16:46.090041848+00:00 stderr F I1212 16:16:46.089752 1 base_controller.go:76] Waiting for caches to sync for ServiceCAOperator 2025-12-12T16:16:46.090041848+00:00 stderr F I1212 16:16:46.089769 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:46.090041848+00:00 stderr F I1212 16:16:46.089867 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_service-ca 2025-12-12T16:16:46.130213409+00:00 stderr F I1212 16:16:46.130121 1 reflector.go:376] Caches populated for *v1.ClusterVersion from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.131926 1 reflector.go:376] Caches populated for *v1.Namespace from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.133657 1 reflector.go:376] Caches populated for *v1.ServiceAccount from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.134445 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.134642 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.134909 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.135540 1 reflector.go:376] Caches populated for *v1.Infrastructure from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.136704198+00:00 stderr F I1212 16:16:46.135794 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.140307596+00:00 stderr F I1212 16:16:46.139814 1 reflector.go:376] Caches populated for *v1.ClusterOperator from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.140307596+00:00 stderr F I1212 16:16:46.140054 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.148416884+00:00 stderr F I1212 16:16:46.134639 1 reflector.go:376] Caches populated for *v1.Deployment from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.154054141+00:00 stderr F I1212 16:16:46.153331 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.164159238+00:00 stderr F I1212 16:16:46.162862 1 reflector.go:376] Caches populated for *v1.ServiceCA from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.164159238+00:00 stderr F I1212 16:16:46.163091 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.164159238+00:00 stderr F I1212 16:16:46.163776 1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 2025-12-12T16:16:46.164159238+00:00 stderr F I1212 16:16:46.163811 1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file 2025-12-12T16:16:46.164159238+00:00 stderr F I1212 16:16:46.164110 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:46.164081656 +0000 UTC))" 2025-12-12T16:16:46.164159238+00:00 stderr F I1212 16:16:46.164130 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:46.164118947 +0000 UTC))" 2025-12-12T16:16:46.164207159+00:00 stderr F I1212 16:16:46.164143 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:46.164135227 +0000 UTC))" 2025-12-12T16:16:46.164207159+00:00 stderr F I1212 16:16:46.164173 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:46.164163258 +0000 UTC))" 2025-12-12T16:16:46.164230400+00:00 stderr F I1212 16:16:46.164211 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:46.164196099 +0000 UTC))" 2025-12-12T16:16:46.164237470+00:00 stderr F I1212 16:16:46.164229 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:46.164218449 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164245 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:46.16423386 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164461 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-service-ca-operator.svc\" [serving] validServingFor=[metrics.openshift-service-ca-operator.svc,metrics.openshift-service-ca-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:46.164447785 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164602 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:46.164590848 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164614 1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164793 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:46.164779873 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164807 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:46.164799364 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164819 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:46.164811354 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164833 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:46.164824184 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164846 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:46.164837004 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164860 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:46.164851855 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164873 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:46.164865065 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.164886 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:46.164877245 +0000 UTC))" 2025-12-12T16:16:46.165245304+00:00 stderr F I1212 16:16:46.165060 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-service-ca-operator.svc\" [serving] validServingFor=[metrics.openshift-service-ca-operator.svc,metrics.openshift-service-ca-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:46.165033679 +0000 UTC))" 2025-12-12T16:16:46.165287995+00:00 stderr F I1212 16:16:46.165272 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:46.165259305 +0000 UTC))" 2025-12-12T16:16:46.177254888+00:00 stderr F I1212 16:16:46.176164 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.195819061+00:00 stderr F I1212 16:16:46.195536 1 base_controller.go:82] Caches are synced for StatusSyncer_service-ca 2025-12-12T16:16:46.195819061+00:00 stderr F I1212 16:16:46.195571 1 base_controller.go:119] Starting #1 worker of StatusSyncer_service-ca controller ... 2025-12-12T16:16:46.195819061+00:00 stderr F I1212 16:16:46.195598 1 base_controller.go:82] Caches are synced for ServiceCAOperator 2025-12-12T16:16:46.195819061+00:00 stderr F I1212 16:16:46.195621 1 base_controller.go:119] Starting #1 worker of ServiceCAOperator controller ... 2025-12-12T16:16:46.198227050+00:00 stderr F I1212 16:16:46.195601 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:46.198227050+00:00 stderr F I1212 16:16:46.196044 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:46.365537474+00:00 stderr F I1212 16:16:46.362046 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.509354706+00:00 stderr F I1212 16:16:46.508397 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:16:46.592057235+00:00 stderr F I1212 16:16:46.591981 1 base_controller.go:82] Caches are synced for resource-sync 2025-12-12T16:16:46.592057235+00:00 stderr F I1212 16:16:46.592029 1 base_controller.go:119] Starting #1 worker of resource-sync controller ... 2025-12-12T16:16:47.343290336+00:00 stderr F I1212 16:16:47.342252 1 status_controller.go:229] clusteroperator/service-ca diff {"status":{"conditions":[{"lastTransitionTime":"2025-11-02T07:52:03Z","message":"All is well","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-12-12T16:16:47Z","message":"Progressing: All service-ca-operator deployments updated","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:52:05Z","message":"All is well","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:52:05Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:52:02Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:47.362939526+00:00 stderr F I1212 16:16:47.361793 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-service-ca-operator", Name:"service-ca-operator", UID:"1703c560-9cd5-4273-a6b7-22510bce9318", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/service-ca changed: Progressing changed from True to False ("Progressing: All service-ca-operator deployments updated") 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.907397 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.907354271 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908034 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.908017887 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908054 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.908043028 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908069 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.908060108 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908085 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.908073618 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908101 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.908090539 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908135 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.90812442 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908149 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.90814092 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908165 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.90815415 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908203 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.908171361 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908415 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-service-ca-operator.svc\" [serving] validServingFor=[metrics.openshift-service-ca-operator.svc,metrics.openshift-service-ca-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:16:55.908401106 +0000 UTC))" 2025-12-12T16:16:55.909237457+00:00 stderr F I1212 16:16:55.908575 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:55.90855965 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.315721 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.315669424 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316190 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.316157756 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316213 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.316201677 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316229 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.316220157 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316244 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.316233778 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316261 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.316249728 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316280 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.316267019 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316300 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.316286069 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316319 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.31630632 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316337 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.31632768 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316358 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.316345071 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316614 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-service-ca-operator.svc\" [serving] validServingFor=[metrics.openshift-service-ca-operator.svc,metrics.openshift-service-ca-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:24 +0000 UTC to 2027-11-02 07:52:25 +0000 UTC (now=2025-12-12 16:17:46.316592217 +0000 UTC))" 2025-12-12T16:17:46.317819007+00:00 stderr F I1212 16:17:46.316824 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556205\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2026-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:17:46.316809252 +0000 UTC))" 2025-12-12T16:18:46.084711236+00:00 stderr F E1212 16:18:46.084118 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-service-ca-operator/leases/service-ca-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:46.085579648+00:00 stderr F E1212 16:18:46.085540 1 leaderelection.go:436] error retrieving resource lock openshift-service-ca-operator/service-ca-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-service-ca-operator/leases/service-ca-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.595473714+00:00 stderr F E1212 16:18:46.595420 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.603202345+00:00 stderr F E1212 16:18:46.603158 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.615714294+00:00 stderr F E1212 16:18:46.615673 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.638389485+00:00 stderr F E1212 16:18:46.638331 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.685641153+00:00 stderr F E1212 16:18:46.684949 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.768323647+00:00 stderr F E1212 16:18:46.768243 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:46.931137013+00:00 stderr F E1212 16:18:46.931023 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.253841501+00:00 stderr F E1212 16:18:47.253793 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.900293972+00:00 stderr F E1212 16:18:47.900248 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.183394914+00:00 stderr F E1212 16:18:49.182870 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.745844144+00:00 stderr F E1212 16:18:51.745772 1 base_controller.go:279] "Unhandled Error" err="resource-sync reconciliation failed: unable to get operator configuration: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/servicecas/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:29.836950928+00:00 stderr F I1212 16:19:29.835933 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:19:58.060085021+00:00 stderr F I1212 16:19:58.059594 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:19:59.561034766+00:00 stderr F I1212 16:19:59.560950 1 reflector.go:376] Caches populated for *v1.ServiceCA from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:00.716873527+00:00 stderr F I1212 16:20:00.715824 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:04.156498838+00:00 stderr F I1212 16:20:04.156363 1 reflector.go:376] Caches populated for *v1.ServiceAccount from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:04.861127491+00:00 stderr F I1212 16:20:04.860500 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:06.568924779+00:00 stderr F I1212 16:20:06.568856 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:07.563301826+00:00 stderr F I1212 16:20:07.563224 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:08.696217322+00:00 stderr F I1212 16:20:08.696127 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:10.677888897+00:00 stderr F I1212 16:20:10.677783 1 reflector.go:376] Caches populated for *v1.ClusterOperator from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:12.781501115+00:00 stderr F I1212 16:20:12.781028 1 reflector.go:376] Caches populated for *v1.Deployment from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:14.932563204+00:00 stderr F I1212 16:20:14.931970 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:15.190498360+00:00 stderr F I1212 16:20:15.190420 1 reflector.go:376] Caches populated for *v1.Namespace from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:15.397346504+00:00 stderr F I1212 16:20:15.396778 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:16.085880141+00:00 stderr F I1212 16:20:16.085796 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:16.941789712+00:00 stderr F I1212 16:20:16.941713 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:17.577651076+00:00 stderr F I1212 16:20:17.577585 1 reflector.go:376] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:20.890728360+00:00 stderr F I1212 16:20:20.889880 1 reflector.go:376] Caches populated for *v1.Infrastructure from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:24.782249018+00:00 stderr F I1212 16:20:24.782193 1 reflector.go:376] Caches populated for *v1.FeatureGate from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:33.388683844+00:00 stderr F I1212 16:20:33.387951 1 reflector.go:376] Caches populated for *v1.ClusterVersion from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 2025-12-12T16:20:33.910778038+00:00 stderr F I1212 16:20:33.910719 1 reflector.go:376] Caches populated for *v1.Secret from k8s.io/client-go@v0.32.2/tools/cache/reflector.go:251 ././@LongLink0000644000000000000000000000025200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015117043043032745 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015117043062032746 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000644000175000017500000003215315117043043032753 0ustar zuulzuul2025-12-12T16:28:08.240264007+00:00 stderr F I1212 16:28:08.238068 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:28:08.240264007+00:00 stderr F I1212 16:28:08.238248 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:28:08.240264007+00:00 stderr F I1212 16:28:08.238276 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:28:08.240264007+00:00 stderr F I1212 16:28:08.238282 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:28:08.277528380+00:00 stderr F I1212 16:28:08.276597 1 setup.go:119] "Registering a reconciler for injectable" logger="cert-manager" kind="mutatingwebhookconfiguration" 2025-12-12T16:28:08.279561801+00:00 stderr F I1212 16:28:08.279518 1 setup.go:119] "Registering a reconciler for injectable" logger="cert-manager" kind="validatingwebhookconfiguration" 2025-12-12T16:28:08.279631603+00:00 stderr F I1212 16:28:08.279575 1 setup.go:119] "Registering a reconciler for injectable" logger="cert-manager" kind="apiservice" 2025-12-12T16:28:08.284071796+00:00 stderr F I1212 16:28:08.284041 1 setup.go:119] "Registering a reconciler for injectable" logger="cert-manager" kind="customresourcedefinition" 2025-12-12T16:28:08.284227709+00:00 stderr F I1212 16:28:08.284158 1 server.go:208] "Starting metrics server" logger="cert-manager.controller-runtime.metrics" 2025-12-12T16:28:08.284650700+00:00 stderr F I1212 16:28:08.284616 1 server.go:247] "Serving metrics server" logger="cert-manager.controller-runtime.metrics" bindAddress="0.0.0.0:9402" secure=false 2025-12-12T16:28:08.326812057+00:00 stderr F I1212 16:28:08.325870 1 reflector.go:376] Caches populated for *v1.MutatingWebhookConfiguration from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.327386932+00:00 stderr F I1212 16:28:08.326702 1 reflector.go:376] Caches populated for *v1.ValidatingWebhookConfiguration from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:08.348284381+00:00 stderr F I1212 16:28:08.346602 1 reflector.go:376] Caches populated for *v1.APIService from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:09.073224278+00:00 stderr F I1212 16:28:09.073117 1 reflector.go:376] Caches populated for *v1.CustomResourceDefinition from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:09.086518875+00:00 stderr F I1212 16:28:09.085580 1 leaderelection.go:257] attempting to acquire leader lease kube-system/cert-manager-cainjector-leader-election... 2025-12-12T16:28:09.095005659+00:00 stderr F I1212 16:28:09.094893 1 leaderelection.go:271] successfully acquired lease kube-system/cert-manager-cainjector-leader-election 2025-12-12T16:28:09.095544103+00:00 stderr F I1212 16:28:09.095359 1 recorder.go:104] "cert-manager-cainjector-7dbf76d5c8-lv2hl_17b67056-f971-432e-9b81-049cd10bd3b8 became leader" logger="cert-manager.events" type="Normal" object={"kind":"Lease","namespace":"kube-system","name":"cert-manager-cainjector-leader-election","uid":"23769597-39b3-4f23-a993-b9a3200c2b87","apiVersion":"coordination.k8s.io/v1","resourceVersion":"43619"} reason="LeaderElection" 2025-12-12T16:28:09.095712127+00:00 stderr F I1212 16:28:09.095673 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="validatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="ValidatingWebhookConfiguration" source="kind source: *v1.ValidatingWebhookConfiguration" 2025-12-12T16:28:09.095727378+00:00 stderr F I1212 16:28:09.095709 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="validatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="ValidatingWebhookConfiguration" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.095736368+00:00 stderr F I1212 16:28:09.095724 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="validatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="ValidatingWebhookConfiguration" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.095779139+00:00 stderr F I1212 16:28:09.095738 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="validatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="ValidatingWebhookConfiguration" source="kind source: *v1.Certificate" 2025-12-12T16:28:09.095779139+00:00 stderr F I1212 16:28:09.095750 1 controller.go:183] "Starting Controller" logger="cert-manager" controller="validatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="ValidatingWebhookConfiguration" 2025-12-12T16:28:09.096039945+00:00 stderr F I1212 16:28:09.095680 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="customresourcedefinition" controllerGroup="apiextensions.k8s.io" controllerKind="CustomResourceDefinition" source="kind source: *v1.CustomResourceDefinition" 2025-12-12T16:28:09.096039945+00:00 stderr F I1212 16:28:09.095905 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="customresourcedefinition" controllerGroup="apiextensions.k8s.io" controllerKind="CustomResourceDefinition" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.096039945+00:00 stderr F I1212 16:28:09.095920 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="customresourcedefinition" controllerGroup="apiextensions.k8s.io" controllerKind="CustomResourceDefinition" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.096039945+00:00 stderr F I1212 16:28:09.095929 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="customresourcedefinition" controllerGroup="apiextensions.k8s.io" controllerKind="CustomResourceDefinition" source="kind source: *v1.Certificate" 2025-12-12T16:28:09.096039945+00:00 stderr F I1212 16:28:09.095942 1 controller.go:183] "Starting Controller" logger="cert-manager" controller="customresourcedefinition" controllerGroup="apiextensions.k8s.io" controllerKind="CustomResourceDefinition" 2025-12-12T16:28:09.100512039+00:00 stderr F I1212 16:28:09.095667 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="mutatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="MutatingWebhookConfiguration" source="kind source: *v1.MutatingWebhookConfiguration" 2025-12-12T16:28:09.100676843+00:00 stderr F I1212 16:28:09.100637 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="mutatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="MutatingWebhookConfiguration" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.100731994+00:00 stderr F I1212 16:28:09.100713 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="mutatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="MutatingWebhookConfiguration" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.100774125+00:00 stderr F I1212 16:28:09.100759 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="mutatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="MutatingWebhookConfiguration" source="kind source: *v1.Certificate" 2025-12-12T16:28:09.100826297+00:00 stderr F I1212 16:28:09.100808 1 controller.go:183] "Starting Controller" logger="cert-manager" controller="mutatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="MutatingWebhookConfiguration" 2025-12-12T16:28:09.106552672+00:00 stderr F I1212 16:28:09.105844 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="apiservice" controllerGroup="apiregistration.k8s.io" controllerKind="APIService" source="kind source: *v1.APIService" 2025-12-12T16:28:09.106552672+00:00 stderr F I1212 16:28:09.106334 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="apiservice" controllerGroup="apiregistration.k8s.io" controllerKind="APIService" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.106552672+00:00 stderr F I1212 16:28:09.106345 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="apiservice" controllerGroup="apiregistration.k8s.io" controllerKind="APIService" source="kind source: *v1.PartialObjectMetadata" 2025-12-12T16:28:09.106552672+00:00 stderr F I1212 16:28:09.106353 1 controller.go:175] "Starting EventSource" logger="cert-manager" controller="apiservice" controllerGroup="apiregistration.k8s.io" controllerKind="APIService" source="kind source: *v1.Certificate" 2025-12-12T16:28:09.106552672+00:00 stderr F I1212 16:28:09.106360 1 controller.go:183] "Starting Controller" logger="cert-manager" controller="apiservice" controllerGroup="apiregistration.k8s.io" controllerKind="APIService" 2025-12-12T16:28:09.113409425+00:00 stderr F I1212 16:28:09.112125 1 reflector.go:376] Caches populated for *v1.Certificate from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:09.131157494+00:00 stderr F I1212 16:28:09.131078 1 reflector.go:376] Caches populated for *v1.PartialObjectMetadata from pkg/mod/k8s.io/client-go@v0.32.0/tools/cache/reflector.go:251 2025-12-12T16:28:09.210355299+00:00 stderr F I1212 16:28:09.210241 1 controller.go:217] "Starting workers" logger="cert-manager" controller="mutatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="MutatingWebhookConfiguration" worker count=1 2025-12-12T16:28:09.215683723+00:00 stderr F I1212 16:28:09.213564 1 controller.go:217] "Starting workers" logger="cert-manager" controller="customresourcedefinition" controllerGroup="apiextensions.k8s.io" controllerKind="CustomResourceDefinition" worker count=1 2025-12-12T16:28:09.215683723+00:00 stderr F I1212 16:28:09.213591 1 controller.go:217] "Starting workers" logger="cert-manager" controller="validatingwebhookconfiguration" controllerGroup="admissionregistration.k8s.io" controllerKind="ValidatingWebhookConfiguration" worker count=1 2025-12-12T16:28:09.215683723+00:00 stderr F I1212 16:28:09.214991 1 controller.go:217] "Starting workers" logger="cert-manager" controller="apiservice" controllerGroup="apiregistration.k8s.io" controllerKind="APIService" worker count=1 2025-12-12T16:28:09.219314755+00:00 stderr F I1212 16:28:09.219256 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="mutatingwebhookconfiguration" kind="mutatingwebhookconfiguration" name="cert-manager-webhook" 2025-12-12T16:28:09.225048201+00:00 stderr F I1212 16:28:09.224900 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="validatingwebhookconfiguration" kind="validatingwebhookconfiguration" name="cert-manager-webhook" 2025-12-12T16:28:09.230111389+00:00 stderr F I1212 16:28:09.229990 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="mutatingwebhookconfiguration" kind="mutatingwebhookconfiguration" name="cert-manager-webhook" 2025-12-12T16:28:09.232621482+00:00 stderr F I1212 16:28:09.232575 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="validatingwebhookconfiguration" kind="validatingwebhookconfiguration" name="cert-manager-webhook" 2025-12-12T16:28:09.339152458+00:00 stderr F I1212 16:28:09.339059 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="issuers.cert-manager.io" 2025-12-12T16:28:09.358224091+00:00 stderr F I1212 16:28:09.358147 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="certificaterequests.cert-manager.io" 2025-12-12T16:28:09.385252545+00:00 stderr F I1212 16:28:09.385135 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="certificates.cert-manager.io" 2025-12-12T16:28:09.481379667+00:00 stderr F I1212 16:28:09.481292 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="challenges.acme.cert-manager.io" 2025-12-12T16:28:09.495943496+00:00 stderr F I1212 16:28:09.495841 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="orders.acme.cert-manager.io" 2025-12-12T16:28:09.629857915+00:00 stderr F I1212 16:28:09.629739 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="clusterissuers.cert-manager.io" 2025-12-12T16:28:09.749650017+00:00 stderr F I1212 16:28:09.749543 1 reconciler.go:141] "Updated object" logger="cert-manager" kind="customresourcedefinition" kind="customresourcedefinition" name="issuers.cert-manager.io" ././@LongLink0000644000000000000000000000030300000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043044033145 5ustar zuulzuul././@LongLink0000644000000000000000000000035100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000755000175000017500000000000015117043063033146 5ustar zuulzuul././@LongLink0000644000000000000000000000035600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_ob0000644000175000017500000000204415117043044033147 0ustar zuulzuul2025-12-12T16:27:30.404586064+00:00 stdout F ts=2025-12-12T16:27:30.403881497Z level=info caller=/workspace/internal/goruntime/cpu.go:27 msg="Updating GOMAXPROCS=1: using minimum allowed GOMAXPROCS" 2025-12-12T16:27:30.410475943+00:00 stdout F ts=2025-12-12T16:27:30.41034474Z level=warn caller=/workspace/pkg/server/server.go:158 msg="server TLS client verification disabled" client_ca_file=/etc/tls/private/tls-ca.crt err="stat /etc/tls/private/tls-ca.crt: no such file or directory" 2025-12-12T16:27:30.412635698+00:00 stdout F ts=2025-12-12T16:27:30.412540326Z level=info caller=/workspace/pkg/server/server.go:295 msg="starting secure server" address=[::]:8443 http2=false 2025-12-12T16:27:30.413032388+00:00 stderr F I1212 16:27:30.412737 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:27:30.427407862+00:00 stderr F I1212 16:27:30.427356 1 dynamic_serving_content.go:135] "Starting controller" name="servingCert::/tmp/k8s-webhook-server/serving-certs/tls.crt::/tmp/k8s-webhook-server/serving-certs/tls.key" ././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043043033065 5ustar zuulzuul././@LongLink0000644000000000000000000000030300000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000000223715117043043033073 0ustar zuulzuul2025-12-12T16:25:27.769920043+00:00 stdout F 2025-12-12T16:25:27+00:00 INFO: ovn-control-plane-metrics-certs mounted, starting kube-rbac-proxy 2025-12-12T16:25:27.806394340+00:00 stderr F W1212 16:25:27.806264 1 deprecated.go:66] 2025-12-12T16:25:27.806394340+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:25:27.806394340+00:00 stderr F 2025-12-12T16:25:27.806394340+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:25:27.806394340+00:00 stderr F 2025-12-12T16:25:27.806394340+00:00 stderr F =============================================== 2025-12-12T16:25:27.806394340+00:00 stderr F 2025-12-12T16:25:27.806964445+00:00 stderr F I1212 16:25:27.806935 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:25:27.808275958+00:00 stderr F I1212 16:25:27.808218 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:25:27.808771040+00:00 stderr F I1212 16:25:27.808699 1 kube-rbac-proxy.go:397] Starting TCP socket on :9108 2025-12-12T16:25:27.810593966+00:00 stderr F I1212 16:25:27.810356 1 kube-rbac-proxy.go:404] Listening securely on :9108 ././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015117043062033066 5ustar zuulzuul././@LongLink0000644000000000000000000000032000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000644000175000017500000011675515117043043033106 0ustar zuulzuul2025-12-12T16:25:27.893464311+00:00 stderr F + [[ -f /env/_master ]] 2025-12-12T16:25:27.893464311+00:00 stderr F + ovn_v4_join_subnet_opt= 2025-12-12T16:25:27.893464311+00:00 stderr F + [[ '' != '' ]] 2025-12-12T16:25:27.893464311+00:00 stderr F + ovn_v6_join_subnet_opt= 2025-12-12T16:25:27.893464311+00:00 stderr F + [[ '' != '' ]] 2025-12-12T16:25:27.893464311+00:00 stderr F + ovn_v4_transit_switch_subnet_opt= 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ '' != '' ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + ovn_v6_transit_switch_subnet_opt= 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ '' != '' ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + dns_name_resolver_enabled_flag= 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ false == \t\r\u\e ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + persistent_ips_enabled_flag=--enable-persistent-ips 2025-12-12T16:25:27.893577144+00:00 stderr F + network_segmentation_enabled_flag= 2025-12-12T16:25:27.893577144+00:00 stderr F + multi_network_enabled_flag= 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ true == \t\r\u\e ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + multi_network_enabled_flag=--enable-multi-network 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ true == \t\r\u\e ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ true != \t\r\u\e ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + network_segmentation_enabled_flag=--enable-network-segmentation 2025-12-12T16:25:27.893577144+00:00 stderr F + route_advertisements_enable_flag= 2025-12-12T16:25:27.893577144+00:00 stderr F + [[ false == \t\r\u\e ]] 2025-12-12T16:25:27.893577144+00:00 stderr F + preconfigured_udn_addresses_enable_flag= 2025-12-12T16:25:27.893589694+00:00 stderr F + [[ false == \t\r\u\e ]] 2025-12-12T16:25:27.893596635+00:00 stderr F + multi_network_policy_enabled_flag= 2025-12-12T16:25:27.893603525+00:00 stderr F + [[ false == \t\r\u\e ]] 2025-12-12T16:25:27.893610375+00:00 stderr F + admin_network_policy_enabled_flag= 2025-12-12T16:25:27.893646196+00:00 stderr F + [[ true == \t\r\u\e ]] 2025-12-12T16:25:27.893646196+00:00 stderr F + admin_network_policy_enabled_flag=--enable-admin-network-policy 2025-12-12T16:25:27.893672317+00:00 stderr F + '[' local == shared ']' 2025-12-12T16:25:27.893697957+00:00 stderr F + '[' local == local ']' 2025-12-12T16:25:27.893697957+00:00 stderr F + gateway_mode_flags='--gateway-mode local' 2025-12-12T16:25:27.894417155+00:00 stderr F ++ date '+%m%d %H:%M:%S.%N' 2025-12-12T16:25:27.897028121+00:00 stderr F + echo 'I1212 16:25:27.896613951 - ovnkube-control-plane - start ovnkube --init-cluster-manager crc' 2025-12-12T16:25:27.897046001+00:00 stdout F I1212 16:25:27.896613951 - ovnkube-control-plane - start ovnkube --init-cluster-manager crc 2025-12-12T16:25:27.897164364+00:00 stderr F + exec /usr/bin/ovnkube --enable-interconnect --init-cluster-manager crc --config-file=/run/ovnkube-config/ovnkube.conf --loglevel 4 --metrics-bind-address 127.0.0.1:29108 --metrics-enable-pprof --metrics-enable-config-duration --enable-persistent-ips --enable-multi-network --enable-network-segmentation --gateway-mode local --enable-egress-ip=true --enable-egress-firewall=true --enable-egress-qos=true --enable-egress-service=true --enable-multicast --enable-multi-external-gateway=true --enable-admin-network-policy 2025-12-12T16:25:27.956707123+00:00 stderr F I1212 16:25:27.956601 1 config.go:2357] Parsed config file /run/ovnkube-config/ovnkube.conf 2025-12-12T16:25:27.956893077+00:00 stderr F I1212 16:25:27.956753 1 config.go:2358] Parsed config: {Default:{MTU:1400 RoutableMTU:0 ConntrackZone:64000 HostMasqConntrackZone:0 OVNMasqConntrackZone:0 HostNodePortConntrackZone:0 ReassemblyConntrackZone:0 EncapType:geneve EncapIP: EffectiveEncapIP: EncapPort:6081 InactivityProbe:100000 OpenFlowProbe:0 OfctrlWaitBeforeClear:0 MonitorAll:true OVSDBTxnTimeout:1m40s LFlowCacheEnable:true LFlowCacheLimit:0 LFlowCacheLimitKb:1048576 RawClusterSubnets:10.217.0.0/22/23 ClusterSubnets:[] EnableUDPAggregation:true Zone:global RawUDNAllowedDefaultServices:default/kubernetes,openshift-dns/dns-default UDNAllowedDefaultServices:[]} Logging:{File: CNIFile: LibovsdbFile:/var/log/ovnkube/libovsdb.log Level:4 LogFileMaxSize:100 LogFileMaxBackups:5 LogFileMaxAge:0 ACLLoggingRateLimit:20} Monitoring:{RawNetFlowTargets: RawSFlowTargets: RawIPFIXTargets: NetFlowTargets:[] SFlowTargets:[] IPFIXTargets:[]} IPFIX:{Sampling:400 CacheActiveTimeout:60 CacheMaxFlows:0} CNI:{ConfDir:/etc/cni/net.d Plugin:ovn-k8s-cni-overlay} OVNKubernetesFeature:{EnableAdminNetworkPolicy:false EnableEgressIP:false EgressIPReachabiltyTotalTimeout:1 EnableEgressFirewall:false EnableEgressQoS:false EnableEgressService:false EgressIPNodeHealthCheckPort:9107 EnableMultiNetwork:false EnableNetworkSegmentation:true EnablePreconfiguredUDNAddresses:false EnableRouteAdvertisements:false EnableMultiNetworkPolicy:false EnableStatelessNetPol:false EnableInterconnect:false EnableMultiExternalGateway:false EnablePersistentIPs:false EnableDNSNameResolver:false EnableServiceTemplateSupport:false EnableObservability:false EnableNetworkQoS:false AdvertisedUDNIsolationMode:strict} Kubernetes:{BootstrapKubeconfig: CertDir: CertDuration:10m0s Kubeconfig: CACert: CAData:[] APIServer:https://api-int.crc.testing:6443 Token: TokenFile: CompatServiceCIDR: RawServiceCIDRs:10.217.4.0/23 ServiceCIDRs:[] OVNConfigNamespace:openshift-ovn-kubernetes OVNEmptyLbEvents:false PodIP: RawNoHostSubnetNodes: NoHostSubnetNodes: HostNetworkNamespace:openshift-host-network DisableRequestedChassis:false PlatformType:None HealthzBindAddress:0.0.0.0:10256 CompatMetricsBindAddress: CompatOVNMetricsBindAddress: CompatMetricsEnablePprof:false DNSServiceNamespace:openshift-dns DNSServiceName:dns-default} Metrics:{BindAddress: OVNMetricsBindAddress: ExportOVSMetrics:false EnablePprof:false NodeServerPrivKey: NodeServerCert: EnableConfigDuration:false EnableScaleMetrics:false} OvnNorth:{Address: PrivKey: Cert: CACert: CertCommonName: Scheme: ElectionTimer:0 northbound:false exec:} OvnSouth:{Address: PrivKey: Cert: CACert: CertCommonName: Scheme: ElectionTimer:0 northbound:false exec:} Gateway:{Mode:local Interface: GatewayAcceleratedInterface: EgressGWInterface: NextHop: VLANID:0 NodeportEnable:true DisableSNATMultipleGWs:false V4JoinSubnet:100.64.0.0/16 V6JoinSubnet:fd98::/64 V4MasqueradeSubnet:169.254.169.0/29 V6MasqueradeSubnet:fd69::/125 MasqueradeIPs:{V4OVNMasqueradeIP:169.254.169.1 V6OVNMasqueradeIP:fd69::1 V4HostMasqueradeIP:169.254.169.2 V6HostMasqueradeIP:fd69::2 V4HostETPLocalMasqueradeIP:169.254.169.3 V6HostETPLocalMasqueradeIP:fd69::3 V4DummyNextHopMasqueradeIP:169.254.169.4 V6DummyNextHopMasqueradeIP:fd69::4 V4OVNServiceHairpinMasqueradeIP:169.254.169.5 V6OVNServiceHairpinMasqueradeIP:fd69::5} DisablePacketMTUCheck:false RouterSubnet: SingleNode:false DisableForwarding:false AllowNoUplink:false EphemeralPortRange:} MasterHA:{ElectionLeaseDuration:137 ElectionRenewDeadline:107 ElectionRetryPeriod:26} ClusterMgrHA:{ElectionLeaseDuration:137 ElectionRenewDeadline:107 ElectionRetryPeriod:26} HybridOverlay:{Enabled:false RawClusterSubnets: ClusterSubnets:[] VXLANPort:4789} OvnKubeNode:{Mode:full DPResourceDeviceIdsMap:map[] MgmtPortNetdev: MgmtPortDPResourceName:} ClusterManager:{V4TransitSwitchSubnet:100.88.0.0/16 V6TransitSwitchSubnet:fd97::/64}} 2025-12-12T16:25:27.959506003+00:00 stderr F I1212 16:25:27.959483 1 leaderelection.go:257] attempting to acquire leader lease openshift-ovn-kubernetes/ovn-kubernetes-master... 2025-12-12T16:25:27.959629676+00:00 stderr F I1212 16:25:27.959484 1 metrics.go:525] Starting metrics server at address "127.0.0.1:29108" 2025-12-12T16:25:27.971519515+00:00 stderr F I1212 16:25:27.971482 1 leaderelection.go:271] successfully acquired lease openshift-ovn-kubernetes/ovn-kubernetes-master 2025-12-12T16:25:27.971835123+00:00 stderr F I1212 16:25:27.971783 1 ovnkube.go:397] Won leader election; in active mode 2025-12-12T16:25:27.971835123+00:00 stderr F I1212 16:25:27.971772 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-ovn-kubernetes", Name:"ovn-kubernetes-master", UID:"65256ce8-7ee0-4070-a47d-3af5324eb879", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"40775", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' ovnkube-control-plane-97c9b6c48-w5wsh became leader 2025-12-12T16:25:28.057080178+00:00 stderr F I1212 16:25:28.056977 1 secondary_network_cluster_manager.go:38] Creating secondary network cluster manager 2025-12-12T16:25:28.057080178+00:00 stderr F I1212 16:25:28.057048 1 egressservice_cluster.go:98] Setting up event handlers for Egress Services 2025-12-12T16:25:28.057584711+00:00 stderr F I1212 16:25:28.057543 1 clustermanager.go:169] Starting the cluster manager 2025-12-12T16:25:28.057584711+00:00 stderr F I1212 16:25:28.057563 1 factory.go:531] Starting watch factory 2025-12-12T16:25:28.058031892+00:00 stderr F I1212 16:25:28.057913 1 reflector.go:357] "Starting reflector" type="*v1.EndpointSlice" resyncPeriod="0s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058031892+00:00 stderr F I1212 16:25:28.057966 1 reflector.go:403] "Listing and watching" type="*v1.EndpointSlice" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058031892+00:00 stderr F I1212 16:25:28.057977 1 reflector.go:357] "Starting reflector" type="*v1.Node" resyncPeriod="0s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058031892+00:00 stderr F I1212 16:25:28.057990 1 reflector.go:357] "Starting reflector" type="*v1.Pod" resyncPeriod="0s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058076953+00:00 stderr F I1212 16:25:28.058025 1 reflector.go:403] "Listing and watching" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058076953+00:00 stderr F I1212 16:25:28.058057 1 reflector.go:403] "Listing and watching" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058393131+00:00 stderr F I1212 16:25:28.058309 1 reflector.go:357] "Starting reflector" type="*v1.Namespace" resyncPeriod="0s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058502684+00:00 stderr F I1212 16:25:28.057921 1 reflector.go:357] "Starting reflector" type="*v1.Service" resyncPeriod="0s" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058502684+00:00 stderr F I1212 16:25:28.058467 1 reflector.go:403] "Listing and watching" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.058513764+00:00 stderr F I1212 16:25:28.058494 1 reflector.go:403] "Listing and watching" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.062115625+00:00 stderr F I1212 16:25:28.062075 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.063171252+00:00 stderr F I1212 16:25:28.063088 1 reflector.go:430] "Caches populated" type="*v1.EndpointSlice" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.063769027+00:00 stderr F I1212 16:25:28.063727 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.067010098+00:00 stderr F I1212 16:25:28.066933 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.077387079+00:00 stderr F I1212 16:25:28.077286 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:25:28.163786263+00:00 stderr F I1212 16:25:28.163637 1 factory.go:1890] *v1.Service informer cache synced successfully 2025-12-12T16:25:28.163786263+00:00 stderr F I1212 16:25:28.163725 1 factory.go:1890] *v1.EndpointSlice informer cache synced successfully 2025-12-12T16:25:28.163786263+00:00 stderr F I1212 16:25:28.163731 1 factory.go:1890] *v1.Node informer cache synced successfully 2025-12-12T16:25:28.163786263+00:00 stderr F I1212 16:25:28.163736 1 factory.go:1890] *v1.Pod informer cache synced successfully 2025-12-12T16:25:28.163786263+00:00 stderr F I1212 16:25:28.163740 1 factory.go:1890] *v1.Namespace informer cache synced successfully 2025-12-12T16:25:28.163966888+00:00 stderr F I1212 16:25:28.163927 1 reflector.go:357] "Starting reflector" type="*v1.EgressIP" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.163966888+00:00 stderr F I1212 16:25:28.163952 1 reflector.go:403] "Listing and watching" type="*v1.EgressIP" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.166376398+00:00 stderr F I1212 16:25:28.166337 1 reflector.go:430] "Caches populated" type="*v1.EgressIP" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.169100477+00:00 stderr F I1212 16:25:28.169056 1 factory.go:1890] *v1.EgressIP informer cache synced successfully 2025-12-12T16:25:28.169194459+00:00 stderr F I1212 16:25:28.169146 1 reflector.go:357] "Starting reflector" type="*v1.EgressFirewall" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.169194459+00:00 stderr F I1212 16:25:28.169168 1 reflector.go:403] "Listing and watching" type="*v1.EgressFirewall" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.170797440+00:00 stderr F I1212 16:25:28.170738 1 reflector.go:430] "Caches populated" type="*v1.EgressFirewall" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.175129919+00:00 stderr F I1212 16:25:28.175054 1 factory.go:1890] *v1.EgressFirewall informer cache synced successfully 2025-12-12T16:25:28.175320473+00:00 stderr F I1212 16:25:28.175272 1 reflector.go:357] "Starting reflector" type="*v1.EgressQoS" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.175320473+00:00 stderr F I1212 16:25:28.175290 1 reflector.go:403] "Listing and watching" type="*v1.EgressQoS" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.176645997+00:00 stderr F I1212 16:25:28.176599 1 reflector.go:430] "Caches populated" type="*v1.EgressQoS" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.180598236+00:00 stderr F I1212 16:25:28.180557 1 factory.go:1890] *v1.EgressQoS informer cache synced successfully 2025-12-12T16:25:28.180698829+00:00 stderr F I1212 16:25:28.180669 1 reflector.go:357] "Starting reflector" type="*v1.EgressService" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.180698829+00:00 stderr F I1212 16:25:28.180690 1 reflector.go:403] "Listing and watching" type="*v1.EgressService" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.183922230+00:00 stderr F I1212 16:25:28.183834 1 reflector.go:430] "Caches populated" type="*v1.EgressService" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.186567886+00:00 stderr F I1212 16:25:28.186464 1 factory.go:1890] *v1.EgressService informer cache synced successfully 2025-12-12T16:25:28.186657269+00:00 stderr F I1212 16:25:28.186587 1 reflector.go:357] "Starting reflector" type="*v1.AdminPolicyBasedExternalRoute" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.186657269+00:00 stderr F I1212 16:25:28.186602 1 reflector.go:403] "Listing and watching" type="*v1.AdminPolicyBasedExternalRoute" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.188131636+00:00 stderr F I1212 16:25:28.188077 1 reflector.go:430] "Caches populated" type="*v1.AdminPolicyBasedExternalRoute" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.191988393+00:00 stderr F I1212 16:25:28.191932 1 factory.go:1890] *v1.AdminPolicyBasedExternalRoute informer cache synced successfully 2025-12-12T16:25:28.192076535+00:00 stderr F I1212 16:25:28.192035 1 reflector.go:357] "Starting reflector" type="*v1alpha1.IPAMClaim" resyncPeriod="0s" reflector="github.com/k8snetworkplumbingwg/ipamclaims/pkg/crd/ipamclaims/v1alpha1/apis/informers/externalversions/factory.go:141" 2025-12-12T16:25:28.192076535+00:00 stderr F I1212 16:25:28.192058 1 reflector.go:403] "Listing and watching" type="*v1alpha1.IPAMClaim" reflector="github.com/k8snetworkplumbingwg/ipamclaims/pkg/crd/ipamclaims/v1alpha1/apis/informers/externalversions/factory.go:141" 2025-12-12T16:25:28.193967613+00:00 stderr F I1212 16:25:28.193921 1 reflector.go:430] "Caches populated" type="*v1alpha1.IPAMClaim" reflector="github.com/k8snetworkplumbingwg/ipamclaims/pkg/crd/ipamclaims/v1alpha1/apis/informers/externalversions/factory.go:141" 2025-12-12T16:25:28.197595664+00:00 stderr F I1212 16:25:28.197548 1 factory.go:1890] *v1alpha1.IPAMClaim informer cache synced successfully 2025-12-12T16:25:28.197800959+00:00 stderr F I1212 16:25:28.197767 1 reflector.go:357] "Starting reflector" type="*v1.NetworkAttachmentDefinition" resyncPeriod="0s" reflector="github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117" 2025-12-12T16:25:28.197800959+00:00 stderr F I1212 16:25:28.197785 1 reflector.go:403] "Listing and watching" type="*v1.NetworkAttachmentDefinition" reflector="github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117" 2025-12-12T16:25:28.199313967+00:00 stderr F I1212 16:25:28.199259 1 reflector.go:430] "Caches populated" type="*v1.NetworkAttachmentDefinition" reflector="github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117" 2025-12-12T16:25:28.203276217+00:00 stderr F I1212 16:25:28.203215 1 factory.go:1890] *v1.NetworkAttachmentDefinition informer cache synced successfully 2025-12-12T16:25:28.203327858+00:00 stderr F I1212 16:25:28.203303 1 reflector.go:357] "Starting reflector" type="*v1.UserDefinedNetwork" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.203327858+00:00 stderr F I1212 16:25:28.203317 1 reflector.go:403] "Listing and watching" type="*v1.UserDefinedNetwork" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.203597225+00:00 stderr F I1212 16:25:28.203516 1 reflector.go:357] "Starting reflector" type="*v1.ClusterUserDefinedNetwork" resyncPeriod="0s" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.203610055+00:00 stderr F I1212 16:25:28.203590 1 reflector.go:403] "Listing and watching" type="*v1.ClusterUserDefinedNetwork" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.205265457+00:00 stderr F I1212 16:25:28.205207 1 reflector.go:430] "Caches populated" type="*v1.UserDefinedNetwork" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.205722948+00:00 stderr F I1212 16:25:28.205641 1 reflector.go:430] "Caches populated" type="*v1.ClusterUserDefinedNetwork" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" 2025-12-12T16:25:28.208896888+00:00 stderr F I1212 16:25:28.208809 1 factory.go:1890] *v1.ClusterUserDefinedNetwork informer cache synced successfully 2025-12-12T16:25:28.208896888+00:00 stderr F I1212 16:25:28.208827 1 factory.go:1890] *v1.UserDefinedNetwork informer cache synced successfully 2025-12-12T16:25:28.208896888+00:00 stderr F I1212 16:25:28.208836 1 factory.go:636] Watch Factory start up complete, took: 151.274477ms 2025-12-12T16:25:28.208948330+00:00 stderr F I1212 16:25:28.208895 1 controller.go:133] Adding controller [clustermanager-nad-controller NAD controller] event handlers 2025-12-12T16:25:28.208948330+00:00 stderr F I1212 16:25:28.208922 1 shared_informer.go:350] "Waiting for caches to sync" controller="[clustermanager-nad-controller NAD controller]" 2025-12-12T16:25:28.208948330+00:00 stderr F I1212 16:25:28.208929 1 shared_informer.go:357] "Caches are synced" controller="[clustermanager-nad-controller NAD controller]" 2025-12-12T16:25:28.208948330+00:00 stderr F I1212 16:25:28.208941 1 controller.go:157] Starting controller [clustermanager-nad-controller NAD controller] with 1 workers 2025-12-12T16:25:28.209000341+00:00 stderr F I1212 16:25:28.208980 1 network_controller.go:246] [clustermanager-nad-controller network controller]: syncing all networks 2025-12-12T16:25:28.209000341+00:00 stderr F I1212 16:25:28.208989 1 network_controller.go:257] [clustermanager-nad-controller network controller]: finished syncing all networks. Time taken: 17.951µs 2025-12-12T16:25:28.209000341+00:00 stderr F I1212 16:25:28.208996 1 controller.go:157] Starting controller [clustermanager-nad-controller network controller] with 1 workers 2025-12-12T16:25:28.209009911+00:00 stderr F I1212 16:25:28.209005 1 nad_controller.go:162] [clustermanager-nad-controller NAD controller]: started 2025-12-12T16:25:28.209028362+00:00 stderr F I1212 16:25:28.209018 1 network_cluster_controller.go:377] Initializing cluster manager network controller "default" ... 2025-12-12T16:25:28.209067473+00:00 stderr F I1212 16:25:28.209051 1 network_cluster_controller.go:383] Cluster manager network controller "default" initialized. Took: 41.851µs 2025-12-12T16:25:28.209067473+00:00 stderr F I1212 16:25:28.209058 1 network_cluster_controller.go:387] Cluster manager network controller "default" starting node watcher... 2025-12-12T16:25:28.209274898+00:00 stderr F I1212 16:25:28.209214 1 network_cluster_controller.go:392] Cluster manager network controller "default" completed watch nodes. Took: 152.814µs 2025-12-12T16:25:28.209274898+00:00 stderr F I1212 16:25:28.209238 1 zone_cluster_controller.go:217] Node crc has the id 2 set 2025-12-12T16:25:28.209365660+00:00 stderr F I1212 16:25:28.209334 1 kube.go:133] Setting annotations map[k8s.ovn.org/node-id:2 k8s.ovn.org/node-transit-switch-port-ifaddr:{"ipv4":"100.88.0.2/16"}] on node crc 2025-12-12T16:25:28.222506751+00:00 stderr F W1212 16:25:28.222454 1 egressip_healthcheck.go:169] Health checking using insecure connection 2025-12-12T16:25:29.223751453+00:00 stderr F W1212 16:25:29.223627 1 egressip_healthcheck.go:188] Could not connect to crc (10.217.0.2:9107): context deadline exceeded 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223752 1 egressip_controller.go:436] EgressIP node reachability enabled and using gRPC port 9107 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223759 1 egressservice_cluster.go:174] Starting Egress Services Controller 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223782 1 shared_informer.go:350] "Waiting for caches to sync" controller="egressservices" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223793 1 shared_informer.go:357] "Caches are synced" controller="egressservices" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223799 1 shared_informer.go:350] "Waiting for caches to sync" controller="egressservices_services" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223804 1 shared_informer.go:357] "Caches are synced" controller="egressservices_services" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223810 1 shared_informer.go:350] "Waiting for caches to sync" controller="egressservices_endpointslices" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223815 1 shared_informer.go:357] "Caches are synced" controller="egressservices_endpointslices" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223820 1 shared_informer.go:350] "Waiting for caches to sync" controller="egressservices_nodes" 2025-12-12T16:25:29.223830535+00:00 stderr F I1212 16:25:29.223825 1 shared_informer.go:357] "Caches are synced" controller="egressservices_nodes" 2025-12-12T16:25:29.223871066+00:00 stderr F I1212 16:25:29.223829 1 egressservice_cluster.go:191] Repairing Egress Services 2025-12-12T16:25:29.223919008+00:00 stderr F I1212 16:25:29.223893 1 kube.go:272] Setting labels map[] on node crc 2025-12-12T16:25:29.237547741+00:00 stderr F I1212 16:25:29.237454 1 endpointslice_mirror_controller.go:155] Starting the EndpointSlice mirror controller 2025-12-12T16:25:29.237547741+00:00 stderr F I1212 16:25:29.237487 1 endpointslice_mirror_controller.go:156] Repairing EndpointSlice mirrors 2025-12-12T16:25:29.237741315+00:00 stderr F I1212 16:25:29.237692 1 status_manager.go:221] Starting StatusManager with typed managers: map[adminpolicybasedexternalroutes:0xc004b9a740 egressfirewalls:0xc004b9ab40 egressqoses:0xc004b9af40] 2025-12-12T16:25:29.237773316+00:00 stderr F I1212 16:25:29.237754 1 controller.go:133] Adding controller zone_tracker event handlers 2025-12-12T16:25:29.237831078+00:00 stderr F I1212 16:25:29.237801 1 shared_informer.go:350] "Waiting for caches to sync" controller="zone_tracker" 2025-12-12T16:25:29.237831078+00:00 stderr F I1212 16:25:29.237821 1 shared_informer.go:357] "Caches are synced" controller="zone_tracker" 2025-12-12T16:25:29.237864138+00:00 stderr F I1212 16:25:29.237838 1 status_manager.go:245] StatusManager got zones update: map[crc:{}] 2025-12-12T16:25:29.237864138+00:00 stderr F I1212 16:25:29.237856 1 controller.go:289] Controller adminpolicybasedexternalroutes_statusmanager: full reconcile 2025-12-12T16:25:29.237873899+00:00 stderr F I1212 16:25:29.237867 1 controller.go:289] Controller egressfirewalls_statusmanager: full reconcile 2025-12-12T16:25:29.237881309+00:00 stderr F I1212 16:25:29.237876 1 controller.go:289] Controller egressqoses_statusmanager: full reconcile 2025-12-12T16:25:29.237890419+00:00 stderr F I1212 16:25:29.237883 1 status_manager.go:245] StatusManager got zones update: map[crc:{}] 2025-12-12T16:25:29.237897769+00:00 stderr F I1212 16:25:29.237890 1 controller.go:289] Controller egressqoses_statusmanager: full reconcile 2025-12-12T16:25:29.237897769+00:00 stderr F I1212 16:25:29.237895 1 controller.go:289] Controller adminpolicybasedexternalroutes_statusmanager: full reconcile 2025-12-12T16:25:29.237905240+00:00 stderr F I1212 16:25:29.237899 1 controller.go:289] Controller egressfirewalls_statusmanager: full reconcile 2025-12-12T16:25:29.237912400+00:00 stderr F I1212 16:25:29.237906 1 controller.go:157] Starting controller zone_tracker with 1 workers 2025-12-12T16:25:29.237943690+00:00 stderr F I1212 16:25:29.237921 1 controller.go:133] Adding controller adminpolicybasedexternalroutes_statusmanager event handlers 2025-12-12T16:25:29.237943690+00:00 stderr F I1212 16:25:29.237936 1 shared_informer.go:350] "Waiting for caches to sync" controller="adminpolicybasedexternalroutes_statusmanager" 2025-12-12T16:25:29.237951351+00:00 stderr F I1212 16:25:29.237943 1 shared_informer.go:357] "Caches are synced" controller="adminpolicybasedexternalroutes_statusmanager" 2025-12-12T16:25:29.237958541+00:00 stderr F I1212 16:25:29.237950 1 controller.go:157] Starting controller adminpolicybasedexternalroutes_statusmanager with 1 workers 2025-12-12T16:25:29.237989582+00:00 stderr F I1212 16:25:29.237964 1 controller.go:133] Adding controller egressfirewalls_statusmanager event handlers 2025-12-12T16:25:29.237989582+00:00 stderr F I1212 16:25:29.237982 1 shared_informer.go:350] "Waiting for caches to sync" controller="egressfirewalls_statusmanager" 2025-12-12T16:25:29.237997562+00:00 stderr F I1212 16:25:29.237989 1 shared_informer.go:357] "Caches are synced" controller="egressfirewalls_statusmanager" 2025-12-12T16:25:29.238004902+00:00 stderr F I1212 16:25:29.237997 1 controller.go:157] Starting controller egressfirewalls_statusmanager with 1 workers 2025-12-12T16:25:29.238032673+00:00 stderr F I1212 16:25:29.238010 1 controller.go:133] Adding controller egressqoses_statusmanager event handlers 2025-12-12T16:25:29.238032673+00:00 stderr F I1212 16:25:29.238026 1 shared_informer.go:350] "Waiting for caches to sync" controller="egressqoses_statusmanager" 2025-12-12T16:25:29.238040473+00:00 stderr F I1212 16:25:29.238033 1 shared_informer.go:357] "Caches are synced" controller="egressqoses_statusmanager" 2025-12-12T16:25:29.238047813+00:00 stderr F I1212 16:25:29.238040 1 controller.go:157] Starting controller egressqoses_statusmanager with 1 workers 2025-12-12T16:25:29.238054953+00:00 stderr F I1212 16:25:29.238049 1 controller.go:145] Starting user-defined network controllers 2025-12-12T16:25:29.238086864+00:00 stderr F I1212 16:25:29.238068 1 controller.go:133] Adding controller udn-namespace-controller event handlers 2025-12-12T16:25:29.238157816+00:00 stderr F I1212 16:25:29.238130 1 shared_informer.go:350] "Waiting for caches to sync" controller="udn-namespace-controller" 2025-12-12T16:25:29.238157816+00:00 stderr F I1212 16:25:29.238145 1 shared_informer.go:357] "Caches are synced" controller="udn-namespace-controller" 2025-12-12T16:25:29.238285779+00:00 stderr F I1212 16:25:29.238216 1 egressservice_cluster_node.go:167] Processing sync for Egress Service node crc 2025-12-12T16:25:29.238285779+00:00 stderr F I1212 16:25:29.238236 1 egressservice_cluster_node.go:170] Finished syncing Egress Service node crc: 22.461µs 2025-12-12T16:25:29.238434833+00:00 stderr F I1212 16:25:29.238406 1 controller.go:133] Adding controller cluster-user-defined-network-controller event handlers 2025-12-12T16:25:29.238434833+00:00 stderr F I1212 16:25:29.238429 1 shared_informer.go:350] "Waiting for caches to sync" controller="cluster-user-defined-network-controller" 2025-12-12T16:25:29.238443583+00:00 stderr F I1212 16:25:29.238438 1 shared_informer.go:357] "Caches are synced" controller="cluster-user-defined-network-controller" 2025-12-12T16:25:29.238464444+00:00 stderr F I1212 16:25:29.238451 1 controller.go:133] Adding controller user-defined-network-controller event handlers 2025-12-12T16:25:29.238471894+00:00 stderr F I1212 16:25:29.238465 1 shared_informer.go:350] "Waiting for caches to sync" controller="user-defined-network-controller" 2025-12-12T16:25:29.238479214+00:00 stderr F I1212 16:25:29.238472 1 shared_informer.go:357] "Caches are synced" controller="user-defined-network-controller" 2025-12-12T16:25:29.238505815+00:00 stderr F I1212 16:25:29.238488 1 controller.go:133] Adding controller udn-nad-controller event handlers 2025-12-12T16:25:29.238518945+00:00 stderr F I1212 16:25:29.238505 1 shared_informer.go:350] "Waiting for caches to sync" controller="udn-nad-controller" 2025-12-12T16:25:29.238518945+00:00 stderr F I1212 16:25:29.238512 1 shared_informer.go:357] "Caches are synced" controller="udn-nad-controller" 2025-12-12T16:25:29.238526765+00:00 stderr F I1212 16:25:29.238521 1 controller.go:157] Starting controller cluster-user-defined-network-controller with 1 workers 2025-12-12T16:25:29.238534165+00:00 stderr F I1212 16:25:29.238529 1 controller.go:157] Starting controller user-defined-network-controller with 1 workers 2025-12-12T16:25:29.238545446+00:00 stderr F I1212 16:25:29.238538 1 controller.go:157] Starting controller udn-nad-controller with 1 workers 2025-12-12T16:25:29.238552896+00:00 stderr F I1212 16:25:29.238546 1 controller.go:157] Starting controller udn-namespace-controller with 1 workers 2025-12-12T16:30:30.066030835+00:00 stderr F I1212 16:30:30.065934 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Namespace" totalItems=21 2025-12-12T16:30:31.196640413+00:00 stderr F I1212 16:30:31.196535 1 reflector.go:946] "Watch close" reflector="github.com/k8snetworkplumbingwg/ipamclaims/pkg/crd/ipamclaims/v1alpha1/apis/informers/externalversions/factory.go:141" type="*v1alpha1.IPAMClaim" totalItems=6 2025-12-12T16:30:33.201231949+00:00 stderr F I1212 16:30:33.201112 1 reflector.go:946] "Watch close" reflector="github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117" type="*v1.NetworkAttachmentDefinition" totalItems=6 2025-12-12T16:30:49.169770228+00:00 stderr F I1212 16:30:49.169028 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressIP" totalItems=7 2025-12-12T16:33:52.179017243+00:00 stderr F I1212 16:33:52.178928 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressQoS" totalItems=9 2025-12-12T16:34:10.206840164+00:00 stderr F I1212 16:34:10.206384 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" type="*v1.UserDefinedNetwork" totalItems=9 2025-12-12T16:34:14.207505963+00:00 stderr F I1212 16:34:14.207402 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" type="*v1.ClusterUserDefinedNetwork" totalItems=10 2025-12-12T16:34:17.173209637+00:00 stderr F I1212 16:34:17.173083 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressFirewall" totalItems=10 2025-12-12T16:34:36.064689607+00:00 stderr F I1212 16:34:36.064589 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.EndpointSlice" totalItems=46 2025-12-12T16:34:42.069762217+00:00 stderr F I1212 16:34:42.069666 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Service" totalItems=37 2025-12-12T16:34:53.065143220+00:00 stderr F I1212 16:34:53.065018 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Node" totalItems=21 2025-12-12T16:35:15.186394832+00:00 stderr F I1212 16:35:15.186255 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressService" totalItems=10 2025-12-12T16:35:25.080124474+00:00 stderr F I1212 16:35:25.079944 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Pod" totalItems=287 2025-12-12T16:35:25.190546079+00:00 stderr F I1212 16:35:25.190383 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140" type="*v1.AdminPolicyBasedExternalRoute" totalItems=11 2025-12-12T16:36:01.203707998+00:00 stderr F I1212 16:36:01.203611 1 reflector.go:946] "Watch close" reflector="github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117" type="*v1.NetworkAttachmentDefinition" totalItems=6 2025-12-12T16:36:38.173614803+00:00 stderr F I1212 16:36:38.173545 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressIP" totalItems=6 2025-12-12T16:37:11.069685925+00:00 stderr F I1212 16:37:11.069589 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Namespace" totalItems=8 2025-12-12T16:38:16.201238247+00:00 stderr F I1212 16:38:16.199926 1 reflector.go:946] "Watch close" reflector="github.com/k8snetworkplumbingwg/ipamclaims/pkg/crd/ipamclaims/v1alpha1/apis/informers/externalversions/factory.go:141" type="*v1alpha1.IPAMClaim" totalItems=9 2025-12-12T16:39:47.182703968+00:00 stderr F I1212 16:39:47.182585 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressQoS" totalItems=6 2025-12-12T16:40:28.209060229+00:00 stderr F I1212 16:40:28.208949 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" type="*v1.UserDefinedNetwork" totalItems=7 2025-12-12T16:40:51.195297948+00:00 stderr F I1212 16:40:51.195142 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140" type="*v1.AdminPolicyBasedExternalRoute" totalItems=7 2025-12-12T16:41:53.176378795+00:00 stderr F I1212 16:41:53.175785 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140" type="*v1.EgressFirewall" totalItems=9 2025-12-12T16:42:05.067860934+00:00 stderr F I1212 16:42:05.067776 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Node" totalItems=18 2025-12-12T16:42:19.066601378+00:00 stderr F I1212 16:42:19.066468 1 reflector.go:946] "Watch close" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.EndpointSlice" totalItems=8 2025-12-12T16:42:56.207169222+00:00 stderr F I1212 16:42:56.207068 1 reflector.go:946] "Watch close" reflector="github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117" type="*v1.NetworkAttachmentDefinition" totalItems=7 2025-12-12T16:43:01.209715812+00:00 stderr F I1212 16:43:01.209643 1 reflector.go:946] "Watch close" reflector="github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140" type="*v1.ClusterUserDefinedNetwork" totalItems=9 ././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043043033051 5ustar zuulzuul././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015117043062033052 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000644000175000017500000403206615117043043033066 0ustar zuulzuul2025-12-12T16:16:47.349243121+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="log level info" 2025-12-12T16:16:47.351265491+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="TLS keys set, using https for metrics" 2025-12-12T16:16:47.351265491+00:00 stderr F W1212 16:16:47.350956 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:47.357349659+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="Using in-cluster kube client config" 2025-12-12T16:16:47.395062560+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="Using in-cluster kube client config" 2025-12-12T16:16:47.396948296+00:00 stderr F W1212 16:16:47.396693 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:47.531730997+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="skipping irrelevant gvr" gvr="rbac.authorization.k8s.io/v1, Resource=clusterroles" 2025-12-12T16:16:47.531730997+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="skipping irrelevant gvr" gvr="rbac.authorization.k8s.io/v1, Resource=clusterrolebindings" 2025-12-12T16:16:47.531766717+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="skipping irrelevant gvr" gvr="apps/v1, Resource=deployments" 2025-12-12T16:16:47.793948018+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="detected ability to filter informers" canFilter=true 2025-12-12T16:16:47.819322118+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="registering owner reference fixer" gvr="/v1, Resource=services" 2025-12-12T16:16:47.852295323+00:00 stderr F W1212 16:16:47.851915 1 client_config.go:667] Neither --kubeconfig nor --master was specified. Using the inClusterConfig. This might not work. 2025-12-12T16:16:47.863534757+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="connection established. cluster-version: v1.33.5" 2025-12-12T16:16:47.863534757+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="operator ready" 2025-12-12T16:16:47.863534757+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="starting informers..." 2025-12-12T16:16:47.863534757+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="informers started" 2025-12-12T16:16:47.863534757+00:00 stderr F time="2025-12-12T16:16:47Z" level=info msg="waiting for caches to sync..." 2025-12-12T16:16:48.062472424+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="starting workers..." 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=lCyvD namespace=default 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=lCyvD namespace=default 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="connection established. cluster-version: v1.33.5" 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="operator ready" 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="starting informers..." 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="informers started" 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="waiting for caches to sync..." 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=UqtcA namespace=hostpath-provisioner 2025-12-12T16:16:48.069806503+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=UqtcA namespace=hostpath-provisioner 2025-12-12T16:16:48.075918693+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace default" id=lCyvD namespace=default 2025-12-12T16:16:48.075991004+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=oDbZ+ namespace=kube-node-lease 2025-12-12T16:16:48.076014525+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=oDbZ+ namespace=kube-node-lease 2025-12-12T16:16:48.076232570+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace hostpath-provisioner" id=UqtcA namespace=hostpath-provisioner 2025-12-12T16:16:48.076282711+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=ShxeD namespace=kube-public 2025-12-12T16:16:48.076307372+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=ShxeD namespace=kube-public 2025-12-12T16:16:48.082670087+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U 2025-12-12T16:16:48.082722869+00:00 stderr F time="2025-12-12T16:16:48Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:16:48.089293939+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R 2025-12-12T16:16:48.089293939+00:00 stderr F time="2025-12-12T16:16:48Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:16:48.169800125+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="starting workers..." 2025-12-12T16:16:48.188875720+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace kube-public" id=ShxeD namespace=kube-public 2025-12-12T16:16:48.188875720+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=XfJhJ namespace=kube-system 2025-12-12T16:16:48.188875720+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=XfJhJ namespace=kube-system 2025-12-12T16:16:48.188875720+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace kube-node-lease" id=oDbZ+ namespace=kube-node-lease 2025-12-12T16:16:48.188875720+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=C8iLm namespace=openshift 2025-12-12T16:16:48.188875720+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=C8iLm namespace=openshift 2025-12-12T16:16:48.285240833+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace openshift" id=C8iLm namespace=openshift 2025-12-12T16:16:48.285240833+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=hMems namespace=openshift-apiserver 2025-12-12T16:16:48.285240833+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=hMems namespace=openshift-apiserver 2025-12-12T16:16:48.285240833+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace kube-system" id=XfJhJ namespace=kube-system 2025-12-12T16:16:48.285240833+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=ciL0T namespace=openshift-apiserver-operator 2025-12-12T16:16:48.285240833+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=ciL0T namespace=openshift-apiserver-operator 2025-12-12T16:16:48.291066575+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace openshift-apiserver" id=hMems namespace=openshift-apiserver 2025-12-12T16:16:48.291066575+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=V8b5C namespace=openshift-authentication 2025-12-12T16:16:48.291066575+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=V8b5C namespace=openshift-authentication 2025-12-12T16:16:48.382743504+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace openshift-apiserver-operator" id=ciL0T namespace=openshift-apiserver-operator 2025-12-12T16:16:48.382743504+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=r4jtn namespace=openshift-authentication-operator 2025-12-12T16:16:48.382743504+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=r4jtn namespace=openshift-authentication-operator 2025-12-12T16:16:48.503245306+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace openshift-authentication" id=V8b5C namespace=openshift-authentication 2025-12-12T16:16:48.503245306+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=D3NIN namespace=openshift-cloud-network-config-controller 2025-12-12T16:16:48.503245306+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=D3NIN namespace=openshift-cloud-network-config-controller 2025-12-12T16:16:48.671807981+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace openshift-authentication-operator" id=r4jtn namespace=openshift-authentication-operator 2025-12-12T16:16:48.671861512+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=BCCMQ namespace=openshift-cloud-platform-infra 2025-12-12T16:16:48.671861512+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=BCCMQ namespace=openshift-cloud-platform-infra 2025-12-12T16:16:48.868732539+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="No subscriptions were found in namespace openshift-cloud-network-config-controller" id=D3NIN namespace=openshift-cloud-network-config-controller 2025-12-12T16:16:48.868732539+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="resolving sources" id=NH7L+ namespace=openshift-cluster-machine-approver 2025-12-12T16:16:48.868732539+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="checking if subscriptions need update" id=NH7L+ namespace=openshift-cluster-machine-approver 2025-12-12T16:16:48.876642122+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U 2025-12-12T16:16:48.876642122+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U 2025-12-12T16:16:48.876642122+00:00 stderr F time="2025-12-12T16:16:48Z" level=info msg="creating desired pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:16:49.075462416+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="No subscriptions were found in namespace openshift-cloud-platform-infra" id=BCCMQ namespace=openshift-cloud-platform-infra 2025-12-12T16:16:49.075462416+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="resolving sources" id=TR14c namespace=openshift-cluster-samples-operator 2025-12-12T16:16:49.075462416+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="checking if subscriptions need update" id=TR14c namespace=openshift-cluster-samples-operator 2025-12-12T16:16:49.075462416+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R 2025-12-12T16:16:49.075462416+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R 2025-12-12T16:16:49.075462416+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="creating desired pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:16:49.270692562+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="No subscriptions were found in namespace openshift-cluster-machine-approver" id=NH7L+ namespace=openshift-cluster-machine-approver 2025-12-12T16:16:49.270692562+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="resolving sources" id=njgG5 namespace=openshift-cluster-storage-operator 2025-12-12T16:16:49.270732963+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="checking if subscriptions need update" id=njgG5 namespace=openshift-cluster-storage-operator 2025-12-12T16:16:49.292294330+00:00 stderr F I1212 16:16:49.291379 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:49.292294330+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="catalog update required at 2025-12-12 16:16:49.291558332 +0000 UTC m=+2.407316315" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U 2025-12-12T16:16:49.486239024+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="No subscriptions were found in namespace openshift-cluster-samples-operator" id=TR14c namespace=openshift-cluster-samples-operator 2025-12-12T16:16:49.486239024+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="resolving sources" id=wJglN namespace=openshift-cluster-version 2025-12-12T16:16:49.486239024+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="checking if subscriptions need update" id=wJglN namespace=openshift-cluster-version 2025-12-12T16:16:49.489731179+00:00 stderr F I1212 16:16:49.489321 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:49.489731179+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="catalog update required at 2025-12-12 16:16:49.489526114 +0000 UTC m=+2.605284097" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R 2025-12-12T16:16:49.685848147+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="No subscriptions were found in namespace openshift-cluster-storage-operator" id=njgG5 namespace=openshift-cluster-storage-operator 2025-12-12T16:16:49.685848147+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="resolving sources" id=Jxc+S namespace=openshift-config 2025-12-12T16:16:49.685848147+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="checking if subscriptions need update" id=Jxc+S namespace=openshift-config 2025-12-12T16:16:49.692438738+00:00 stderr F I1212 16:16:49.692314 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:49.693680858+00:00 stderr F time="2025-12-12T16:16:49Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=PUH+U 2025-12-12T16:16:49.693680858+00:00 stderr F time="2025-12-12T16:16:49Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=PUH+U 2025-12-12T16:16:49.693680858+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=PUH+U 2025-12-12T16:16:49.876221135+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="No subscriptions were found in namespace openshift-cluster-version" id=wJglN namespace=openshift-cluster-version 2025-12-12T16:16:49.876221135+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="resolving sources" id=PVcG1 namespace=openshift-config-managed 2025-12-12T16:16:49.876221135+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="checking if subscriptions need update" id=PVcG1 namespace=openshift-config-managed 2025-12-12T16:16:49.885480671+00:00 stderr F I1212 16:16:49.884388 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:49.885480671+00:00 stderr F time="2025-12-12T16:16:49Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=aYQ0R 2025-12-12T16:16:49.885480671+00:00 stderr F time="2025-12-12T16:16:49Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=aYQ0R 2025-12-12T16:16:49.885480671+00:00 stderr F time="2025-12-12T16:16:49Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=aYQ0R 2025-12-12T16:16:50.071236786+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="No subscriptions were found in namespace openshift-config" id=Jxc+S namespace=openshift-config 2025-12-12T16:16:50.071236786+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="resolving sources" id=8PGX4 namespace=openshift-config-operator 2025-12-12T16:16:50.071236786+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="checking if subscriptions need update" id=8PGX4 namespace=openshift-config-operator 2025-12-12T16:16:50.293228026+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb 2025-12-12T16:16:50.293228026+00:00 stderr F time="2025-12-12T16:16:50Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:16:50.486228488+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="No subscriptions were found in namespace openshift-config-managed" id=PVcG1 namespace=openshift-config-managed 2025-12-12T16:16:50.486228488+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="resolving sources" id=clnPH namespace=openshift-console 2025-12-12T16:16:50.486228488+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="checking if subscriptions need update" id=clnPH namespace=openshift-console 2025-12-12T16:16:50.871538125+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="No subscriptions were found in namespace openshift-config-operator" id=8PGX4 namespace=openshift-config-operator 2025-12-12T16:16:50.871538125+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="resolving sources" id=Ja4Be namespace=openshift-console-operator 2025-12-12T16:16:50.871591416+00:00 stderr F time="2025-12-12T16:16:50Z" level=info msg="checking if subscriptions need update" id=Ja4Be namespace=openshift-console-operator 2025-12-12T16:16:51.070270667+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="No subscriptions were found in namespace openshift-console" id=clnPH namespace=openshift-console 2025-12-12T16:16:51.070270667+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="resolving sources" id=54fV0 namespace=openshift-console-user-settings 2025-12-12T16:16:51.070270667+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="checking if subscriptions need update" id=54fV0 namespace=openshift-console-user-settings 2025-12-12T16:16:51.074063400+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb 2025-12-12T16:16:51.074063400+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb 2025-12-12T16:16:51.074138692+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="creating desired pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:16:51.271245254+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="No subscriptions were found in namespace openshift-console-operator" id=Ja4Be namespace=openshift-console-operator 2025-12-12T16:16:51.271288995+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="resolving sources" id=3/uis namespace=openshift-controller-manager 2025-12-12T16:16:51.271288995+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="checking if subscriptions need update" id=3/uis namespace=openshift-controller-manager 2025-12-12T16:16:51.274413021+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE 2025-12-12T16:16:51.274413021+00:00 stderr F time="2025-12-12T16:16:51Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:16:51.474591458+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="No subscriptions were found in namespace openshift-console-user-settings" id=54fV0 namespace=openshift-console-user-settings 2025-12-12T16:16:51.474591458+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="resolving sources" id=LwW4h namespace=openshift-controller-manager-operator 2025-12-12T16:16:51.474591458+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="checking if subscriptions need update" id=LwW4h namespace=openshift-controller-manager-operator 2025-12-12T16:16:51.483129887+00:00 stderr F I1212 16:16:51.483061 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:51.483394543+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="catalog update required at 2025-12-12 16:16:51.483284881 +0000 UTC m=+4.599042864" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb 2025-12-12T16:16:51.680971287+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="No subscriptions were found in namespace openshift-controller-manager" id=3/uis namespace=openshift-controller-manager 2025-12-12T16:16:51.680971287+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="resolving sources" id=OrOhz namespace=openshift-dns 2025-12-12T16:16:51.680971287+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="checking if subscriptions need update" id=OrOhz namespace=openshift-dns 2025-12-12T16:16:51.868748292+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="No subscriptions were found in namespace openshift-controller-manager-operator" id=LwW4h namespace=openshift-controller-manager-operator 2025-12-12T16:16:51.868748292+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="resolving sources" id=Wflxa namespace=openshift-dns-operator 2025-12-12T16:16:51.868748292+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="checking if subscriptions need update" id=Wflxa namespace=openshift-dns-operator 2025-12-12T16:16:51.894593443+00:00 stderr F I1212 16:16:51.893408 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:51.894593443+00:00 stderr F time="2025-12-12T16:16:51Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=9soeb 2025-12-12T16:16:51.894593443+00:00 stderr F time="2025-12-12T16:16:51Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=9soeb 2025-12-12T16:16:51.894593443+00:00 stderr F time="2025-12-12T16:16:51Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=9soeb 2025-12-12T16:16:52.070899207+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="No subscriptions were found in namespace openshift-dns" id=OrOhz namespace=openshift-dns 2025-12-12T16:16:52.071008470+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="resolving sources" id=GH8N/ namespace=openshift-etcd 2025-12-12T16:16:52.071036850+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="checking if subscriptions need update" id=GH8N/ namespace=openshift-etcd 2025-12-12T16:16:52.271930915+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="No subscriptions were found in namespace openshift-dns-operator" id=Wflxa namespace=openshift-dns-operator 2025-12-12T16:16:52.272025757+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="resolving sources" id=7wZ8T namespace=openshift-etcd-operator 2025-12-12T16:16:52.272049058+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="checking if subscriptions need update" id=7wZ8T namespace=openshift-etcd-operator 2025-12-12T16:16:52.275898432+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE 2025-12-12T16:16:52.275946493+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE 2025-12-12T16:16:52.276054876+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="creating desired pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:16:52.483321286+00:00 stderr F I1212 16:16:52.482733 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:52.483321286+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="catalog update required at 2025-12-12 16:16:52.482822274 +0000 UTC m=+5.598580257" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE 2025-12-12T16:16:52.671498110+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="No subscriptions were found in namespace openshift-etcd" id=GH8N/ namespace=openshift-etcd 2025-12-12T16:16:52.671498110+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="resolving sources" id=NczJN namespace=openshift-host-network 2025-12-12T16:16:52.671571552+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="checking if subscriptions need update" id=NczJN namespace=openshift-host-network 2025-12-12T16:16:52.882305637+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="No subscriptions were found in namespace openshift-etcd-operator" id=7wZ8T namespace=openshift-etcd-operator 2025-12-12T16:16:52.882305637+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="resolving sources" id=KZHUo namespace=openshift-image-registry 2025-12-12T16:16:52.882305637+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="checking if subscriptions need update" id=KZHUo namespace=openshift-image-registry 2025-12-12T16:16:52.895198632+00:00 stderr F I1212 16:16:52.889875 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:16:52.895198632+00:00 stderr F time="2025-12-12T16:16:52Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=2HPmE 2025-12-12T16:16:52.895198632+00:00 stderr F time="2025-12-12T16:16:52Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=2HPmE 2025-12-12T16:16:52.895198632+00:00 stderr F time="2025-12-12T16:16:52Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2HPmE 2025-12-12T16:16:53.072978291+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="No subscriptions were found in namespace openshift-host-network" id=NczJN namespace=openshift-host-network 2025-12-12T16:16:53.072978291+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="resolving sources" id=MUBEk namespace=openshift-infra 2025-12-12T16:16:53.072978291+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="checking if subscriptions need update" id=MUBEk namespace=openshift-infra 2025-12-12T16:16:53.077215154+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vPp+Q 2025-12-12T16:16:53.077215154+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vPp+Q 2025-12-12T16:16:53.299043350+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="No subscriptions were found in namespace openshift-image-registry" id=KZHUo namespace=openshift-image-registry 2025-12-12T16:16:53.299043350+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="resolving sources" id=ANeJJ namespace=openshift-ingress 2025-12-12T16:16:53.299043350+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="checking if subscriptions need update" id=ANeJJ namespace=openshift-ingress 2025-12-12T16:16:53.676122286+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="No subscriptions were found in namespace openshift-infra" id=MUBEk namespace=openshift-infra 2025-12-12T16:16:53.676165028+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="resolving sources" id=t85e4 namespace=openshift-ingress-canary 2025-12-12T16:16:53.676165028+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="checking if subscriptions need update" id=t85e4 namespace=openshift-ingress-canary 2025-12-12T16:16:53.676232169+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vPp+Q 2025-12-12T16:16:53.676240469+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vPp+Q 2025-12-12T16:16:53.676470605+00:00 stderr F time="2025-12-12T16:16:53Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=vPp+Q 2025-12-12T16:16:53.676470605+00:00 stderr F time="2025-12-12T16:16:53Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=vPp+Q 2025-12-12T16:16:53.676470605+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=vPp+Q 2025-12-12T16:16:53.679727675+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=CONNECTING" 2025-12-12T16:16:53.694740651+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:16:53.873655609+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="No subscriptions were found in namespace openshift-ingress" id=ANeJJ namespace=openshift-ingress 2025-12-12T16:16:53.873655609+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="resolving sources" id=YxTSE namespace=openshift-ingress-operator 2025-12-12T16:16:53.873655609+00:00 stderr F time="2025-12-12T16:16:53Z" level=info msg="checking if subscriptions need update" id=YxTSE namespace=openshift-ingress-operator 2025-12-12T16:16:54.080026558+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=xJW0o 2025-12-12T16:16:54.080026558+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=xJW0o 2025-12-12T16:16:54.080154481+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="No subscriptions were found in namespace openshift-ingress-canary" id=t85e4 namespace=openshift-ingress-canary 2025-12-12T16:16:54.080239153+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="resolving sources" id=LuXgu namespace=openshift-kni-infra 2025-12-12T16:16:54.080239153+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="checking if subscriptions need update" id=LuXgu namespace=openshift-kni-infra 2025-12-12T16:16:54.479740717+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="No subscriptions were found in namespace openshift-ingress-operator" id=YxTSE namespace=openshift-ingress-operator 2025-12-12T16:16:54.479740717+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="resolving sources" id=sK/nP namespace=openshift-kube-apiserver 2025-12-12T16:16:54.479740717+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="checking if subscriptions need update" id=sK/nP namespace=openshift-kube-apiserver 2025-12-12T16:16:54.675995638+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="No subscriptions were found in namespace openshift-kni-infra" id=LuXgu namespace=openshift-kni-infra 2025-12-12T16:16:54.675995638+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="resolving sources" id=AbQ8M namespace=openshift-kube-apiserver-operator 2025-12-12T16:16:54.675995638+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="checking if subscriptions need update" id=AbQ8M namespace=openshift-kube-apiserver-operator 2025-12-12T16:16:54.881056804+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="No subscriptions were found in namespace openshift-kube-apiserver" id=sK/nP namespace=openshift-kube-apiserver 2025-12-12T16:16:54.881056804+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="resolving sources" id=jls5k namespace=openshift-kube-controller-manager 2025-12-12T16:16:54.881056804+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="checking if subscriptions need update" id=jls5k namespace=openshift-kube-controller-manager 2025-12-12T16:16:54.881336021+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jqsdU 2025-12-12T16:16:54.881359282+00:00 stderr F time="2025-12-12T16:16:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jqsdU 2025-12-12T16:16:55.072229682+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="No subscriptions were found in namespace openshift-kube-apiserver-operator" id=AbQ8M namespace=openshift-kube-apiserver-operator 2025-12-12T16:16:55.072229682+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="resolving sources" id=/DWBy namespace=openshift-kube-controller-manager-operator 2025-12-12T16:16:55.072229682+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="checking if subscriptions need update" id=/DWBy namespace=openshift-kube-controller-manager-operator 2025-12-12T16:16:55.080451352+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=xJW0o 2025-12-12T16:16:55.080451352+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=xJW0o 2025-12-12T16:16:55.080497164+00:00 stderr F time="2025-12-12T16:16:55Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=xJW0o 2025-12-12T16:16:55.080497164+00:00 stderr F time="2025-12-12T16:16:55Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=xJW0o 2025-12-12T16:16:55.080523204+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=xJW0o 2025-12-12T16:16:55.080937874+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=CONNECTING" 2025-12-12T16:16:55.271371104+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="No subscriptions were found in namespace openshift-kube-controller-manager" id=jls5k namespace=openshift-kube-controller-manager 2025-12-12T16:16:55.271371104+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="resolving sources" id=/M9kU namespace=openshift-kube-scheduler 2025-12-12T16:16:55.271371104+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="checking if subscriptions need update" id=/M9kU namespace=openshift-kube-scheduler 2025-12-12T16:16:55.481101894+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="No subscriptions were found in namespace openshift-kube-controller-manager-operator" id=/DWBy namespace=openshift-kube-controller-manager-operator 2025-12-12T16:16:55.481101894+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="resolving sources" id=b7sll namespace=openshift-kube-scheduler-operator 2025-12-12T16:16:55.481101894+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="checking if subscriptions need update" id=b7sll namespace=openshift-kube-scheduler-operator 2025-12-12T16:16:55.597498096+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:16:55.698231975+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jqsdU 2025-12-12T16:16:55.698231975+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jqsdU 2025-12-12T16:16:55.698231975+00:00 stderr F time="2025-12-12T16:16:55Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=jqsdU 2025-12-12T16:16:55.698231975+00:00 stderr F time="2025-12-12T16:16:55Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=jqsdU 2025-12-12T16:16:55.698231975+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=jqsdU 2025-12-12T16:16:55.699200309+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=CONNECTING" 2025-12-12T16:16:55.744319910+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=TRANSIENT_FAILURE" 2025-12-12T16:16:55.881721965+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="No subscriptions were found in namespace openshift-kube-scheduler" id=/M9kU namespace=openshift-kube-scheduler 2025-12-12T16:16:55.881721965+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="resolving sources" id=lMd8L namespace=openshift-kube-storage-version-migrator 2025-12-12T16:16:55.881721965+00:00 stderr F time="2025-12-12T16:16:55Z" level=info msg="checking if subscriptions need update" id=lMd8L namespace=openshift-kube-storage-version-migrator 2025-12-12T16:16:56.073504997+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="No subscriptions were found in namespace openshift-kube-scheduler-operator" id=b7sll namespace=openshift-kube-scheduler-operator 2025-12-12T16:16:56.073504997+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="resolving sources" id=EhC7/ namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:16:56.073504997+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="checking if subscriptions need update" id=EhC7/ namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:16:56.077921165+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=2eT6N 2025-12-12T16:16:56.077921165+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=2eT6N 2025-12-12T16:16:56.470208193+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="No subscriptions were found in namespace openshift-kube-storage-version-migrator" id=lMd8L namespace=openshift-kube-storage-version-migrator 2025-12-12T16:16:56.470208193+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="resolving sources" id=1Fc3/ namespace=openshift-machine-api 2025-12-12T16:16:56.470208193+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="checking if subscriptions need update" id=1Fc3/ namespace=openshift-machine-api 2025-12-12T16:16:56.672346877+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="No subscriptions were found in namespace openshift-kube-storage-version-migrator-operator" id=EhC7/ namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:16:56.672346877+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="resolving sources" id=sJz+H namespace=openshift-machine-config-operator 2025-12-12T16:16:56.672346877+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="checking if subscriptions need update" id=sJz+H namespace=openshift-machine-config-operator 2025-12-12T16:16:56.871418747+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="No subscriptions were found in namespace openshift-machine-api" id=1Fc3/ namespace=openshift-machine-api 2025-12-12T16:16:56.871462198+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="resolving sources" id=9mUZt namespace=openshift-marketplace 2025-12-12T16:16:56.871462198+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="checking if subscriptions need update" id=9mUZt namespace=openshift-marketplace 2025-12-12T16:16:56.874953433+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Ndx9A 2025-12-12T16:16:56.874953433+00:00 stderr F time="2025-12-12T16:16:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Ndx9A 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="No subscriptions were found in namespace openshift-machine-config-operator" id=sJz+H namespace=openshift-machine-config-operator 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="resolving sources" id=swN/q namespace=openshift-monitoring 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="checking if subscriptions need update" id=swN/q namespace=openshift-monitoring 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=2eT6N 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=2eT6N 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=2eT6N 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=2eT6N 2025-12-12T16:16:57.074730281+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=2eT6N 2025-12-12T16:16:57.075262004+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=CONNECTING" 2025-12-12T16:16:57.082380717+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:16:57.270069050+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=9mUZt namespace=openshift-marketplace 2025-12-12T16:16:57.270069050+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="resolving sources" id=BO5Ft namespace=openshift-multus 2025-12-12T16:16:57.270069050+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="checking if subscriptions need update" id=BO5Ft namespace=openshift-multus 2025-12-12T16:16:57.469495159+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="No subscriptions were found in namespace openshift-monitoring" id=swN/q namespace=openshift-monitoring 2025-12-12T16:16:57.469495159+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="resolving sources" id=HBQn3 namespace=openshift-network-console 2025-12-12T16:16:57.469543530+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="checking if subscriptions need update" id=HBQn3 namespace=openshift-network-console 2025-12-12T16:16:57.690156796+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Ndx9A 2025-12-12T16:16:57.690156796+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Ndx9A 2025-12-12T16:16:57.690156796+00:00 stderr F time="2025-12-12T16:16:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=Ndx9A 2025-12-12T16:16:58.486971820+00:00 stderr F time="2025-12-12T16:16:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=Ndx9A 2025-12-12T16:16:58.486971820+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=Ndx9A 2025-12-12T16:16:58.487684927+00:00 stderr F time="2025-12-12T16:16:57Z" level=info msg="No subscriptions were found in namespace openshift-multus" id=BO5Ft namespace=openshift-multus 2025-12-12T16:16:58.487684927+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="resolving sources" id=tBvzw namespace=openshift-network-diagnostics 2025-12-12T16:16:58.487684927+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="checking if subscriptions need update" id=tBvzw namespace=openshift-network-diagnostics 2025-12-12T16:16:58.487769039+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Fu+j8 2025-12-12T16:16:58.487769039+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Fu+j8 2025-12-12T16:16:58.488116698+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="No subscriptions were found in namespace openshift-network-console" id=HBQn3 namespace=openshift-network-console 2025-12-12T16:16:58.488129168+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="resolving sources" id=qrrdJ namespace=openshift-network-node-identity 2025-12-12T16:16:58.488129168+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="checking if subscriptions need update" id=qrrdJ namespace=openshift-network-node-identity 2025-12-12T16:16:58.498909181+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="No subscriptions were found in namespace openshift-network-diagnostics" id=tBvzw namespace=openshift-network-diagnostics 2025-12-12T16:16:58.498909181+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="resolving sources" id=DEmNe namespace=openshift-network-operator 2025-12-12T16:16:58.498909181+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="checking if subscriptions need update" id=DEmNe namespace=openshift-network-operator 2025-12-12T16:16:58.669019364+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="No subscriptions were found in namespace openshift-network-node-identity" id=qrrdJ namespace=openshift-network-node-identity 2025-12-12T16:16:58.669144958+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="resolving sources" id=W8ftq namespace=openshift-node 2025-12-12T16:16:58.669224950+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="checking if subscriptions need update" id=W8ftq namespace=openshift-node 2025-12-12T16:16:58.870716979+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="No subscriptions were found in namespace openshift-network-operator" id=DEmNe namespace=openshift-network-operator 2025-12-12T16:16:58.870716979+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="resolving sources" id=uHSuZ namespace=openshift-nutanix-infra 2025-12-12T16:16:58.870764630+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="checking if subscriptions need update" id=uHSuZ namespace=openshift-nutanix-infra 2025-12-12T16:16:58.874029200+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Fu+j8 2025-12-12T16:16:58.874029200+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Fu+j8 2025-12-12T16:16:58.874106332+00:00 stderr F time="2025-12-12T16:16:58Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=Fu+j8 2025-12-12T16:16:58.874106332+00:00 stderr F time="2025-12-12T16:16:58Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=Fu+j8 2025-12-12T16:16:58.874106332+00:00 stderr F time="2025-12-12T16:16:58Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=Fu+j8 2025-12-12T16:16:59.073038538+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="No subscriptions were found in namespace openshift-node" id=W8ftq namespace=openshift-node 2025-12-12T16:16:59.073100000+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="resolving sources" id=hAtUb namespace=openshift-oauth-apiserver 2025-12-12T16:16:59.073100000+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="checking if subscriptions need update" id=hAtUb namespace=openshift-oauth-apiserver 2025-12-12T16:16:59.074827872+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=cmJ6j 2025-12-12T16:16:59.074827872+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=cmJ6j 2025-12-12T16:16:59.272883887+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="No subscriptions were found in namespace openshift-nutanix-infra" id=uHSuZ namespace=openshift-nutanix-infra 2025-12-12T16:16:59.272883887+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="resolving sources" id=vAgNN namespace=openshift-openstack-infra 2025-12-12T16:16:59.272883887+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="checking if subscriptions need update" id=vAgNN namespace=openshift-openstack-infra 2025-12-12T16:16:59.673850497+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="No subscriptions were found in namespace openshift-oauth-apiserver" id=hAtUb namespace=openshift-oauth-apiserver 2025-12-12T16:16:59.673895508+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="resolving sources" id=WW94F namespace=openshift-operator-lifecycle-manager 2025-12-12T16:16:59.673895508+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="checking if subscriptions need update" id=WW94F namespace=openshift-operator-lifecycle-manager 2025-12-12T16:16:59.675734163+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=cmJ6j 2025-12-12T16:16:59.675734163+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=cmJ6j 2025-12-12T16:16:59.675734163+00:00 stderr F time="2025-12-12T16:16:59Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=cmJ6j 2025-12-12T16:16:59.675734163+00:00 stderr F time="2025-12-12T16:16:59Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=cmJ6j 2025-12-12T16:16:59.675734163+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=cmJ6j 2025-12-12T16:16:59.872117488+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="No subscriptions were found in namespace openshift-openstack-infra" id=vAgNN namespace=openshift-openstack-infra 2025-12-12T16:16:59.872117488+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="resolving sources" id=hskl6 namespace=openshift-operators 2025-12-12T16:16:59.872162429+00:00 stderr F time="2025-12-12T16:16:59Z" level=info msg="checking if subscriptions need update" id=hskl6 namespace=openshift-operators 2025-12-12T16:17:00.071261680+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="No subscriptions were found in namespace openshift-operator-lifecycle-manager" id=WW94F namespace=openshift-operator-lifecycle-manager 2025-12-12T16:17:00.071261680+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="resolving sources" id=g3za5 namespace=openshift-ovirt-infra 2025-12-12T16:17:00.071261680+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="checking if subscriptions need update" id=g3za5 namespace=openshift-ovirt-infra 2025-12-12T16:17:00.073458093+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=WEOlF 2025-12-12T16:17:00.073514985+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=WEOlF 2025-12-12T16:17:00.471445329+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="No subscriptions were found in namespace openshift-operators" id=hskl6 namespace=openshift-operators 2025-12-12T16:17:00.471445329+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="resolving sources" id=3TZ8K namespace=openshift-ovn-kubernetes 2025-12-12T16:17:00.471445329+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="checking if subscriptions need update" id=3TZ8K namespace=openshift-ovn-kubernetes 2025-12-12T16:17:00.670201701+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="No subscriptions were found in namespace openshift-ovirt-infra" id=g3za5 namespace=openshift-ovirt-infra 2025-12-12T16:17:00.670285223+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="resolving sources" id=CouNj namespace=openshift-route-controller-manager 2025-12-12T16:17:00.670285223+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="checking if subscriptions need update" id=CouNj namespace=openshift-route-controller-manager 2025-12-12T16:17:00.870605614+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="No subscriptions were found in namespace openshift-ovn-kubernetes" id=3TZ8K namespace=openshift-ovn-kubernetes 2025-12-12T16:17:00.870605614+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="resolving sources" id=/Pg90 namespace=openshift-service-ca 2025-12-12T16:17:00.870659945+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="checking if subscriptions need update" id=/Pg90 namespace=openshift-service-ca 2025-12-12T16:17:00.873604227+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=GC+67 2025-12-12T16:17:00.873604227+00:00 stderr F time="2025-12-12T16:17:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=GC+67 2025-12-12T16:17:01.070144316+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="No subscriptions were found in namespace openshift-route-controller-manager" id=CouNj namespace=openshift-route-controller-manager 2025-12-12T16:17:01.070144316+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="resolving sources" id=bwQiM namespace=openshift-service-ca-operator 2025-12-12T16:17:01.070144316+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="checking if subscriptions need update" id=bwQiM namespace=openshift-service-ca-operator 2025-12-12T16:17:01.073275512+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=WEOlF 2025-12-12T16:17:01.073275512+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=WEOlF 2025-12-12T16:17:01.073353544+00:00 stderr F time="2025-12-12T16:17:01Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=WEOlF 2025-12-12T16:17:01.073364144+00:00 stderr F time="2025-12-12T16:17:01Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=WEOlF 2025-12-12T16:17:01.073364144+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=WEOlF 2025-12-12T16:17:01.269563575+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="No subscriptions were found in namespace openshift-service-ca" id=/Pg90 namespace=openshift-service-ca 2025-12-12T16:17:01.269619256+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="resolving sources" id=uu7xD namespace=openshift-user-workload-monitoring 2025-12-12T16:17:01.269619256+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="checking if subscriptions need update" id=uu7xD namespace=openshift-user-workload-monitoring 2025-12-12T16:17:01.470439709+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="No subscriptions were found in namespace openshift-service-ca-operator" id=bwQiM namespace=openshift-service-ca-operator 2025-12-12T16:17:01.470439709+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="resolving sources" id=pHmAg namespace=openshift-vsphere-infra 2025-12-12T16:17:01.470439709+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="checking if subscriptions need update" id=pHmAg namespace=openshift-vsphere-infra 2025-12-12T16:17:01.674558042+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=GC+67 2025-12-12T16:17:01.674558042+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=GC+67 2025-12-12T16:17:01.674558042+00:00 stderr F time="2025-12-12T16:17:01Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=GC+67 2025-12-12T16:17:01.674558042+00:00 stderr F time="2025-12-12T16:17:01Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=GC+67 2025-12-12T16:17:01.674558042+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=GC+67 2025-12-12T16:17:01.869795069+00:00 stderr F time="2025-12-12T16:17:01Z" level=info msg="No subscriptions were found in namespace openshift-user-workload-monitoring" id=uu7xD namespace=openshift-user-workload-monitoring 2025-12-12T16:17:02.079325864+00:00 stderr F time="2025-12-12T16:17:02Z" level=info msg="No subscriptions were found in namespace openshift-vsphere-infra" id=pHmAg namespace=openshift-vsphere-infra 2025-12-12T16:17:02.274975011+00:00 stderr F time="2025-12-12T16:17:02Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=DXzHQ 2025-12-12T16:17:02.275051793+00:00 stderr F time="2025-12-12T16:17:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=DXzHQ 2025-12-12T16:17:02.476518742+00:00 stderr F time="2025-12-12T16:17:02Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2j65y 2025-12-12T16:17:02.476518742+00:00 stderr F time="2025-12-12T16:17:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2j65y 2025-12-12T16:17:03.474056106+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=DXzHQ 2025-12-12T16:17:03.474056106+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=DXzHQ 2025-12-12T16:17:03.474117797+00:00 stderr F time="2025-12-12T16:17:03Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=DXzHQ 2025-12-12T16:17:03.474117797+00:00 stderr F time="2025-12-12T16:17:03Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=DXzHQ 2025-12-12T16:17:03.474117797+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=DXzHQ 2025-12-12T16:17:03.674764395+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2j65y 2025-12-12T16:17:03.674764395+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2j65y 2025-12-12T16:17:03.674794046+00:00 stderr F time="2025-12-12T16:17:03Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=2j65y 2025-12-12T16:17:03.674806296+00:00 stderr F time="2025-12-12T16:17:03Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=2j65y 2025-12-12T16:17:03.674832817+00:00 stderr F time="2025-12-12T16:17:03Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=2j65y 2025-12-12T16:17:04.073969192+00:00 stderr F time="2025-12-12T16:17:04Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=gpxwZ 2025-12-12T16:17:04.073969192+00:00 stderr F time="2025-12-12T16:17:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=gpxwZ 2025-12-12T16:17:04.676855891+00:00 stderr F time="2025-12-12T16:17:04Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=gpxwZ 2025-12-12T16:17:04.676971724+00:00 stderr F time="2025-12-12T16:17:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=gpxwZ 2025-12-12T16:17:04.677099867+00:00 stderr F time="2025-12-12T16:17:04Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=gpxwZ 2025-12-12T16:17:04.677134178+00:00 stderr F time="2025-12-12T16:17:04Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=gpxwZ 2025-12-12T16:17:04.677167028+00:00 stderr F time="2025-12-12T16:17:04Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=gpxwZ 2025-12-12T16:17:19.701201388+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=22+KK 2025-12-12T16:17:19.701201388+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=22+KK 2025-12-12T16:17:19.711019330+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=22+KK 2025-12-12T16:17:19.711019330+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=22+KK 2025-12-12T16:17:19.711161884+00:00 stderr F time="2025-12-12T16:17:19Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=22+KK 2025-12-12T16:17:19.711161884+00:00 stderr F time="2025-12-12T16:17:19Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=22+KK 2025-12-12T16:17:19.711161884+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=22+KK 2025-12-12T16:17:19.891992972+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=u4LW2 2025-12-12T16:17:19.891992972+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=u4LW2 2025-12-12T16:17:19.901690510+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=u4LW2 2025-12-12T16:17:19.901690510+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=u4LW2 2025-12-12T16:17:19.901736872+00:00 stderr F time="2025-12-12T16:17:19Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=u4LW2 2025-12-12T16:17:19.901780163+00:00 stderr F time="2025-12-12T16:17:19Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=u4LW2 2025-12-12T16:17:19.901780163+00:00 stderr F time="2025-12-12T16:17:19Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=u4LW2 2025-12-12T16:17:21.903976246+00:00 stderr F time="2025-12-12T16:17:21Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=CB4sg 2025-12-12T16:17:21.903976246+00:00 stderr F time="2025-12-12T16:17:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=CB4sg 2025-12-12T16:17:21.916380662+00:00 stderr F time="2025-12-12T16:17:21Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=CB4sg 2025-12-12T16:17:21.916380662+00:00 stderr F time="2025-12-12T16:17:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=CB4sg 2025-12-12T16:17:21.916550207+00:00 stderr F time="2025-12-12T16:17:21Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=CB4sg 2025-12-12T16:17:21.916550207+00:00 stderr F time="2025-12-12T16:17:21Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=CB4sg 2025-12-12T16:17:21.916584258+00:00 stderr F time="2025-12-12T16:17:21Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=CB4sg 2025-12-12T16:17:22.900342223+00:00 stderr F time="2025-12-12T16:17:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=ZsGCy 2025-12-12T16:17:22.900342223+00:00 stderr F time="2025-12-12T16:17:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=ZsGCy 2025-12-12T16:17:22.913985034+00:00 stderr F time="2025-12-12T16:17:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=ZsGCy 2025-12-12T16:17:22.913985034+00:00 stderr F time="2025-12-12T16:17:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=ZsGCy 2025-12-12T16:17:22.914131389+00:00 stderr F time="2025-12-12T16:17:22Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=ZsGCy 2025-12-12T16:17:22.914131389+00:00 stderr F time="2025-12-12T16:17:22Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=ZsGCy 2025-12-12T16:17:22.914172830+00:00 stderr F time="2025-12-12T16:17:22Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=ZsGCy 2025-12-12T16:17:31.677415524+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=E77Om 2025-12-12T16:17:31.677415524+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=E77Om 2025-12-12T16:17:31.704031808+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=E77Om 2025-12-12T16:17:31.704031808+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=E77Om 2025-12-12T16:17:31.704031808+00:00 stderr F time="2025-12-12T16:17:31Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=E77Om 2025-12-12T16:17:31.704031808+00:00 stderr F time="2025-12-12T16:17:31Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=E77Om 2025-12-12T16:17:31.704070119+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=E77Om 2025-12-12T16:17:31.704358898+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=nFUtI 2025-12-12T16:17:31.704358898+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=nFUtI 2025-12-12T16:17:31.717225077+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=nFUtI 2025-12-12T16:17:31.717225077+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=nFUtI 2025-12-12T16:17:31.717225077+00:00 stderr F time="2025-12-12T16:17:31Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=nFUtI 2025-12-12T16:17:31.717225077+00:00 stderr F time="2025-12-12T16:17:31Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=nFUtI 2025-12-12T16:17:31.717225077+00:00 stderr F time="2025-12-12T16:17:31Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=nFUtI 2025-12-12T16:17:32.272650753+00:00 stderr F time="2025-12-12T16:17:32Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=mirbD 2025-12-12T16:17:32.272752195+00:00 stderr F time="2025-12-12T16:17:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=mirbD 2025-12-12T16:17:32.695910425+00:00 stderr F time="2025-12-12T16:17:32Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=5jjRU 2025-12-12T16:17:32.695910425+00:00 stderr F time="2025-12-12T16:17:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=5jjRU 2025-12-12T16:17:33.269330557+00:00 stderr F time="2025-12-12T16:17:33Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=mirbD 2025-12-12T16:17:33.269330557+00:00 stderr F time="2025-12-12T16:17:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=mirbD 2025-12-12T16:17:33.269420030+00:00 stderr F time="2025-12-12T16:17:33Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=mirbD 2025-12-12T16:17:33.269428740+00:00 stderr F time="2025-12-12T16:17:33Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=mirbD 2025-12-12T16:17:33.269451771+00:00 stderr F time="2025-12-12T16:17:33Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=mirbD 2025-12-12T16:17:33.875717685+00:00 stderr F time="2025-12-12T16:17:33Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=5jjRU 2025-12-12T16:17:33.875717685+00:00 stderr F time="2025-12-12T16:17:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=5jjRU 2025-12-12T16:17:33.875769377+00:00 stderr F time="2025-12-12T16:17:33Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=5jjRU 2025-12-12T16:17:33.875769377+00:00 stderr F time="2025-12-12T16:17:33Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=5jjRU 2025-12-12T16:17:33.875793528+00:00 stderr F time="2025-12-12T16:17:33Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5jjRU 2025-12-12T16:17:34.091989630+00:00 stderr F time="2025-12-12T16:17:34Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=AaZSL 2025-12-12T16:17:34.091989630+00:00 stderr F time="2025-12-12T16:17:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=AaZSL 2025-12-12T16:17:34.674322628+00:00 stderr F time="2025-12-12T16:17:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lVRPV 2025-12-12T16:17:34.674322628+00:00 stderr F time="2025-12-12T16:17:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lVRPV 2025-12-12T16:17:35.273585032+00:00 stderr F time="2025-12-12T16:17:35Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=AaZSL 2025-12-12T16:17:35.273585032+00:00 stderr F time="2025-12-12T16:17:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=AaZSL 2025-12-12T16:17:35.273647194+00:00 stderr F time="2025-12-12T16:17:35Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=AaZSL 2025-12-12T16:17:35.273647194+00:00 stderr F time="2025-12-12T16:17:35Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=AaZSL 2025-12-12T16:17:35.273655814+00:00 stderr F time="2025-12-12T16:17:35Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=AaZSL 2025-12-12T16:17:35.872239418+00:00 stderr F time="2025-12-12T16:17:35Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lVRPV 2025-12-12T16:17:35.872239418+00:00 stderr F time="2025-12-12T16:17:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lVRPV 2025-12-12T16:17:35.872310410+00:00 stderr F time="2025-12-12T16:17:35Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=lVRPV 2025-12-12T16:17:35.872310410+00:00 stderr F time="2025-12-12T16:17:35Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=lVRPV 2025-12-12T16:17:35.872310410+00:00 stderr F time="2025-12-12T16:17:35Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=lVRPV 2025-12-12T16:17:36.071156694+00:00 stderr F time="2025-12-12T16:17:36Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=OEVsJ 2025-12-12T16:17:36.071156694+00:00 stderr F time="2025-12-12T16:17:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=OEVsJ 2025-12-12T16:17:36.670857370+00:00 stderr F time="2025-12-12T16:17:36Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Kl8UB 2025-12-12T16:17:36.670857370+00:00 stderr F time="2025-12-12T16:17:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Kl8UB 2025-12-12T16:17:37.272477791+00:00 stderr F time="2025-12-12T16:17:37Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=OEVsJ 2025-12-12T16:17:37.272477791+00:00 stderr F time="2025-12-12T16:17:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=OEVsJ 2025-12-12T16:17:37.272944155+00:00 stderr F time="2025-12-12T16:17:37Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=OEVsJ 2025-12-12T16:17:37.272944155+00:00 stderr F time="2025-12-12T16:17:37Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-2blsm has not yet reported ready" id=OEVsJ 2025-12-12T16:17:37.272944155+00:00 stderr F time="2025-12-12T16:17:37Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=OEVsJ 2025-12-12T16:17:37.877428498+00:00 stderr F time="2025-12-12T16:17:37Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Kl8UB 2025-12-12T16:17:37.877428498+00:00 stderr F time="2025-12-12T16:17:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=Kl8UB 2025-12-12T16:17:37.877428498+00:00 stderr F time="2025-12-12T16:17:37Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=Kl8UB 2025-12-12T16:17:37.877428498+00:00 stderr F time="2025-12-12T16:17:37Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=Kl8UB 2025-12-12T16:17:37.877428498+00:00 stderr F time="2025-12-12T16:17:37Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=Kl8UB 2025-12-12T16:17:38.071494476+00:00 stderr F time="2025-12-12T16:17:38Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=84Q26 2025-12-12T16:17:38.071494476+00:00 stderr F time="2025-12-12T16:17:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=84Q26 2025-12-12T16:17:38.672950633+00:00 stderr F time="2025-12-12T16:17:38Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=s624A 2025-12-12T16:17:38.672950633+00:00 stderr F time="2025-12-12T16:17:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=s624A 2025-12-12T16:17:39.272384591+00:00 stderr F time="2025-12-12T16:17:39Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=84Q26 2025-12-12T16:17:39.272384591+00:00 stderr F time="2025-12-12T16:17:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=84Q26 2025-12-12T16:17:39.272421612+00:00 stderr F time="2025-12-12T16:17:39Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=84Q26 2025-12-12T16:17:39.272436513+00:00 stderr F time="2025-12-12T16:17:39Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-kxjp8 has not yet reported ready" id=84Q26 2025-12-12T16:17:39.272445973+00:00 stderr F time="2025-12-12T16:17:39Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=84Q26 2025-12-12T16:17:39.670836052+00:00 stderr F time="2025-12-12T16:17:39Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=s624A 2025-12-12T16:17:39.670836052+00:00 stderr F time="2025-12-12T16:17:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=s624A 2025-12-12T16:17:39.670836052+00:00 stderr F time="2025-12-12T16:17:39Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=s624A 2025-12-12T16:17:39.670836052+00:00 stderr F time="2025-12-12T16:17:39Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-marketplace not ready for update: update pod redhat-marketplace-mgp9n has not yet reported ready" id=s624A 2025-12-12T16:17:39.670836052+00:00 stderr F time="2025-12-12T16:17:39Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=s624A 2025-12-12T16:17:41.097514667+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jqz/ 2025-12-12T16:17:41.097606429+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jqz/ 2025-12-12T16:17:41.116765383+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jqz/ 2025-12-12T16:17:41.116837855+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jqz/ 2025-12-12T16:17:41.116963348+00:00 stderr F time="2025-12-12T16:17:41Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=+jqz/ 2025-12-12T16:17:41.117003889+00:00 stderr F time="2025-12-12T16:17:41Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-p7s65 has not yet reported ready" id=+jqz/ 2025-12-12T16:17:41.117035660+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=+jqz/ 2025-12-12T16:17:41.119399068+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=UAyQY 2025-12-12T16:17:41.119453559+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=UAyQY 2025-12-12T16:17:41.871747738+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=UTHXH 2025-12-12T16:17:41.871747738+00:00 stderr F time="2025-12-12T16:17:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=UTHXH 2025-12-12T16:17:42.070938383+00:00 stderr F time="2025-12-12T16:17:42Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=UAyQY 2025-12-12T16:17:42.070938383+00:00 stderr F time="2025-12-12T16:17:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=UAyQY 2025-12-12T16:17:43.069800747+00:00 stderr F time="2025-12-12T16:17:43Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=UTHXH 2025-12-12T16:17:43.069800747+00:00 stderr F time="2025-12-12T16:17:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=UTHXH 2025-12-12T16:17:43.270926889+00:00 stderr F time="2025-12-12T16:17:43Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2Efbh 2025-12-12T16:17:43.270926889+00:00 stderr F time="2025-12-12T16:17:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2Efbh 2025-12-12T16:17:44.271344052+00:00 stderr F time="2025-12-12T16:17:44Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=SLMlx 2025-12-12T16:17:44.271344052+00:00 stderr F time="2025-12-12T16:17:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=SLMlx 2025-12-12T16:17:44.470756182+00:00 stderr F time="2025-12-12T16:17:44Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2Efbh 2025-12-12T16:17:44.470756182+00:00 stderr F time="2025-12-12T16:17:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2Efbh 2025-12-12T16:17:45.470847297+00:00 stderr F time="2025-12-12T16:17:45Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=SLMlx 2025-12-12T16:17:45.470847297+00:00 stderr F time="2025-12-12T16:17:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=SLMlx 2025-12-12T16:17:45.670777550+00:00 stderr F time="2025-12-12T16:17:45Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=uQtfU 2025-12-12T16:17:45.670777550+00:00 stderr F time="2025-12-12T16:17:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=uQtfU 2025-12-12T16:17:46.676490323+00:00 stderr F time="2025-12-12T16:17:46Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=9OIeN 2025-12-12T16:17:46.676490323+00:00 stderr F time="2025-12-12T16:17:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=9OIeN 2025-12-12T16:17:46.871116885+00:00 stderr F time="2025-12-12T16:17:46Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=uQtfU 2025-12-12T16:17:46.871116885+00:00 stderr F time="2025-12-12T16:17:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=uQtfU 2025-12-12T16:17:47.872232646+00:00 stderr F time="2025-12-12T16:17:47Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=9OIeN 2025-12-12T16:17:47.872232646+00:00 stderr F time="2025-12-12T16:17:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=9OIeN 2025-12-12T16:17:48.071152723+00:00 stderr F time="2025-12-12T16:17:48Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=B6hkp 2025-12-12T16:17:48.071152723+00:00 stderr F time="2025-12-12T16:17:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=B6hkp 2025-12-12T16:17:48.669948838+00:00 stderr F time="2025-12-12T16:17:48Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Kr5S6 2025-12-12T16:17:48.669948838+00:00 stderr F time="2025-12-12T16:17:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Kr5S6 2025-12-12T16:17:49.271632275+00:00 stderr F time="2025-12-12T16:17:49Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=B6hkp 2025-12-12T16:17:49.271632275+00:00 stderr F time="2025-12-12T16:17:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=B6hkp 2025-12-12T16:17:49.870894649+00:00 stderr F time="2025-12-12T16:17:49Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Kr5S6 2025-12-12T16:17:49.870894649+00:00 stderr F time="2025-12-12T16:17:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=Kr5S6 2025-12-12T16:17:50.069617423+00:00 stderr F time="2025-12-12T16:17:50Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Hd4Dc 2025-12-12T16:17:50.069617423+00:00 stderr F time="2025-12-12T16:17:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Hd4Dc 2025-12-12T16:17:50.670424415+00:00 stderr F time="2025-12-12T16:17:50Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=irPWD 2025-12-12T16:17:50.670424415+00:00 stderr F time="2025-12-12T16:17:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=irPWD 2025-12-12T16:17:51.272801018+00:00 stderr F time="2025-12-12T16:17:51Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Hd4Dc 2025-12-12T16:17:51.272801018+00:00 stderr F time="2025-12-12T16:17:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Hd4Dc 2025-12-12T16:17:51.871521040+00:00 stderr F time="2025-12-12T16:17:51Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=irPWD 2025-12-12T16:17:51.871521040+00:00 stderr F time="2025-12-12T16:17:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=irPWD 2025-12-12T16:17:52.070283354+00:00 stderr F time="2025-12-12T16:17:52Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=PH+we 2025-12-12T16:17:52.070283354+00:00 stderr F time="2025-12-12T16:17:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=PH+we 2025-12-12T16:17:52.670903983+00:00 stderr F time="2025-12-12T16:17:52Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=x9B/W 2025-12-12T16:17:52.670903983+00:00 stderr F time="2025-12-12T16:17:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=x9B/W 2025-12-12T16:17:53.271810339+00:00 stderr F time="2025-12-12T16:17:53Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=PH+we 2025-12-12T16:17:53.271810339+00:00 stderr F time="2025-12-12T16:17:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=PH+we 2025-12-12T16:17:53.869853364+00:00 stderr F time="2025-12-12T16:17:53Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=x9B/W 2025-12-12T16:17:53.869853364+00:00 stderr F time="2025-12-12T16:17:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=x9B/W 2025-12-12T16:17:54.069112950+00:00 stderr F time="2025-12-12T16:17:54Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/mPSo 2025-12-12T16:17:54.069112950+00:00 stderr F time="2025-12-12T16:17:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/mPSo 2025-12-12T16:17:54.670976870+00:00 stderr F time="2025-12-12T16:17:54Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=NCK1j 2025-12-12T16:17:54.670976870+00:00 stderr F time="2025-12-12T16:17:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=NCK1j 2025-12-12T16:17:55.270516402+00:00 stderr F time="2025-12-12T16:17:55Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/mPSo 2025-12-12T16:17:55.270516402+00:00 stderr F time="2025-12-12T16:17:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/mPSo 2025-12-12T16:17:55.870961407+00:00 stderr F time="2025-12-12T16:17:55Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=NCK1j 2025-12-12T16:17:55.870961407+00:00 stderr F time="2025-12-12T16:17:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=NCK1j 2025-12-12T16:17:56.070883399+00:00 stderr F time="2025-12-12T16:17:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=aKA0k 2025-12-12T16:17:56.070883399+00:00 stderr F time="2025-12-12T16:17:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=aKA0k 2025-12-12T16:17:56.670483113+00:00 stderr F time="2025-12-12T16:17:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=aKA0k 2025-12-12T16:17:56.670483113+00:00 stderr F time="2025-12-12T16:17:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=aKA0k 2025-12-12T16:18:00.196514796+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=READY" 2025-12-12T16:18:00.196968147+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="resolving sources" id=HJKDI namespace=openshift-marketplace 2025-12-12T16:18:00.197005428+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="checking if subscriptions need update" id=HJKDI namespace=openshift-marketplace 2025-12-12T16:18:00.201246713+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=HJKDI namespace=openshift-marketplace 2025-12-12T16:18:00.202215137+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=0oo1d 2025-12-12T16:18:00.202258398+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=0oo1d 2025-12-12T16:18:00.210043121+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=0oo1d 2025-12-12T16:18:00.210119143+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=0oo1d 2025-12-12T16:18:00.227356369+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=IaBb0 2025-12-12T16:18:00.227356369+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=IaBb0 2025-12-12T16:18:00.239333265+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=IaBb0 2025-12-12T16:18:00.239333265+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=IaBb0 2025-12-12T16:18:00.458798961+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=READY" 2025-12-12T16:18:00.458963635+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="resolving sources" id=YYE32 namespace=openshift-marketplace 2025-12-12T16:18:00.458963635+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="checking if subscriptions need update" id=YYE32 namespace=openshift-marketplace 2025-12-12T16:18:00.466033639+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=YYE32 namespace=openshift-marketplace 2025-12-12T16:18:00.604329769+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=E4DTn 2025-12-12T16:18:00.604329769+00:00 stderr F time="2025-12-12T16:18:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=E4DTn 2025-12-12T16:18:01.201157853+00:00 stderr F time="2025-12-12T16:18:01Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=E4DTn 2025-12-12T16:18:01.201157853+00:00 stderr F time="2025-12-12T16:18:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=E4DTn 2025-12-12T16:18:01.603104290+00:00 stderr F time="2025-12-12T16:18:01Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=dmbY7 2025-12-12T16:18:01.603104290+00:00 stderr F time="2025-12-12T16:18:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=dmbY7 2025-12-12T16:18:02.199512225+00:00 stderr F time="2025-12-12T16:18:02Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=dmbY7 2025-12-12T16:18:02.199512225+00:00 stderr F time="2025-12-12T16:18:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=dmbY7 2025-12-12T16:18:03.404036405+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=READY" 2025-12-12T16:18:03.404118707+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="resolving sources" id=A4/E2 namespace=openshift-marketplace 2025-12-12T16:18:03.404118707+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="checking if subscriptions need update" id=A4/E2 namespace=openshift-marketplace 2025-12-12T16:18:03.408086005+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=A4/E2 namespace=openshift-marketplace 2025-12-12T16:18:03.411489709+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=A+Qqv 2025-12-12T16:18:03.411489709+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=A+Qqv 2025-12-12T16:18:03.427237008+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=A+Qqv 2025-12-12T16:18:03.427237008+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=A+Qqv 2025-12-12T16:18:03.465356931+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=READY" 2025-12-12T16:18:03.465437773+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="resolving sources" id=lrfpQ namespace=openshift-marketplace 2025-12-12T16:18:03.465437773+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="checking if subscriptions need update" id=lrfpQ namespace=openshift-marketplace 2025-12-12T16:18:03.513119711+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=lrfpQ namespace=openshift-marketplace 2025-12-12T16:18:03.605467954+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=BOMR9 2025-12-12T16:18:03.605467954+00:00 stderr F time="2025-12-12T16:18:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=BOMR9 2025-12-12T16:18:04.201476070+00:00 stderr F time="2025-12-12T16:18:04Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=tZSog 2025-12-12T16:18:04.201476070+00:00 stderr F time="2025-12-12T16:18:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=tZSog 2025-12-12T16:18:04.801482602+00:00 stderr F time="2025-12-12T16:18:04Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=BOMR9 2025-12-12T16:18:04.801482602+00:00 stderr F time="2025-12-12T16:18:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=BOMR9 2025-12-12T16:18:05.402252025+00:00 stderr F time="2025-12-12T16:18:05Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=tZSog 2025-12-12T16:18:05.402252025+00:00 stderr F time="2025-12-12T16:18:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=tZSog 2025-12-12T16:18:05.601944232+00:00 stderr F time="2025-12-12T16:18:05Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=III50 2025-12-12T16:18:05.601944232+00:00 stderr F time="2025-12-12T16:18:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=III50 2025-12-12T16:18:06.202907840+00:00 stderr F time="2025-12-12T16:18:06Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=286h5 2025-12-12T16:18:06.202907840+00:00 stderr F time="2025-12-12T16:18:06Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=286h5 2025-12-12T16:18:06.799200562+00:00 stderr F time="2025-12-12T16:18:06Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=III50 2025-12-12T16:18:06.799200562+00:00 stderr F time="2025-12-12T16:18:06Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=III50 2025-12-12T16:18:07.200840131+00:00 stderr F time="2025-12-12T16:18:07Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=286h5 2025-12-12T16:18:07.200840131+00:00 stderr F time="2025-12-12T16:18:07Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=286h5 2025-12-12T16:18:18.331739187+00:00 stderr F time="2025-12-12T16:18:18Z" level=error msg="Unable to retrieve cluster operator: Get \"https://10.217.4.1:443/apis/config.openshift.io/v1/clusteroperators/operator-lifecycle-manager-catalog\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:19:33.228813670+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=hEthX 2025-12-12T16:19:33.228813670+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=hEthX 2025-12-12T16:19:33.228959744+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=mi9A0 2025-12-12T16:19:33.229023236+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=mi9A0 2025-12-12T16:19:33.236775010+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=mi9A0 2025-12-12T16:19:33.236775010+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=mi9A0 2025-12-12T16:19:33.423222362+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=hEthX 2025-12-12T16:19:33.423310174+00:00 stderr F time="2025-12-12T16:19:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=hEthX 2025-12-12T16:19:34.025426751+00:00 stderr F time="2025-12-12T16:19:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=4+1jW 2025-12-12T16:19:34.025501093+00:00 stderr F time="2025-12-12T16:19:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=4+1jW 2025-12-12T16:19:34.223588857+00:00 stderr F time="2025-12-12T16:19:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=AljTw 2025-12-12T16:19:34.223666258+00:00 stderr F time="2025-12-12T16:19:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=AljTw 2025-12-12T16:19:35.224399985+00:00 stderr F time="2025-12-12T16:19:35Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=4+1jW 2025-12-12T16:19:35.224399985+00:00 stderr F time="2025-12-12T16:19:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=4+1jW 2025-12-12T16:19:35.423970896+00:00 stderr F time="2025-12-12T16:19:35Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=AljTw 2025-12-12T16:19:35.423970896+00:00 stderr F time="2025-12-12T16:19:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=AljTw 2025-12-12T16:19:40.735503758+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="resolving sources" id=+4F3s namespace=openshift-monitoring 2025-12-12T16:19:40.735503758+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="checking if subscriptions need update" id=+4F3s namespace=openshift-monitoring 2025-12-12T16:19:40.735576930+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="resolving sources" id=ZGsT6 namespace=openshift-operator-lifecycle-manager 2025-12-12T16:19:40.735576930+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="checking if subscriptions need update" id=ZGsT6 namespace=openshift-operator-lifecycle-manager 2025-12-12T16:19:40.739001566+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="No subscriptions were found in namespace openshift-monitoring" id=+4F3s namespace=openshift-monitoring 2025-12-12T16:19:40.739001566+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="resolving sources" id=3OEHz namespace=openshift-operators 2025-12-12T16:19:40.739001566+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="checking if subscriptions need update" id=3OEHz namespace=openshift-operators 2025-12-12T16:19:40.739001566+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="No subscriptions were found in namespace openshift-operator-lifecycle-manager" id=ZGsT6 namespace=openshift-operator-lifecycle-manager 2025-12-12T16:19:40.741055317+00:00 stderr F time="2025-12-12T16:19:40Z" level=info msg="No subscriptions were found in namespace openshift-operators" id=3OEHz namespace=openshift-operators 2025-12-12T16:19:42.472966491+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=l+3zj 2025-12-12T16:19:42.472966491+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=l+3zj 2025-12-12T16:19:42.473542266+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=eOSZQ 2025-12-12T16:19:42.473565276+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=eOSZQ 2025-12-12T16:19:42.482558742+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=l+3zj 2025-12-12T16:19:42.482558742+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=l+3zj 2025-12-12T16:19:42.482735977+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=eOSZQ 2025-12-12T16:19:42.482735977+00:00 stderr F time="2025-12-12T16:19:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=eOSZQ 2025-12-12T16:19:42.489996439+00:00 stderr F time="2025-12-12T16:19:42Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=eOSZQ 2025-12-12T16:19:42.491647560+00:00 stderr F E1212 16:19:42.491301 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:43.072750471+00:00 stderr F time="2025-12-12T16:19:43Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=qYBZc 2025-12-12T16:19:43.072750471+00:00 stderr F time="2025-12-12T16:19:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=qYBZc 2025-12-12T16:19:43.271993823+00:00 stderr F time="2025-12-12T16:19:43Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=zv+c0 2025-12-12T16:19:43.271993823+00:00 stderr F time="2025-12-12T16:19:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=zv+c0 2025-12-12T16:19:44.272362951+00:00 stderr F time="2025-12-12T16:19:44Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=qYBZc 2025-12-12T16:19:44.272362951+00:00 stderr F time="2025-12-12T16:19:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=qYBZc 2025-12-12T16:19:44.281551682+00:00 stderr F time="2025-12-12T16:19:44Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=qYBZc 2025-12-12T16:19:44.281551682+00:00 stderr F E1212 16:19:44.280861 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:44.471998353+00:00 stderr F time="2025-12-12T16:19:44Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=zv+c0 2025-12-12T16:19:44.471998353+00:00 stderr F time="2025-12-12T16:19:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=zv+c0 2025-12-12T16:19:45.071734080+00:00 stderr F time="2025-12-12T16:19:45Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=sjYNT 2025-12-12T16:19:45.071734080+00:00 stderr F time="2025-12-12T16:19:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=sjYNT 2025-12-12T16:19:45.272073071+00:00 stderr F time="2025-12-12T16:19:45Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=O91Sv 2025-12-12T16:19:45.272073071+00:00 stderr F time="2025-12-12T16:19:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=O91Sv 2025-12-12T16:19:46.271111595+00:00 stderr F time="2025-12-12T16:19:46Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=sjYNT 2025-12-12T16:19:46.271111595+00:00 stderr F time="2025-12-12T16:19:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=sjYNT 2025-12-12T16:19:46.472806439+00:00 stderr F time="2025-12-12T16:19:46Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=O91Sv 2025-12-12T16:19:46.472806439+00:00 stderr F time="2025-12-12T16:19:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=O91Sv 2025-12-12T16:19:46.477462946+00:00 stderr F time="2025-12-12T16:19:46Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=O91Sv 2025-12-12T16:19:46.477499147+00:00 stderr F E1212 16:19:46.477484 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:47.072658670+00:00 stderr F time="2025-12-12T16:19:47Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=bjWz/ 2025-12-12T16:19:47.072658670+00:00 stderr F time="2025-12-12T16:19:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=bjWz/ 2025-12-12T16:19:47.271123283+00:00 stderr F time="2025-12-12T16:19:47Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=hN9WT 2025-12-12T16:19:47.271123283+00:00 stderr F time="2025-12-12T16:19:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=hN9WT 2025-12-12T16:19:48.270635269+00:00 stderr F time="2025-12-12T16:19:48Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=bjWz/ 2025-12-12T16:19:48.270635269+00:00 stderr F time="2025-12-12T16:19:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=bjWz/ 2025-12-12T16:19:48.275900841+00:00 stderr F time="2025-12-12T16:19:48Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=bjWz/ 2025-12-12T16:19:48.275900841+00:00 stderr F E1212 16:19:48.275869 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:48.473034070+00:00 stderr F time="2025-12-12T16:19:48Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=hN9WT 2025-12-12T16:19:48.473034070+00:00 stderr F time="2025-12-12T16:19:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=hN9WT 2025-12-12T16:19:48.478238130+00:00 stderr F time="2025-12-12T16:19:48Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=hN9WT 2025-12-12T16:19:48.478307392+00:00 stderr F E1212 16:19:48.478249 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:49.073422735+00:00 stderr F time="2025-12-12T16:19:49Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jWZi6 2025-12-12T16:19:49.073422735+00:00 stderr F time="2025-12-12T16:19:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jWZi6 2025-12-12T16:19:49.271736224+00:00 stderr F time="2025-12-12T16:19:49Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=JaVy6 2025-12-12T16:19:49.271736224+00:00 stderr F time="2025-12-12T16:19:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=JaVy6 2025-12-12T16:19:50.271495286+00:00 stderr F time="2025-12-12T16:19:50Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jWZi6 2025-12-12T16:19:50.271495286+00:00 stderr F time="2025-12-12T16:19:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=jWZi6 2025-12-12T16:19:50.275251730+00:00 stderr F time="2025-12-12T16:19:50Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=jWZi6 2025-12-12T16:19:50.275280021+00:00 stderr F E1212 16:19:50.275269 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:50.475654152+00:00 stderr F time="2025-12-12T16:19:50Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=JaVy6 2025-12-12T16:19:50.475654152+00:00 stderr F time="2025-12-12T16:19:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=JaVy6 2025-12-12T16:19:50.481949430+00:00 stderr F time="2025-12-12T16:19:50Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=JaVy6 2025-12-12T16:19:50.481949430+00:00 stderr F E1212 16:19:50.481227 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:51.078271962+00:00 stderr F time="2025-12-12T16:19:51Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=OCaw3 2025-12-12T16:19:51.078271962+00:00 stderr F time="2025-12-12T16:19:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=OCaw3 2025-12-12T16:19:51.273457563+00:00 stderr F time="2025-12-12T16:19:51Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=pr46r 2025-12-12T16:19:51.273457563+00:00 stderr F time="2025-12-12T16:19:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=pr46r 2025-12-12T16:19:52.282223721+00:00 stderr F time="2025-12-12T16:19:52Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=OCaw3 2025-12-12T16:19:52.282223721+00:00 stderr F time="2025-12-12T16:19:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=OCaw3 2025-12-12T16:19:52.286258512+00:00 stderr F time="2025-12-12T16:19:52Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=OCaw3 2025-12-12T16:19:52.286258512+00:00 stderr F E1212 16:19:52.285492 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:52.472907208+00:00 stderr F time="2025-12-12T16:19:52Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=pr46r 2025-12-12T16:19:52.472907208+00:00 stderr F time="2025-12-12T16:19:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=pr46r 2025-12-12T16:19:52.479375410+00:00 stderr F time="2025-12-12T16:19:52Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=pr46r 2025-12-12T16:19:52.479375410+00:00 stderr F E1212 16:19:52.477946 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:53.072433351+00:00 stderr F time="2025-12-12T16:19:53Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=iO00J 2025-12-12T16:19:53.072433351+00:00 stderr F time="2025-12-12T16:19:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=iO00J 2025-12-12T16:19:53.272034063+00:00 stderr F time="2025-12-12T16:19:53Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=PDMPM 2025-12-12T16:19:53.272034063+00:00 stderr F time="2025-12-12T16:19:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=PDMPM 2025-12-12T16:19:54.271871707+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=iO00J 2025-12-12T16:19:54.271871707+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=iO00J 2025-12-12T16:19:54.276580235+00:00 stderr F time="2025-12-12T16:19:54Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=iO00J 2025-12-12T16:19:54.276625856+00:00 stderr F E1212 16:19:54.276590 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:54.475604672+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=PDMPM 2025-12-12T16:19:54.475604672+00:00 stderr F time="2025-12-12T16:19:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=PDMPM 2025-12-12T16:19:54.486302751+00:00 stderr F time="2025-12-12T16:19:54Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=PDMPM 2025-12-12T16:19:54.486302751+00:00 stderr F E1212 16:19:54.485672 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:55.077169066+00:00 stderr F time="2025-12-12T16:19:55Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=EG5ek 2025-12-12T16:19:55.077169066+00:00 stderr F time="2025-12-12T16:19:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=EG5ek 2025-12-12T16:19:55.275281050+00:00 stderr F time="2025-12-12T16:19:55Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nXSee 2025-12-12T16:19:55.275281050+00:00 stderr F time="2025-12-12T16:19:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nXSee 2025-12-12T16:19:56.272338884+00:00 stderr F time="2025-12-12T16:19:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=EG5ek 2025-12-12T16:19:56.272338884+00:00 stderr F time="2025-12-12T16:19:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=EG5ek 2025-12-12T16:19:56.276159540+00:00 stderr F time="2025-12-12T16:19:56Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=EG5ek 2025-12-12T16:19:56.276216801+00:00 stderr F E1212 16:19:56.276172 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:56.471394222+00:00 stderr F time="2025-12-12T16:19:56Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nXSee 2025-12-12T16:19:56.471394222+00:00 stderr F time="2025-12-12T16:19:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nXSee 2025-12-12T16:19:56.476280584+00:00 stderr F time="2025-12-12T16:19:56Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=nXSee 2025-12-12T16:19:56.476314565+00:00 stderr F E1212 16:19:56.476292 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:56.872454241+00:00 stderr F time="2025-12-12T16:19:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zoys5 2025-12-12T16:19:56.872454241+00:00 stderr F time="2025-12-12T16:19:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zoys5 2025-12-12T16:19:57.471018560+00:00 stderr F time="2025-12-12T16:19:57Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=ccL/U 2025-12-12T16:19:57.471018560+00:00 stderr F time="2025-12-12T16:19:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=ccL/U 2025-12-12T16:19:58.071576799+00:00 stderr F time="2025-12-12T16:19:58Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zoys5 2025-12-12T16:19:58.071576799+00:00 stderr F time="2025-12-12T16:19:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zoys5 2025-12-12T16:19:58.075641851+00:00 stderr F time="2025-12-12T16:19:58Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=Zoys5 2025-12-12T16:19:58.075735024+00:00 stderr F E1212 16:19:58.075634 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:58.472721241+00:00 stderr F time="2025-12-12T16:19:58Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=ccL/U 2025-12-12T16:19:58.472721241+00:00 stderr F time="2025-12-12T16:19:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=ccL/U 2025-12-12T16:19:58.476424164+00:00 stderr F time="2025-12-12T16:19:58Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=ccL/U 2025-12-12T16:19:58.476493066+00:00 stderr F E1212 16:19:58.476451 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/certified-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:58.872670363+00:00 stderr F time="2025-12-12T16:19:58Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=VKhM3 2025-12-12T16:19:58.872670363+00:00 stderr F time="2025-12-12T16:19:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=VKhM3 2025-12-12T16:19:59.672369051+00:00 stderr F time="2025-12-12T16:19:59Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=VKhM3 2025-12-12T16:19:59.672369051+00:00 stderr F time="2025-12-12T16:19:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=VKhM3 2025-12-12T16:19:59.676723521+00:00 stderr F time="2025-12-12T16:19:59Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=VKhM3 2025-12-12T16:19:59.676770242+00:00 stderr F E1212 16:19:59.676755 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-marketplace/redhat-marketplace\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:19:59.874035495+00:00 stderr F time="2025-12-12T16:19:59Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=rF/hx 2025-12-12T16:19:59.874035495+00:00 stderr F time="2025-12-12T16:19:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=rF/hx 2025-12-12T16:20:00.476331057+00:00 stderr F time="2025-12-12T16:20:00Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=rF/hx 2025-12-12T16:20:00.476331057+00:00 stderr F time="2025-12-12T16:20:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=rF/hx 2025-12-12T16:20:00.481867246+00:00 stderr F time="2025-12-12T16:20:00Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"certified-operators\": the object has been modified; please apply your changes to the latest version and try again" id=rF/hx 2025-12-12T16:20:00.871664493+00:00 stderr F time="2025-12-12T16:20:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=dFXEJ 2025-12-12T16:20:00.871664493+00:00 stderr F time="2025-12-12T16:20:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=dFXEJ 2025-12-12T16:20:01.672445759+00:00 stderr F time="2025-12-12T16:20:01Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=dFXEJ 2025-12-12T16:20:01.672445759+00:00 stderr F time="2025-12-12T16:20:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=dFXEJ 2025-12-12T16:20:01.677215639+00:00 stderr F time="2025-12-12T16:20:01Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"redhat-marketplace\": the object has been modified; please apply your changes to the latest version and try again" id=dFXEJ 2025-12-12T16:20:01.872487232+00:00 stderr F time="2025-12-12T16:20:01Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nEhnM 2025-12-12T16:20:01.872487232+00:00 stderr F time="2025-12-12T16:20:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nEhnM 2025-12-12T16:20:02.471360699+00:00 stderr F time="2025-12-12T16:20:02Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=KgZPW 2025-12-12T16:20:02.471360699+00:00 stderr F time="2025-12-12T16:20:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=KgZPW 2025-12-12T16:20:03.072206754+00:00 stderr F time="2025-12-12T16:20:03Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nEhnM 2025-12-12T16:20:03.072206754+00:00 stderr F time="2025-12-12T16:20:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=nEhnM 2025-12-12T16:20:03.672237330+00:00 stderr F time="2025-12-12T16:20:03Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=KgZPW 2025-12-12T16:20:03.672237330+00:00 stderr F time="2025-12-12T16:20:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=KgZPW 2025-12-12T16:20:03.872824396+00:00 stderr F time="2025-12-12T16:20:03Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Yd8K6 2025-12-12T16:20:03.872824396+00:00 stderr F time="2025-12-12T16:20:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Yd8K6 2025-12-12T16:20:04.472623896+00:00 stderr F time="2025-12-12T16:20:04Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=kP0P/ 2025-12-12T16:20:04.472623896+00:00 stderr F time="2025-12-12T16:20:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=kP0P/ 2025-12-12T16:20:05.072932358+00:00 stderr F time="2025-12-12T16:20:05Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Yd8K6 2025-12-12T16:20:05.072932358+00:00 stderr F time="2025-12-12T16:20:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=Yd8K6 2025-12-12T16:20:05.472611564+00:00 stderr F time="2025-12-12T16:20:05Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=kP0P/ 2025-12-12T16:20:05.472611564+00:00 stderr F time="2025-12-12T16:20:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=kP0P/ 2025-12-12T16:20:17.846508946+00:00 stderr F time="2025-12-12T16:20:17Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/azd0 2025-12-12T16:20:17.846508946+00:00 stderr F time="2025-12-12T16:20:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/azd0 2025-12-12T16:20:17.846891396+00:00 stderr F time="2025-12-12T16:20:17Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vjATA 2025-12-12T16:20:17.846891396+00:00 stderr F time="2025-12-12T16:20:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vjATA 2025-12-12T16:20:17.854101627+00:00 stderr F time="2025-12-12T16:20:17Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/azd0 2025-12-12T16:20:17.854101627+00:00 stderr F time="2025-12-12T16:20:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=/azd0 2025-12-12T16:20:18.042700322+00:00 stderr F time="2025-12-12T16:20:18Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vjATA 2025-12-12T16:20:18.042700322+00:00 stderr F time="2025-12-12T16:20:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=vjATA 2025-12-12T16:20:18.642874562+00:00 stderr F time="2025-12-12T16:20:18Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2H1vw 2025-12-12T16:20:18.642874562+00:00 stderr F time="2025-12-12T16:20:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2H1vw 2025-12-12T16:20:18.842276728+00:00 stderr F time="2025-12-12T16:20:18Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=BzjJK 2025-12-12T16:20:18.842276728+00:00 stderr F time="2025-12-12T16:20:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=BzjJK 2025-12-12T16:20:19.842460871+00:00 stderr F time="2025-12-12T16:20:19Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2H1vw 2025-12-12T16:20:19.842460871+00:00 stderr F time="2025-12-12T16:20:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=2H1vw 2025-12-12T16:20:20.045408467+00:00 stderr F time="2025-12-12T16:20:20Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=BzjJK 2025-12-12T16:20:20.045408467+00:00 stderr F time="2025-12-12T16:20:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=BzjJK 2025-12-12T16:20:20.844283314+00:00 stderr F time="2025-12-12T16:20:20Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jsmD 2025-12-12T16:20:20.844283314+00:00 stderr F time="2025-12-12T16:20:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jsmD 2025-12-12T16:20:21.044241865+00:00 stderr F time="2025-12-12T16:20:21Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=xiDun 2025-12-12T16:20:21.044241865+00:00 stderr F time="2025-12-12T16:20:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=xiDun 2025-12-12T16:20:22.242562192+00:00 stderr F time="2025-12-12T16:20:22Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jsmD 2025-12-12T16:20:22.242562192+00:00 stderr F time="2025-12-12T16:20:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+jsmD 2025-12-12T16:20:22.442442121+00:00 stderr F time="2025-12-12T16:20:22Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=xiDun 2025-12-12T16:20:22.442442121+00:00 stderr F time="2025-12-12T16:20:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=xiDun 2025-12-12T16:20:23.044876627+00:00 stderr F time="2025-12-12T16:20:23Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lGBEH 2025-12-12T16:20:23.044876627+00:00 stderr F time="2025-12-12T16:20:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lGBEH 2025-12-12T16:20:23.241604236+00:00 stderr F time="2025-12-12T16:20:23Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=H/O73 2025-12-12T16:20:23.241604236+00:00 stderr F time="2025-12-12T16:20:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=H/O73 2025-12-12T16:20:24.242483536+00:00 stderr F time="2025-12-12T16:20:24Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lGBEH 2025-12-12T16:20:24.242483536+00:00 stderr F time="2025-12-12T16:20:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=lGBEH 2025-12-12T16:20:24.442391955+00:00 stderr F time="2025-12-12T16:20:24Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=H/O73 2025-12-12T16:20:24.442463157+00:00 stderr F time="2025-12-12T16:20:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=H/O73 2025-12-12T16:20:24.843262240+00:00 stderr F time="2025-12-12T16:20:24Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=zKbjx 2025-12-12T16:20:24.843410714+00:00 stderr F time="2025-12-12T16:20:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=zKbjx 2025-12-12T16:20:25.443002479+00:00 stderr F time="2025-12-12T16:20:25Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=zKbjx 2025-12-12T16:20:25.443239534+00:00 stderr F time="2025-12-12T16:20:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=zKbjx 2025-12-12T16:20:29.823887171+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=m4IFq namespace=default 2025-12-12T16:20:29.824024404+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=m4IFq namespace=default 2025-12-12T16:20:29.824216089+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=vR2zB namespace=hostpath-provisioner 2025-12-12T16:20:29.824259510+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=vR2zB namespace=hostpath-provisioner 2025-12-12T16:20:29.830579666+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace default" id=m4IFq namespace=default 2025-12-12T16:20:29.830579666+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=FgrwL namespace=kube-node-lease 2025-12-12T16:20:29.830630957+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=FgrwL namespace=kube-node-lease 2025-12-12T16:20:29.830973226+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace hostpath-provisioner" id=vR2zB namespace=hostpath-provisioner 2025-12-12T16:20:29.830973226+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=Aa82c namespace=kube-public 2025-12-12T16:20:29.830973226+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=Aa82c namespace=kube-public 2025-12-12T16:20:29.833858302+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace kube-node-lease" id=FgrwL namespace=kube-node-lease 2025-12-12T16:20:29.833858302+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=uymSu namespace=kube-system 2025-12-12T16:20:29.833858302+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=uymSu namespace=kube-system 2025-12-12T16:20:29.834343465+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace kube-public" id=Aa82c namespace=kube-public 2025-12-12T16:20:29.834343465+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=2lmIk namespace=openshift 2025-12-12T16:20:29.834343465+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=2lmIk namespace=openshift 2025-12-12T16:20:29.835872395+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace kube-system" id=uymSu namespace=kube-system 2025-12-12T16:20:29.835872395+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=MVXSE namespace=openshift-apiserver 2025-12-12T16:20:29.835872395+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=MVXSE namespace=openshift-apiserver 2025-12-12T16:20:29.836275815+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace openshift" id=2lmIk namespace=openshift 2025-12-12T16:20:29.836299806+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=7mHKj namespace=openshift-apiserver-operator 2025-12-12T16:20:29.836299806+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=7mHKj namespace=openshift-apiserver-operator 2025-12-12T16:20:29.838472113+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace openshift-apiserver" id=MVXSE namespace=openshift-apiserver 2025-12-12T16:20:29.838540075+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=bXziZ namespace=openshift-authentication 2025-12-12T16:20:29.838573306+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=bXziZ namespace=openshift-authentication 2025-12-12T16:20:29.838691269+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace openshift-apiserver-operator" id=7mHKj namespace=openshift-apiserver-operator 2025-12-12T16:20:29.838757321+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=0S5zX namespace=openshift-authentication-operator 2025-12-12T16:20:29.838791351+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=0S5zX namespace=openshift-authentication-operator 2025-12-12T16:20:29.841367329+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace openshift-authentication-operator" id=0S5zX namespace=openshift-authentication-operator 2025-12-12T16:20:29.841367329+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=svb22 namespace=openshift-cloud-network-config-controller 2025-12-12T16:20:29.841367329+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=svb22 namespace=openshift-cloud-network-config-controller 2025-12-12T16:20:29.841458642+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="No subscriptions were found in namespace openshift-authentication" id=bXziZ namespace=openshift-authentication 2025-12-12T16:20:29.841512063+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="resolving sources" id=2OR4N namespace=openshift-cloud-platform-infra 2025-12-12T16:20:29.841512063+00:00 stderr F time="2025-12-12T16:20:29Z" level=info msg="checking if subscriptions need update" id=2OR4N namespace=openshift-cloud-platform-infra 2025-12-12T16:20:30.032604755+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="No subscriptions were found in namespace openshift-cloud-network-config-controller" id=svb22 namespace=openshift-cloud-network-config-controller 2025-12-12T16:20:30.032719448+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="resolving sources" id=aQoxt namespace=openshift-cluster-machine-approver 2025-12-12T16:20:30.032747709+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="checking if subscriptions need update" id=aQoxt namespace=openshift-cluster-machine-approver 2025-12-12T16:20:30.227382244+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="No subscriptions were found in namespace openshift-cloud-platform-infra" id=2OR4N namespace=openshift-cloud-platform-infra 2025-12-12T16:20:30.227492407+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="resolving sources" id=1+9Nz namespace=openshift-cluster-samples-operator 2025-12-12T16:20:30.227516157+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="checking if subscriptions need update" id=1+9Nz namespace=openshift-cluster-samples-operator 2025-12-12T16:20:30.428588591+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="No subscriptions were found in namespace openshift-cluster-machine-approver" id=aQoxt namespace=openshift-cluster-machine-approver 2025-12-12T16:20:30.428733065+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="resolving sources" id=ST3AB namespace=openshift-cluster-storage-operator 2025-12-12T16:20:30.428763266+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="checking if subscriptions need update" id=ST3AB namespace=openshift-cluster-storage-operator 2025-12-12T16:20:30.628878295+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="No subscriptions were found in namespace openshift-cluster-samples-operator" id=1+9Nz namespace=openshift-cluster-samples-operator 2025-12-12T16:20:30.628878295+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="resolving sources" id=n/pPZ namespace=openshift-cluster-version 2025-12-12T16:20:30.628921696+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="checking if subscriptions need update" id=n/pPZ namespace=openshift-cluster-version 2025-12-12T16:20:30.828931673+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="No subscriptions were found in namespace openshift-cluster-storage-operator" id=ST3AB namespace=openshift-cluster-storage-operator 2025-12-12T16:20:30.828931673+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="resolving sources" id=A+xlk namespace=openshift-config 2025-12-12T16:20:30.828931673+00:00 stderr F time="2025-12-12T16:20:30Z" level=info msg="checking if subscriptions need update" id=A+xlk namespace=openshift-config 2025-12-12T16:20:31.028871827+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="No subscriptions were found in namespace openshift-cluster-version" id=n/pPZ namespace=openshift-cluster-version 2025-12-12T16:20:31.029047692+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="resolving sources" id=VI+fs namespace=openshift-config-managed 2025-12-12T16:20:31.029073682+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="checking if subscriptions need update" id=VI+fs namespace=openshift-config-managed 2025-12-12T16:20:31.228001370+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="No subscriptions were found in namespace openshift-config" id=A+xlk namespace=openshift-config 2025-12-12T16:20:31.228091493+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="resolving sources" id=/gDKw namespace=openshift-config-operator 2025-12-12T16:20:31.228115243+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="checking if subscriptions need update" id=/gDKw namespace=openshift-config-operator 2025-12-12T16:20:31.427788120+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="No subscriptions were found in namespace openshift-config-managed" id=VI+fs namespace=openshift-config-managed 2025-12-12T16:20:31.427788120+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="resolving sources" id=McGRb namespace=openshift-console 2025-12-12T16:20:31.427788120+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="checking if subscriptions need update" id=McGRb namespace=openshift-console 2025-12-12T16:20:31.805486747+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="No subscriptions were found in namespace openshift-config-operator" id=/gDKw namespace=openshift-config-operator 2025-12-12T16:20:31.805627170+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="resolving sources" id=gkHpE namespace=openshift-console-operator 2025-12-12T16:20:31.805655951+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="checking if subscriptions need update" id=gkHpE namespace=openshift-console-operator 2025-12-12T16:20:31.829795304+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="No subscriptions were found in namespace openshift-console" id=McGRb namespace=openshift-console 2025-12-12T16:20:31.829912237+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="resolving sources" id=d8Dto namespace=openshift-console-user-settings 2025-12-12T16:20:31.829941078+00:00 stderr F time="2025-12-12T16:20:31Z" level=info msg="checking if subscriptions need update" id=d8Dto namespace=openshift-console-user-settings 2025-12-12T16:20:32.033637061+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="No subscriptions were found in namespace openshift-console-operator" id=gkHpE namespace=openshift-console-operator 2025-12-12T16:20:32.033802635+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="resolving sources" id=dr28h namespace=openshift-controller-manager 2025-12-12T16:20:32.034576156+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="checking if subscriptions need update" id=dr28h namespace=openshift-controller-manager 2025-12-12T16:20:32.227728582+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="No subscriptions were found in namespace openshift-console-user-settings" id=d8Dto namespace=openshift-console-user-settings 2025-12-12T16:20:32.227784013+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="resolving sources" id=z6LBN namespace=openshift-controller-manager-operator 2025-12-12T16:20:32.227784013+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="checking if subscriptions need update" id=z6LBN namespace=openshift-controller-manager-operator 2025-12-12T16:20:32.428105588+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="No subscriptions were found in namespace openshift-controller-manager" id=dr28h namespace=openshift-controller-manager 2025-12-12T16:20:32.428240101+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="resolving sources" id=Ws602 namespace=openshift-dns 2025-12-12T16:20:32.428278152+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="checking if subscriptions need update" id=Ws602 namespace=openshift-dns 2025-12-12T16:20:32.627229271+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="No subscriptions were found in namespace openshift-controller-manager-operator" id=z6LBN namespace=openshift-controller-manager-operator 2025-12-12T16:20:32.627229271+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="resolving sources" id=pUpzw namespace=openshift-dns-operator 2025-12-12T16:20:32.627229271+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="checking if subscriptions need update" id=pUpzw namespace=openshift-dns-operator 2025-12-12T16:20:32.826911659+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="No subscriptions were found in namespace openshift-dns" id=Ws602 namespace=openshift-dns 2025-12-12T16:20:32.827036572+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="resolving sources" id=1D6jk namespace=openshift-etcd 2025-12-12T16:20:32.827066653+00:00 stderr F time="2025-12-12T16:20:32Z" level=info msg="checking if subscriptions need update" id=1D6jk namespace=openshift-etcd 2025-12-12T16:20:33.028402834+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="No subscriptions were found in namespace openshift-dns-operator" id=pUpzw namespace=openshift-dns-operator 2025-12-12T16:20:33.028506156+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="resolving sources" id=mRc1g namespace=openshift-etcd-operator 2025-12-12T16:20:33.028529997+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="checking if subscriptions need update" id=mRc1g namespace=openshift-etcd-operator 2025-12-12T16:20:33.227649660+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="No subscriptions were found in namespace openshift-etcd" id=1D6jk namespace=openshift-etcd 2025-12-12T16:20:33.227776433+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="resolving sources" id=0+yuC namespace=openshift-host-network 2025-12-12T16:20:33.227810924+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="checking if subscriptions need update" id=0+yuC namespace=openshift-host-network 2025-12-12T16:20:33.427924723+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="No subscriptions were found in namespace openshift-etcd-operator" id=mRc1g namespace=openshift-etcd-operator 2025-12-12T16:20:33.428041376+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="resolving sources" id=eZKfi namespace=openshift-image-registry 2025-12-12T16:20:33.428069417+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="checking if subscriptions need update" id=eZKfi namespace=openshift-image-registry 2025-12-12T16:20:33.628747551+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="No subscriptions were found in namespace openshift-host-network" id=0+yuC namespace=openshift-host-network 2025-12-12T16:20:33.628747551+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="resolving sources" id=3Cw2F namespace=openshift-infra 2025-12-12T16:20:33.628747551+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="checking if subscriptions need update" id=3Cw2F namespace=openshift-infra 2025-12-12T16:20:33.829059795+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="No subscriptions were found in namespace openshift-image-registry" id=eZKfi namespace=openshift-image-registry 2025-12-12T16:20:33.829059795+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="resolving sources" id=fzDjq namespace=openshift-ingress 2025-12-12T16:20:33.829059795+00:00 stderr F time="2025-12-12T16:20:33Z" level=info msg="checking if subscriptions need update" id=fzDjq namespace=openshift-ingress 2025-12-12T16:20:34.029914553+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="No subscriptions were found in namespace openshift-infra" id=3Cw2F namespace=openshift-infra 2025-12-12T16:20:34.029914553+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="resolving sources" id=Z58wS namespace=openshift-ingress-canary 2025-12-12T16:20:34.029914553+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="checking if subscriptions need update" id=Z58wS namespace=openshift-ingress-canary 2025-12-12T16:20:34.227979948+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="No subscriptions were found in namespace openshift-ingress" id=fzDjq namespace=openshift-ingress 2025-12-12T16:20:34.227979948+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="resolving sources" id=YPB3U namespace=openshift-ingress-operator 2025-12-12T16:20:34.228039810+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="checking if subscriptions need update" id=YPB3U namespace=openshift-ingress-operator 2025-12-12T16:20:34.427834711+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="No subscriptions were found in namespace openshift-ingress-canary" id=Z58wS namespace=openshift-ingress-canary 2025-12-12T16:20:34.427834711+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="resolving sources" id=TdK1w namespace=openshift-kni-infra 2025-12-12T16:20:34.427834711+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="checking if subscriptions need update" id=TdK1w namespace=openshift-kni-infra 2025-12-12T16:20:34.628293979+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="No subscriptions were found in namespace openshift-ingress-operator" id=YPB3U namespace=openshift-ingress-operator 2025-12-12T16:20:34.628332530+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="resolving sources" id=USajy namespace=openshift-kube-apiserver 2025-12-12T16:20:34.628332530+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="checking if subscriptions need update" id=USajy namespace=openshift-kube-apiserver 2025-12-12T16:20:34.827255557+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="No subscriptions were found in namespace openshift-kni-infra" id=TdK1w namespace=openshift-kni-infra 2025-12-12T16:20:34.827348040+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="resolving sources" id=t32Y3 namespace=openshift-kube-apiserver-operator 2025-12-12T16:20:34.827373251+00:00 stderr F time="2025-12-12T16:20:34Z" level=info msg="checking if subscriptions need update" id=t32Y3 namespace=openshift-kube-apiserver-operator 2025-12-12T16:20:35.028918966+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="No subscriptions were found in namespace openshift-kube-apiserver" id=USajy namespace=openshift-kube-apiserver 2025-12-12T16:20:35.029065560+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="resolving sources" id=U5PrQ namespace=openshift-kube-controller-manager 2025-12-12T16:20:35.029100591+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="checking if subscriptions need update" id=U5PrQ namespace=openshift-kube-controller-manager 2025-12-12T16:20:35.228424289+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="No subscriptions were found in namespace openshift-kube-apiserver-operator" id=t32Y3 namespace=openshift-kube-apiserver-operator 2025-12-12T16:20:35.228579163+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="resolving sources" id=Wsml4 namespace=openshift-kube-controller-manager-operator 2025-12-12T16:20:35.228613514+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="checking if subscriptions need update" id=Wsml4 namespace=openshift-kube-controller-manager-operator 2025-12-12T16:20:35.426574147+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="No subscriptions were found in namespace openshift-kube-controller-manager" id=U5PrQ namespace=openshift-kube-controller-manager 2025-12-12T16:20:35.426708820+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="resolving sources" id=ylh/e namespace=openshift-kube-scheduler 2025-12-12T16:20:35.426733251+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="checking if subscriptions need update" id=ylh/e namespace=openshift-kube-scheduler 2025-12-12T16:20:35.629044297+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="No subscriptions were found in namespace openshift-kube-controller-manager-operator" id=Wsml4 namespace=openshift-kube-controller-manager-operator 2025-12-12T16:20:35.629044297+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="resolving sources" id=r9M53 namespace=openshift-kube-scheduler-operator 2025-12-12T16:20:35.629125689+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="checking if subscriptions need update" id=r9M53 namespace=openshift-kube-scheduler-operator 2025-12-12T16:20:35.777307686+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=NMecT 2025-12-12T16:20:35.777377118+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=NMecT 2025-12-12T16:20:35.784997158+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+vZHH 2025-12-12T16:20:35.785101701+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+vZHH 2025-12-12T16:20:35.798972915+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=IDLE" 2025-12-12T16:20:35.799344274+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=IDLE" 2025-12-12T16:20:35.799384635+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=CONNECTING" 2025-12-12T16:20:35.799484078+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=CONNECTING" 2025-12-12T16:20:35.800082824+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=NMecT 2025-12-12T16:20:35.800157466+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-pvzzz current-pod.namespace=openshift-marketplace id=NMecT 2025-12-12T16:20:35.809655665+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+vZHH 2025-12-12T16:20:35.809725697+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-2gt6h current-pod.namespace=openshift-marketplace id=+vZHH 2025-12-12T16:20:35.824122794+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:20:35.824122794+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=IDLE" 2025-12-12T16:20:35.824122794+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=CONNECTING" 2025-12-12T16:20:35.824122794+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:20:35.831557659+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=IDLE" 2025-12-12T16:20:35.831557659+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=CONNECTING" 2025-12-12T16:20:35.834886207+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=TRANSIENT_FAILURE" 2025-12-12T16:20:35.834886207+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="No subscriptions were found in namespace openshift-kube-scheduler" id=ylh/e namespace=openshift-kube-scheduler 2025-12-12T16:20:35.834886207+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="resolving sources" id=P4RYE namespace=openshift-kube-storage-version-migrator 2025-12-12T16:20:35.834886207+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="checking if subscriptions need update" id=P4RYE namespace=openshift-kube-storage-version-migrator 2025-12-12T16:20:35.842048374+00:00 stderr F time="2025-12-12T16:20:35Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:20:36.027489589+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="No subscriptions were found in namespace openshift-kube-scheduler-operator" id=r9M53 namespace=openshift-kube-scheduler-operator 2025-12-12T16:20:36.027489589+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="resolving sources" id=el8Ti namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:20:36.027520489+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="checking if subscriptions need update" id=el8Ti namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:20:36.242705364+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zs7u1 2025-12-12T16:20:36.242705364+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zs7u1 2025-12-12T16:20:36.628159204+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="No subscriptions were found in namespace openshift-kube-storage-version-migrator" id=P4RYE namespace=openshift-kube-storage-version-migrator 2025-12-12T16:20:36.628280127+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="resolving sources" id=1+XxA namespace=openshift-machine-api 2025-12-12T16:20:36.628280127+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="checking if subscriptions need update" id=1+XxA namespace=openshift-machine-api 2025-12-12T16:20:36.829699101+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="No subscriptions were found in namespace openshift-kube-storage-version-migrator-operator" id=el8Ti namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:20:36.829880485+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="resolving sources" id=ueshH namespace=openshift-machine-config-operator 2025-12-12T16:20:36.829931477+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="checking if subscriptions need update" id=ueshH namespace=openshift-machine-config-operator 2025-12-12T16:20:36.977484177+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zs7u1 2025-12-12T16:20:36.977484177+00:00 stderr F time="2025-12-12T16:20:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-s7x92 current-pod.namespace=openshift-marketplace id=Zs7u1 2025-12-12T16:20:37.031535405+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="No subscriptions were found in namespace openshift-machine-api" id=1+XxA namespace=openshift-machine-api 2025-12-12T16:20:37.031688489+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="resolving sources" id=pz5n3 namespace=openshift-marketplace 2025-12-12T16:20:37.031730610+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="checking if subscriptions need update" id=pz5n3 namespace=openshift-marketplace 2025-12-12T16:20:37.177426611+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=ipht0 2025-12-12T16:20:37.177511984+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-9ndfc current-pod.namespace=openshift-marketplace id=ipht0 2025-12-12T16:20:37.229168129+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="No subscriptions were found in namespace openshift-machine-config-operator" id=ueshH namespace=openshift-machine-config-operator 2025-12-12T16:20:37.229385044+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="resolving sources" id=RUUUM namespace=openshift-monitoring 2025-12-12T16:20:37.229425025+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="checking if subscriptions need update" id=RUUUM namespace=openshift-monitoring 2025-12-12T16:20:37.628122043+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=pz5n3 namespace=openshift-marketplace 2025-12-12T16:20:37.628171884+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="resolving sources" id=PVWKO namespace=openshift-multus 2025-12-12T16:20:37.628171884+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="checking if subscriptions need update" id=PVWKO namespace=openshift-multus 2025-12-12T16:20:37.828023516+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="No subscriptions were found in namespace openshift-monitoring" id=RUUUM namespace=openshift-monitoring 2025-12-12T16:20:37.828023516+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="resolving sources" id=gkZr+ namespace=openshift-network-console 2025-12-12T16:20:37.828023516+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="checking if subscriptions need update" id=gkZr+ namespace=openshift-network-console 2025-12-12T16:20:37.976148042+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=ipht0 2025-12-12T16:20:37.976148042+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=ipht0 2025-12-12T16:20:37.976262395+00:00 stderr F time="2025-12-12T16:20:37Z" level=info msg="creating desired pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=ipht0 pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:20:38.029422739+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="No subscriptions were found in namespace openshift-multus" id=PVWKO namespace=openshift-multus 2025-12-12T16:20:38.029546772+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="resolving sources" id=UHGGL namespace=openshift-network-diagnostics 2025-12-12T16:20:38.029574173+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="checking if subscriptions need update" id=UHGGL namespace=openshift-network-diagnostics 2025-12-12T16:20:38.175441709+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=UFJoC 2025-12-12T16:20:38.175510971+00:00 stderr F time="2025-12-12T16:20:38Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=UFJoC isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:20:38.227677619+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="No subscriptions were found in namespace openshift-network-console" id=gkZr+ namespace=openshift-network-console 2025-12-12T16:20:38.227818993+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="resolving sources" id=XDM9s namespace=openshift-network-node-identity 2025-12-12T16:20:38.227844674+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="checking if subscriptions need update" id=XDM9s namespace=openshift-network-node-identity 2025-12-12T16:20:38.381741381+00:00 stderr F I1212 16:20:38.381596 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:20:38.428722532+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="No subscriptions were found in namespace openshift-network-diagnostics" id=UHGGL namespace=openshift-network-diagnostics 2025-12-12T16:20:38.428722532+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="resolving sources" id=9Ph3I namespace=openshift-network-operator 2025-12-12T16:20:38.428722532+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="checking if subscriptions need update" id=9Ph3I namespace=openshift-network-operator 2025-12-12T16:20:38.627871385+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="No subscriptions were found in namespace openshift-network-node-identity" id=XDM9s namespace=openshift-network-node-identity 2025-12-12T16:20:38.627909976+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="resolving sources" id=CJNQY namespace=openshift-node 2025-12-12T16:20:38.627909976+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="checking if subscriptions need update" id=CJNQY namespace=openshift-node 2025-12-12T16:20:38.975237377+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=UFJoC 2025-12-12T16:20:38.975237377+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=UFJoC 2025-12-12T16:20:38.975237377+00:00 stderr F time="2025-12-12T16:20:38Z" level=info msg="creating desired pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=UFJoC pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:20:39.028533535+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="No subscriptions were found in namespace openshift-network-operator" id=9Ph3I namespace=openshift-network-operator 2025-12-12T16:20:39.028533535+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="resolving sources" id=LOBVr namespace=openshift-nutanix-infra 2025-12-12T16:20:39.028614127+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="checking if subscriptions need update" id=LOBVr namespace=openshift-nutanix-infra 2025-12-12T16:20:39.227433512+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="No subscriptions were found in namespace openshift-node" id=CJNQY namespace=openshift-node 2025-12-12T16:20:39.227433512+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="resolving sources" id=PsbyK namespace=openshift-oauth-apiserver 2025-12-12T16:20:39.227433512+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="checking if subscriptions need update" id=PsbyK namespace=openshift-oauth-apiserver 2025-12-12T16:20:39.378208147+00:00 stderr F I1212 16:20:39.378017 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:20:39.429534493+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="No subscriptions were found in namespace openshift-nutanix-infra" id=LOBVr namespace=openshift-nutanix-infra 2025-12-12T16:20:39.429534493+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="resolving sources" id=nQLeC namespace=openshift-openstack-infra 2025-12-12T16:20:39.429534493+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="checking if subscriptions need update" id=nQLeC namespace=openshift-openstack-infra 2025-12-12T16:20:39.574995019+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5r/fn 2025-12-12T16:20:39.574995019+00:00 stderr F time="2025-12-12T16:20:39Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5r/fn isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:20:39.628724748+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="No subscriptions were found in namespace openshift-oauth-apiserver" id=PsbyK namespace=openshift-oauth-apiserver 2025-12-12T16:20:39.628724748+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="resolving sources" id=lla4A namespace=openshift-operator-lifecycle-manager 2025-12-12T16:20:39.628724748+00:00 stderr F time="2025-12-12T16:20:39Z" level=info msg="checking if subscriptions need update" id=lla4A namespace=openshift-operator-lifecycle-manager 2025-12-12T16:20:40.027995261+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="No subscriptions were found in namespace openshift-openstack-infra" id=nQLeC namespace=openshift-openstack-infra 2025-12-12T16:20:40.027995261+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="resolving sources" id=IfsyK namespace=openshift-operators 2025-12-12T16:20:40.028028422+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="checking if subscriptions need update" id=IfsyK namespace=openshift-operators 2025-12-12T16:20:40.228488040+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="No subscriptions were found in namespace openshift-operator-lifecycle-manager" id=lla4A namespace=openshift-operator-lifecycle-manager 2025-12-12T16:20:40.228488040+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="resolving sources" id=XmKdA namespace=openshift-ovirt-infra 2025-12-12T16:20:40.228521230+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="checking if subscriptions need update" id=XmKdA namespace=openshift-ovirt-infra 2025-12-12T16:20:40.375286280+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5r/fn 2025-12-12T16:20:40.375286280+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5r/fn 2025-12-12T16:20:40.375348102+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="creating desired pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5r/fn pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:20:40.430848718+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="No subscriptions were found in namespace openshift-operators" id=IfsyK namespace=openshift-operators 2025-12-12T16:20:40.430884759+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="resolving sources" id=ekh9P namespace=openshift-ovn-kubernetes 2025-12-12T16:20:40.430884759+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="checking if subscriptions need update" id=ekh9P namespace=openshift-ovn-kubernetes 2025-12-12T16:20:40.576633551+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=6gbgn 2025-12-12T16:20:40.576633551+00:00 stderr F time="2025-12-12T16:20:40Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=6gbgn isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:20:40.629002755+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="No subscriptions were found in namespace openshift-ovirt-infra" id=XmKdA namespace=openshift-ovirt-infra 2025-12-12T16:20:40.629042276+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="resolving sources" id=6n0sy namespace=openshift-route-controller-manager 2025-12-12T16:20:40.629042276+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="checking if subscriptions need update" id=6n0sy namespace=openshift-route-controller-manager 2025-12-12T16:20:40.780498679+00:00 stderr F I1212 16:20:40.780434 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:20:40.827968074+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="No subscriptions were found in namespace openshift-ovn-kubernetes" id=ekh9P namespace=openshift-ovn-kubernetes 2025-12-12T16:20:40.827968074+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="resolving sources" id=dkJqo namespace=openshift-service-ca 2025-12-12T16:20:40.827968074+00:00 stderr F time="2025-12-12T16:20:40Z" level=info msg="checking if subscriptions need update" id=dkJqo namespace=openshift-service-ca 2025-12-12T16:20:41.029253164+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="No subscriptions were found in namespace openshift-route-controller-manager" id=6n0sy namespace=openshift-route-controller-manager 2025-12-12T16:20:41.029253164+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="resolving sources" id=MpqdX namespace=openshift-service-ca-operator 2025-12-12T16:20:41.029253164+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="checking if subscriptions need update" id=MpqdX namespace=openshift-service-ca-operator 2025-12-12T16:20:41.375477135+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=6gbgn 2025-12-12T16:20:41.375477135+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=6gbgn 2025-12-12T16:20:41.375537577+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="creating desired pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace id=6gbgn pod.name= pod.namespace=openshift-marketplace 2025-12-12T16:20:41.429995925+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="No subscriptions were found in namespace openshift-service-ca" id=dkJqo namespace=openshift-service-ca 2025-12-12T16:20:41.429995925+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="resolving sources" id=ZOSox namespace=openshift-user-workload-monitoring 2025-12-12T16:20:41.429995925+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="checking if subscriptions need update" id=ZOSox namespace=openshift-user-workload-monitoring 2025-12-12T16:20:41.628746638+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="No subscriptions were found in namespace openshift-service-ca-operator" id=MpqdX namespace=openshift-service-ca-operator 2025-12-12T16:20:41.628889842+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="resolving sources" id=w+U1P namespace=openshift-vsphere-infra 2025-12-12T16:20:41.628933463+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="checking if subscriptions need update" id=w+U1P namespace=openshift-vsphere-infra 2025-12-12T16:20:41.780145020+00:00 stderr F I1212 16:20:41.779160 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:20:41.828331084+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="No subscriptions were found in namespace openshift-user-workload-monitoring" id=ZOSox namespace=openshift-user-workload-monitoring 2025-12-12T16:20:41.974549349+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=W2sN/ 2025-12-12T16:20:41.974549349+00:00 stderr F time="2025-12-12T16:20:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=W2sN/ 2025-12-12T16:20:42.027626640+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="No subscriptions were found in namespace openshift-vsphere-infra" id=w+U1P namespace=openshift-vsphere-infra 2025-12-12T16:20:42.775416594+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=W2sN/ 2025-12-12T16:20:42.775416594+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=W2sN/ 2025-12-12T16:20:42.974274861+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=Oa9x0 2025-12-12T16:20:42.974274861+00:00 stderr F time="2025-12-12T16:20:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=Oa9x0 2025-12-12T16:20:43.578079428+00:00 stderr F time="2025-12-12T16:20:43Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=vyEv1 2025-12-12T16:20:43.578079428+00:00 stderr F time="2025-12-12T16:20:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=vyEv1 2025-12-12T16:20:44.176892345+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=Oa9x0 2025-12-12T16:20:44.176892345+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=Oa9x0 2025-12-12T16:20:44.774805508+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=vyEv1 2025-12-12T16:20:44.774805508+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=vyEv1 2025-12-12T16:20:44.976818677+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=jkuKZ 2025-12-12T16:20:44.976818677+00:00 stderr F time="2025-12-12T16:20:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=jkuKZ 2025-12-12T16:20:45.579263718+00:00 stderr F time="2025-12-12T16:20:45Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KNXpq 2025-12-12T16:20:45.579360801+00:00 stderr F time="2025-12-12T16:20:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KNXpq 2025-12-12T16:20:46.181214727+00:00 stderr F time="2025-12-12T16:20:46Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=jkuKZ 2025-12-12T16:20:46.181214727+00:00 stderr F time="2025-12-12T16:20:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=jkuKZ 2025-12-12T16:20:46.777774275+00:00 stderr F time="2025-12-12T16:20:46Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KNXpq 2025-12-12T16:20:46.777774275+00:00 stderr F time="2025-12-12T16:20:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KNXpq 2025-12-12T16:20:46.979668541+00:00 stderr F time="2025-12-12T16:20:46Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=j+lPK 2025-12-12T16:20:46.979668541+00:00 stderr F time="2025-12-12T16:20:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=j+lPK 2025-12-12T16:20:47.576731492+00:00 stderr F time="2025-12-12T16:20:47Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=1Jp8d 2025-12-12T16:20:47.576731492+00:00 stderr F time="2025-12-12T16:20:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=1Jp8d 2025-12-12T16:20:48.177802678+00:00 stderr F time="2025-12-12T16:20:48Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=j+lPK 2025-12-12T16:20:48.177802678+00:00 stderr F time="2025-12-12T16:20:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=j+lPK 2025-12-12T16:20:48.776013509+00:00 stderr F time="2025-12-12T16:20:48Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=1Jp8d 2025-12-12T16:20:48.776013509+00:00 stderr F time="2025-12-12T16:20:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=1Jp8d 2025-12-12T16:20:48.975192723+00:00 stderr F time="2025-12-12T16:20:48Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=9H+/c 2025-12-12T16:20:48.975192723+00:00 stderr F time="2025-12-12T16:20:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=9H+/c 2025-12-12T16:20:49.578806105+00:00 stderr F time="2025-12-12T16:20:49Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=h/JpR 2025-12-12T16:20:49.578855966+00:00 stderr F time="2025-12-12T16:20:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=h/JpR 2025-12-12T16:20:50.176099082+00:00 stderr F time="2025-12-12T16:20:50Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=9H+/c 2025-12-12T16:20:50.176285467+00:00 stderr F time="2025-12-12T16:20:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=9H+/c 2025-12-12T16:20:50.775304229+00:00 stderr F time="2025-12-12T16:20:50Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=h/JpR 2025-12-12T16:20:50.775304229+00:00 stderr F time="2025-12-12T16:20:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=h/JpR 2025-12-12T16:20:50.976586729+00:00 stderr F time="2025-12-12T16:20:50Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=MLsGi 2025-12-12T16:20:50.976586729+00:00 stderr F time="2025-12-12T16:20:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=MLsGi 2025-12-12T16:20:51.298486232+00:00 stderr F time="2025-12-12T16:20:51Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-operators state.State=READY" 2025-12-12T16:20:51.298576405+00:00 stderr F time="2025-12-12T16:20:51Z" level=info msg="resolving sources" id=jXAWN namespace=openshift-marketplace 2025-12-12T16:20:51.298576405+00:00 stderr F time="2025-12-12T16:20:51Z" level=info msg="checking if subscriptions need update" id=jXAWN namespace=openshift-marketplace 2025-12-12T16:20:51.302667142+00:00 stderr F time="2025-12-12T16:20:51Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=jXAWN namespace=openshift-marketplace 2025-12-12T16:20:51.577646955+00:00 stderr F time="2025-12-12T16:20:51Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7qUHV 2025-12-12T16:20:51.577646955+00:00 stderr F time="2025-12-12T16:20:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7qUHV 2025-12-12T16:20:52.007516240+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=community-operators state.State=READY" 2025-12-12T16:20:52.007569381+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="resolving sources" id=EiKq9 namespace=openshift-marketplace 2025-12-12T16:20:52.007569381+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="checking if subscriptions need update" id=EiKq9 namespace=openshift-marketplace 2025-12-12T16:20:52.010735565+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=EiKq9 namespace=openshift-marketplace 2025-12-12T16:20:52.177416257+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=MLsGi 2025-12-12T16:20:52.177416257+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=MLsGi 2025-12-12T16:20:52.793156566+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7qUHV 2025-12-12T16:20:52.793156566+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7qUHV 2025-12-12T16:20:52.976025173+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=KX+0U 2025-12-12T16:20:52.976025173+00:00 stderr F time="2025-12-12T16:20:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=KX+0U 2025-12-12T16:20:53.144118112+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=redhat-marketplace state.State=READY" 2025-12-12T16:20:53.144211285+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="resolving sources" id=jd6nX namespace=openshift-marketplace 2025-12-12T16:20:53.144211285+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="checking if subscriptions need update" id=jd6nX namespace=openshift-marketplace 2025-12-12T16:20:53.147362357+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=jd6nX namespace=openshift-marketplace 2025-12-12T16:20:53.576622007+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=vCmJ+ 2025-12-12T16:20:53.576622007+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=vCmJ+ 2025-12-12T16:20:53.879032189+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="state.Key.Namespace=openshift-marketplace state.Key.Name=certified-operators state.State=READY" 2025-12-12T16:20:53.879117571+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="resolving sources" id=tE/xp namespace=openshift-marketplace 2025-12-12T16:20:53.879117571+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="checking if subscriptions need update" id=tE/xp namespace=openshift-marketplace 2025-12-12T16:20:53.882705325+00:00 stderr F time="2025-12-12T16:20:53Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=tE/xp namespace=openshift-marketplace 2025-12-12T16:20:54.175207748+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=KX+0U 2025-12-12T16:20:54.175207748+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=KX+0U 2025-12-12T16:20:54.778119832+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=vCmJ+ 2025-12-12T16:20:54.778119832+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=vCmJ+ 2025-12-12T16:20:54.976583188+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=BnE/3 2025-12-12T16:20:54.976583188+00:00 stderr F time="2025-12-12T16:20:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=BnE/3 2025-12-12T16:20:55.577702805+00:00 stderr F time="2025-12-12T16:20:55Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=dcDwu 2025-12-12T16:20:55.577702805+00:00 stderr F time="2025-12-12T16:20:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=dcDwu 2025-12-12T16:20:56.175759882+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=BnE/3 2025-12-12T16:20:56.175759882+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=BnE/3 2025-12-12T16:20:56.775330238+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=dcDwu 2025-12-12T16:20:56.775330238+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=dcDwu 2025-12-12T16:20:56.976512375+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=0zCBD 2025-12-12T16:20:56.976512375+00:00 stderr F time="2025-12-12T16:20:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=0zCBD 2025-12-12T16:20:57.575666431+00:00 stderr F time="2025-12-12T16:20:57Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=SHCuI 2025-12-12T16:20:57.575666431+00:00 stderr F time="2025-12-12T16:20:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=SHCuI 2025-12-12T16:20:58.174269462+00:00 stderr F time="2025-12-12T16:20:58Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=0zCBD 2025-12-12T16:20:58.174269462+00:00 stderr F time="2025-12-12T16:20:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=0zCBD 2025-12-12T16:20:58.775336048+00:00 stderr F time="2025-12-12T16:20:58Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=SHCuI 2025-12-12T16:20:58.775336048+00:00 stderr F time="2025-12-12T16:20:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=SHCuI 2025-12-12T16:20:58.975107738+00:00 stderr F time="2025-12-12T16:20:58Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=tiu1e 2025-12-12T16:20:58.975107738+00:00 stderr F time="2025-12-12T16:20:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=tiu1e 2025-12-12T16:20:59.575336392+00:00 stderr F time="2025-12-12T16:20:59Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MekCt 2025-12-12T16:20:59.575336392+00:00 stderr F time="2025-12-12T16:20:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MekCt 2025-12-12T16:21:00.174199209+00:00 stderr F time="2025-12-12T16:21:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=tiu1e 2025-12-12T16:21:00.174199209+00:00 stderr F time="2025-12-12T16:21:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-marketplace catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-marketplace-jkgqd current-pod.namespace=openshift-marketplace id=tiu1e 2025-12-12T16:21:00.576262535+00:00 stderr F time="2025-12-12T16:21:00Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MekCt 2025-12-12T16:21:00.576262535+00:00 stderr F time="2025-12-12T16:21:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MekCt 2025-12-12T16:25:04.278408724+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="resolving sources" id=uahhF namespace=openstack 2025-12-12T16:25:04.278408724+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="checking if subscriptions need update" id=uahhF namespace=openstack 2025-12-12T16:25:04.282749958+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="No subscriptions were found in namespace openstack" id=uahhF namespace=openstack 2025-12-12T16:25:04.294108776+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="resolving sources" id=C66L9 namespace=openstack 2025-12-12T16:25:04.294108776+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="checking if subscriptions need update" id=C66L9 namespace=openstack 2025-12-12T16:25:04.381681115+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="No subscriptions were found in namespace openstack" id=C66L9 namespace=openstack 2025-12-12T16:25:04.381681115+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="resolving sources" id=K+aZ8 namespace=openstack 2025-12-12T16:25:04.381681115+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="checking if subscriptions need update" id=K+aZ8 namespace=openstack 2025-12-12T16:25:04.384401107+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="No subscriptions were found in namespace openstack" id=K+aZ8 namespace=openstack 2025-12-12T16:25:04.994746404+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="resolving sources" id=GxGQ2 namespace=openstack-operators 2025-12-12T16:25:04.994746404+00:00 stderr F time="2025-12-12T16:25:04Z" level=info msg="checking if subscriptions need update" id=GxGQ2 namespace=openstack-operators 2025-12-12T16:25:05.002136168+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="No subscriptions were found in namespace openstack-operators" id=GxGQ2 namespace=openstack-operators 2025-12-12T16:25:05.008452404+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="resolving sources" id=L4HM2 namespace=openstack-operators 2025-12-12T16:25:05.008452404+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="checking if subscriptions need update" id=L4HM2 namespace=openstack-operators 2025-12-12T16:25:05.100342068+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="No subscriptions were found in namespace openstack-operators" id=L4HM2 namespace=openstack-operators 2025-12-12T16:25:05.100342068+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="resolving sources" id=lT9BB namespace=openstack-operators 2025-12-12T16:25:05.100342068+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="checking if subscriptions need update" id=lT9BB namespace=openstack-operators 2025-12-12T16:25:05.103423519+00:00 stderr F time="2025-12-12T16:25:05Z" level=info msg="No subscriptions were found in namespace openstack-operators" id=lT9BB namespace=openstack-operators 2025-12-12T16:26:15.047809809+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="resolving sources" id=/udUG namespace=service-telemetry 2025-12-12T16:26:15.047809809+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="checking if subscriptions need update" id=/udUG namespace=service-telemetry 2025-12-12T16:26:15.057299759+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="No subscriptions were found in namespace service-telemetry" id=/udUG namespace=service-telemetry 2025-12-12T16:26:15.060218793+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="resolving sources" id=UCNer namespace=service-telemetry 2025-12-12T16:26:15.060218793+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="checking if subscriptions need update" id=UCNer namespace=service-telemetry 2025-12-12T16:26:15.155701085+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="No subscriptions were found in namespace service-telemetry" id=UCNer namespace=service-telemetry 2025-12-12T16:26:15.155701085+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="resolving sources" id=Pt2CY namespace=service-telemetry 2025-12-12T16:26:15.155783317+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="checking if subscriptions need update" id=Pt2CY namespace=service-telemetry 2025-12-12T16:26:15.159249305+00:00 stderr F time="2025-12-12T16:26:15Z" level=info msg="No subscriptions were found in namespace service-telemetry" id=Pt2CY namespace=service-telemetry 2025-12-12T16:26:38.869878105+00:00 stderr F time="2025-12-12T16:26:38Z" level=info msg="removed client for deleted catalogsource" source="{redhat-marketplace openshift-marketplace}" 2025-12-12T16:26:39.826774530+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="resolving sources" id=9D6az namespace=service-telemetry 2025-12-12T16:26:39.826774530+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="checking if subscriptions need update" id=9D6az namespace=service-telemetry 2025-12-12T16:26:39.829403366+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="No subscriptions were found in namespace service-telemetry" id=9D6az namespace=service-telemetry 2025-12-12T16:26:39.832010562+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="resolving sources" id=qOuMW namespace=service-telemetry 2025-12-12T16:26:39.832010562+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="checking if subscriptions need update" id=qOuMW namespace=service-telemetry 2025-12-12T16:26:39.936313996+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="No subscriptions were found in namespace service-telemetry" id=qOuMW namespace=service-telemetry 2025-12-12T16:26:39.936313996+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="resolving sources" id=SP/ln namespace=service-telemetry 2025-12-12T16:26:39.936313996+00:00 stderr F time="2025-12-12T16:26:39Z" level=info msg="checking if subscriptions need update" id=SP/ln namespace=service-telemetry 2025-12-12T16:26:40.030882965+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="No subscriptions were found in namespace service-telemetry" id=SP/ln namespace=service-telemetry 2025-12-12T16:26:40.948406975+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:40.955690279+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.955690279+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.959896785+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.959896785+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.968671077+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.968671077+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.992173251+00:00 stderr F time="2025-12-12T16:26:40Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"cluster-observability-operator\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:40.992292244+00:00 stderr F E1212 16:26:40.992159 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/cluster-observability-operator\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"cluster-observability-operator\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:26:40.995257979+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:40.998888741+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:40.998888741+00:00 stderr F time="2025-12-12T16:26:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:41.003163389+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:41.003163389+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:41.349705183+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:41.349705183+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:41.355957871+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="requesting snapshot for catalog source openshift-marketplace/redhat-operators" 2025-12-12T16:26:41.356213218+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="requesting snapshot for catalog source openshift-marketplace/community-operators" 2025-12-12T16:26:41.356523276+00:00 stderr F time="2025-12-12T16:26:41Z" level=info msg="requesting snapshot for catalog source openshift-marketplace/certified-operators" 2025-12-12T16:26:42.026810400+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving sources" id=RQbj8 namespace=cert-manager-operator 2025-12-12T16:26:42.026810400+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking if subscriptions need update" id=RQbj8 namespace=cert-manager-operator 2025-12-12T16:26:42.037058679+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="No subscriptions were found in namespace cert-manager-operator" id=RQbj8 namespace=cert-manager-operator 2025-12-12T16:26:42.050404276+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving sources" id=geWZB namespace=cert-manager-operator 2025-12-12T16:26:42.050404276+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking if subscriptions need update" id=geWZB namespace=cert-manager-operator 2025-12-12T16:26:42.136415009+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="No subscriptions were found in namespace cert-manager-operator" id=geWZB namespace=cert-manager-operator 2025-12-12T16:26:42.136415009+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving sources" id=MNZmW namespace=cert-manager-operator 2025-12-12T16:26:42.136415009+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking if subscriptions need update" id=MNZmW namespace=cert-manager-operator 2025-12-12T16:26:42.139686681+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="No subscriptions were found in namespace cert-manager-operator" id=MNZmW namespace=cert-manager-operator 2025-12-12T16:26:42.380508905+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving sources" id=PQh/k namespace=openshift-operators 2025-12-12T16:26:42.380508905+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking if subscriptions need update" id=PQh/k namespace=openshift-operators 2025-12-12T16:26:42.380789063+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:42.384109926+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking for existing installplan" channel=stable id=PQh/k namespace=openshift-operators pkg=cluster-observability-operator source=redhat-operators sub=cluster-observability-operator 2025-12-12T16:26:42.384109926+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving subscriptions in namespace" id=PQh/k namespace=openshift-operators 2025-12-12T16:26:42.385710267+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:42.385710267+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:42.390365814+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:42.390365814+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:42.462414545+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="unpacking bundles" id=PQh/k namespace=openshift-operators 2025-12-12T16:26:42.549422143+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:42.549422143+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:42.572469675+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:42.807840181+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="unpacking is not complete yet, requeueing" id=PQh/k namespace=openshift-operators 2025-12-12T16:26:42.807886853+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving sources" id=WDtXa namespace=openshift-operators 2025-12-12T16:26:42.807886853+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking if subscriptions need update" id=WDtXa namespace=openshift-operators 2025-12-12T16:26:42.810893129+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="checking for existing installplan" channel=stable id=WDtXa namespace=openshift-operators pkg=cluster-observability-operator source=redhat-operators sub=cluster-observability-operator 2025-12-12T16:26:42.810893129+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="resolving subscriptions in namespace" id=WDtXa namespace=openshift-operators 2025-12-12T16:26:42.935049065+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="unpacking bundles" id=WDtXa namespace=openshift-operators 2025-12-12T16:26:42.935336992+00:00 stderr F time="2025-12-12T16:26:42Z" level=info msg="unpacking is not complete yet, requeueing" id=WDtXa namespace=openshift-operators 2025-12-12T16:26:43.008654005+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="resolving sources" id=E3rjg namespace=cert-manager-operator 2025-12-12T16:26:43.008654005+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="checking if subscriptions need update" id=E3rjg namespace=cert-manager-operator 2025-12-12T16:26:43.012523022+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="No subscriptions were found in namespace cert-manager-operator" id=E3rjg namespace=cert-manager-operator 2025-12-12T16:26:43.012634875+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="resolving sources" id=xBKTy namespace=cert-manager-operator 2025-12-12T16:26:43.012634875+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="checking if subscriptions need update" id=xBKTy namespace=cert-manager-operator 2025-12-12T16:26:43.015465737+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="No subscriptions were found in namespace cert-manager-operator" id=xBKTy namespace=cert-manager-operator 2025-12-12T16:26:43.027206303+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="resolving sources" id=rHROp namespace=cert-manager-operator 2025-12-12T16:26:43.027206303+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="checking if subscriptions need update" id=rHROp namespace=cert-manager-operator 2025-12-12T16:26:43.112884398+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="No subscriptions were found in namespace cert-manager-operator" id=rHROp namespace=cert-manager-operator 2025-12-12T16:26:43.352323117+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:43.352323117+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:43.550352129+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=GsrFa 2025-12-12T16:26:43.550352129+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=GsrFa 2025-12-12T16:26:43.905644205+00:00 stderr F time="2025-12-12T16:26:43Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:44.149117146+00:00 stderr F time="2025-12-12T16:26:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:44.149117146+00:00 stderr F time="2025-12-12T16:26:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:44.949884246+00:00 stderr F time="2025-12-12T16:26:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:44.949884246+00:00 stderr F time="2025-12-12T16:26:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:45.148952686+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=GsrFa 2025-12-12T16:26:45.148952686+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=GsrFa 2025-12-12T16:26:45.350732213+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:45.350732213+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:45.364174763+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:45.364286596+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="resolving sources" id=3pina namespace=openshift-operators 2025-12-12T16:26:45.364286596+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="checking if subscriptions need update" id=3pina namespace=openshift-operators 2025-12-12T16:26:45.368319908+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="checking for existing installplan" channel=stable id=3pina namespace=openshift-operators pkg=cluster-observability-operator source=redhat-operators sub=cluster-observability-operator 2025-12-12T16:26:45.368319908+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="resolving subscriptions in namespace" id=3pina namespace=openshift-operators 2025-12-12T16:26:45.461033400+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="unpacking bundles" id=3pina namespace=openshift-operators 2025-12-12T16:26:45.461275066+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="unpacking is not complete yet, requeueing" id=3pina namespace=openshift-operators 2025-12-12T16:26:45.950445874+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:45.950445874+00:00 stderr F time="2025-12-12T16:26:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:46.150211491+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:46.150211491+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:46.751077971+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:46.751077971+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:46.755817111+00:00 stderr F time="2025-12-12T16:26:46Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"openshift-cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:46.755866482+00:00 stderr F E1212 16:26:46.755844 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"cert-manager-operator/openshift-cert-manager-operator\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"openshift-cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:26:46.757204986+00:00 stderr F time="2025-12-12T16:26:46Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:47.149995278+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:47.149995278+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:47.549634455+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=+CBGv 2025-12-12T16:26:47.549634455+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=+CBGv 2025-12-12T16:26:47.808005302+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="resolving sources" id=J/gZx namespace=openshift-operators 2025-12-12T16:26:47.808005302+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="checking if subscriptions need update" id=J/gZx namespace=openshift-operators 2025-12-12T16:26:47.812353962+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="checking for existing installplan" channel=stable id=J/gZx namespace=openshift-operators pkg=cluster-observability-operator source=redhat-operators sub=cluster-observability-operator 2025-12-12T16:26:47.812353962+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="resolving subscriptions in namespace" id=J/gZx namespace=openshift-operators 2025-12-12T16:26:47.918822402+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="unpacking bundles" id=J/gZx namespace=openshift-operators 2025-12-12T16:26:47.919053428+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="unpacking is not complete yet, requeueing" id=J/gZx namespace=openshift-operators 2025-12-12T16:26:47.948086721+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:47.948086721+00:00 stderr F time="2025-12-12T16:26:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:48.349359639+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:48.349359639+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:48.360283905+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:48.360283905+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="resolving sources" id=I2dsz namespace=service-telemetry 2025-12-12T16:26:48.360283905+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="checking if subscriptions need update" id=I2dsz namespace=service-telemetry 2025-12-12T16:26:48.362646125+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="checking for existing installplan" channel=stable id=I2dsz namespace=service-telemetry pkg=elasticsearch-eck-operator-certified source=certified-operators sub=elasticsearch-eck-operator-certified 2025-12-12T16:26:48.362666185+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="resolving subscriptions in namespace" id=I2dsz namespace=service-telemetry 2025-12-12T16:26:48.414529835+00:00 stderr F time="2025-12-12T16:26:48Z" level=info msg="unpacking bundles" id=I2dsz namespace=service-telemetry 2025-12-12T16:26:49.201693312+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="unpacking is not complete yet, requeueing" id=I2dsz namespace=service-telemetry 2025-12-12T16:26:49.349854395+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:49.349854395+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:49.549005286+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=+CBGv 2025-12-12T16:26:49.549005286+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=+CBGv 2025-12-12T16:26:49.749754278+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:49.749754278+00:00 stderr F time="2025-12-12T16:26:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:50.550302653+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=x3IHR 2025-12-12T16:26:50.550302653+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=x3IHR 2025-12-12T16:26:50.752760996+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:50.752760996+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:50.785719279+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:50.785777401+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="resolving sources" id=+9Wbm namespace=openshift-operators 2025-12-12T16:26:50.785777401+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="checking if subscriptions need update" id=+9Wbm namespace=openshift-operators 2025-12-12T16:26:50.789945566+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="checking for existing installplan" channel=stable id=+9Wbm namespace=openshift-operators pkg=cluster-observability-operator source=redhat-operators sub=cluster-observability-operator 2025-12-12T16:26:50.789945566+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="resolving subscriptions in namespace" id=+9Wbm namespace=openshift-operators 2025-12-12T16:26:50.875640561+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="unpacking bundles" id=+9Wbm namespace=openshift-operators 2025-12-12T16:26:50.911471106+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=cluster-observability-operator.clusterserviceversion.yaml 2025-12-12T16:26:50.949520667+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:50.949520667+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:50.952060022+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_alertmanagerconfigs.yaml 2025-12-12T16:26:50.997005767+00:00 stderr F time="2025-12-12T16:26:50Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_alertmanagers.yaml 2025-12-12T16:26:51.003433939+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_monitoringstacks.yaml 2025-12-12T16:26:51.008015915+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_podmonitors.yaml 2025-12-12T16:26:51.012001716+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_probes.yaml 2025-12-12T16:26:51.050125209+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheusagents.yaml 2025-12-12T16:26:51.099351203+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheuses.yaml 2025-12-12T16:26:51.100360228+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheusrules.yaml 2025-12-12T16:26:51.157252015+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_scrapeconfigs.yaml 2025-12-12T16:26:51.162005096+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_servicemonitors.yaml 2025-12-12T16:26:51.164545900+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_thanosqueriers.yaml 2025-12-12T16:26:51.201315359+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_thanosrulers.yaml 2025-12-12T16:26:51.201474093+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=PodDisruptionBudget" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator-admission-webhook_policy_v1_poddisruptionbudget.yaml 2025-12-12T16:26:51.201654567+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator-admission-webhook_v1_service.yaml 2025-12-12T16:26:51.201899683+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator_v1_service.yaml 2025-12-12T16:26:51.202062978+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=PrometheusRule" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_monitoring.coreos.com_v1_prometheusrule.yaml 2025-12-12T16:26:51.202150150+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=RoleBinding" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_rbac.authorization.k8s.io_v1_rolebinding.yaml 2025-12-12T16:26:51.202389826+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_v1_service.yaml 2025-12-12T16:26:51.204040887+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability.openshift.io_observabilityinstallers.yaml 2025-12-12T16:26:51.205111235+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability.openshift.io_uiplugins.yaml 2025-12-12T16:26:51.215213570+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_perses.yaml 2025-12-12T16:26:51.216089592+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_persesdashboards.yaml 2025-12-12T16:26:51.217087197+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_persesdatasources.yaml 2025-12-12T16:26:51.217127898+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=ServiceAccount" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses_v1_serviceaccount.yaml 2025-12-12T16:26:51.217289002+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdashboard-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:51.217416645+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdashboard-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:51.217550559+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdatasource-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:51.217687452+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdatasource-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:51.585061333+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="resolution caused subscription changes, creating installplan" id=+9Wbm namespace=openshift-operators 2025-12-12T16:26:51.591532277+00:00 stderr F time="2025-12-12T16:26:51Z" level=warning msg="no installplan found with matching generation, creating new one" id=+9Wbm namespace=openshift-operators 2025-12-12T16:26:51.596983225+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg=syncing id=ukD3x ip=install-sdtz5 namespace=openshift-operators phase= 2025-12-12T16:26:51.597006345+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="skip processing installplan without status - subscription sync responsible for initial status" id=ukD3x ip=install-sdtz5 namespace=openshift-operators phase= 2025-12-12T16:26:51.631012114+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg=syncing id=KOrKU ip=install-sdtz5 namespace=openshift-operators phase=Installing 2025-12-12T16:26:51.681592402+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=cluster-observability-operator.clusterserviceversion.yaml 2025-12-12T16:26:51.751101098+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_alertmanagerconfigs.yaml 2025-12-12T16:26:51.784249496+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_alertmanagers.yaml 2025-12-12T16:26:51.790682678+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_monitoringstacks.yaml 2025-12-12T16:26:51.795706645+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_podmonitors.yaml 2025-12-12T16:26:51.800030794+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_probes.yaml 2025-12-12T16:26:51.836829664+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheusagents.yaml 2025-12-12T16:26:51.897874366+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheuses.yaml 2025-12-12T16:26:51.899009575+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheusrules.yaml 2025-12-12T16:26:51.940660477+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_scrapeconfigs.yaml 2025-12-12T16:26:51.946085824+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_servicemonitors.yaml 2025-12-12T16:26:51.946817283+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_thanosqueriers.yaml 2025-12-12T16:26:51.949683355+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:51.949683355+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:51.989830209+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_thanosrulers.yaml 2025-12-12T16:26:51.989910181+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=PodDisruptionBudget" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator-admission-webhook_policy_v1_poddisruptionbudget.yaml 2025-12-12T16:26:51.990027894+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator-admission-webhook_v1_service.yaml 2025-12-12T16:26:51.990154698+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator_v1_service.yaml 2025-12-12T16:26:51.990317222+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=PrometheusRule" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_monitoring.coreos.com_v1_prometheusrule.yaml 2025-12-12T16:26:51.990385673+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=RoleBinding" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_rbac.authorization.k8s.io_v1_rolebinding.yaml 2025-12-12T16:26:51.990521197+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_v1_service.yaml 2025-12-12T16:26:51.992397684+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability.openshift.io_observabilityinstallers.yaml 2025-12-12T16:26:51.993809440+00:00 stderr F time="2025-12-12T16:26:51Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability.openshift.io_uiplugins.yaml 2025-12-12T16:26:52.005040344+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_perses.yaml 2025-12-12T16:26:52.006015118+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_persesdashboards.yaml 2025-12-12T16:26:52.007083715+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_persesdatasources.yaml 2025-12-12T16:26:52.007120346+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=ServiceAccount" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses_v1_serviceaccount.yaml 2025-12-12T16:26:52.007325291+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdashboard-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:52.007422854+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdashboard-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:52.007526567+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdatasource-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:52.007641399+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdatasource-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:26:52.149395751+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:52.149395751+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:52.166489152+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:52.166687437+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="resolving sources" id=yMVxW namespace=cert-manager-operator 2025-12-12T16:26:52.166696428+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="checking if subscriptions need update" id=yMVxW namespace=cert-manager-operator 2025-12-12T16:26:52.169475608+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="checking for existing installplan" channel=stable-v1 id=yMVxW namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:26:52.169475608+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="resolving subscriptions in namespace" id=yMVxW namespace=cert-manager-operator 2025-12-12T16:26:52.200116712+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="unpacking bundles" id=yMVxW namespace=cert-manager-operator 2025-12-12T16:26:52.349911096+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=x3IHR 2025-12-12T16:26:52.349911096+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=x3IHR 2025-12-12T16:26:52.349961558+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="catalog update required at 2025-12-12 16:26:52.349912346 +0000 UTC m=+605.465670329" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=x3IHR 2025-12-12T16:26:52.378992711+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="resolving sources" id=bJqWX namespace=openshift-operators 2025-12-12T16:26:52.378992711+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="checking if subscriptions need update" id=bJqWX namespace=openshift-operators 2025-12-12T16:26:52.409356038+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="subscriptions were updated, wait for a new resolution" id=bJqWX namespace=openshift-operators 2025-12-12T16:26:52.409356038+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="resolving sources" id=SC3hz namespace=openshift-operators 2025-12-12T16:26:52.409356038+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="checking if subscriptions need update" id=SC3hz namespace=openshift-operators 2025-12-12T16:26:52.816571206+00:00 stderr F time="2025-12-12T16:26:52Z" level=info msg="resolving subscriptions in namespace" id=SC3hz namespace=openshift-operators 2025-12-12T16:26:53.036216665+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="no subscriptions were updated" id=SC3hz namespace=openshift-operators 2025-12-12T16:26:53.169249796+00:00 stderr F I1212 16:26:53.166391 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:26:53.169249796+00:00 stderr F time="2025-12-12T16:26:53Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-8pl6d has not yet reported ready" id=x3IHR 2025-12-12T16:26:53.169249796+00:00 stderr F time="2025-12-12T16:26:53Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-8pl6d has not yet reported ready" id=x3IHR 2025-12-12T16:26:53.169249796+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=x3IHR 2025-12-12T16:26:53.353934682+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:53.353934682+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:53.550583320+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:53.550583320+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:53.865734372+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="resolving sources" id=7jD3/ namespace=openshift-operators 2025-12-12T16:26:53.865734372+00:00 stderr F time="2025-12-12T16:26:53Z" level=info msg="checking if subscriptions need update" id=7jD3/ namespace=openshift-operators 2025-12-12T16:26:54.005745169+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="unpacking is not complete yet, requeueing" id=yMVxW namespace=cert-manager-operator 2025-12-12T16:26:54.202851909+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="resolving sources" id=dW08v namespace=service-telemetry 2025-12-12T16:26:54.202851909+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="checking if subscriptions need update" id=dW08v namespace=service-telemetry 2025-12-12T16:26:54.427616126+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="checking for existing installplan" channel=stable id=dW08v namespace=service-telemetry pkg=elasticsearch-eck-operator-certified source=certified-operators sub=elasticsearch-eck-operator-certified 2025-12-12T16:26:54.427616126+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="resolving subscriptions in namespace" id=dW08v namespace=service-telemetry 2025-12-12T16:26:54.583980776+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=nyYsH 2025-12-12T16:26:54.583980776+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=nyYsH 2025-12-12T16:26:54.627045434+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="resolving subscriptions in namespace" id=7jD3/ namespace=openshift-operators 2025-12-12T16:26:54.786094932+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:54.786094932+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:54.977237811+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:54.977237811+00:00 stderr F time="2025-12-12T16:26:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:55.016234037+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="no subscriptions were updated" id=7jD3/ namespace=openshift-operators 2025-12-12T16:26:55.079131946+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="unpacking bundles" id=dW08v namespace=service-telemetry 2025-12-12T16:26:55.079131946+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="unpacking is not complete yet, requeueing" id=dW08v namespace=service-telemetry 2025-12-12T16:26:55.158366587+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=fojvm 2025-12-12T16:26:55.158436439+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=fojvm 2025-12-12T16:26:55.205279013+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:55.216455905+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="resolving sources" id=Ysfos namespace=service-telemetry 2025-12-12T16:26:55.216455905+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="checking if subscriptions need update" id=Ysfos namespace=service-telemetry 2025-12-12T16:26:55.790016705+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="checking for existing installplan" channel=stable id=Ysfos namespace=service-telemetry pkg=elasticsearch-eck-operator-certified source=certified-operators sub=elasticsearch-eck-operator-certified 2025-12-12T16:26:55.790016705+00:00 stderr F time="2025-12-12T16:26:55Z" level=info msg="resolving subscriptions in namespace" id=Ysfos namespace=service-telemetry 2025-12-12T16:26:56.350642549+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:56.350642549+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:56.363250207+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:26:56.363250207+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="resolving sources" id=0Hhpd namespace=openshift-operators 2025-12-12T16:26:56.363250207+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="checking if subscriptions need update" id=0Hhpd namespace=openshift-operators 2025-12-12T16:26:56.446479170+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="unpacking bundles" id=Ysfos namespace=service-telemetry 2025-12-12T16:26:56.446812658+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="unpacking is not complete yet, requeueing" id=Ysfos namespace=service-telemetry 2025-12-12T16:26:56.749731601+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:56.749731601+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:56.951225812+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=nyYsH 2025-12-12T16:26:56.951225812+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=nyYsH 2025-12-12T16:26:56.951293093+00:00 stderr F time="2025-12-12T16:26:56Z" level=info msg="catalog update required at 2025-12-12 16:26:56.951272403 +0000 UTC m=+610.067030386" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=nyYsH 2025-12-12T16:26:57.198288553+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="resolving subscriptions in namespace" id=0Hhpd namespace=openshift-operators 2025-12-12T16:26:57.352829838+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=fojvm 2025-12-12T16:26:57.352829838+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=fojvm 2025-12-12T16:26:57.352901950+00:00 stderr F time="2025-12-12T16:26:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-8pl6d has not yet reported ready" id=fojvm 2025-12-12T16:26:57.352901950+00:00 stderr F time="2025-12-12T16:26:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-8pl6d has not yet reported ready" id=fojvm 2025-12-12T16:26:57.352901950+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=fojvm 2025-12-12T16:26:57.619150976+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="no subscriptions were updated" id=0Hhpd namespace=openshift-operators 2025-12-12T16:26:57.758244090+00:00 stderr F I1212 16:26:57.757592 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:26:57.758244090+00:00 stderr F time="2025-12-12T16:26:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=nyYsH 2025-12-12T16:26:57.758244090+00:00 stderr F time="2025-12-12T16:26:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=nyYsH 2025-12-12T16:26:57.758244090+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=nyYsH 2025-12-12T16:26:57.955318728+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:57.955318728+00:00 stderr F time="2025-12-12T16:26:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:58.351237090+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:58.351237090+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:58.392405912+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="resolving sources" id=twdTW namespace=openshift-operators 2025-12-12T16:26:58.392405912+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="checking if subscriptions need update" id=twdTW namespace=openshift-operators 2025-12-12T16:26:58.754211979+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=jveVL 2025-12-12T16:26:58.754211979+00:00 stderr F time="2025-12-12T16:26:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=jveVL 2025-12-12T16:26:59.005313444+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="resolving sources" id=k+T0P namespace=cert-manager-operator 2025-12-12T16:26:59.005313444+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="checking if subscriptions need update" id=k+T0P namespace=cert-manager-operator 2025-12-12T16:26:59.209244685+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="resolving subscriptions in namespace" id=twdTW namespace=openshift-operators 2025-12-12T16:26:59.355129168+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:59.355129168+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:59.392120874+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="checking for existing installplan" channel=stable-v1 id=k+T0P namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:26:59.392120874+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="resolving subscriptions in namespace" id=k+T0P namespace=cert-manager-operator 2025-12-12T16:26:59.754335791+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5rivA 2025-12-12T16:26:59.754335791+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5rivA 2025-12-12T16:26:59.814991806+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="no subscriptions were updated" id=twdTW namespace=openshift-operators 2025-12-12T16:26:59.951449970+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:26:59.951449970+00:00 stderr F time="2025-12-12T16:26:59Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:00.042201657+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="unpacking bundles" id=k+T0P namespace=cert-manager-operator 2025-12-12T16:27:00.042405022+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="unpacking is not complete yet, requeueing" id=k+T0P namespace=cert-manager-operator 2025-12-12T16:27:00.079529431+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="resolving sources" id=BWQIF namespace=service-telemetry 2025-12-12T16:27:00.079601183+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="checking if subscriptions need update" id=BWQIF namespace=service-telemetry 2025-12-12T16:27:00.205247163+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="error updating InstallPlan status" id=KOrKU ip=install-sdtz5 namespace=openshift-operators phase=Installing updateError="Operation cannot be fulfilled on installplans.operators.coreos.com \"install-sdtz5\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:00.205247163+00:00 stderr F E1212 16:27:00.202440 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"openshift-operators/install-sdtz5\" failed: error updating InstallPlan status: Operation cannot be fulfilled on installplans.operators.coreos.com \"install-sdtz5\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:00.205247163+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg=syncing id=IdAYV ip=install-sdtz5 namespace=openshift-operators phase=Installing 2025-12-12T16:27:00.627983162+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:00.751700963+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:00.751821236+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:00.755604922+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:00.791277975+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="checking for existing installplan" channel=stable id=BWQIF namespace=service-telemetry pkg=elasticsearch-eck-operator-certified source=certified-operators sub=elasticsearch-eck-operator-certified 2025-12-12T16:27:00.791277975+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="resolving subscriptions in namespace" id=BWQIF namespace=service-telemetry 2025-12-12T16:27:00.957858891+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=jveVL 2025-12-12T16:27:00.957858891+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=jveVL 2025-12-12T16:27:00.957858891+00:00 stderr F time="2025-12-12T16:27:00Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-8pl6d has not yet reported ready" id=jveVL 2025-12-12T16:27:00.957858891+00:00 stderr F time="2025-12-12T16:27:00Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-8pl6d has not yet reported ready" id=jveVL 2025-12-12T16:27:00.957858891+00:00 stderr F time="2025-12-12T16:27:00Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=jveVL 2025-12-12T16:27:01.079575771+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=cluster-observability-operator.clusterserviceversion.yaml 2025-12-12T16:27:01.184117997+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_alertmanagerconfigs.yaml 2025-12-12T16:27:01.198221814+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="resolving sources" id=1WKyY namespace=cert-manager-operator 2025-12-12T16:27:01.198269065+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="checking if subscriptions need update" id=1WKyY namespace=cert-manager-operator 2025-12-12T16:27:01.223676438+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_alertmanagers.yaml 2025-12-12T16:27:01.230374578+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_monitoringstacks.yaml 2025-12-12T16:27:01.237282043+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_podmonitors.yaml 2025-12-12T16:27:01.243788277+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_probes.yaml 2025-12-12T16:27:01.303821207+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheusagents.yaml 2025-12-12T16:27:01.378029235+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheuses.yaml 2025-12-12T16:27:01.379288177+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_prometheusrules.yaml 2025-12-12T16:27:01.438745170+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_scrapeconfigs.yaml 2025-12-12T16:27:01.450977550+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_servicemonitors.yaml 2025-12-12T16:27:01.451918014+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_thanosqueriers.yaml 2025-12-12T16:27:01.472350581+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="unpacking bundles" id=BWQIF namespace=service-telemetry 2025-12-12T16:27:01.487575736+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=agents.agent.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.499940639+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=apmservers.apm.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.503207572+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=beats.beat.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.504870654+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=monitoring.rhobs_thanosrulers.yaml 2025-12-12T16:27:01.505082519+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=PodDisruptionBudget" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator-admission-webhook_policy_v1_poddisruptionbudget.yaml 2025-12-12T16:27:01.505402507+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator-admission-webhook_v1_service.yaml 2025-12-12T16:27:01.505741606+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=obo-prometheus-operator_v1_service.yaml 2025-12-12T16:27:01.510891966+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=PrometheusRule" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_monitoring.coreos.com_v1_prometheusrule.yaml 2025-12-12T16:27:01.510891966+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=RoleBinding" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_rbac.authorization.k8s.io_v1_rolebinding.yaml 2025-12-12T16:27:01.510891966+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability-operator_v1_service.yaml 2025-12-12T16:27:01.512313282+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability.openshift.io_observabilityinstallers.yaml 2025-12-12T16:27:01.514895688+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=observability.openshift.io_uiplugins.yaml 2025-12-12T16:27:01.514895688+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticmapsservers.maps.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.519492754+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearch-eck-operator-certified.clusterserviceversion.yaml 2025-12-12T16:27:01.521218328+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearchautoscalers.autoscaling.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.524860340+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_perses.yaml 2025-12-12T16:27:01.526318727+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_persesdashboards.yaml 2025-12-12T16:27:01.526934622+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses.dev_persesdatasources.yaml 2025-12-12T16:27:01.526952163+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ServiceAccount" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=perses_v1_serviceaccount.yaml 2025-12-12T16:27:01.527086946+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdashboard-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:01.527226100+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdashboard-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:01.533670233+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdatasource-editor-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:01.533670233+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92100b6b5 key=persesdatasource-viewer-role_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:01.533670233+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearches.elasticsearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.540551357+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=enterprisesearches.enterprisesearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.544238770+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=kibanas.kibana.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.549295638+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=logstashes.logstash.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.550686444+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=stackconfigpolicies.stackconfigpolicy.k8s.elastic.co.crd.yaml 2025-12-12T16:27:01.589754172+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="checking for existing installplan" channel=stable-v1 id=1WKyY namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:27:01.589754172+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="resolving subscriptions in namespace" id=1WKyY namespace=cert-manager-operator 2025-12-12T16:27:01.609065231+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="resolution caused subscription changes, creating installplan" id=BWQIF namespace=service-telemetry 2025-12-12T16:27:01.844982882+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="unpacking bundles" id=1WKyY namespace=cert-manager-operator 2025-12-12T16:27:01.844982882+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="unpacking is not complete yet, requeueing" id=1WKyY namespace=cert-manager-operator 2025-12-12T16:27:01.950930853+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5rivA 2025-12-12T16:27:01.950930853+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5rivA 2025-12-12T16:27:01.950930853+00:00 stderr F time="2025-12-12T16:27:01Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=5rivA 2025-12-12T16:27:01.950930853+00:00 stderr F time="2025-12-12T16:27:01Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=5rivA 2025-12-12T16:27:01.950930853+00:00 stderr F time="2025-12-12T16:27:01Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=5rivA 2025-12-12T16:27:02.149483898+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:02.149483898+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:02.189392958+00:00 stderr F time="2025-12-12T16:27:02Z" level=warning msg="no installplan found with matching generation, creating new one" id=BWQIF namespace=service-telemetry 2025-12-12T16:27:02.348580607+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:02.348580607+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:02.390216771+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg=syncing id=aJuHM ip=install-t6x4f namespace=service-telemetry phase= 2025-12-12T16:27:02.390216771+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="skip processing installplan without status - subscription sync responsible for initial status" id=aJuHM ip=install-t6x4f namespace=service-telemetry phase= 2025-12-12T16:27:02.550087357+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=OAOIe 2025-12-12T16:27:02.550087357+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=OAOIe 2025-12-12T16:27:02.603235902+00:00 stderr F time="2025-12-12T16:27:02Z" level=info msg=syncing id=aX7cN ip=install-t6x4f namespace=service-telemetry phase=Installing 2025-12-12T16:27:03.007803982+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=agents.agent.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.011299900+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=apmservers.apm.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.013937197+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=beats.beat.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.015890026+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticmapsservers.maps.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.019125809+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearch-eck-operator-certified.clusterserviceversion.yaml 2025-12-12T16:27:03.041928726+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearchautoscalers.autoscaling.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.041928726+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearches.elasticsearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.041928726+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=enterprisesearches.enterprisesearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.041928726+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=kibanas.kibana.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.074081179+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=logstashes.logstash.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.074081179+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=stackconfigpolicies.stackconfigpolicy.k8s.elastic.co.crd.yaml 2025-12-12T16:27:03.201228927+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="resolving sources" id=iUaLe namespace=service-telemetry 2025-12-12T16:27:03.201228927+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="checking if subscriptions need update" id=iUaLe namespace=service-telemetry 2025-12-12T16:27:03.560732186+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jPyUd 2025-12-12T16:27:03.560732186+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jPyUd 2025-12-12T16:27:03.749260727+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:03.749260727+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:03.794401560+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="subscriptions were updated, wait for a new resolution" id=iUaLe namespace=service-telemetry 2025-12-12T16:27:03.794401560+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="resolving sources" id=I042a namespace=service-telemetry 2025-12-12T16:27:03.794401560+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="checking if subscriptions need update" id=I042a namespace=service-telemetry 2025-12-12T16:27:03.955838416+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:03.955838416+00:00 stderr F time="2025-12-12T16:27:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:04.599241670+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="resolving subscriptions in namespace" id=I042a namespace=service-telemetry 2025-12-12T16:27:04.953886145+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=OAOIe 2025-12-12T16:27:04.953886145+00:00 stderr F time="2025-12-12T16:27:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=OAOIe 2025-12-12T16:27:05.014298153+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="no subscriptions were updated" id=I042a namespace=service-telemetry 2025-12-12T16:27:05.043480802+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="resolving sources" id=Zbn0m namespace=cert-manager-operator 2025-12-12T16:27:05.043480802+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="checking if subscriptions need update" id=Zbn0m namespace=cert-manager-operator 2025-12-12T16:27:05.203060170+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="error updating InstallPlan status" id=aX7cN ip=install-t6x4f namespace=service-telemetry phase=Installing updateError="Operation cannot be fulfilled on installplans.operators.coreos.com \"install-t6x4f\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:05.203060170+00:00 stderr F E1212 16:27:05.201655 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/install-t6x4f\" failed: error updating InstallPlan status: Operation cannot be fulfilled on installplans.operators.coreos.com \"install-t6x4f\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:05.203060170+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg=syncing id=RC37n ip=install-t6x4f namespace=service-telemetry phase=Installing 2025-12-12T16:27:05.357254143+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:05.357254143+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:05.450513323+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:05.554848764+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:05.554848764+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:05.595081772+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="checking for existing installplan" channel=stable-v1 id=Zbn0m namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:27:05.595081772+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="resolving subscriptions in namespace" id=Zbn0m namespace=cert-manager-operator 2025-12-12T16:27:05.850595959+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=agents.agent.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.850595959+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=apmservers.apm.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.850595959+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=beats.beat.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.850595959+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticmapsservers.maps.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.857651927+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearch-eck-operator-certified.clusterserviceversion.yaml 2025-12-12T16:27:05.864280975+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearchautoscalers.autoscaling.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.873151930+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearches.elasticsearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.877392687+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=enterprisesearches.enterprisesearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.885711797+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=kibanas.kibana.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.891403072+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=logstashes.logstash.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.892695724+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=stackconfigpolicies.stackconfigpolicy.k8s.elastic.co.crd.yaml 2025-12-12T16:27:05.959695230+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jPyUd 2025-12-12T16:27:05.959695230+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jPyUd 2025-12-12T16:27:05.959695230+00:00 stderr F time="2025-12-12T16:27:05Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=jPyUd 2025-12-12T16:27:05.959695230+00:00 stderr F time="2025-12-12T16:27:05Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=jPyUd 2025-12-12T16:27:05.959695230+00:00 stderr F time="2025-12-12T16:27:05Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=jPyUd 2025-12-12T16:27:06.002481413+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="resolving sources" id=4yyGK namespace=openshift-operators 2025-12-12T16:27:06.002481413+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="checking if subscriptions need update" id=4yyGK namespace=openshift-operators 2025-12-12T16:27:06.196243127+00:00 stderr F time="2025-12-12T16:27:06Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"elasticsearch-eck-operator-certified\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:06.196243127+00:00 stderr F E1212 16:27:06.193647 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"elasticsearch-eck-operator-certified\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:06.196703848+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:06.352639615+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9tdEN 2025-12-12T16:27:06.352778009+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9tdEN 2025-12-12T16:27:06.464597059+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="unpacking bundles" id=Zbn0m namespace=cert-manager-operator 2025-12-12T16:27:06.464597059+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="unpacking is not complete yet, requeueing" id=Zbn0m namespace=cert-manager-operator 2025-12-12T16:27:06.464597059+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="resolving sources" id=W7Xw7 namespace=cert-manager-operator 2025-12-12T16:27:06.464597059+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="checking if subscriptions need update" id=W7Xw7 namespace=cert-manager-operator 2025-12-12T16:27:06.951238115+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vGzFE 2025-12-12T16:27:06.951238115+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vGzFE 2025-12-12T16:27:06.992454398+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="checking for existing installplan" channel=stable-v1 id=W7Xw7 namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:27:06.992454398+00:00 stderr F time="2025-12-12T16:27:06Z" level=info msg="resolving subscriptions in namespace" id=W7Xw7 namespace=cert-manager-operator 2025-12-12T16:27:07.199710064+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="resolving subscriptions in namespace" id=4yyGK namespace=openshift-operators 2025-12-12T16:27:07.473322698+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="unpacking bundles" id=W7Xw7 namespace=cert-manager-operator 2025-12-12T16:27:07.477219237+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="unpacking is not complete yet, requeueing" id=W7Xw7 namespace=cert-manager-operator 2025-12-12T16:27:07.611240999+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="no subscriptions were updated" id=4yyGK namespace=openshift-operators 2025-12-12T16:27:07.755904160+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9tdEN 2025-12-12T16:27:07.755904160+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9tdEN 2025-12-12T16:27:07.960135909+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:07.960135909+00:00 stderr F time="2025-12-12T16:27:07Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:08.753248721+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vGzFE 2025-12-12T16:27:08.753248721+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vGzFE 2025-12-12T16:27:08.753248721+00:00 stderr F time="2025-12-12T16:27:08Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=vGzFE 2025-12-12T16:27:08.753248721+00:00 stderr F time="2025-12-12T16:27:08Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=vGzFE 2025-12-12T16:27:08.753248721+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=vGzFE 2025-12-12T16:27:08.950843742+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gBalc 2025-12-12T16:27:08.950843742+00:00 stderr F time="2025-12-12T16:27:08Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gBalc 2025-12-12T16:27:09.152678630+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:09.152678630+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:09.949705822+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=PDiSK 2025-12-12T16:27:09.949784254+00:00 stderr F time="2025-12-12T16:27:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=PDiSK 2025-12-12T16:27:10.279756885+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg=syncing id=wgCJd ip=install-t6x4f namespace=service-telemetry phase=Installing 2025-12-12T16:27:10.354703442+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:10.354703442+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:10.360987861+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:10.361104634+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="resolving sources" id=lBZkg namespace=service-telemetry 2025-12-12T16:27:10.361104634+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="checking if subscriptions need update" id=lBZkg namespace=service-telemetry 2025-12-12T16:27:10.394189681+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=agents.agent.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.403612650+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=apmservers.apm.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.407258842+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=beats.beat.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.410108104+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticmapsservers.maps.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.413416498+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearch-eck-operator-certified.clusterserviceversion.yaml 2025-12-12T16:27:10.414833954+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearchautoscalers.autoscaling.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.431437544+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=elasticsearches.elasticsearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.438713838+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=enterprisesearches.enterprisesearch.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.445003627+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=kibanas.kibana.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.452439755+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=logstashes.logstash.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.456205201+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e559b8 key=stackconfigpolicies.stackconfigpolicy.k8s.elastic.co.crd.yaml 2025-12-12T16:27:10.594738737+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="resolving subscriptions in namespace" id=lBZkg namespace=service-telemetry 2025-12-12T16:27:10.751240308+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gBalc 2025-12-12T16:27:10.751240308+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gBalc 2025-12-12T16:27:10.798966566+00:00 stderr F time="2025-12-12T16:27:10Z" level=info msg="no subscriptions were updated" id=lBZkg namespace=service-telemetry 2025-12-12T16:27:11.349320164+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:11.349320164+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:11.462460198+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="resolving sources" id=N5gVb namespace=cert-manager-operator 2025-12-12T16:27:11.462460198+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="checking if subscriptions need update" id=N5gVb namespace=cert-manager-operator 2025-12-12T16:27:11.550920827+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=PDiSK 2025-12-12T16:27:11.550920827+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=PDiSK 2025-12-12T16:27:11.551002069+00:00 stderr F time="2025-12-12T16:27:11Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=PDiSK 2025-12-12T16:27:11.551012489+00:00 stderr F time="2025-12-12T16:27:11Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-b4n58 has not yet reported ready" id=PDiSK 2025-12-12T16:27:11.551020419+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=PDiSK 2025-12-12T16:27:11.591736330+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="checking for existing installplan" channel=stable-v1 id=N5gVb namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:27:11.591810071+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="resolving subscriptions in namespace" id=N5gVb namespace=cert-manager-operator 2025-12-12T16:27:11.913233326+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="unpacking bundles" id=N5gVb namespace=cert-manager-operator 2025-12-12T16:27:11.954236884+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:11.954236884+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:11.957367413+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=acme.cert-manager.io_challenges.yaml 2025-12-12T16:27:11.957367413+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=acme.cert-manager.io_orders.yaml 2025-12-12T16:27:11.957367413+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator-controller-manager-metrics-service_v1_service.yaml 2025-12-12T16:27:11.957367413+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator-metrics-reader_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:11.962961565+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator.clusterserviceversion.yaml 2025-12-12T16:27:11.962961565+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_certificaterequests.yaml 2025-12-12T16:27:11.966784872+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_certificates.yaml 2025-12-12T16:27:11.999049808+00:00 stderr F time="2025-12-12T16:27:11Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_clusterissuers.yaml 2025-12-12T16:27:12.028632837+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_issuers.yaml 2025-12-12T16:27:12.047399562+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=operator.openshift.io_certmanagers.yaml 2025-12-12T16:27:12.055058676+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=operator.openshift.io_istiocsrs.yaml 2025-12-12T16:27:12.149105665+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="resolution caused subscription changes, creating installplan" id=N5gVb namespace=cert-manager-operator 2025-12-12T16:27:12.188841301+00:00 stderr F time="2025-12-12T16:27:12Z" level=warning msg="no installplan found with matching generation, creating new one" id=N5gVb namespace=cert-manager-operator 2025-12-12T16:27:12.356317629+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:12.356317629+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:12.356894554+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:12.357171851+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="resolving sources" id=x+uPh namespace=service-telemetry 2025-12-12T16:27:12.357171851+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="checking if subscriptions need update" id=x+uPh namespace=service-telemetry 2025-12-12T16:27:12.393681065+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:12.752287761+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:12.752287761+00:00 stderr F time="2025-12-12T16:27:12Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:13.349856654+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:13.349856654+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:13.394028042+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="resolving subscriptions in namespace" id=x+uPh namespace=service-telemetry 2025-12-12T16:27:13.550285907+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:13.550285907+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:13.795536984+00:00 stderr F time="2025-12-12T16:27:13Z" level=info msg="no subscriptions were updated" id=x+uPh namespace=service-telemetry 2025-12-12T16:27:14.150369524+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:14.150369524+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:14.164201154+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:14.192710556+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="resolving sources" id=GAA5h namespace=cert-manager-operator 2025-12-12T16:27:14.192710556+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="checking if subscriptions need update" id=GAA5h namespace=cert-manager-operator 2025-12-12T16:27:14.349088094+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:14.349088094+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:14.951494760+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:14.951494760+00:00 stderr F time="2025-12-12T16:27:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:15.148954307+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:15.149006459+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:15.151352548+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:15.151446940+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="resolving sources" id=ExG7i namespace=service-telemetry 2025-12-12T16:27:15.151446940+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="checking if subscriptions need update" id=ExG7i namespace=service-telemetry 2025-12-12T16:27:15.188237112+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="skipping update: installplan already created" id=GAA5h namespace=cert-manager-operator 2025-12-12T16:27:15.188237112+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="all subscriptions up to date" id=GAA5h namespace=cert-manager-operator 2025-12-12T16:27:15.752367918+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:15.752367918+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:15.952239447+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:15.952239447+00:00 stderr F time="2025-12-12T16:27:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:16.198378526+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="resolving subscriptions in namespace" id=ExG7i namespace=service-telemetry 2025-12-12T16:27:16.418053956+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg=syncing id=/qPoM ip=install-k6ssv namespace=cert-manager-operator phase=Installing 2025-12-12T16:27:16.550440216+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:16.550440216+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:16.569270433+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:16.569270433+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="resolving sources" id=aTTA3 namespace=cert-manager-operator 2025-12-12T16:27:16.569270433+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="checking if subscriptions need update" id=aTTA3 namespace=cert-manager-operator 2025-12-12T16:27:16.593082706+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="error updating InstallPlan status" id=wgCJd ip=install-t6x4f namespace=service-telemetry phase=Installing updateError="Operation cannot be fulfilled on installplans.operators.coreos.com \"install-t6x4f\": the object has been modified; please apply your changes to the latest version and try again" 2025-12-12T16:27:16.593260920+00:00 stderr F E1212 16:27:16.593238 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/install-t6x4f\" failed: error updating InstallPlan status: Operation cannot be fulfilled on installplans.operators.coreos.com \"install-t6x4f\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:16.594437020+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg=syncing id=+totc ip=install-sdtz5 namespace=openshift-operators phase=Complete 2025-12-12T16:27:16.594501942+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg=syncing id=gOZkh ip=install-t6x4f namespace=service-telemetry phase=Complete 2025-12-12T16:27:16.599919819+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg=syncing id=hSF8T ip=install-t6x4f namespace=service-telemetry phase=Complete 2025-12-12T16:27:16.749359531+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:16.749359531+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:16.793992020+00:00 stderr F time="2025-12-12T16:27:16Z" level=info msg="no subscriptions were updated" id=ExG7i namespace=service-telemetry 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=acme.cert-manager.io_challenges.yaml 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=acme.cert-manager.io_orders.yaml 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator-controller-manager-metrics-service_v1_service.yaml 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator-metrics-reader_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator.clusterserviceversion.yaml 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_certificaterequests.yaml 2025-12-12T16:27:17.037695508+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_certificates.yaml 2025-12-12T16:27:17.053124509+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_clusterissuers.yaml 2025-12-12T16:27:17.069027431+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_issuers.yaml 2025-12-12T16:27:17.073329540+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=operator.openshift.io_certmanagers.yaml 2025-12-12T16:27:17.078700126+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=operator.openshift.io_istiocsrs.yaml 2025-12-12T16:27:17.149123638+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:17.149123638+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:17.155066009+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:17.550365023+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:17.550365023+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:17.951539647+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:17.951539647+00:00 stderr F time="2025-12-12T16:27:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:18.401271589+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="resolving sources" id=rqrAS namespace=service-telemetry 2025-12-12T16:27:18.401271589+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="checking if subscriptions need update" id=rqrAS namespace=service-telemetry 2025-12-12T16:27:18.550471695+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:18.550471695+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:18.591561025+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="error updating subscription status" channel=stable-v1 error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"openshift-cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again" id=aTTA3 namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:27:18.591561025+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="error recording current state of CSV in status: error updating Subscription status: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"openshift-cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again" channel=stable-v1 id=aTTA3 namespace=cert-manager-operator pkg=openshift-cert-manager-operator source=redhat-operators sub=openshift-cert-manager-operator 2025-12-12T16:27:18.591561025+00:00 stderr F E1212 16:27:18.591136 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/cert-manager-operator\" failed: error updating Subscription status: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"openshift-cert-manager-operator\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:27:18.593254958+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="resolving sources" id=i1fMK namespace=cert-manager-operator 2025-12-12T16:27:18.593254958+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="checking if subscriptions need update" id=i1fMK namespace=cert-manager-operator 2025-12-12T16:27:18.951225318+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:18.951225318+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:18.958886101+00:00 stderr F time="2025-12-12T16:27:18Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:19.348580773+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KiR/i 2025-12-12T16:27:19.348580773+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KiR/i 2025-12-12T16:27:19.594452226+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="resolving subscriptions in namespace" id=rqrAS namespace=service-telemetry 2025-12-12T16:27:19.750260379+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:19.750260379+00:00 stderr F time="2025-12-12T16:27:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:20.149322779+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:20.149322779+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:20.192627385+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="no subscriptions were updated" id=rqrAS namespace=service-telemetry 2025-12-12T16:27:20.392937635+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="subscriptions were updated, wait for a new resolution" id=i1fMK namespace=cert-manager-operator 2025-12-12T16:27:20.392989496+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="resolving sources" id=ohtnw namespace=openshift-operators 2025-12-12T16:27:20.392989496+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="checking if subscriptions need update" id=ohtnw namespace=openshift-operators 2025-12-12T16:27:20.601522054+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg=syncing id=KZcL/ ip=install-k6ssv namespace=cert-manager-operator phase=Installing 2025-12-12T16:27:20.949817219+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:20.949817219+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:20.961807122+00:00 stderr F time="2025-12-12T16:27:20Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:21.148627680+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KiR/i 2025-12-12T16:27:21.148627680+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KiR/i 2025-12-12T16:27:21.222660354+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=acme.cert-manager.io_challenges.yaml 2025-12-12T16:27:21.223480095+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=acme.cert-manager.io_orders.yaml 2025-12-12T16:27:21.223599388+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=Service" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator-controller-manager-metrics-service_v1_service.yaml 2025-12-12T16:27:21.223679830+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=ClusterRole" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator-metrics-reader_rbac.authorization.k8s.io_v1_clusterrole.yaml 2025-12-12T16:27:21.226470120+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=ClusterServiceVersion" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager-operator.clusterserviceversion.yaml 2025-12-12T16:27:21.227502526+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_certificaterequests.yaml 2025-12-12T16:27:21.229831455+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_certificates.yaml 2025-12-12T16:27:21.243030539+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_clusterissuers.yaml 2025-12-12T16:27:21.260372438+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=cert-manager.io_issuers.yaml 2025-12-12T16:27:21.264676837+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=operator.openshift.io_certmanagers.yaml 2025-12-12T16:27:21.270208347+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="added to bundle, Kind=CustomResourceDefinition" configmap=openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a36aa3 key=operator.openshift.io_istiocsrs.yaml 2025-12-12T16:27:21.348685893+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:21.348685893+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:21.397874508+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="resolving sources" id=Beq+t namespace=cert-manager-operator 2025-12-12T16:27:21.397874508+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="checking if subscriptions need update" id=Beq+t namespace=cert-manager-operator 2025-12-12T16:27:21.596532366+00:00 stderr F time="2025-12-12T16:27:21Z" level=info msg="resolving subscriptions in namespace" id=ohtnw namespace=openshift-operators 2025-12-12T16:27:22.007982339+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg=syncing id=wZxBt ip=install-k6ssv namespace=cert-manager-operator phase=Complete 2025-12-12T16:27:22.203977540+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="no subscriptions were updated" id=ohtnw namespace=openshift-operators 2025-12-12T16:27:22.348894967+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=joMDI 2025-12-12T16:27:22.348894967+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=joMDI 2025-12-12T16:27:22.393298381+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="resolving subscriptions in namespace" id=Beq+t namespace=cert-manager-operator 2025-12-12T16:27:22.549659279+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:22.549659279+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:22.817282222+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="no subscriptions were updated" id=Beq+t namespace=cert-manager-operator 2025-12-12T16:27:22.949239800+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:22.949239800+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:22.951622691+00:00 stderr F time="2025-12-12T16:27:22Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:23.009318791+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="resolving sources" id=4Epof namespace=service-telemetry 2025-12-12T16:27:23.009318791+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="checking if subscriptions need update" id=4Epof namespace=service-telemetry 2025-12-12T16:27:23.551236676+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=HloF3 2025-12-12T16:27:23.551236676+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=HloF3 2025-12-12T16:27:23.947712891+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:23.947712891+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:23.993276574+00:00 stderr F time="2025-12-12T16:27:23Z" level=info msg="resolving subscriptions in namespace" id=4Epof namespace=service-telemetry 2025-12-12T16:27:24.393466182+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="no subscriptions were updated" id=4Epof namespace=service-telemetry 2025-12-12T16:27:24.550819844+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=joMDI 2025-12-12T16:27:24.550819844+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=joMDI 2025-12-12T16:27:24.550910607+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="catalog update required at 2025-12-12 16:27:24.550825025 +0000 UTC m=+637.666582998" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=joMDI 2025-12-12T16:27:24.949041663+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:24.949041663+00:00 stderr F time="2025-12-12T16:27:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:25.356776872+00:00 stderr F I1212 16:27:25.356510 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:27:25.356776872+00:00 stderr F time="2025-12-12T16:27:25Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=joMDI 2025-12-12T16:27:25.356776872+00:00 stderr F time="2025-12-12T16:27:25Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=joMDI 2025-12-12T16:27:25.356776872+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=joMDI 2025-12-12T16:27:25.551023698+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:25.551023698+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:25.562305244+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="resolving sources" id=axUtK namespace=openshift-operators 2025-12-12T16:27:25.562305244+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="checking if subscriptions need update" id=axUtK namespace=openshift-operators 2025-12-12T16:27:25.598847259+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="resolving subscriptions in namespace" id=axUtK namespace=openshift-operators 2025-12-12T16:27:25.601496046+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg=syncing id=jV9V0 ip=install-k6ssv namespace=cert-manager-operator phase=Complete 2025-12-12T16:27:25.809397848+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="no subscriptions were updated" id=axUtK namespace=openshift-operators 2025-12-12T16:27:25.951802662+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=HloF3 2025-12-12T16:27:25.951802662+00:00 stderr F time="2025-12-12T16:27:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=HloF3 2025-12-12T16:27:26.349978479+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:26.349978479+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:26.748446933+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=DmgpV 2025-12-12T16:27:26.748527775+00:00 stderr F time="2025-12-12T16:27:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=DmgpV 2025-12-12T16:27:27.549724192+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:27.549800054+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:27.559864379+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:27.560125646+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="resolving sources" id=sWPBM namespace=cert-manager-operator 2025-12-12T16:27:27.560125646+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="checking if subscriptions need update" id=sWPBM namespace=cert-manager-operator 2025-12-12T16:27:27.579795313+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="resolving subscriptions in namespace" id=sWPBM namespace=cert-manager-operator 2025-12-12T16:27:27.598671831+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="no subscriptions were updated" id=sWPBM namespace=cert-manager-operator 2025-12-12T16:27:27.756993788+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=UHmH5 2025-12-12T16:27:27.756993788+00:00 stderr F time="2025-12-12T16:27:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=UHmH5 2025-12-12T16:27:28.552623644+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=DmgpV 2025-12-12T16:27:28.552623644+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=DmgpV 2025-12-12T16:27:28.752646277+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:28.752646277+00:00 stderr F time="2025-12-12T16:27:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:29.549530245+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=UHmH5 2025-12-12T16:27:29.549530245+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=UHmH5 2025-12-12T16:27:29.549530245+00:00 stderr F time="2025-12-12T16:27:29Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=UHmH5 2025-12-12T16:27:29.549530245+00:00 stderr F time="2025-12-12T16:27:29Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=UHmH5 2025-12-12T16:27:29.549530245+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=UHmH5 2025-12-12T16:27:29.751369923+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=FkKKo 2025-12-12T16:27:29.751369923+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=FkKKo 2025-12-12T16:27:29.948727408+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:29.948727408+00:00 stderr F time="2025-12-12T16:27:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:30.962468214+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=e5llp 2025-12-12T16:27:30.962550636+00:00 stderr F time="2025-12-12T16:27:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=e5llp 2025-12-12T16:27:31.153091938+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:31.153091938+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:31.168912629+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:27:31.169025981+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="resolving sources" id=4SSaS namespace=cert-manager-operator 2025-12-12T16:27:31.169025981+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="checking if subscriptions need update" id=4SSaS namespace=cert-manager-operator 2025-12-12T16:27:31.181055106+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="resolving subscriptions in namespace" id=4SSaS namespace=cert-manager-operator 2025-12-12T16:27:31.301032902+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="no subscriptions were updated" id=4SSaS namespace=cert-manager-operator 2025-12-12T16:27:31.355942632+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=FkKKo 2025-12-12T16:27:31.355942632+00:00 stderr F time="2025-12-12T16:27:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=FkKKo 2025-12-12T16:27:32.352139275+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:32.352139275+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:32.552234349+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=s9E2C 2025-12-12T16:27:32.552234349+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=s9E2C 2025-12-12T16:27:32.752781045+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=e5llp 2025-12-12T16:27:32.752781045+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=e5llp 2025-12-12T16:27:32.752866367+00:00 stderr F time="2025-12-12T16:27:32Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=e5llp 2025-12-12T16:27:32.752866367+00:00 stderr F time="2025-12-12T16:27:32Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=e5llp 2025-12-12T16:27:32.752866367+00:00 stderr F time="2025-12-12T16:27:32Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=e5llp 2025-12-12T16:27:33.549882008+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:33.549882008+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:33.952271991+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=lT2l2 2025-12-12T16:27:33.952271991+00:00 stderr F time="2025-12-12T16:27:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=lT2l2 2025-12-12T16:27:34.349088824+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=s9E2C 2025-12-12T16:27:34.349088824+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=s9E2C 2025-12-12T16:27:34.752507584+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:34.752507584+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:27:34.787770397+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="resolving sources" id=A7YF0 namespace=cert-manager-operator 2025-12-12T16:27:34.787770397+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="checking if subscriptions need update" id=A7YF0 namespace=cert-manager-operator 2025-12-12T16:27:34.800882389+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="resolving subscriptions in namespace" id=A7YF0 namespace=cert-manager-operator 2025-12-12T16:27:34.819054078+00:00 stderr F time="2025-12-12T16:27:34Z" level=info msg="no subscriptions were updated" id=A7YF0 namespace=cert-manager-operator 2025-12-12T16:27:35.150232720+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=lT2l2 2025-12-12T16:27:35.150232720+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=lT2l2 2025-12-12T16:27:35.150232720+00:00 stderr F time="2025-12-12T16:27:35Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=lT2l2 2025-12-12T16:27:35.150232720+00:00 stderr F time="2025-12-12T16:27:35Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=lT2l2 2025-12-12T16:27:35.150232720+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=lT2l2 2025-12-12T16:27:35.556256946+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=DGdCl 2025-12-12T16:27:35.556256946+00:00 stderr F time="2025-12-12T16:27:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=DGdCl 2025-12-12T16:27:36.151742767+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=DGdCl 2025-12-12T16:27:36.151742767+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=DGdCl 2025-12-12T16:27:36.151742767+00:00 stderr F time="2025-12-12T16:27:36Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=DGdCl 2025-12-12T16:27:36.151742767+00:00 stderr F time="2025-12-12T16:27:36Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=DGdCl 2025-12-12T16:27:36.151799059+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=DGdCl 2025-12-12T16:27:36.258045838+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="resolving sources" id=InvUI namespace=cert-manager 2025-12-12T16:27:36.258045838+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="checking if subscriptions need update" id=InvUI namespace=cert-manager 2025-12-12T16:27:36.281252925+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="No subscriptions were found in namespace cert-manager" id=InvUI namespace=cert-manager 2025-12-12T16:27:36.315923872+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="resolving sources" id=CKduI namespace=cert-manager 2025-12-12T16:27:36.315923872+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="checking if subscriptions need update" id=CKduI namespace=cert-manager 2025-12-12T16:27:36.377399298+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="No subscriptions were found in namespace cert-manager" id=CKduI namespace=cert-manager 2025-12-12T16:27:36.377399298+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="resolving sources" id=c30WC namespace=cert-manager 2025-12-12T16:27:36.377399298+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="checking if subscriptions need update" id=c30WC namespace=cert-manager 2025-12-12T16:27:36.479487082+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="No subscriptions were found in namespace cert-manager" id=c30WC namespace=cert-manager 2025-12-12T16:27:36.558615735+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hQOII 2025-12-12T16:27:36.558615735+00:00 stderr F time="2025-12-12T16:27:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hQOII 2025-12-12T16:27:37.148992806+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hQOII 2025-12-12T16:27:37.148992806+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hQOII 2025-12-12T16:27:37.149053118+00:00 stderr F time="2025-12-12T16:27:37Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=hQOII 2025-12-12T16:27:37.149053118+00:00 stderr F time="2025-12-12T16:27:37Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=hQOII 2025-12-12T16:27:37.149053118+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=hQOII 2025-12-12T16:27:37.827699583+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=td2Xd 2025-12-12T16:27:37.827699583+00:00 stderr F time="2025-12-12T16:27:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=td2Xd 2025-12-12T16:27:38.148410119+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=td2Xd 2025-12-12T16:27:38.148410119+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=td2Xd 2025-12-12T16:27:38.753675218+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FhfDw 2025-12-12T16:27:38.753675218+00:00 stderr F time="2025-12-12T16:27:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FhfDw 2025-12-12T16:27:39.350956944+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FhfDw 2025-12-12T16:27:39.350956944+00:00 stderr F time="2025-12-12T16:27:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FhfDw 2025-12-12T16:27:55.365546245+00:00 stderr F time="2025-12-12T16:27:55Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=puJy0 2025-12-12T16:27:55.365546245+00:00 stderr F time="2025-12-12T16:27:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=puJy0 2025-12-12T16:27:55.376729218+00:00 stderr F time="2025-12-12T16:27:55Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=puJy0 2025-12-12T16:27:55.376729218+00:00 stderr F time="2025-12-12T16:27:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=puJy0 2025-12-12T16:27:56.023221430+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=5GxAd 2025-12-12T16:27:56.023221430+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=5GxAd 2025-12-12T16:27:56.032072064+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=5GxAd 2025-12-12T16:27:56.032072064+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=5GxAd 2025-12-12T16:27:56.032141206+00:00 stderr F time="2025-12-12T16:27:56Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=5GxAd 2025-12-12T16:27:56.032141206+00:00 stderr F time="2025-12-12T16:27:56Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-9wq8j has not yet reported ready" id=5GxAd 2025-12-12T16:27:56.032150766+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=5GxAd 2025-12-12T16:27:56.077493154+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="resolving sources" id=kkqP5 namespace=service-telemetry 2025-12-12T16:27:56.077493154+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="checking if subscriptions need update" id=kkqP5 namespace=service-telemetry 2025-12-12T16:27:56.090065562+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="resolving subscriptions in namespace" id=kkqP5 namespace=service-telemetry 2025-12-12T16:27:56.095018247+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="no subscriptions were updated" id=kkqP5 namespace=service-telemetry 2025-12-12T16:27:56.957314951+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hVpKQ 2025-12-12T16:27:56.957314951+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hVpKQ 2025-12-12T16:27:56.969218912+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hVpKQ 2025-12-12T16:27:56.969218912+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=hVpKQ 2025-12-12T16:27:56.975311207+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=ahn2l 2025-12-12T16:27:56.975311207+00:00 stderr F time="2025-12-12T16:27:56Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=ahn2l 2025-12-12T16:27:57.561583935+00:00 stderr F time="2025-12-12T16:27:57Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=ahn2l 2025-12-12T16:27:57.561583935+00:00 stderr F time="2025-12-12T16:27:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=ahn2l 2025-12-12T16:28:26.043156974+00:00 stderr F time="2025-12-12T16:28:26Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Mxo0U 2025-12-12T16:28:26.043156974+00:00 stderr F time="2025-12-12T16:28:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Mxo0U 2025-12-12T16:28:26.052847489+00:00 stderr F time="2025-12-12T16:28:26Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Mxo0U 2025-12-12T16:28:26.052847489+00:00 stderr F time="2025-12-12T16:28:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Mxo0U 2025-12-12T16:29:23.030042587+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:23.033203676+00:00 stderr F time="2025-12-12T16:29:23Z" level=error msg="registry service not healthy: could not get service account" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="serviceaccounts \"infrawatch-operators\" not found" id=JSUuk 2025-12-12T16:29:23.033293359+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="registry service status invalid, need to overwrite" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=JSUuk 2025-12-12T16:29:23.037828883+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:23.037828883+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:23.045925987+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:23.045925987+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:23.052129013+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="of 0 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=JSUuk 2025-12-12T16:29:23.052129013+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=JSUuk 2025-12-12T16:29:23.052129013+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="creating desired pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=JSUuk pod.name= pod.namespace=service-telemetry 2025-12-12T16:29:23.052225726+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:23.052225726+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:23.239353534+00:00 stderr F I1212 16:29:23.237630 1 warnings.go:110] "Warning: would violate PodSecurity \"restricted:latest\": allowPrivilegeEscalation != false (container \"registry-server\" must set securityContext.allowPrivilegeEscalation=false), unrestricted capabilities (container \"registry-server\" must set securityContext.capabilities.drop=[\"ALL\"]), runAsNonRoot != true (pod or container \"registry-server\" must set securityContext.runAsNonRoot=true), seccompProfile (pod or container \"registry-server\" must set securityContext.seccompProfile.type to \"RuntimeDefault\" or \"Localhost\")" 2025-12-12T16:29:23.832540928+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="evaluating current pod" correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry 2025-12-12T16:29:23.834376955+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="pod spec diff: v1.PodSpec{\n- \tVolumes: []v1.Volume{\n- \t\t{\n- \t\t\tName: \"kube-api-access-sd628\",\n- \t\t\tVolumeSource: v1.VolumeSource{Projected: s\"&ProjectedVolumeSource{Sources:[\"...},\n- \t\t},\n- \t},\n+ \tVolumes: nil,\n \tInitContainers: nil,\n \tContainers: []v1.Container{\n \t\t{\n \t\t\t... // 3 identical fields\n \t\t\tArgs: nil,\n \t\t\tWorkingDir: \"\",\n \t\t\tPorts: []v1.ContainerPort{\n \t\t\t\t{\n \t\t\t\t\tName: \"grpc\",\n \t\t\t\t\tHostPort: 0,\n \t\t\t\t\tContainerPort: 50051,\n- \t\t\t\t\tProtocol: \"TCP\",\n+ \t\t\t\t\tProtocol: \"\",\n \t\t\t\t\tHostIP: \"\",\n \t\t\t\t},\n \t\t\t},\n \t\t\tEnvFrom: nil,\n \t\t\tEnv: nil,\n \t\t\tResources: {Requests: {s\"cpu\": {i: {...}, s: \"10m\", Format: \"DecimalSI\"}, s\"memory\": {i: {...}, s: \"50Mi\", Format: \"BinarySI\"}}},\n \t\t\tResizePolicy: nil,\n \t\t\tRestartPolicy: nil,\n- \t\t\tVolumeMounts: []v1.VolumeMount{\n- \t\t\t\t{\n- \t\t\t\t\tName: \"kube-api-access-sd628\",\n- \t\t\t\t\tReadOnly: true,\n- \t\t\t\t\tMountPath: \"/var/run/secrets/kubernetes.io/serviceaccount\",\n- \t\t\t\t},\n- \t\t\t},\n+ \t\t\tVolumeMounts: nil,\n \t\t\tVolumeDevices: nil,\n \t\t\tLivenessProbe: &v1.Probe{\n \t\t\t\tProbeHandler: {Exec: &{Command: {\"grpc_health_probe\", \"-addr=:50051\"}}},\n \t\t\t\tInitialDelaySeconds: 10,\n \t\t\t\tTimeoutSeconds: 5,\n- \t\t\t\tPeriodSeconds: 10,\n+ \t\t\t\tPeriodSeconds: 0,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n- \t\t\t\tFailureThreshold: 3,\n+ \t\t\t\tFailureThreshold: 0,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tReadinessProbe: &v1.Probe{\n \t\t\t\tProbeHandler: {Exec: &{Command: {\"grpc_health_probe\", \"-addr=:50051\"}}},\n \t\t\t\tInitialDelaySeconds: 5,\n \t\t\t\tTimeoutSeconds: 5,\n- \t\t\t\tPeriodSeconds: 10,\n+ \t\t\t\tPeriodSeconds: 0,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n- \t\t\t\tFailureThreshold: 3,\n+ \t\t\t\tFailureThreshold: 0,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tStartupProbe: &v1.Probe{\n \t\t\t\t... // 2 identical fields\n \t\t\t\tTimeoutSeconds: 5,\n \t\t\t\tPeriodSeconds: 10,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n \t\t\t\tFailureThreshold: 10,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tLifecycle: nil,\n- \t\t\tTerminationMessagePath: \"/dev/termination-log\",\n+ \t\t\tTerminationMessagePath: \"\",\n \t\t\tTerminationMessagePolicy: \"FallbackToLogsOnError\",\n \t\t\tImagePullPolicy: \"Always\",\n \t\t\tSecurityContext: &v1.SecurityContext{\n- \t\t\t\tCapabilities: s\"&Capabilities{Add:[],Drop:[MKNOD],}\",\n+ \t\t\t\tCapabilities: nil,\n \t\t\t\tPrivileged: nil,\n \t\t\t\tSELinuxOptions: nil,\n \t\t\t\t... // 9 identical fields\n \t\t\t},\n \t\t\tStdin: false,\n \t\t\tStdinOnce: false,\n \t\t\tTTY: false,\n \t\t},\n \t},\n \tEphemeralContainers: nil,\n- \tRestartPolicy: \"Always\",\n+ \tRestartPolicy: \"\",\n- \tTerminationGracePeriodSeconds: &30,\n+ \tTerminationGracePeriodSeconds: nil,\n \tActiveDeadlineSeconds: nil,\n- \tDNSPolicy: \"ClusterFirst\",\n+ \tDNSPolicy: \"\",\n \tNodeSelector: {\"kubernetes.io/os\": \"linux\"},\n \tServiceAccountName: \"infrawatch-operators\",\n- \tDeprecatedServiceAccount: \"infrawatch-operators\",\n+ \tDeprecatedServiceAccount: \"\",\n \tAutomountServiceAccountToken: nil,\n- \tNodeName: \"crc\",\n+ \tNodeName: \"\",\n \tHostNetwork: false,\n \tHostPID: false,\n \tHostIPC: false,\n \tShareProcessNamespace: nil,\n- \tSecurityContext: s\"&PodSecurityContext{SELinuxOptions:&SELinuxOptions{User:,Role:,Type:,Level:s0:c26,c10,},RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmo\"...,\n+ \tSecurityContext: nil,\n \tImagePullSecrets: {{Name: \"infrawatch-operators-dockercfg-n6ssc\"}},\n \tHostname: \"\",\n \tSubdomain: \"\",\n \tAffinity: nil,\n- \tSchedulerName: \"default-scheduler\",\n+ \tSchedulerName: \"\",\n- \tTolerations: []v1.Toleration{\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/not-ready\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoExecute\",\n- \t\t\tTolerationSeconds: &300,\n- \t\t},\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/unreachable\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoExecute\",\n- \t\t\tTolerationSeconds: &300,\n- \t\t},\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/memory-pressure\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoSchedule\",\n- \t\t},\n- \t},\n+ \tTolerations: nil,\n \tHostAliases: nil,\n \tPriorityClassName: \"\",\n- \tPriority: &0,\n+ \tPriority: nil,\n \tDNSConfig: nil,\n \tReadinessGates: nil,\n \tRuntimeClassName: nil,\n- \tEnableServiceLinks: &true,\n+ \tEnableServiceLinks: nil,\n- \tPreemptionPolicy: &\"PreemptLowerPriority\",\n+ \tPreemptionPolicy: nil,\n \tOverhead: nil,\n \tTopologySpreadConstraints: nil,\n \t... // 6 identical fields\n }\n" correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry 2025-12-12T16:29:23.834376955+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="of 1 pods matching label selector, 0 have the correct images and matching hash" correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry 2025-12-12T16:29:23.834407975+00:00 stderr F time="2025-12-12T16:29:23Z" level=error msg="registry service not healthy: one or more required resources are missing" isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:29:23.844876049+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:23.844937961+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="resolving sources" id=+5/bM namespace=service-telemetry 2025-12-12T16:29:23.844937961+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="checking if subscriptions need update" id=+5/bM namespace=service-telemetry 2025-12-12T16:29:23.855207210+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="resolving subscriptions in namespace" id=+5/bM namespace=service-telemetry 2025-12-12T16:29:23.861054047+00:00 stderr F I1212 16:29:23.860164 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:23.868492355+00:00 stderr F E1212 16:29:23.868418 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:23.878399684+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="resolving sources" id=ijZY0 namespace=service-telemetry 2025-12-12T16:29:23.878399684+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="checking if subscriptions need update" id=ijZY0 namespace=service-telemetry 2025-12-12T16:29:23.888234072+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="resolving subscriptions in namespace" id=ijZY0 namespace=service-telemetry 2025-12-12T16:29:23.896987643+00:00 stderr F E1212 16:29:23.896009 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:23.896987643+00:00 stderr F I1212 16:29:23.896083 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:23.907541259+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="resolving sources" id=dsbJy namespace=service-telemetry 2025-12-12T16:29:23.907541259+00:00 stderr F time="2025-12-12T16:29:23Z" level=info msg="checking if subscriptions need update" id=dsbJy namespace=service-telemetry 2025-12-12T16:29:24.054383681+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="resolving subscriptions in namespace" id=dsbJy namespace=service-telemetry 2025-12-12T16:29:24.155658474+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:24.255368078+00:00 stderr F E1212 16:29:24.255263 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:24.255415259+00:00 stderr F I1212 16:29:24.255369 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:24.276932582+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="resolving sources" id=33LsM namespace=service-telemetry 2025-12-12T16:29:24.277029824+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="checking if subscriptions need update" id=33LsM namespace=service-telemetry 2025-12-12T16:29:24.633244114+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:24.634389223+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="pod spec diff: v1.PodSpec{\n- \tVolumes: []v1.Volume{\n- \t\t{\n- \t\t\tName: \"kube-api-access-sd628\",\n- \t\t\tVolumeSource: v1.VolumeSource{Projected: s\"&ProjectedVolumeSource{Sources:[\"...},\n- \t\t},\n- \t},\n+ \tVolumes: nil,\n \tInitContainers: nil,\n \tContainers: []v1.Container{\n \t\t{\n \t\t\t... // 3 identical fields\n \t\t\tArgs: nil,\n \t\t\tWorkingDir: \"\",\n \t\t\tPorts: []v1.ContainerPort{\n \t\t\t\t{\n \t\t\t\t\tName: \"grpc\",\n \t\t\t\t\tHostPort: 0,\n \t\t\t\t\tContainerPort: 50051,\n- \t\t\t\t\tProtocol: \"TCP\",\n+ \t\t\t\t\tProtocol: \"\",\n \t\t\t\t\tHostIP: \"\",\n \t\t\t\t},\n \t\t\t},\n \t\t\tEnvFrom: nil,\n \t\t\tEnv: nil,\n \t\t\tResources: {Requests: {s\"cpu\": {i: {...}, s: \"10m\", Format: \"DecimalSI\"}, s\"memory\": {i: {...}, s: \"50Mi\", Format: \"BinarySI\"}}},\n \t\t\tResizePolicy: nil,\n \t\t\tRestartPolicy: nil,\n- \t\t\tVolumeMounts: []v1.VolumeMount{\n- \t\t\t\t{\n- \t\t\t\t\tName: \"kube-api-access-sd628\",\n- \t\t\t\t\tReadOnly: true,\n- \t\t\t\t\tMountPath: \"/var/run/secrets/kubernetes.io/serviceaccount\",\n- \t\t\t\t},\n- \t\t\t},\n+ \t\t\tVolumeMounts: nil,\n \t\t\tVolumeDevices: nil,\n \t\t\tLivenessProbe: &v1.Probe{\n \t\t\t\tProbeHandler: {Exec: &{Command: {\"grpc_health_probe\", \"-addr=:50051\"}}},\n \t\t\t\tInitialDelaySeconds: 10,\n \t\t\t\tTimeoutSeconds: 5,\n- \t\t\t\tPeriodSeconds: 10,\n+ \t\t\t\tPeriodSeconds: 0,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n- \t\t\t\tFailureThreshold: 3,\n+ \t\t\t\tFailureThreshold: 0,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tReadinessProbe: &v1.Probe{\n \t\t\t\tProbeHandler: {Exec: &{Command: {\"grpc_health_probe\", \"-addr=:50051\"}}},\n \t\t\t\tInitialDelaySeconds: 5,\n \t\t\t\tTimeoutSeconds: 5,\n- \t\t\t\tPeriodSeconds: 10,\n+ \t\t\t\tPeriodSeconds: 0,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n- \t\t\t\tFailureThreshold: 3,\n+ \t\t\t\tFailureThreshold: 0,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tStartupProbe: &v1.Probe{\n \t\t\t\t... // 2 identical fields\n \t\t\t\tTimeoutSeconds: 5,\n \t\t\t\tPeriodSeconds: 10,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n \t\t\t\tFailureThreshold: 10,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tLifecycle: nil,\n- \t\t\tTerminationMessagePath: \"/dev/termination-log\",\n+ \t\t\tTerminationMessagePath: \"\",\n \t\t\tTerminationMessagePolicy: \"FallbackToLogsOnError\",\n \t\t\tImagePullPolicy: \"Always\",\n \t\t\tSecurityContext: &v1.SecurityContext{\n- \t\t\t\tCapabilities: s\"&Capabilities{Add:[],Drop:[MKNOD],}\",\n+ \t\t\t\tCapabilities: nil,\n \t\t\t\tPrivileged: nil,\n \t\t\t\tSELinuxOptions: nil,\n \t\t\t\t... // 9 identical fields\n \t\t\t},\n \t\t\tStdin: false,\n \t\t\tStdinOnce: false,\n \t\t\tTTY: false,\n \t\t},\n \t},\n \tEphemeralContainers: nil,\n- \tRestartPolicy: \"Always\",\n+ \tRestartPolicy: \"\",\n- \tTerminationGracePeriodSeconds: &30,\n+ \tTerminationGracePeriodSeconds: nil,\n \tActiveDeadlineSeconds: nil,\n- \tDNSPolicy: \"ClusterFirst\",\n+ \tDNSPolicy: \"\",\n \tNodeSelector: {\"kubernetes.io/os\": \"linux\"},\n \tServiceAccountName: \"infrawatch-operators\",\n- \tDeprecatedServiceAccount: \"infrawatch-operators\",\n+ \tDeprecatedServiceAccount: \"\",\n \tAutomountServiceAccountToken: nil,\n- \tNodeName: \"crc\",\n+ \tNodeName: \"\",\n \tHostNetwork: false,\n \tHostPID: false,\n \tHostIPC: false,\n \tShareProcessNamespace: nil,\n- \tSecurityContext: s\"&PodSecurityContext{SELinuxOptions:&SELinuxOptions{User:,Role:,Type:,Level:s0:c26,c10,},RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmo\"...,\n+ \tSecurityContext: nil,\n \tImagePullSecrets: {{Name: \"infrawatch-operators-dockercfg-n6ssc\"}},\n \tHostname: \"\",\n \tSubdomain: \"\",\n \tAffinity: nil,\n- \tSchedulerName: \"default-scheduler\",\n+ \tSchedulerName: \"\",\n- \tTolerations: []v1.Toleration{\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/not-ready\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoExecute\",\n- \t\t\tTolerationSeconds: &300,\n- \t\t},\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/unreachable\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoExecute\",\n- \t\t\tTolerationSeconds: &300,\n- \t\t},\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/memory-pressure\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoSchedule\",\n- \t\t},\n- \t},\n+ \tTolerations: nil,\n \tHostAliases: nil,\n \tPriorityClassName: \"\",\n- \tPriority: &0,\n+ \tPriority: nil,\n \tDNSConfig: nil,\n \tReadinessGates: nil,\n \tRuntimeClassName: nil,\n- \tEnableServiceLinks: &true,\n+ \tEnableServiceLinks: nil,\n- \tPreemptionPolicy: &\"PreemptLowerPriority\",\n+ \tPreemptionPolicy: nil,\n \tOverhead: nil,\n \tTopologySpreadConstraints: nil,\n \t... // 6 identical fields\n }\n" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:24.634436754+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="of 1 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:24.634471915+00:00 stderr F time="2025-12-12T16:29:24Z" level=error msg="registry service not healthy: one or more required resources are missing" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=hHj/B isCurrentServiceAccountNil=false isServiceNil=false numCurrentPods=0 2025-12-12T16:29:24.651122285+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="checking for existing installplan" channel=unstable id=33LsM namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:24.651122285+00:00 stderr F time="2025-12-12T16:29:24Z" level=info msg="resolving subscriptions in namespace" id=33LsM namespace=service-telemetry 2025-12-12T16:29:24.852370138+00:00 stderr F I1212 16:29:24.852262 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:25.035035064+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:25.035035064+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:25.249841529+00:00 stderr F E1212 16:29:25.249759 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:25.291459518+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="resolving sources" id=mfS0m namespace=service-telemetry 2025-12-12T16:29:25.291459518+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="checking if subscriptions need update" id=mfS0m namespace=service-telemetry 2025-12-12T16:29:25.633144883+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:25.633144883+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:25.650261364+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="checking for existing installplan" channel=unstable id=mfS0m namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:25.650261364+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="resolving subscriptions in namespace" id=mfS0m namespace=service-telemetry 2025-12-12T16:29:25.851015385+00:00 stderr F E1212 16:29:25.850927 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:25.851092537+00:00 stderr F I1212 16:29:25.851005 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:25.932504450+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="resolving sources" id=/APfg namespace=service-telemetry 2025-12-12T16:29:25.932504450+00:00 stderr F time="2025-12-12T16:29:25Z" level=info msg="checking if subscriptions need update" id=/APfg namespace=service-telemetry 2025-12-12T16:29:26.251992234+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="checking for existing installplan" channel=unstable id=/APfg namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:26.251992234+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="resolving subscriptions in namespace" id=/APfg namespace=service-telemetry 2025-12-12T16:29:26.450498719+00:00 stderr F E1212 16:29:26.450410 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:26.450656923+00:00 stderr F I1212 16:29:26.450578 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:26.612290278+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="resolving sources" id=RnSDM namespace=service-telemetry 2025-12-12T16:29:26.612290278+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="checking if subscriptions need update" id=RnSDM namespace=service-telemetry 2025-12-12T16:29:26.634318363+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:26.634389065+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:26.851298343+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="checking for existing installplan" channel=unstable id=RnSDM namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:26.851402996+00:00 stderr F time="2025-12-12T16:29:26Z" level=info msg="resolving subscriptions in namespace" id=RnSDM namespace=service-telemetry 2025-12-12T16:29:27.034268936+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:27.035632470+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="pod spec diff: v1.PodSpec{\n- \tVolumes: []v1.Volume{\n- \t\t{\n- \t\t\tName: \"kube-api-access-sd628\",\n- \t\t\tVolumeSource: v1.VolumeSource{Projected: s\"&ProjectedVolumeSource{Sources:[\"...},\n- \t\t},\n- \t},\n+ \tVolumes: nil,\n \tInitContainers: nil,\n \tContainers: []v1.Container{\n \t\t{\n \t\t\t... // 3 identical fields\n \t\t\tArgs: nil,\n \t\t\tWorkingDir: \"\",\n \t\t\tPorts: []v1.ContainerPort{\n \t\t\t\t{\n \t\t\t\t\tName: \"grpc\",\n \t\t\t\t\tHostPort: 0,\n \t\t\t\t\tContainerPort: 50051,\n- \t\t\t\t\tProtocol: \"TCP\",\n+ \t\t\t\t\tProtocol: \"\",\n \t\t\t\t\tHostIP: \"\",\n \t\t\t\t},\n \t\t\t},\n \t\t\tEnvFrom: nil,\n \t\t\tEnv: nil,\n \t\t\tResources: {Requests: {s\"cpu\": {i: {...}, s: \"10m\", Format: \"DecimalSI\"}, s\"memory\": {i: {...}, s: \"50Mi\", Format: \"BinarySI\"}}},\n \t\t\tResizePolicy: nil,\n \t\t\tRestartPolicy: nil,\n- \t\t\tVolumeMounts: []v1.VolumeMount{\n- \t\t\t\t{\n- \t\t\t\t\tName: \"kube-api-access-sd628\",\n- \t\t\t\t\tReadOnly: true,\n- \t\t\t\t\tMountPath: \"/var/run/secrets/kubernetes.io/serviceaccount\",\n- \t\t\t\t},\n- \t\t\t},\n+ \t\t\tVolumeMounts: nil,\n \t\t\tVolumeDevices: nil,\n \t\t\tLivenessProbe: &v1.Probe{\n \t\t\t\tProbeHandler: {Exec: &{Command: {\"grpc_health_probe\", \"-addr=:50051\"}}},\n \t\t\t\tInitialDelaySeconds: 10,\n \t\t\t\tTimeoutSeconds: 5,\n- \t\t\t\tPeriodSeconds: 10,\n+ \t\t\t\tPeriodSeconds: 0,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n- \t\t\t\tFailureThreshold: 3,\n+ \t\t\t\tFailureThreshold: 0,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tReadinessProbe: &v1.Probe{\n \t\t\t\tProbeHandler: {Exec: &{Command: {\"grpc_health_probe\", \"-addr=:50051\"}}},\n \t\t\t\tInitialDelaySeconds: 5,\n \t\t\t\tTimeoutSeconds: 5,\n- \t\t\t\tPeriodSeconds: 10,\n+ \t\t\t\tPeriodSeconds: 0,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n- \t\t\t\tFailureThreshold: 3,\n+ \t\t\t\tFailureThreshold: 0,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tStartupProbe: &v1.Probe{\n \t\t\t\t... // 2 identical fields\n \t\t\t\tTimeoutSeconds: 5,\n \t\t\t\tPeriodSeconds: 10,\n- \t\t\t\tSuccessThreshold: 1,\n+ \t\t\t\tSuccessThreshold: 0,\n \t\t\t\tFailureThreshold: 10,\n \t\t\t\tTerminationGracePeriodSeconds: nil,\n \t\t\t},\n \t\t\tLifecycle: nil,\n- \t\t\tTerminationMessagePath: \"/dev/termination-log\",\n+ \t\t\tTerminationMessagePath: \"\",\n \t\t\tTerminationMessagePolicy: \"FallbackToLogsOnError\",\n \t\t\tImagePullPolicy: \"Always\",\n \t\t\tSecurityContext: &v1.SecurityContext{\n- \t\t\t\tCapabilities: s\"&Capabilities{Add:[],Drop:[MKNOD],}\",\n+ \t\t\t\tCapabilities: nil,\n \t\t\t\tPrivileged: nil,\n \t\t\t\tSELinuxOptions: nil,\n \t\t\t\t... // 9 identical fields\n \t\t\t},\n \t\t\tStdin: false,\n \t\t\tStdinOnce: false,\n \t\t\tTTY: false,\n \t\t},\n \t},\n \tEphemeralContainers: nil,\n- \tRestartPolicy: \"Always\",\n+ \tRestartPolicy: \"\",\n- \tTerminationGracePeriodSeconds: &30,\n+ \tTerminationGracePeriodSeconds: nil,\n \tActiveDeadlineSeconds: nil,\n- \tDNSPolicy: \"ClusterFirst\",\n+ \tDNSPolicy: \"\",\n \tNodeSelector: {\"kubernetes.io/os\": \"linux\"},\n \tServiceAccountName: \"infrawatch-operators\",\n- \tDeprecatedServiceAccount: \"infrawatch-operators\",\n+ \tDeprecatedServiceAccount: \"\",\n \tAutomountServiceAccountToken: nil,\n- \tNodeName: \"crc\",\n+ \tNodeName: \"\",\n \tHostNetwork: false,\n \tHostPID: false,\n \tHostIPC: false,\n \tShareProcessNamespace: nil,\n- \tSecurityContext: s\"&PodSecurityContext{SELinuxOptions:&SELinuxOptions{User:,Role:,Type:,Level:s0:c26,c10,},RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmo\"...,\n+ \tSecurityContext: nil,\n \tImagePullSecrets: {{Name: \"infrawatch-operators-dockercfg-n6ssc\"}},\n \tHostname: \"\",\n \tSubdomain: \"\",\n \tAffinity: nil,\n- \tSchedulerName: \"default-scheduler\",\n+ \tSchedulerName: \"\",\n- \tTolerations: []v1.Toleration{\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/not-ready\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoExecute\",\n- \t\t\tTolerationSeconds: &300,\n- \t\t},\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/unreachable\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoExecute\",\n- \t\t\tTolerationSeconds: &300,\n- \t\t},\n- \t\t{\n- \t\t\tKey: \"node.kubernetes.io/memory-pressure\",\n- \t\t\tOperator: \"Exists\",\n- \t\t\tEffect: \"NoSchedule\",\n- \t\t},\n- \t},\n+ \tTolerations: nil,\n \tHostAliases: nil,\n \tPriorityClassName: \"\",\n- \tPriority: &0,\n+ \tPriority: nil,\n \tDNSConfig: nil,\n \tReadinessGates: nil,\n \tRuntimeClassName: nil,\n- \tEnableServiceLinks: &true,\n+ \tEnableServiceLinks: nil,\n- \tPreemptionPolicy: &\"PreemptLowerPriority\",\n+ \tPreemptionPolicy: nil,\n \tOverhead: nil,\n \tTopologySpreadConstraints: nil,\n \t... // 6 identical fields\n }\n" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:27.035720353+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="of 1 pods matching label selector, 0 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=false correctImages=true current-pod.name=infrawatch-operators-cj72z current-pod.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:27.035762724+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="registry pods invalid, need to overwrite" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=hHj/B 2025-12-12T16:29:27.035860116+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="deleting current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=hHj/B pod.name=infrawatch-operators-cj72z pod.namespace=service-telemetry 2025-12-12T16:29:27.051248264+00:00 stderr F E1212 16:29:27.051153 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:27.051304606+00:00 stderr F I1212 16:29:27.051241 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:27.233975831+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:27.233975831+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:27.372855752+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="resolving sources" id=wq+pQ namespace=service-telemetry 2025-12-12T16:29:27.372855752+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="checking if subscriptions need update" id=wq+pQ namespace=service-telemetry 2025-12-12T16:29:27.457173298+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="checking for existing installplan" channel=unstable id=wq+pQ namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:27.457289981+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="resolving subscriptions in namespace" id=wq+pQ namespace=service-telemetry 2025-12-12T16:29:27.649323892+00:00 stderr F E1212 16:29:27.649247 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators" logger="UnhandledError" 2025-12-12T16:29:27.649403604+00:00 stderr F I1212 16:29:27.649311 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:27.837752943+00:00 stderr F time="2025-12-12T16:29:27Z" level=info msg="creating desired pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=hHj/B pod.name= pod.namespace=service-telemetry 2025-12-12T16:29:28.232984966+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:28.232984966+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:28.291201963+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="resolving sources" id=WCOP3 namespace=service-telemetry 2025-12-12T16:29:28.291201963+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="checking if subscriptions need update" id=WCOP3 namespace=service-telemetry 2025-12-12T16:29:28.303093913+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="checking for existing installplan" channel=unstable id=WCOP3 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:28.303093913+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="resolving subscriptions in namespace" id=WCOP3 namespace=service-telemetry 2025-12-12T16:29:28.310209113+00:00 stderr F I1212 16:29:28.310122 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: no registry client established for catalogsource service-telemetry/infrawatch-operators 2025-12-12T16:29:28.637106194+00:00 stderr F I1212 16:29:28.637021 1 warnings.go:110] "Warning: would violate PodSecurity \"restricted:latest\": allowPrivilegeEscalation != false (container \"registry-server\" must set securityContext.allowPrivilegeEscalation=false), unrestricted capabilities (container \"registry-server\" must set securityContext.capabilities.drop=[\"ALL\"]), runAsNonRoot != true (pod or container \"registry-server\" must set securityContext.runAsNonRoot=true), seccompProfile (pod or container \"registry-server\" must set securityContext.seccompProfile.type to \"RuntimeDefault\" or \"Localhost\")" 2025-12-12T16:29:28.637168656+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="multiple pods found for selector" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=hHj/B selector="olm.catalogSource=infrawatch-operators" 2025-12-12T16:29:28.832650674+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:28.832650674+00:00 stderr F time="2025-12-12T16:29:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:29.833468105+00:00 stderr F time="2025-12-12T16:29:29Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:29.833468105+00:00 stderr F time="2025-12-12T16:29:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:29.838855651+00:00 stderr F time="2025-12-12T16:29:29Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"elasticsearch-eck-operator-certified\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:29.838855651+00:00 stderr F E1212 16:29:29.838824 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"elasticsearch-eck-operator-certified\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:29:29.840203115+00:00 stderr F time="2025-12-12T16:29:29Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:30.233953882+00:00 stderr F time="2025-12-12T16:29:30Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XFstZ 2025-12-12T16:29:30.233953882+00:00 stderr F time="2025-12-12T16:29:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XFstZ 2025-12-12T16:29:30.435438831+00:00 stderr F time="2025-12-12T16:29:30Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:30.435438831+00:00 stderr F time="2025-12-12T16:29:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:30.441235598+00:00 stderr F time="2025-12-12T16:29:30Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"service-telemetry-operator\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:30.441313840+00:00 stderr F E1212 16:29:30.441268 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/service-telemetry-operator\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"service-telemetry-operator\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:29:30.442669454+00:00 stderr F time="2025-12-12T16:29:30Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:31.432918089+00:00 stderr F time="2025-12-12T16:29:31Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:31.432918089+00:00 stderr F time="2025-12-12T16:29:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:31.832748458+00:00 stderr F time="2025-12-12T16:29:31Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:31.832748458+00:00 stderr F time="2025-12-12T16:29:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:32.234406884+00:00 stderr F time="2025-12-12T16:29:32Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XFstZ 2025-12-12T16:29:32.234406884+00:00 stderr F time="2025-12-12T16:29:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XFstZ 2025-12-12T16:29:32.234930847+00:00 stderr F time="2025-12-12T16:29:32Z" level=info msg="state.Key.Namespace=service-telemetry state.Key.Name=infrawatch-operators state.State=CONNECTING" 2025-12-12T16:29:32.239850961+00:00 stderr F time="2025-12-12T16:29:32Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"infrawatch-operators\": the object has been modified; please apply your changes to the latest version and try again" id=XFstZ 2025-12-12T16:29:32.239900902+00:00 stderr F E1212 16:29:32.239849 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/infrawatch-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"infrawatch-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:29:32.242943679+00:00 stderr F time="2025-12-12T16:29:32Z" level=info msg="state.Key.Namespace=service-telemetry state.Key.Name=infrawatch-operators state.State=TRANSIENT_FAILURE" 2025-12-12T16:29:32.633421153+00:00 stderr F time="2025-12-12T16:29:32Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:32.633421153+00:00 stderr F time="2025-12-12T16:29:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:33.033900649+00:00 stderr F time="2025-12-12T16:29:33Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:33.033900649+00:00 stderr F time="2025-12-12T16:29:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:33.433323509+00:00 stderr F time="2025-12-12T16:29:33Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XTOco 2025-12-12T16:29:33.433323509+00:00 stderr F time="2025-12-12T16:29:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XTOco 2025-12-12T16:29:33.834331629+00:00 stderr F time="2025-12-12T16:29:33Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:33.834331629+00:00 stderr F time="2025-12-12T16:29:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:34.235333039+00:00 stderr F time="2025-12-12T16:29:34Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:34.235333039+00:00 stderr F time="2025-12-12T16:29:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:35.034951549+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:35.034951549+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:35.047421363+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:35.047421363+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="resolving sources" id=XPC2m namespace=service-telemetry 2025-12-12T16:29:35.047421363+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="checking if subscriptions need update" id=XPC2m namespace=service-telemetry 2025-12-12T16:29:35.061565729+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="checking for existing installplan" channel=unstable id=XPC2m namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:35.061565729+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="resolving subscriptions in namespace" id=XPC2m namespace=service-telemetry 2025-12-12T16:29:35.064919664+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:35.071396987+00:00 stderr F I1212 16:29:35.071161 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:35.085386890+00:00 stderr F E1212 16:29:35.085291 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:35.092286854+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="resolving sources" id=LSs0m namespace=service-telemetry 2025-12-12T16:29:35.092286854+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="checking if subscriptions need update" id=LSs0m namespace=service-telemetry 2025-12-12T16:29:35.106306917+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="checking for existing installplan" channel=unstable id=LSs0m namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:35.106306917+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="resolving subscriptions in namespace" id=LSs0m namespace=service-telemetry 2025-12-12T16:29:35.232972031+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XTOco 2025-12-12T16:29:35.232972031+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XTOco 2025-12-12T16:29:35.241321151+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:35.242128062+00:00 stderr F E1212 16:29:35.241675 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:35.242128062+00:00 stderr F I1212 16:29:35.241729 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:35.253788906+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="resolving sources" id=IoCx8 namespace=service-telemetry 2025-12-12T16:29:35.253788906+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="checking if subscriptions need update" id=IoCx8 namespace=service-telemetry 2025-12-12T16:29:35.431944066+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:35.431944066+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:35.840401494+00:00 stderr F time="2025-12-12T16:29:35Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"service-telemetry-operator\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:35.840401494+00:00 stderr F E1212 16:29:35.840387 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/service-telemetry-operator\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"service-telemetry-operator\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:29:35.841601664+00:00 stderr F time="2025-12-12T16:29:35Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:36.042806756+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="checking for existing installplan" channel=unstable id=IoCx8 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:36.042806756+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="resolving subscriptions in namespace" id=IoCx8 namespace=service-telemetry 2025-12-12T16:29:36.233138045+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:36.233138045+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:36.239689650+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:36.244009479+00:00 stderr F E1212 16:29:36.243169 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:36.244009479+00:00 stderr F I1212 16:29:36.243444 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:36.265408368+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="resolving sources" id=BMkMt namespace=service-telemetry 2025-12-12T16:29:36.265408368+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="checking if subscriptions need update" id=BMkMt namespace=service-telemetry 2025-12-12T16:29:36.632829771+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=b7PvD 2025-12-12T16:29:36.632829771+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=b7PvD 2025-12-12T16:29:36.642664679+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="checking for existing installplan" channel=unstable id=BMkMt namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:36.642664679+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="resolving subscriptions in namespace" id=BMkMt namespace=service-telemetry 2025-12-12T16:29:36.838728012+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:36.842078157+00:00 stderr F E1212 16:29:36.842023 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:36.842194879+00:00 stderr F I1212 16:29:36.842144 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:36.883622564+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="resolving sources" id=oetKv namespace=service-telemetry 2025-12-12T16:29:36.883622564+00:00 stderr F time="2025-12-12T16:29:36Z" level=info msg="checking if subscriptions need update" id=oetKv namespace=service-telemetry 2025-12-12T16:29:37.034032596+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:37.034032596+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:37.248327319+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="checking for existing installplan" channel=unstable id=oetKv namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:37.248327319+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="resolving subscriptions in namespace" id=oetKv namespace=service-telemetry 2025-12-12T16:29:37.433375984+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:37.433375984+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:37.437864237+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:37.442704839+00:00 stderr F E1212 16:29:37.441283 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:37.442704839+00:00 stderr F I1212 16:29:37.441525 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:37.522915801+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="resolving sources" id=IPwhJ namespace=service-telemetry 2025-12-12T16:29:37.522915801+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="checking if subscriptions need update" id=IPwhJ namespace=service-telemetry 2025-12-12T16:29:37.843286708+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="checking for existing installplan" channel=unstable id=IPwhJ namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:37.843286708+00:00 stderr F time="2025-12-12T16:29:37Z" level=info msg="resolving subscriptions in namespace" id=IPwhJ namespace=service-telemetry 2025-12-12T16:29:38.039656909+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:38.043135876+00:00 stderr F E1212 16:29:38.043012 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:38.043278350+00:00 stderr F I1212 16:29:38.043216 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:38.204796052+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="resolving sources" id=Asp3p namespace=service-telemetry 2025-12-12T16:29:38.204796052+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="checking if subscriptions need update" id=Asp3p namespace=service-telemetry 2025-12-12T16:29:38.232958682+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:38.232958682+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:38.433069307+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=b7PvD 2025-12-12T16:29:38.433069307+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=b7PvD 2025-12-12T16:29:38.442935386+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="checking for existing installplan" channel=unstable id=Asp3p namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:38.442993347+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="resolving subscriptions in namespace" id=Asp3p namespace=service-telemetry 2025-12-12T16:29:38.633020558+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:38.633020558+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:38.640075306+00:00 stderr F time="2025-12-12T16:29:38Z" level=error msg="UpdateStatus - error while setting CatalogSource status" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="Operation cannot be fulfilled on catalogsources.operators.coreos.com \"infrawatch-operators\": the object has been modified; please apply your changes to the latest version and try again" id=b7PvD 2025-12-12T16:29:38.640120817+00:00 stderr F E1212 16:29:38.640104 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/infrawatch-operators\" failed: Operation cannot be fulfilled on catalogsources.operators.coreos.com \"infrawatch-operators\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:29:38.839746850+00:00 stderr F time="2025-12-12T16:29:38Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:38.842927080+00:00 stderr F E1212 16:29:38.842803 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:38.843027172+00:00 stderr F I1212 16:29:38.842913 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:39.164796833+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="resolving sources" id=D8sob namespace=service-telemetry 2025-12-12T16:29:39.164796833+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="checking if subscriptions need update" id=D8sob namespace=service-telemetry 2025-12-12T16:29:39.241977029+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="checking for existing installplan" channel=unstable id=D8sob namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:39.241977029+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="resolving subscriptions in namespace" id=D8sob namespace=service-telemetry 2025-12-12T16:29:39.433616260+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:39.433616260+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:39.438491213+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:39.441778976+00:00 stderr F E1212 16:29:39.441696 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:39.441832608+00:00 stderr F I1212 16:29:39.441794 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:39.634294790+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:39.634294790+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:39.642346673+00:00 stderr F time="2025-12-12T16:29:39Z" level=warning msg="an error was encountered during reconciliation" error="Operation cannot be fulfilled on subscriptions.operators.coreos.com \"elasticsearch-eck-operator-certified\": the object has been modified; please apply your changes to the latest version and try again" reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:39.642387004+00:00 stderr F E1212 16:29:39.642364 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"service-telemetry/elasticsearch-eck-operator-certified\" failed: Operation cannot be fulfilled on subscriptions.operators.coreos.com \"elasticsearch-eck-operator-certified\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError" 2025-12-12T16:29:39.643666936+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:39.835475992+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=m0Mxz 2025-12-12T16:29:39.835475992+00:00 stderr F time="2025-12-12T16:29:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=m0Mxz 2025-12-12T16:29:40.086288245+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="resolving sources" id=avjjZ namespace=service-telemetry 2025-12-12T16:29:40.086288245+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="checking if subscriptions need update" id=avjjZ namespace=service-telemetry 2025-12-12T16:29:40.097503658+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="checking for existing installplan" channel=unstable id=avjjZ namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:40.097503658+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="resolving subscriptions in namespace" id=avjjZ namespace=service-telemetry 2025-12-12T16:29:40.238693897+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:40.241569680+00:00 stderr F I1212 16:29:40.241468 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:40.634029714+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:40.634029714+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:40.640999319+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:40.641052571+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:40.641152823+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="resolving sources" id=WZKKm namespace=service-telemetry 2025-12-12T16:29:40.641152823+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="checking if subscriptions need update" id=WZKKm namespace=service-telemetry 2025-12-12T16:29:40.838462837+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:40.838462837+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:40.846220903+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="checking for existing installplan" channel=unstable id=WZKKm namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:40.846220903+00:00 stderr F time="2025-12-12T16:29:40Z" level=info msg="resolving subscriptions in namespace" id=WZKKm namespace=service-telemetry 2025-12-12T16:29:41.037894145+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:41.040374807+00:00 stderr F E1212 16:29:41.040268 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:41.040431249+00:00 stderr F I1212 16:29:41.040394 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:41.046761148+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="resolving sources" id=2eYy5 namespace=service-telemetry 2025-12-12T16:29:41.046761148+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="checking if subscriptions need update" id=2eYy5 namespace=service-telemetry 2025-12-12T16:29:41.443269824+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="checking for existing installplan" channel=unstable id=2eYy5 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:41.443269824+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="resolving subscriptions in namespace" id=2eYy5 namespace=service-telemetry 2025-12-12T16:29:41.633219653+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=m0Mxz 2025-12-12T16:29:41.633219653+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=m0Mxz 2025-12-12T16:29:41.639915632+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:41.642720783+00:00 stderr F E1212 16:29:41.642675 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:41.642801885+00:00 stderr F I1212 16:29:41.642750 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:41.654120080+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="resolving sources" id=RXex8 namespace=service-telemetry 2025-12-12T16:29:41.654120080+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="checking if subscriptions need update" id=RXex8 namespace=service-telemetry 2025-12-12T16:29:41.832564339+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:41.832564339+00:00 stderr F time="2025-12-12T16:29:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:42.033011772+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:42.033011772+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:42.043700551+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="checking for existing installplan" channel=unstable id=RXex8 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:42.043700551+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="resolving subscriptions in namespace" id=RXex8 namespace=service-telemetry 2025-12-12T16:29:42.239864517+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:42.243300233+00:00 stderr F E1212 16:29:42.243257 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:42.243394766+00:00 stderr F I1212 16:29:42.243349 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:42.264795095+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="resolving sources" id=P61yB namespace=service-telemetry 2025-12-12T16:29:42.264795095+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="checking if subscriptions need update" id=P61yB namespace=service-telemetry 2025-12-12T16:29:42.646233320+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="checking for existing installplan" channel=unstable id=P61yB namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:42.646314612+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="resolving subscriptions in namespace" id=P61yB namespace=service-telemetry 2025-12-12T16:29:42.833530902+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Ft5Uk 2025-12-12T16:29:42.833530902+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Ft5Uk 2025-12-12T16:29:42.839460202+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:42.843153925+00:00 stderr F E1212 16:29:42.843065 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:42.843281748+00:00 stderr F I1212 16:29:42.843229 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:42.884970249+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="resolving sources" id=OwQhe namespace=service-telemetry 2025-12-12T16:29:42.884970249+00:00 stderr F time="2025-12-12T16:29:42Z" level=info msg="checking if subscriptions need update" id=OwQhe namespace=service-telemetry 2025-12-12T16:29:43.032968500+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:43.032968500+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:43.232894661+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:43.232894661+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:43.244034941+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="checking for existing installplan" channel=unstable id=OwQhe namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:43.244034941+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="resolving subscriptions in namespace" id=OwQhe namespace=service-telemetry 2025-12-12T16:29:43.439103229+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:43.442020593+00:00 stderr F E1212 16:29:43.441971 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:43.442105895+00:00 stderr F I1212 16:29:43.442067 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:43.523619020+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="resolving sources" id=oIf4Z namespace=service-telemetry 2025-12-12T16:29:43.523619020+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="checking if subscriptions need update" id=oIf4Z namespace=service-telemetry 2025-12-12T16:29:43.844050048+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="checking for existing installplan" channel=unstable id=oIf4Z namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:43.844122740+00:00 stderr F time="2025-12-12T16:29:43Z" level=info msg="resolving subscriptions in namespace" id=oIf4Z namespace=service-telemetry 2025-12-12T16:29:44.040930382+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:44.043747613+00:00 stderr F E1212 16:29:44.043705 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:44.043984119+00:00 stderr F I1212 16:29:44.043886 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:44.207802190+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="resolving sources" id=vM3FK namespace=service-telemetry 2025-12-12T16:29:44.207802190+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="checking if subscriptions need update" id=vM3FK namespace=service-telemetry 2025-12-12T16:29:44.233103237+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:44.233103237+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:44.434392372+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:44.434392372+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:44.445299177+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="checking for existing installplan" channel=unstable id=vM3FK namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:44.445299177+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="resolving subscriptions in namespace" id=vM3FK namespace=service-telemetry 2025-12-12T16:29:44.634383024+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Ft5Uk 2025-12-12T16:29:44.634383024+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Ft5Uk 2025-12-12T16:29:44.645094274+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:44.839667010+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:44.842508961+00:00 stderr F E1212 16:29:44.842408 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:44.842569863+00:00 stderr F I1212 16:29:44.842502 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:44.844157513+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="resolving sources" id=P+kwJ namespace=service-telemetry 2025-12-12T16:29:44.844157513+00:00 stderr F time="2025-12-12T16:29:44Z" level=info msg="checking if subscriptions need update" id=P+kwJ namespace=service-telemetry 2025-12-12T16:29:45.233746905+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:45.233746905+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:45.244543577+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="checking for existing installplan" channel=unstable id=P+kwJ namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:45.244543577+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="resolving subscriptions in namespace" id=P+kwJ namespace=service-telemetry 2025-12-12T16:29:45.432229769+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:45.432229769+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:45.443860852+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:45.443925364+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:29:45.639806332+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:45.642486999+00:00 stderr F E1212 16:29:45.642426 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:45.642695465+00:00 stderr F I1212 16:29:45.642628 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:45.643907085+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="resolving sources" id=OFvQ4 namespace=service-telemetry 2025-12-12T16:29:45.643907085+00:00 stderr F time="2025-12-12T16:29:45Z" level=info msg="checking if subscriptions need update" id=OFvQ4 namespace=service-telemetry 2025-12-12T16:29:46.033535078+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:46.033535078+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:46.043591281+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="checking for existing installplan" channel=unstable id=OFvQ4 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:46.043591281+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="resolving subscriptions in namespace" id=OFvQ4 namespace=service-telemetry 2025-12-12T16:29:46.234440672+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:46.234440672+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:46.246299041+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:46.250520988+00:00 stderr F I1212 16:29:46.250462 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:46.284081984+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="resolving sources" id=nPH+V namespace=service-telemetry 2025-12-12T16:29:46.284081984+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="checking if subscriptions need update" id=nPH+V namespace=service-telemetry 2025-12-12T16:29:46.648228374+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="checking for existing installplan" channel=unstable id=nPH+V namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:46.648228374+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="resolving subscriptions in namespace" id=nPH+V namespace=service-telemetry 2025-12-12T16:29:46.832454869+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:46.832454869+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:46.838893491+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:46.841310402+00:00 stderr F E1212 16:29:46.841277 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:46.841394004+00:00 stderr F I1212 16:29:46.841354 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:46.847753984+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="resolving sources" id=Fgtku namespace=service-telemetry 2025-12-12T16:29:46.847753984+00:00 stderr F time="2025-12-12T16:29:46Z" level=info msg="checking if subscriptions need update" id=Fgtku namespace=service-telemetry 2025-12-12T16:29:47.033225071+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:47.033225071+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:47.242441145+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="checking for existing installplan" channel=unstable id=Fgtku namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:47.242441145+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="resolving subscriptions in namespace" id=Fgtku namespace=service-telemetry 2025-12-12T16:29:47.441714529+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:47.446090419+00:00 stderr F E1212 16:29:47.446040 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:47.446225032+00:00 stderr F I1212 16:29:47.446170 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:47.457665031+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="resolving sources" id=W+kGU namespace=service-telemetry 2025-12-12T16:29:47.457665031+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="checking if subscriptions need update" id=W+kGU namespace=service-telemetry 2025-12-12T16:29:47.632991401+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:47.632991401+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:47.832892761+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:47.832892761+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:29:47.843709833+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="checking for existing installplan" channel=unstable id=W+kGU namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:47.843709833+00:00 stderr F time="2025-12-12T16:29:47Z" level=info msg="resolving subscriptions in namespace" id=W+kGU namespace=service-telemetry 2025-12-12T16:29:48.045889390+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:48.051003849+00:00 stderr F E1212 16:29:48.050881 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:48.051101572+00:00 stderr F I1212 16:29:48.051024 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:48.052385634+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="resolving sources" id=DFZVb namespace=service-telemetry 2025-12-12T16:29:48.052385634+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="checking if subscriptions need update" id=DFZVb namespace=service-telemetry 2025-12-12T16:29:48.236573088+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:48.236573088+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:29:48.237108071+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:48.444321595+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="checking for existing installplan" channel=unstable id=DFZVb namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:48.444321595+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="resolving subscriptions in namespace" id=DFZVb namespace=service-telemetry 2025-12-12T16:29:48.640629824+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:48.642907692+00:00 stderr F E1212 16:29:48.642877 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:48.642998734+00:00 stderr F I1212 16:29:48.642961 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:48.644165423+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="resolving sources" id=OLPAo namespace=service-telemetry 2025-12-12T16:29:48.644165423+00:00 stderr F time="2025-12-12T16:29:48Z" level=info msg="checking if subscriptions need update" id=OLPAo namespace=service-telemetry 2025-12-12T16:29:49.043699836+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="checking for existing installplan" channel=unstable id=OLPAo namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:49.043699836+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="resolving subscriptions in namespace" id=OLPAo namespace=service-telemetry 2025-12-12T16:29:49.240399485+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:49.245851412+00:00 stderr F I1212 16:29:49.245814 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:49.245902043+00:00 stderr F E1212 16:29:49.245827 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:49.247131094+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="resolving sources" id=q2Ddc namespace=service-telemetry 2025-12-12T16:29:49.247131094+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="checking if subscriptions need update" id=q2Ddc namespace=service-telemetry 2025-12-12T16:29:49.643423934+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="checking for existing installplan" channel=unstable id=q2Ddc namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:49.643423934+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="resolving subscriptions in namespace" id=q2Ddc namespace=service-telemetry 2025-12-12T16:29:49.841229720+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:49.844111753+00:00 stderr F E1212 16:29:49.844029 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:49.844255527+00:00 stderr F I1212 16:29:49.844158 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:49.845362855+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="resolving sources" id=GBV+Q namespace=service-telemetry 2025-12-12T16:29:49.845405786+00:00 stderr F time="2025-12-12T16:29:49Z" level=info msg="checking if subscriptions need update" id=GBV+Q namespace=service-telemetry 2025-12-12T16:29:50.250749535+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="checking for existing installplan" channel=unstable id=GBV+Q namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:50.250749535+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="resolving subscriptions in namespace" id=GBV+Q namespace=service-telemetry 2025-12-12T16:29:50.440213322+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:50.443560296+00:00 stderr F E1212 16:29:50.443514 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:50.443785252+00:00 stderr F I1212 16:29:50.443667 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:50.444724095+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="resolving sources" id=eP9Gi namespace=service-telemetry 2025-12-12T16:29:50.444766836+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="checking if subscriptions need update" id=eP9Gi namespace=service-telemetry 2025-12-12T16:29:50.849872110+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="checking for existing installplan" channel=unstable id=eP9Gi namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:50.849986603+00:00 stderr F time="2025-12-12T16:29:50Z" level=info msg="resolving subscriptions in namespace" id=eP9Gi namespace=service-telemetry 2025-12-12T16:29:51.040733362+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:51.043101741+00:00 stderr F E1212 16:29:51.043018 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:51.043202084+00:00 stderr F I1212 16:29:51.043150 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:51.044319742+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="resolving sources" id=yZW/0 namespace=service-telemetry 2025-12-12T16:29:51.044396674+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="checking if subscriptions need update" id=yZW/0 namespace=service-telemetry 2025-12-12T16:29:51.444470710+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="checking for existing installplan" channel=unstable id=yZW/0 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:51.444552192+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="resolving subscriptions in namespace" id=yZW/0 namespace=service-telemetry 2025-12-12T16:29:51.639684962+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:51.645499888+00:00 stderr F I1212 16:29:51.645404 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:51.684489731+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="resolving sources" id=bQ5BO namespace=service-telemetry 2025-12-12T16:29:51.684489731+00:00 stderr F time="2025-12-12T16:29:51Z" level=info msg="checking if subscriptions need update" id=bQ5BO namespace=service-telemetry 2025-12-12T16:29:52.044740243+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="checking for existing installplan" channel=unstable id=bQ5BO namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:52.044740243+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="resolving subscriptions in namespace" id=bQ5BO namespace=service-telemetry 2025-12-12T16:29:52.241977435+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:52.244790196+00:00 stderr F E1212 16:29:52.244604 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:52.244790196+00:00 stderr F I1212 16:29:52.244700 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:52.251219108+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="resolving sources" id=3xEqw namespace=service-telemetry 2025-12-12T16:29:52.251219108+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="checking if subscriptions need update" id=3xEqw namespace=service-telemetry 2025-12-12T16:29:52.644531574+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="checking for existing installplan" channel=unstable id=3xEqw namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:52.644531574+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="resolving subscriptions in namespace" id=3xEqw namespace=service-telemetry 2025-12-12T16:29:52.840513565+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:52.846085175+00:00 stderr F E1212 16:29:52.846041 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:52.846331421+00:00 stderr F I1212 16:29:52.846218 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:52.857836791+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="resolving sources" id=VGMAU namespace=service-telemetry 2025-12-12T16:29:52.857881243+00:00 stderr F time="2025-12-12T16:29:52Z" level=info msg="checking if subscriptions need update" id=VGMAU namespace=service-telemetry 2025-12-12T16:29:53.244310624+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="checking for existing installplan" channel=unstable id=VGMAU namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:53.244310624+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="resolving subscriptions in namespace" id=VGMAU namespace=service-telemetry 2025-12-12T16:29:53.441563507+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:53.445278160+00:00 stderr F E1212 16:29:53.445165 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:53.445321101+00:00 stderr F I1212 16:29:53.445280 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:53.466806843+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="resolving sources" id=VFl77 namespace=service-telemetry 2025-12-12T16:29:53.466806843+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="checking if subscriptions need update" id=VFl77 namespace=service-telemetry 2025-12-12T16:29:53.844343961+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="checking for existing installplan" channel=unstable id=VFl77 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:53.844343961+00:00 stderr F time="2025-12-12T16:29:53Z" level=info msg="resolving subscriptions in namespace" id=VFl77 namespace=service-telemetry 2025-12-12T16:29:54.040041954+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:54.043523972+00:00 stderr F E1212 16:29:54.043442 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:54.043614625+00:00 stderr F I1212 16:29:54.043554 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:54.085217393+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="resolving sources" id=jOaEN namespace=service-telemetry 2025-12-12T16:29:54.085217393+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="checking if subscriptions need update" id=jOaEN namespace=service-telemetry 2025-12-12T16:29:54.443924346+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="checking for existing installplan" channel=unstable id=jOaEN namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:54.443924346+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="resolving subscriptions in namespace" id=jOaEN namespace=service-telemetry 2025-12-12T16:29:54.639558449+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:54.642661767+00:00 stderr F E1212 16:29:54.642610 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:54.642815411+00:00 stderr F I1212 16:29:54.642757 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:54.724289815+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="resolving sources" id=HcY7R namespace=service-telemetry 2025-12-12T16:29:54.724289815+00:00 stderr F time="2025-12-12T16:29:54Z" level=info msg="checking if subscriptions need update" id=HcY7R namespace=service-telemetry 2025-12-12T16:29:55.044417415+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="checking for existing installplan" channel=unstable id=HcY7R namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:55.044417415+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="resolving subscriptions in namespace" id=HcY7R namespace=service-telemetry 2025-12-12T16:29:55.239221197+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:55.242252863+00:00 stderr F E1212 16:29:55.242210 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:55.242320455+00:00 stderr F I1212 16:29:55.242280 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:55.403967510+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="resolving sources" id=7zHiO namespace=service-telemetry 2025-12-12T16:29:55.403967510+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="checking if subscriptions need update" id=7zHiO namespace=service-telemetry 2025-12-12T16:29:55.644340440+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="checking for existing installplan" channel=unstable id=7zHiO namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:55.644340440+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="resolving subscriptions in namespace" id=7zHiO namespace=service-telemetry 2025-12-12T16:29:55.839031628+00:00 stderr F time="2025-12-12T16:29:55Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:55.842785133+00:00 stderr F E1212 16:29:55.842705 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:55.842848695+00:00 stderr F I1212 16:29:55.842779 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:56.164641337+00:00 stderr F time="2025-12-12T16:29:56Z" level=info msg="resolving sources" id=M8RBB namespace=service-telemetry 2025-12-12T16:29:56.164641337+00:00 stderr F time="2025-12-12T16:29:56Z" level=info msg="checking if subscriptions need update" id=M8RBB namespace=service-telemetry 2025-12-12T16:29:56.245807223+00:00 stderr F time="2025-12-12T16:29:56Z" level=info msg="checking for existing installplan" channel=unstable id=M8RBB namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:56.245807223+00:00 stderr F time="2025-12-12T16:29:56Z" level=info msg="resolving subscriptions in namespace" id=M8RBB namespace=service-telemetry 2025-12-12T16:29:56.439396084+00:00 stderr F time="2025-12-12T16:29:56Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:56.442899972+00:00 stderr F E1212 16:29:56.442840 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:29:56.443066897+00:00 stderr F I1212 16:29:56.442953 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:57.084981059+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="resolving sources" id=HTNkf namespace=service-telemetry 2025-12-12T16:29:57.084981059+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="checking if subscriptions need update" id=HTNkf namespace=service-telemetry 2025-12-12T16:29:57.097331440+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="checking for existing installplan" channel=unstable id=HTNkf namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:29:57.097331440+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="resolving subscriptions in namespace" id=HTNkf namespace=service-telemetry 2025-12-12T16:29:57.100025688+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:29:57.102806768+00:00 stderr F I1212 16:29:57.102753 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:29:57.391948578+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Y+kAc 2025-12-12T16:29:57.391948578+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Y+kAc 2025-12-12T16:29:57.402653148+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Y+kAc 2025-12-12T16:29:57.402653148+00:00 stderr F time="2025-12-12T16:29:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Y+kAc 2025-12-12T16:30:09.389987927+00:00 stderr F time="2025-12-12T16:30:09Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=0y42j 2025-12-12T16:30:09.389987927+00:00 stderr F time="2025-12-12T16:30:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=0y42j 2025-12-12T16:30:09.400714597+00:00 stderr F time="2025-12-12T16:30:09Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=0y42j 2025-12-12T16:30:09.400714597+00:00 stderr F time="2025-12-12T16:30:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=0y42j 2025-12-12T16:30:23.398947286+00:00 stderr F time="2025-12-12T16:30:23Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SEyz3 2025-12-12T16:30:23.398947286+00:00 stderr F time="2025-12-12T16:30:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SEyz3 2025-12-12T16:30:23.408456523+00:00 stderr F time="2025-12-12T16:30:23Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SEyz3 2025-12-12T16:30:23.408456523+00:00 stderr F time="2025-12-12T16:30:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SEyz3 2025-12-12T16:30:38.395537272+00:00 stderr F time="2025-12-12T16:30:38Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hn99n 2025-12-12T16:30:38.395537272+00:00 stderr F time="2025-12-12T16:30:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hn99n 2025-12-12T16:30:38.404075275+00:00 stderr F time="2025-12-12T16:30:38Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hn99n 2025-12-12T16:30:38.404135886+00:00 stderr F time="2025-12-12T16:30:38Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hn99n 2025-12-12T16:31:04.397144846+00:00 stderr F time="2025-12-12T16:31:04Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=JLApD 2025-12-12T16:31:04.397144846+00:00 stderr F time="2025-12-12T16:31:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=JLApD 2025-12-12T16:31:04.410717045+00:00 stderr F time="2025-12-12T16:31:04Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=JLApD 2025-12-12T16:31:04.410717045+00:00 stderr F time="2025-12-12T16:31:04Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=JLApD 2025-12-12T16:31:18.398062178+00:00 stderr F time="2025-12-12T16:31:18Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4VzZ 2025-12-12T16:31:18.398062178+00:00 stderr F time="2025-12-12T16:31:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4VzZ 2025-12-12T16:31:18.407533721+00:00 stderr F time="2025-12-12T16:31:18Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4VzZ 2025-12-12T16:31:18.407533721+00:00 stderr F time="2025-12-12T16:31:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4VzZ 2025-12-12T16:32:41.403328994+00:00 stderr F time="2025-12-12T16:32:41Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=qpYnP 2025-12-12T16:32:41.403328994+00:00 stderr F time="2025-12-12T16:32:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=qpYnP 2025-12-12T16:32:41.412160785+00:00 stderr F time="2025-12-12T16:32:41Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=qpYnP 2025-12-12T16:32:41.412160785+00:00 stderr F time="2025-12-12T16:32:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=qpYnP 2025-12-12T16:32:53.400989081+00:00 stderr F time="2025-12-12T16:32:53Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=8mlYU 2025-12-12T16:32:53.400989081+00:00 stderr F time="2025-12-12T16:32:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=8mlYU 2025-12-12T16:32:53.407619736+00:00 stderr F time="2025-12-12T16:32:53Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=8mlYU 2025-12-12T16:32:53.407619736+00:00 stderr F time="2025-12-12T16:32:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=8mlYU 2025-12-12T16:32:57.522960992+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=xFvjZ namespace=openshift-machine-config-operator 2025-12-12T16:32:57.522960992+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=xFvjZ namespace=openshift-machine-config-operator 2025-12-12T16:32:57.523045484+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=unZDS namespace=openshift-network-console 2025-12-12T16:32:57.523045484+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=unZDS namespace=openshift-network-console 2025-12-12T16:32:57.527898865+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-network-console" id=unZDS namespace=openshift-network-console 2025-12-12T16:32:57.527898865+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=OW0GL namespace=openshift-network-node-identity 2025-12-12T16:32:57.527898865+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=OW0GL namespace=openshift-network-node-identity 2025-12-12T16:32:57.528403038+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-machine-config-operator" id=xFvjZ namespace=openshift-machine-config-operator 2025-12-12T16:32:57.528423518+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=NbdZa namespace=openshift-ovn-kubernetes 2025-12-12T16:32:57.528423518+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=NbdZa namespace=openshift-ovn-kubernetes 2025-12-12T16:32:57.531563907+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-network-node-identity" id=OW0GL namespace=openshift-network-node-identity 2025-12-12T16:32:57.531563907+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-ovn-kubernetes" id=NbdZa namespace=openshift-ovn-kubernetes 2025-12-12T16:32:57.531596497+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=l2K2D namespace=openshift-config-managed 2025-12-12T16:32:57.531596497+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=l2K2D namespace=openshift-config-managed 2025-12-12T16:32:57.531708000+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=w9nPh namespace=kube-system 2025-12-12T16:32:57.531708000+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=w9nPh namespace=kube-system 2025-12-12T16:32:57.534505130+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace kube-system" id=w9nPh namespace=kube-system 2025-12-12T16:32:57.534505130+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=JiHxd namespace=openshift-etcd-operator 2025-12-12T16:32:57.534505130+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=JiHxd namespace=openshift-etcd-operator 2025-12-12T16:32:57.534663564+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-config-managed" id=l2K2D namespace=openshift-config-managed 2025-12-12T16:32:57.534663564+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=oCsOb namespace=openshift-network-operator 2025-12-12T16:32:57.534663564+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=oCsOb namespace=openshift-network-operator 2025-12-12T16:32:57.536636473+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-network-operator" id=oCsOb namespace=openshift-network-operator 2025-12-12T16:32:57.536636473+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=W/Nm0 namespace=openshift-nutanix-infra 2025-12-12T16:32:57.536676334+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=W/Nm0 namespace=openshift-nutanix-infra 2025-12-12T16:32:57.537537966+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-etcd-operator" id=JiHxd namespace=openshift-etcd-operator 2025-12-12T16:32:57.537537966+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=jvLAK namespace=openshift-service-ca-operator 2025-12-12T16:32:57.537537966+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=jvLAK namespace=openshift-service-ca-operator 2025-12-12T16:32:57.539497265+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-nutanix-infra" id=W/Nm0 namespace=openshift-nutanix-infra 2025-12-12T16:32:57.539497265+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=3s0W3 namespace=openshift-kube-controller-manager-operator 2025-12-12T16:32:57.539497265+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=3s0W3 namespace=openshift-kube-controller-manager-operator 2025-12-12T16:32:57.540650014+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-service-ca-operator" id=jvLAK namespace=openshift-service-ca-operator 2025-12-12T16:32:57.540650014+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=EcZrb namespace=openshift-cluster-samples-operator 2025-12-12T16:32:57.540650014+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=EcZrb namespace=openshift-cluster-samples-operator 2025-12-12T16:32:57.727248737+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-kube-controller-manager-operator" id=3s0W3 namespace=openshift-kube-controller-manager-operator 2025-12-12T16:32:57.727302969+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=+AxBK namespace=openshift-config 2025-12-12T16:32:57.727302969+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=+AxBK namespace=openshift-config 2025-12-12T16:32:57.927546343+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="No subscriptions were found in namespace openshift-cluster-samples-operator" id=EcZrb namespace=openshift-cluster-samples-operator 2025-12-12T16:32:57.927546343+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="resolving sources" id=3U+IY namespace=openshift-controller-manager 2025-12-12T16:32:57.927546343+00:00 stderr F time="2025-12-12T16:32:57Z" level=info msg="checking if subscriptions need update" id=3U+IY namespace=openshift-controller-manager 2025-12-12T16:32:58.128560736+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="No subscriptions were found in namespace openshift-config" id=+AxBK namespace=openshift-config 2025-12-12T16:32:58.128560736+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="resolving sources" id=H2LDV namespace=openshift-etcd 2025-12-12T16:32:58.128560736+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="checking if subscriptions need update" id=H2LDV namespace=openshift-etcd 2025-12-12T16:32:58.329908158+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="No subscriptions were found in namespace openshift-controller-manager" id=3U+IY namespace=openshift-controller-manager 2025-12-12T16:32:58.330011631+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="resolving sources" id=fPikK namespace=openshift-host-network 2025-12-12T16:32:58.330011631+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="checking if subscriptions need update" id=fPikK namespace=openshift-host-network 2025-12-12T16:32:58.528696696+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="No subscriptions were found in namespace openshift-etcd" id=H2LDV namespace=openshift-etcd 2025-12-12T16:32:58.528696696+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="resolving sources" id=ByOUC namespace=openshift-kube-apiserver 2025-12-12T16:32:58.528696696+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="checking if subscriptions need update" id=ByOUC namespace=openshift-kube-apiserver 2025-12-12T16:32:58.727989486+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="No subscriptions were found in namespace openshift-host-network" id=fPikK namespace=openshift-host-network 2025-12-12T16:32:58.728071339+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="resolving sources" id=bMIO8 namespace=openstack-operators 2025-12-12T16:32:58.728071339+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="checking if subscriptions need update" id=bMIO8 namespace=openstack-operators 2025-12-12T16:32:58.928013725+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="No subscriptions were found in namespace openshift-kube-apiserver" id=ByOUC namespace=openshift-kube-apiserver 2025-12-12T16:32:58.928241651+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="resolving sources" id=8pSif namespace=kube-public 2025-12-12T16:32:58.928241651+00:00 stderr F time="2025-12-12T16:32:58Z" level=info msg="checking if subscriptions need update" id=8pSif namespace=kube-public 2025-12-12T16:32:59.127955352+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="No subscriptions were found in namespace openstack-operators" id=bMIO8 namespace=openstack-operators 2025-12-12T16:32:59.128043264+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="resolving sources" id=5B+cc namespace=openshift-console-operator 2025-12-12T16:32:59.128043264+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="checking if subscriptions need update" id=5B+cc namespace=openshift-console-operator 2025-12-12T16:32:59.326611255+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="No subscriptions were found in namespace kube-public" id=8pSif namespace=kube-public 2025-12-12T16:32:59.326611255+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="resolving sources" id=bQCIz namespace=openshift-image-registry 2025-12-12T16:32:59.326611255+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="checking if subscriptions need update" id=bQCIz namespace=openshift-image-registry 2025-12-12T16:32:59.527581218+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="No subscriptions were found in namespace openshift-console-operator" id=5B+cc namespace=openshift-console-operator 2025-12-12T16:32:59.527581218+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="resolving sources" id=1vGWK namespace=openshift-kube-controller-manager 2025-12-12T16:32:59.527581218+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="checking if subscriptions need update" id=1vGWK namespace=openshift-kube-controller-manager 2025-12-12T16:32:59.728484588+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="No subscriptions were found in namespace openshift-image-registry" id=bQCIz namespace=openshift-image-registry 2025-12-12T16:32:59.728484588+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="resolving sources" id=kEsOE namespace=openshift-cluster-storage-operator 2025-12-12T16:32:59.728484588+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="checking if subscriptions need update" id=kEsOE namespace=openshift-cluster-storage-operator 2025-12-12T16:32:59.927204414+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="No subscriptions were found in namespace openshift-kube-controller-manager" id=1vGWK namespace=openshift-kube-controller-manager 2025-12-12T16:32:59.927204414+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="resolving sources" id=amwZA namespace=openshift-dns 2025-12-12T16:32:59.927295247+00:00 stderr F time="2025-12-12T16:32:59Z" level=info msg="checking if subscriptions need update" id=amwZA namespace=openshift-dns 2025-12-12T16:33:00.126821043+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="No subscriptions were found in namespace openshift-cluster-storage-operator" id=kEsOE namespace=openshift-cluster-storage-operator 2025-12-12T16:33:00.126821043+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="resolving sources" id=LXETM namespace=openshift 2025-12-12T16:33:00.126821043+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="checking if subscriptions need update" id=LXETM namespace=openshift 2025-12-12T16:33:00.327597271+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="No subscriptions were found in namespace openshift-dns" id=amwZA namespace=openshift-dns 2025-12-12T16:33:00.327597271+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="resolving sources" id=r2VgZ namespace=openshift-vsphere-infra 2025-12-12T16:33:00.327597271+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="checking if subscriptions need update" id=r2VgZ namespace=openshift-vsphere-infra 2025-12-12T16:33:00.528752128+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="No subscriptions were found in namespace openshift" id=LXETM namespace=openshift 2025-12-12T16:33:00.528752128+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="resolving sources" id=dYvG6 namespace=cert-manager-operator 2025-12-12T16:33:00.528752128+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="checking if subscriptions need update" id=dYvG6 namespace=cert-manager-operator 2025-12-12T16:33:00.727512175+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="No subscriptions were found in namespace openshift-vsphere-infra" id=r2VgZ namespace=openshift-vsphere-infra 2025-12-12T16:33:00.727512175+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="resolving sources" id=TJSx4 namespace=hostpath-provisioner 2025-12-12T16:33:00.727512175+00:00 stderr F time="2025-12-12T16:33:00Z" level=info msg="checking if subscriptions need update" id=TJSx4 namespace=hostpath-provisioner 2025-12-12T16:33:01.127329117+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="No subscriptions were found in namespace hostpath-provisioner" id=TJSx4 namespace=hostpath-provisioner 2025-12-12T16:33:01.127329117+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="resolving sources" id=PG2mw namespace=openshift-infra 2025-12-12T16:33:01.127329117+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="checking if subscriptions need update" id=PG2mw namespace=openshift-infra 2025-12-12T16:33:01.334541365+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="resolving subscriptions in namespace" id=dYvG6 namespace=cert-manager-operator 2025-12-12T16:33:01.528095802+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="No subscriptions were found in namespace openshift-infra" id=PG2mw namespace=openshift-infra 2025-12-12T16:33:01.528172994+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="resolving sources" id=tJ0JN namespace=openshift-ingress-canary 2025-12-12T16:33:01.528172994+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="checking if subscriptions need update" id=tJ0JN namespace=openshift-ingress-canary 2025-12-12T16:33:01.738403348+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="no subscriptions were updated" id=dYvG6 namespace=cert-manager-operator 2025-12-12T16:33:01.928596171+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="No subscriptions were found in namespace openshift-ingress-canary" id=tJ0JN namespace=openshift-ingress-canary 2025-12-12T16:33:01.928596171+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="resolving sources" id=fuIBG namespace=openshift-user-workload-monitoring 2025-12-12T16:33:01.928596171+00:00 stderr F time="2025-12-12T16:33:01Z" level=info msg="checking if subscriptions need update" id=fuIBG namespace=openshift-user-workload-monitoring 2025-12-12T16:33:02.327528630+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="No subscriptions were found in namespace openshift-user-workload-monitoring" id=fuIBG namespace=openshift-user-workload-monitoring 2025-12-12T16:33:02.327614953+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="resolving sources" id=xq5jF namespace=openstack 2025-12-12T16:33:02.327614953+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="checking if subscriptions need update" id=xq5jF namespace=openstack 2025-12-12T16:33:02.531728753+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="resolving sources" id=/RaaM namespace=cert-manager 2025-12-12T16:33:02.531728753+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="checking if subscriptions need update" id=/RaaM namespace=cert-manager 2025-12-12T16:33:02.727131446+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="No subscriptions were found in namespace openstack" id=xq5jF namespace=openstack 2025-12-12T16:33:02.727131446+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="resolving sources" id=uw+sB namespace=openshift-console 2025-12-12T16:33:02.727131446+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="checking if subscriptions need update" id=uw+sB namespace=openshift-console 2025-12-12T16:33:02.927890722+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="No subscriptions were found in namespace cert-manager" id=/RaaM namespace=cert-manager 2025-12-12T16:33:02.927970374+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="resolving sources" id=6DpYf namespace=openshift-machine-api 2025-12-12T16:33:02.927970374+00:00 stderr F time="2025-12-12T16:33:02Z" level=info msg="checking if subscriptions need update" id=6DpYf namespace=openshift-machine-api 2025-12-12T16:33:03.127664515+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="No subscriptions were found in namespace openshift-console" id=uw+sB namespace=openshift-console 2025-12-12T16:33:03.127664515+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="resolving sources" id=xDzpM namespace=openshift-marketplace 2025-12-12T16:33:03.127664515+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="checking if subscriptions need update" id=xDzpM namespace=openshift-marketplace 2025-12-12T16:33:03.330068273+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="No subscriptions were found in namespace openshift-machine-api" id=6DpYf namespace=openshift-machine-api 2025-12-12T16:33:03.330068273+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="resolving sources" id=UCRm3 namespace=openshift-ovirt-infra 2025-12-12T16:33:03.330068273+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="checking if subscriptions need update" id=UCRm3 namespace=openshift-ovirt-infra 2025-12-12T16:33:03.529427125+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="No subscriptions were found in namespace openshift-marketplace" id=xDzpM namespace=openshift-marketplace 2025-12-12T16:33:03.529466466+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="resolving sources" id=QnSj2 namespace=openshift-oauth-apiserver 2025-12-12T16:33:03.529466466+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="checking if subscriptions need update" id=QnSj2 namespace=openshift-oauth-apiserver 2025-12-12T16:33:03.728448308+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="No subscriptions were found in namespace openshift-ovirt-infra" id=UCRm3 namespace=openshift-ovirt-infra 2025-12-12T16:33:03.728448308+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="resolving sources" id=Q3Lvu namespace=openshift-console-user-settings 2025-12-12T16:33:03.728448308+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="checking if subscriptions need update" id=Q3Lvu namespace=openshift-console-user-settings 2025-12-12T16:33:03.927823601+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="No subscriptions were found in namespace openshift-oauth-apiserver" id=QnSj2 namespace=openshift-oauth-apiserver 2025-12-12T16:33:03.927823601+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="resolving sources" id=r/SF4 namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:33:03.927823601+00:00 stderr F time="2025-12-12T16:33:03Z" level=info msg="checking if subscriptions need update" id=r/SF4 namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:33:04.128610959+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="No subscriptions were found in namespace openshift-console-user-settings" id=Q3Lvu namespace=openshift-console-user-settings 2025-12-12T16:33:04.128689681+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="resolving sources" id=cHhmb namespace=openshift-openstack-infra 2025-12-12T16:33:04.128689681+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="checking if subscriptions need update" id=cHhmb namespace=openshift-openstack-infra 2025-12-12T16:33:04.327823167+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="No subscriptions were found in namespace openshift-kube-storage-version-migrator-operator" id=r/SF4 namespace=openshift-kube-storage-version-migrator-operator 2025-12-12T16:33:04.327867058+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="resolving sources" id=uGhtg namespace=default 2025-12-12T16:33:04.327867058+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="checking if subscriptions need update" id=uGhtg namespace=default 2025-12-12T16:33:04.527779704+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="No subscriptions were found in namespace openshift-openstack-infra" id=cHhmb namespace=openshift-openstack-infra 2025-12-12T16:33:04.527779704+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="resolving sources" id=1T2kw namespace=openshift-kni-infra 2025-12-12T16:33:04.527779704+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="checking if subscriptions need update" id=1T2kw namespace=openshift-kni-infra 2025-12-12T16:33:04.727668449+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="No subscriptions were found in namespace default" id=uGhtg namespace=default 2025-12-12T16:33:04.727668449+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="resolving sources" id=+BHxu namespace=openshift-network-diagnostics 2025-12-12T16:33:04.727668449+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="checking if subscriptions need update" id=+BHxu namespace=openshift-network-diagnostics 2025-12-12T16:33:04.928343314+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="No subscriptions were found in namespace openshift-kni-infra" id=1T2kw namespace=openshift-kni-infra 2025-12-12T16:33:04.928343314+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="resolving sources" id=HVZ65 namespace=openshift-operators 2025-12-12T16:33:04.928343314+00:00 stderr F time="2025-12-12T16:33:04Z" level=info msg="checking if subscriptions need update" id=HVZ65 namespace=openshift-operators 2025-12-12T16:33:05.128170238+00:00 stderr F time="2025-12-12T16:33:05Z" level=info msg="No subscriptions were found in namespace openshift-network-diagnostics" id=+BHxu namespace=openshift-network-diagnostics 2025-12-12T16:33:05.128265020+00:00 stderr F time="2025-12-12T16:33:05Z" level=info msg="resolving sources" id=WcZyR namespace=service-telemetry 2025-12-12T16:33:05.128265020+00:00 stderr F time="2025-12-12T16:33:05Z" level=info msg="checking if subscriptions need update" id=WcZyR namespace=service-telemetry 2025-12-12T16:33:05.738082520+00:00 stderr F time="2025-12-12T16:33:05Z" level=info msg="resolving subscriptions in namespace" id=HVZ65 namespace=openshift-operators 2025-12-12T16:33:05.937052232+00:00 stderr F time="2025-12-12T16:33:05Z" level=info msg="checking for existing installplan" channel=unstable id=WcZyR namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:05.937052232+00:00 stderr F time="2025-12-12T16:33:05Z" level=info msg="resolving subscriptions in namespace" id=WcZyR namespace=service-telemetry 2025-12-12T16:33:06.138538457+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="no subscriptions were updated" id=HVZ65 namespace=openshift-operators 2025-12-12T16:33:06.328279418+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:06.331202741+00:00 stderr F E1212 16:33:06.331130 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:06.331375396+00:00 stderr F I1212 16:33:06.331295 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:06.332468293+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="resolving sources" id=lQBG8 namespace=kube-node-lease 2025-12-12T16:33:06.332468293+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="checking if subscriptions need update" id=lQBG8 namespace=kube-node-lease 2025-12-12T16:33:06.727335680+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="No subscriptions were found in namespace kube-node-lease" id=lQBG8 namespace=kube-node-lease 2025-12-12T16:33:06.727335680+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="resolving sources" id=JdJ8L namespace=openshift-authentication 2025-12-12T16:33:06.727335680+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="checking if subscriptions need update" id=JdJ8L namespace=openshift-authentication 2025-12-12T16:33:06.930509077+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="resolving sources" id=+GPaI namespace=openshift-authentication-operator 2025-12-12T16:33:06.930509077+00:00 stderr F time="2025-12-12T16:33:06Z" level=info msg="checking if subscriptions need update" id=+GPaI namespace=openshift-authentication-operator 2025-12-12T16:33:07.128220948+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="No subscriptions were found in namespace openshift-authentication" id=JdJ8L namespace=openshift-authentication 2025-12-12T16:33:07.128283119+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="resolving sources" id=ZqGkp namespace=openshift-apiserver-operator 2025-12-12T16:33:07.128283119+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="checking if subscriptions need update" id=ZqGkp namespace=openshift-apiserver-operator 2025-12-12T16:33:07.327248481+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="No subscriptions were found in namespace openshift-authentication-operator" id=+GPaI namespace=openshift-authentication-operator 2025-12-12T16:33:07.327248481+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="resolving sources" id=QIlWd namespace=openshift-kube-storage-version-migrator 2025-12-12T16:33:07.327332114+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="checking if subscriptions need update" id=QIlWd namespace=openshift-kube-storage-version-migrator 2025-12-12T16:33:07.528251785+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="No subscriptions were found in namespace openshift-apiserver-operator" id=ZqGkp namespace=openshift-apiserver-operator 2025-12-12T16:33:07.528345767+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="resolving sources" id=4/jZ/ namespace=openshift-node 2025-12-12T16:33:07.528345767+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="checking if subscriptions need update" id=4/jZ/ namespace=openshift-node 2025-12-12T16:33:07.727103864+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="No subscriptions were found in namespace openshift-kube-storage-version-migrator" id=QIlWd namespace=openshift-kube-storage-version-migrator 2025-12-12T16:33:07.727172736+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="resolving sources" id=1AZvE namespace=openshift-operator-lifecycle-manager 2025-12-12T16:33:07.727172736+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="checking if subscriptions need update" id=1AZvE namespace=openshift-operator-lifecycle-manager 2025-12-12T16:33:07.928517218+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="No subscriptions were found in namespace openshift-node" id=4/jZ/ namespace=openshift-node 2025-12-12T16:33:07.928563519+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="resolving sources" id=D2t7z namespace=openshift-apiserver 2025-12-12T16:33:07.928563519+00:00 stderr F time="2025-12-12T16:33:07Z" level=info msg="checking if subscriptions need update" id=D2t7z namespace=openshift-apiserver 2025-12-12T16:33:08.127917160+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="No subscriptions were found in namespace openshift-operator-lifecycle-manager" id=1AZvE namespace=openshift-operator-lifecycle-manager 2025-12-12T16:33:08.127917160+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="resolving sources" id=41HNG namespace=openshift-cluster-machine-approver 2025-12-12T16:33:08.127917160+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="checking if subscriptions need update" id=41HNG namespace=openshift-cluster-machine-approver 2025-12-12T16:33:08.327228601+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="No subscriptions were found in namespace openshift-apiserver" id=D2t7z namespace=openshift-apiserver 2025-12-12T16:33:08.327267632+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="resolving sources" id=Ibi+J namespace=openshift-ingress 2025-12-12T16:33:08.327267632+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="checking if subscriptions need update" id=Ibi+J namespace=openshift-ingress 2025-12-12T16:33:08.527990609+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="No subscriptions were found in namespace openshift-cluster-machine-approver" id=41HNG namespace=openshift-cluster-machine-approver 2025-12-12T16:33:08.527990609+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="resolving sources" id=Kcmzb namespace=openshift-kube-scheduler-operator 2025-12-12T16:33:08.528049290+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="checking if subscriptions need update" id=Kcmzb namespace=openshift-kube-scheduler-operator 2025-12-12T16:33:08.728763046+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="No subscriptions were found in namespace openshift-ingress" id=Ibi+J namespace=openshift-ingress 2025-12-12T16:33:08.728763046+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="resolving sources" id=4HsPB namespace=openshift-route-controller-manager 2025-12-12T16:33:08.728763046+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="checking if subscriptions need update" id=4HsPB namespace=openshift-route-controller-manager 2025-12-12T16:33:08.927286507+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="No subscriptions were found in namespace openshift-kube-scheduler-operator" id=Kcmzb namespace=openshift-kube-scheduler-operator 2025-12-12T16:33:08.927286507+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="resolving sources" id=r0a2m namespace=openshift-service-ca 2025-12-12T16:33:08.927286507+00:00 stderr F time="2025-12-12T16:33:08Z" level=info msg="checking if subscriptions need update" id=r0a2m namespace=openshift-service-ca 2025-12-12T16:33:09.127170712+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="No subscriptions were found in namespace openshift-route-controller-manager" id=4HsPB namespace=openshift-route-controller-manager 2025-12-12T16:33:09.127170712+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="resolving sources" id=k1gdg namespace=openshift-monitoring 2025-12-12T16:33:09.127267945+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="checking if subscriptions need update" id=k1gdg namespace=openshift-monitoring 2025-12-12T16:33:09.328031832+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="No subscriptions were found in namespace openshift-service-ca" id=r0a2m namespace=openshift-service-ca 2025-12-12T16:33:09.328031832+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="resolving sources" id=DvC1H namespace=openshift-multus 2025-12-12T16:33:09.328031832+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="checking if subscriptions need update" id=DvC1H namespace=openshift-multus 2025-12-12T16:33:09.527649450+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="No subscriptions were found in namespace openshift-monitoring" id=k1gdg namespace=openshift-monitoring 2025-12-12T16:33:09.527649450+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="resolving sources" id=MyP3w namespace=openshift-cloud-network-config-controller 2025-12-12T16:33:09.527692911+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="checking if subscriptions need update" id=MyP3w namespace=openshift-cloud-network-config-controller 2025-12-12T16:33:09.728102970+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="No subscriptions were found in namespace openshift-multus" id=DvC1H namespace=openshift-multus 2025-12-12T16:33:09.732464059+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="resolving sources" id=KMt05 namespace=openshift-cluster-version 2025-12-12T16:33:09.732464059+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="checking if subscriptions need update" id=KMt05 namespace=openshift-cluster-version 2025-12-12T16:33:09.926042007+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="No subscriptions were found in namespace openshift-cloud-network-config-controller" id=MyP3w namespace=openshift-cloud-network-config-controller 2025-12-12T16:33:09.926042007+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="resolving sources" id=Efb4r namespace=openshift-config-operator 2025-12-12T16:33:09.926042007+00:00 stderr F time="2025-12-12T16:33:09Z" level=info msg="checking if subscriptions need update" id=Efb4r namespace=openshift-config-operator 2025-12-12T16:33:10.126870035+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="No subscriptions were found in namespace openshift-cluster-version" id=KMt05 namespace=openshift-cluster-version 2025-12-12T16:33:10.126918906+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="resolving sources" id=eCkID namespace=openshift-controller-manager-operator 2025-12-12T16:33:10.126918906+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="checking if subscriptions need update" id=eCkID namespace=openshift-controller-manager-operator 2025-12-12T16:33:10.327341945+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="No subscriptions were found in namespace openshift-config-operator" id=Efb4r namespace=openshift-config-operator 2025-12-12T16:33:10.327341945+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="resolving sources" id=hlqb3 namespace=openshift-ingress-operator 2025-12-12T16:33:10.327422667+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="checking if subscriptions need update" id=hlqb3 namespace=openshift-ingress-operator 2025-12-12T16:33:10.527095367+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="No subscriptions were found in namespace openshift-controller-manager-operator" id=eCkID namespace=openshift-controller-manager-operator 2025-12-12T16:33:10.527095367+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="resolving sources" id=nn3VC namespace=openshift-kube-apiserver-operator 2025-12-12T16:33:10.527095367+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="checking if subscriptions need update" id=nn3VC namespace=openshift-kube-apiserver-operator 2025-12-12T16:33:10.728618353+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="No subscriptions were found in namespace openshift-ingress-operator" id=hlqb3 namespace=openshift-ingress-operator 2025-12-12T16:33:10.728618353+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="resolving sources" id=3fwlb namespace=openshift-cloud-platform-infra 2025-12-12T16:33:10.728618353+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="checking if subscriptions need update" id=3fwlb namespace=openshift-cloud-platform-infra 2025-12-12T16:33:10.926959639+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="No subscriptions were found in namespace openshift-kube-apiserver-operator" id=nn3VC namespace=openshift-kube-apiserver-operator 2025-12-12T16:33:10.926959639+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="resolving sources" id=svBrp namespace=openshift-dns-operator 2025-12-12T16:33:10.926959639+00:00 stderr F time="2025-12-12T16:33:10Z" level=info msg="checking if subscriptions need update" id=svBrp namespace=openshift-dns-operator 2025-12-12T16:33:11.126451134+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="No subscriptions were found in namespace openshift-cloud-platform-infra" id=3fwlb namespace=openshift-cloud-platform-infra 2025-12-12T16:33:11.126488765+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="resolving sources" id=p/RPj namespace=openshift-kube-scheduler 2025-12-12T16:33:11.126520746+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="checking if subscriptions need update" id=p/RPj namespace=openshift-kube-scheduler 2025-12-12T16:33:11.328269948+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="No subscriptions were found in namespace openshift-dns-operator" id=svBrp namespace=openshift-dns-operator 2025-12-12T16:33:11.328336239+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="resolving sources" id=tP1mm namespace=service-telemetry 2025-12-12T16:33:11.328336239+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="checking if subscriptions need update" id=tP1mm namespace=service-telemetry 2025-12-12T16:33:11.528518962+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="No subscriptions were found in namespace openshift-kube-scheduler" id=p/RPj namespace=openshift-kube-scheduler 2025-12-12T16:33:11.933685717+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="checking for existing installplan" channel=unstable id=tP1mm namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:11.933685717+00:00 stderr F time="2025-12-12T16:33:11Z" level=info msg="resolving subscriptions in namespace" id=tP1mm namespace=service-telemetry 2025-12-12T16:33:12.129620524+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:12.134744122+00:00 stderr F E1212 16:33:12.134650 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:12.134825084+00:00 stderr F I1212 16:33:12.134774 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:12.146238599+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="resolving sources" id=AvWek namespace=service-telemetry 2025-12-12T16:33:12.146238599+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="checking if subscriptions need update" id=AvWek namespace=service-telemetry 2025-12-12T16:33:12.532883861+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="checking for existing installplan" channel=unstable id=AvWek namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:12.532883861+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="resolving subscriptions in namespace" id=AvWek namespace=service-telemetry 2025-12-12T16:33:12.728038138+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:12.730653214+00:00 stderr F E1212 16:33:12.730598 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:12.730754446+00:00 stderr F I1212 16:33:12.730697 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:12.752342456+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="resolving sources" id=fTlf1 namespace=service-telemetry 2025-12-12T16:33:12.752342456+00:00 stderr F time="2025-12-12T16:33:12Z" level=info msg="checking if subscriptions need update" id=fTlf1 namespace=service-telemetry 2025-12-12T16:33:13.133437180+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="checking for existing installplan" channel=unstable id=fTlf1 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:13.133437180+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="resolving subscriptions in namespace" id=fTlf1 namespace=service-telemetry 2025-12-12T16:33:13.329224793+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:13.331848918+00:00 stderr F E1212 16:33:13.331734 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:13.332045803+00:00 stderr F I1212 16:33:13.331965 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:13.373292704+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="resolving sources" id=4jClg namespace=service-telemetry 2025-12-12T16:33:13.373292704+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="checking if subscriptions need update" id=4jClg namespace=service-telemetry 2025-12-12T16:33:13.732115860+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="checking for existing installplan" channel=unstable id=4jClg namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:13.732115860+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="resolving subscriptions in namespace" id=4jClg namespace=service-telemetry 2025-12-12T16:33:13.928383004+00:00 stderr F time="2025-12-12T16:33:13Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:13.931447611+00:00 stderr F E1212 16:33:13.931342 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:13.931503522+00:00 stderr F I1212 16:33:13.931449 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:14.012992349+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="resolving sources" id=bCicY namespace=service-telemetry 2025-12-12T16:33:14.012992349+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="checking if subscriptions need update" id=bCicY namespace=service-telemetry 2025-12-12T16:33:14.333403136+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="checking for existing installplan" channel=unstable id=bCicY namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:14.333403136+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="resolving subscriptions in namespace" id=bCicY namespace=service-telemetry 2025-12-12T16:33:14.529122117+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:14.532486481+00:00 stderr F E1212 16:33:14.532430 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:14.532611734+00:00 stderr F I1212 16:33:14.532565 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:14.694191542+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="resolving sources" id=PPmGO namespace=service-telemetry 2025-12-12T16:33:14.694191542+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="checking if subscriptions need update" id=PPmGO namespace=service-telemetry 2025-12-12T16:33:14.933721148+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="checking for existing installplan" channel=unstable id=PPmGO namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:14.933721148+00:00 stderr F time="2025-12-12T16:33:14Z" level=info msg="resolving subscriptions in namespace" id=PPmGO namespace=service-telemetry 2025-12-12T16:33:15.127723035+00:00 stderr F time="2025-12-12T16:33:15Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:15.131067279+00:00 stderr F E1212 16:33:15.131025 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:15.131385277+00:00 stderr F I1212 16:33:15.131303 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:15.453149848+00:00 stderr F time="2025-12-12T16:33:15Z" level=info msg="resolving sources" id=De/af namespace=service-telemetry 2025-12-12T16:33:15.453149848+00:00 stderr F time="2025-12-12T16:33:15Z" level=info msg="checking if subscriptions need update" id=De/af namespace=service-telemetry 2025-12-12T16:33:15.533107766+00:00 stderr F time="2025-12-12T16:33:15Z" level=info msg="checking for existing installplan" channel=unstable id=De/af namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:15.533107766+00:00 stderr F time="2025-12-12T16:33:15Z" level=info msg="resolving subscriptions in namespace" id=De/af namespace=service-telemetry 2025-12-12T16:33:15.726752595+00:00 stderr F time="2025-12-12T16:33:15Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:15.729337160+00:00 stderr F E1212 16:33:15.729304 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:33:15.729428812+00:00 stderr F I1212 16:33:15.729373 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:33:16.371623071+00:00 stderr F time="2025-12-12T16:33:16Z" level=info msg="resolving sources" id=NCE7t namespace=service-telemetry 2025-12-12T16:33:16.371623071+00:00 stderr F time="2025-12-12T16:33:16Z" level=info msg="checking if subscriptions need update" id=NCE7t namespace=service-telemetry 2025-12-12T16:33:16.385633161+00:00 stderr F time="2025-12-12T16:33:16Z" level=info msg="checking for existing installplan" channel=unstable id=NCE7t namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:33:16.385633161+00:00 stderr F time="2025-12-12T16:33:16Z" level=info msg="resolving subscriptions in namespace" id=NCE7t namespace=service-telemetry 2025-12-12T16:33:16.389410856+00:00 stderr F time="2025-12-12T16:33:16Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:33:16.391093368+00:00 stderr F I1212 16:33:16.391039 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:25.669851237+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=keo4s 2025-12-12T16:34:25.669851237+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=keo4s 2025-12-12T16:34:25.677983500+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=keo4s 2025-12-12T16:34:25.677983500+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=keo4s 2025-12-12T16:34:25.678061312+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="catalog update required at 2025-12-12 16:34:25.678016371 +0000 UTC m=+1058.793774354" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=keo4s 2025-12-12T16:34:25.685765084+00:00 stderr F I1212 16:34:25.685694 1 warnings.go:110] "Warning: would violate PodSecurity \"restricted:latest\": allowPrivilegeEscalation != false (container \"registry-server\" must set securityContext.allowPrivilegeEscalation=false), unrestricted capabilities (container \"registry-server\" must set securityContext.capabilities.drop=[\"ALL\"]), runAsNonRoot != true (pod or container \"registry-server\" must set securityContext.runAsNonRoot=true), seccompProfile (pod or container \"registry-server\" must set securityContext.seccompProfile.type to \"RuntimeDefault\" or \"Localhost\")" 2025-12-12T16:34:25.685882907+00:00 stderr F time="2025-12-12T16:34:25Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=keo4s 2025-12-12T16:34:25.685882907+00:00 stderr F time="2025-12-12T16:34:25Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=keo4s 2025-12-12T16:34:25.685894107+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=keo4s 2025-12-12T16:34:25.692193765+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:34:25.692518103+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:34:25.699281962+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=giX2c 2025-12-12T16:34:25.699281962+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=giX2c 2025-12-12T16:34:25.865624929+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:25.865624929+00:00 stderr F time="2025-12-12T16:34:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:26.068157111+00:00 stderr F time="2025-12-12T16:34:26Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:26.068157111+00:00 stderr F time="2025-12-12T16:34:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:27.068920421+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:27.068920421+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:27.266764466+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:27.266764466+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:27.469226576+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=giX2c 2025-12-12T16:34:27.469226576+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=giX2c 2025-12-12T16:34:27.469226576+00:00 stderr F time="2025-12-12T16:34:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=giX2c 2025-12-12T16:34:27.469226576+00:00 stderr F time="2025-12-12T16:34:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=giX2c 2025-12-12T16:34:27.469226576+00:00 stderr F time="2025-12-12T16:34:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=giX2c 2025-12-12T16:34:28.268881760+00:00 stderr F time="2025-12-12T16:34:28Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:28.268881760+00:00 stderr F time="2025-12-12T16:34:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:28.467561295+00:00 stderr F time="2025-12-12T16:34:28Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:28.467561295+00:00 stderr F time="2025-12-12T16:34:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:34:28.669407479+00:00 stderr F time="2025-12-12T16:34:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t4OrR 2025-12-12T16:34:28.669407479+00:00 stderr F time="2025-12-12T16:34:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t4OrR 2025-12-12T16:34:29.468550309+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:34:29.468550309+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:34:29.468851937+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:29.469000880+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving sources" id=grfaa namespace=service-telemetry 2025-12-12T16:34:29.469000880+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking if subscriptions need update" id=grfaa namespace=service-telemetry 2025-12-12T16:34:29.484344424+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking for existing installplan" channel=unstable id=grfaa namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:29.484451297+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving subscriptions in namespace" id=grfaa namespace=service-telemetry 2025-12-12T16:34:29.489027031+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:29.492465827+00:00 stderr F E1212 16:34:29.492422 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:29.492625291+00:00 stderr F I1212 16:34:29.492580 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:29.499312848+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving sources" id=AaTNz namespace=service-telemetry 2025-12-12T16:34:29.499312848+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking if subscriptions need update" id=AaTNz namespace=service-telemetry 2025-12-12T16:34:29.517642416+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking for existing installplan" channel=unstable id=AaTNz namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:29.517642416+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving subscriptions in namespace" id=AaTNz namespace=service-telemetry 2025-12-12T16:34:29.522133538+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:29.524680762+00:00 stderr F E1212 16:34:29.524606 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:29.524680762+00:00 stderr F I1212 16:34:29.524662 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:29.536868586+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving sources" id=pUXmk namespace=service-telemetry 2025-12-12T16:34:29.536868586+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking if subscriptions need update" id=pUXmk namespace=service-telemetry 2025-12-12T16:34:29.551900842+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking for existing installplan" channel=unstable id=pUXmk namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:29.552008935+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving subscriptions in namespace" id=pUXmk namespace=service-telemetry 2025-12-12T16:34:29.555685507+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:29.562279181+00:00 stderr F E1212 16:34:29.562206 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:29.562439185+00:00 stderr F I1212 16:34:29.562276 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:29.584227930+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving sources" id=pK7y9 namespace=service-telemetry 2025-12-12T16:34:29.584227930+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking if subscriptions need update" id=pK7y9 namespace=service-telemetry 2025-12-12T16:34:29.666380343+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:34:29.666455765+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:34:29.676915916+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking for existing installplan" channel=unstable id=pK7y9 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:29.676976668+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving subscriptions in namespace" id=pK7y9 namespace=service-telemetry 2025-12-12T16:34:29.874573996+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:29.876413262+00:00 stderr F E1212 16:34:29.876340 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:29.876470243+00:00 stderr F I1212 16:34:29.876430 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:29.877671933+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="resolving sources" id=4Rrwf namespace=service-telemetry 2025-12-12T16:34:29.877671933+00:00 stderr F time="2025-12-12T16:34:29Z" level=info msg="checking if subscriptions need update" id=4Rrwf namespace=service-telemetry 2025-12-12T16:34:30.266371337+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t4OrR 2025-12-12T16:34:30.266554532+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t4OrR 2025-12-12T16:34:30.266845129+00:00 stderr F time="2025-12-12T16:34:30Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=t4OrR 2025-12-12T16:34:30.266855709+00:00 stderr F time="2025-12-12T16:34:30Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=t4OrR 2025-12-12T16:34:30.266871020+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=t4OrR 2025-12-12T16:34:30.277376732+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="checking for existing installplan" channel=unstable id=4Rrwf namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:30.277426903+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="resolving subscriptions in namespace" id=4Rrwf namespace=service-telemetry 2025-12-12T16:34:30.476356595+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:30.478137719+00:00 stderr F E1212 16:34:30.478094 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:30.478322604+00:00 stderr F I1212 16:34:30.478220 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:30.479470692+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="resolving sources" id=FILGh namespace=service-telemetry 2025-12-12T16:34:30.479470692+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="checking if subscriptions need update" id=FILGh namespace=service-telemetry 2025-12-12T16:34:30.667054850+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=RNslN 2025-12-12T16:34:30.667054850+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=RNslN 2025-12-12T16:34:30.877451038+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="checking for existing installplan" channel=unstable id=FILGh namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:30.877547931+00:00 stderr F time="2025-12-12T16:34:30Z" level=info msg="resolving subscriptions in namespace" id=FILGh namespace=service-telemetry 2025-12-12T16:34:31.073039972+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:31.076076058+00:00 stderr F E1212 16:34:31.076005 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:31.076173041+00:00 stderr F I1212 16:34:31.076106 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:31.077405292+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="resolving sources" id=RvTsH namespace=service-telemetry 2025-12-12T16:34:31.077405292+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="checking if subscriptions need update" id=RvTsH namespace=service-telemetry 2025-12-12T16:34:31.267387485+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=RNslN 2025-12-12T16:34:31.267387485+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=RNslN 2025-12-12T16:34:31.267475567+00:00 stderr F time="2025-12-12T16:34:31Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=RNslN 2025-12-12T16:34:31.267475567+00:00 stderr F time="2025-12-12T16:34:31Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=RNslN 2025-12-12T16:34:31.267475567+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=RNslN 2025-12-12T16:34:31.479806752+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="checking for existing installplan" channel=unstable id=RvTsH namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:31.479806752+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="resolving subscriptions in namespace" id=RvTsH namespace=service-telemetry 2025-12-12T16:34:31.673144380+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:31.676159786+00:00 stderr F E1212 16:34:31.676108 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:31.676393292+00:00 stderr F I1212 16:34:31.676319 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:31.677323875+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="resolving sources" id=f5Ucm namespace=service-telemetry 2025-12-12T16:34:31.677323875+00:00 stderr F time="2025-12-12T16:34:31Z" level=info msg="checking if subscriptions need update" id=f5Ucm namespace=service-telemetry 2025-12-12T16:34:32.077621603+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="checking for existing installplan" channel=unstable id=f5Ucm namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:32.077621603+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="resolving subscriptions in namespace" id=f5Ucm namespace=service-telemetry 2025-12-12T16:34:32.274639273+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:32.277951236+00:00 stderr F E1212 16:34:32.277910 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:32.278081489+00:00 stderr F I1212 16:34:32.278056 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:32.279363122+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="resolving sources" id=KfzDs namespace=service-telemetry 2025-12-12T16:34:32.279363122+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="checking if subscriptions need update" id=KfzDs namespace=service-telemetry 2025-12-12T16:34:32.677428062+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="checking for existing installplan" channel=unstable id=KfzDs namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:32.677428062+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="resolving subscriptions in namespace" id=KfzDs namespace=service-telemetry 2025-12-12T16:34:32.875083388+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:32.877704274+00:00 stderr F I1212 16:34:32.877619 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:32.920411967+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="resolving sources" id=bbj0a namespace=service-telemetry 2025-12-12T16:34:32.920488339+00:00 stderr F time="2025-12-12T16:34:32Z" level=info msg="checking if subscriptions need update" id=bbj0a namespace=service-telemetry 2025-12-12T16:34:33.278284399+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="checking for existing installplan" channel=unstable id=bbj0a namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:33.278353821+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="resolving subscriptions in namespace" id=bbj0a namespace=service-telemetry 2025-12-12T16:34:33.474305254+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:33.477170976+00:00 stderr F E1212 16:34:33.477105 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:33.477269198+00:00 stderr F I1212 16:34:33.477218 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:33.484060369+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="resolving sources" id=1hBD0 namespace=service-telemetry 2025-12-12T16:34:33.484060369+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="checking if subscriptions need update" id=1hBD0 namespace=service-telemetry 2025-12-12T16:34:33.879189307+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="checking for existing installplan" channel=unstable id=1hBD0 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:33.879303850+00:00 stderr F time="2025-12-12T16:34:33Z" level=info msg="resolving subscriptions in namespace" id=1hBD0 namespace=service-telemetry 2025-12-12T16:34:34.073680644+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:34.076311760+00:00 stderr F E1212 16:34:34.076264 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:34.076400262+00:00 stderr F I1212 16:34:34.076363 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:34.088002994+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="resolving sources" id=KNSZn namespace=service-telemetry 2025-12-12T16:34:34.088104326+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="checking if subscriptions need update" id=KNSZn namespace=service-telemetry 2025-12-12T16:34:34.482988278+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="checking for existing installplan" channel=unstable id=KNSZn namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:34.483101940+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="resolving subscriptions in namespace" id=KNSZn namespace=service-telemetry 2025-12-12T16:34:34.674607922+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:34.677720180+00:00 stderr F E1212 16:34:34.677618 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:34.677804482+00:00 stderr F I1212 16:34:34.677734 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:34.699390985+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="resolving sources" id=Ctoag namespace=service-telemetry 2025-12-12T16:34:34.699390985+00:00 stderr F time="2025-12-12T16:34:34Z" level=info msg="checking if subscriptions need update" id=Ctoag namespace=service-telemetry 2025-12-12T16:34:35.080275014+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="checking for existing installplan" channel=unstable id=Ctoag namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:35.080275014+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="resolving subscriptions in namespace" id=Ctoag namespace=service-telemetry 2025-12-12T16:34:35.274100674+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:35.278086434+00:00 stderr F E1212 16:34:35.278041 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:35.278337651+00:00 stderr F I1212 16:34:35.278284 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:35.319678389+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="resolving sources" id=izraN namespace=service-telemetry 2025-12-12T16:34:35.319776442+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="checking if subscriptions need update" id=izraN namespace=service-telemetry 2025-12-12T16:34:35.680429383+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="checking for existing installplan" channel=unstable id=izraN namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:35.680429383+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="resolving subscriptions in namespace" id=izraN namespace=service-telemetry 2025-12-12T16:34:35.873712650+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:35.878014428+00:00 stderr F E1212 16:34:35.877946 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:35.878125921+00:00 stderr F I1212 16:34:35.878075 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:35.959647629+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="resolving sources" id=kKqMZ namespace=service-telemetry 2025-12-12T16:34:35.959647629+00:00 stderr F time="2025-12-12T16:34:35Z" level=info msg="checking if subscriptions need update" id=kKqMZ namespace=service-telemetry 2025-12-12T16:34:36.277072753+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="checking for existing installplan" channel=unstable id=kKqMZ namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:36.277072753+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="resolving subscriptions in namespace" id=kKqMZ namespace=service-telemetry 2025-12-12T16:34:36.474307669+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:36.477562391+00:00 stderr F E1212 16:34:36.477495 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:36.477731395+00:00 stderr F I1212 16:34:36.477605 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:36.639274654+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="resolving sources" id=gwd62 namespace=service-telemetry 2025-12-12T16:34:36.639399677+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="checking if subscriptions need update" id=gwd62 namespace=service-telemetry 2025-12-12T16:34:36.877594992+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="checking for existing installplan" channel=unstable id=gwd62 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:36.877594992+00:00 stderr F time="2025-12-12T16:34:36Z" level=info msg="resolving subscriptions in namespace" id=gwd62 namespace=service-telemetry 2025-12-12T16:34:37.074603521+00:00 stderr F time="2025-12-12T16:34:37Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:37.080325875+00:00 stderr F E1212 16:34:37.080278 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:37.080399397+00:00 stderr F I1212 16:34:37.080350 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:37.402573752+00:00 stderr F time="2025-12-12T16:34:37Z" level=info msg="resolving sources" id=K/spN namespace=service-telemetry 2025-12-12T16:34:37.402573752+00:00 stderr F time="2025-12-12T16:34:37Z" level=info msg="checking if subscriptions need update" id=K/spN namespace=service-telemetry 2025-12-12T16:34:37.479540275+00:00 stderr F time="2025-12-12T16:34:37Z" level=info msg="checking for existing installplan" channel=unstable id=K/spN namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:37.479540275+00:00 stderr F time="2025-12-12T16:34:37Z" level=info msg="resolving subscriptions in namespace" id=K/spN namespace=service-telemetry 2025-12-12T16:34:37.675566551+00:00 stderr F time="2025-12-12T16:34:37Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:37.678532485+00:00 stderr F E1212 16:34:37.678497 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:34:37.678705920+00:00 stderr F I1212 16:34:37.678623 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:38.320521615+00:00 stderr F time="2025-12-12T16:34:38Z" level=info msg="resolving sources" id=/PujC namespace=service-telemetry 2025-12-12T16:34:38.320521615+00:00 stderr F time="2025-12-12T16:34:38Z" level=info msg="checking if subscriptions need update" id=/PujC namespace=service-telemetry 2025-12-12T16:34:38.337787399+00:00 stderr F time="2025-12-12T16:34:38Z" level=info msg="checking for existing installplan" channel=unstable id=/PujC namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:34:38.337787399+00:00 stderr F time="2025-12-12T16:34:38Z" level=info msg="resolving subscriptions in namespace" id=/PujC namespace=service-telemetry 2025-12-12T16:34:38.343556684+00:00 stderr F time="2025-12-12T16:34:38Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:34:38.348911559+00:00 stderr F I1212 16:34:38.348838 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:34:51.398313300+00:00 stderr F time="2025-12-12T16:34:51Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=xbS6v 2025-12-12T16:34:51.398313300+00:00 stderr F time="2025-12-12T16:34:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=xbS6v 2025-12-12T16:34:51.408019314+00:00 stderr F time="2025-12-12T16:34:51Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=xbS6v 2025-12-12T16:34:51.408019314+00:00 stderr F time="2025-12-12T16:34:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=xbS6v 2025-12-12T16:34:51.408054365+00:00 stderr F time="2025-12-12T16:34:51Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=xbS6v 2025-12-12T16:34:51.408054365+00:00 stderr F time="2025-12-12T16:34:51Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=xbS6v 2025-12-12T16:34:51.408054365+00:00 stderr F time="2025-12-12T16:34:51Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=xbS6v 2025-12-12T16:34:55.695981181+00:00 stderr F time="2025-12-12T16:34:55Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=CIUjM 2025-12-12T16:34:55.695981181+00:00 stderr F time="2025-12-12T16:34:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=CIUjM 2025-12-12T16:34:55.708398333+00:00 stderr F time="2025-12-12T16:34:55Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=CIUjM 2025-12-12T16:34:55.708398333+00:00 stderr F time="2025-12-12T16:34:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=CIUjM 2025-12-12T16:34:55.708556777+00:00 stderr F time="2025-12-12T16:34:55Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=CIUjM 2025-12-12T16:34:55.708556777+00:00 stderr F time="2025-12-12T16:34:55Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=CIUjM 2025-12-12T16:34:55.708600328+00:00 stderr F time="2025-12-12T16:34:55Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=CIUjM 2025-12-12T16:35:03.399117206+00:00 stderr F time="2025-12-12T16:35:03Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=aTzrm 2025-12-12T16:35:03.399117206+00:00 stderr F time="2025-12-12T16:35:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=aTzrm 2025-12-12T16:35:03.410858451+00:00 stderr F time="2025-12-12T16:35:03Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=aTzrm 2025-12-12T16:35:03.410858451+00:00 stderr F time="2025-12-12T16:35:03Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=aTzrm 2025-12-12T16:35:03.410922002+00:00 stderr F time="2025-12-12T16:35:03Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=aTzrm 2025-12-12T16:35:03.410922002+00:00 stderr F time="2025-12-12T16:35:03Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=aTzrm 2025-12-12T16:35:03.410922002+00:00 stderr F time="2025-12-12T16:35:03Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=aTzrm 2025-12-12T16:35:16.396684420+00:00 stderr F time="2025-12-12T16:35:16Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=KsuH7 2025-12-12T16:35:16.396684420+00:00 stderr F time="2025-12-12T16:35:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=KsuH7 2025-12-12T16:35:16.407381939+00:00 stderr F time="2025-12-12T16:35:16Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=KsuH7 2025-12-12T16:35:16.407381939+00:00 stderr F time="2025-12-12T16:35:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=KsuH7 2025-12-12T16:35:16.407381939+00:00 stderr F time="2025-12-12T16:35:16Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=KsuH7 2025-12-12T16:35:16.407381939+00:00 stderr F time="2025-12-12T16:35:16Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=KsuH7 2025-12-12T16:35:16.407451901+00:00 stderr F time="2025-12-12T16:35:16Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=KsuH7 2025-12-12T16:35:25.717333005+00:00 stderr F time="2025-12-12T16:35:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=9c9MI 2025-12-12T16:35:25.717333005+00:00 stderr F time="2025-12-12T16:35:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=9c9MI 2025-12-12T16:35:25.730372282+00:00 stderr F time="2025-12-12T16:35:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=9c9MI 2025-12-12T16:35:25.730372282+00:00 stderr F time="2025-12-12T16:35:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=9c9MI 2025-12-12T16:35:25.730372282+00:00 stderr F time="2025-12-12T16:35:25Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=9c9MI 2025-12-12T16:35:25.730372282+00:00 stderr F time="2025-12-12T16:35:25Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=9c9MI 2025-12-12T16:35:25.730372282+00:00 stderr F time="2025-12-12T16:35:25Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=9c9MI 2025-12-12T16:35:28.389042543+00:00 stderr F time="2025-12-12T16:35:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=OM+FP 2025-12-12T16:35:28.389042543+00:00 stderr F time="2025-12-12T16:35:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=OM+FP 2025-12-12T16:35:28.397437684+00:00 stderr F time="2025-12-12T16:35:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=OM+FP 2025-12-12T16:35:28.397437684+00:00 stderr F time="2025-12-12T16:35:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=OM+FP 2025-12-12T16:35:28.397477475+00:00 stderr F time="2025-12-12T16:35:28Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=OM+FP 2025-12-12T16:35:28.397502215+00:00 stderr F time="2025-12-12T16:35:28Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=OM+FP 2025-12-12T16:35:28.397510985+00:00 stderr F time="2025-12-12T16:35:28Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=OM+FP 2025-12-12T16:35:29.911335871+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9c0OP 2025-12-12T16:35:29.911335871+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9c0OP 2025-12-12T16:35:29.911434293+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=AcuIU 2025-12-12T16:35:29.911434293+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=AcuIU 2025-12-12T16:35:29.920303036+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=AcuIU 2025-12-12T16:35:29.920303036+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=AcuIU 2025-12-12T16:35:29.922746827+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9c0OP 2025-12-12T16:35:29.922746827+00:00 stderr F time="2025-12-12T16:35:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=9c0OP 2025-12-12T16:35:30.514890795+00:00 stderr F time="2025-12-12T16:35:30Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=evtvQ 2025-12-12T16:35:30.514890795+00:00 stderr F time="2025-12-12T16:35:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=evtvQ 2025-12-12T16:35:30.709552666+00:00 stderr F time="2025-12-12T16:35:30Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=pWsMF 2025-12-12T16:35:30.709552666+00:00 stderr F time="2025-12-12T16:35:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=pWsMF 2025-12-12T16:35:31.710275390+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=evtvQ 2025-12-12T16:35:31.710275390+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=evtvQ 2025-12-12T16:35:31.710384153+00:00 stderr F time="2025-12-12T16:35:31Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=evtvQ 2025-12-12T16:35:31.710393073+00:00 stderr F time="2025-12-12T16:35:31Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=evtvQ 2025-12-12T16:35:31.710419224+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=evtvQ 2025-12-12T16:35:31.910570673+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=pWsMF 2025-12-12T16:35:31.910570673+00:00 stderr F time="2025-12-12T16:35:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=pWsMF 2025-12-12T16:35:32.511596004+00:00 stderr F time="2025-12-12T16:35:32Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=CiFvI 2025-12-12T16:35:32.511596004+00:00 stderr F time="2025-12-12T16:35:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=CiFvI 2025-12-12T16:35:32.711209359+00:00 stderr F time="2025-12-12T16:35:32Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=+E/pv 2025-12-12T16:35:32.711209359+00:00 stderr F time="2025-12-12T16:35:32Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=+E/pv 2025-12-12T16:35:33.710024294+00:00 stderr F time="2025-12-12T16:35:33Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=CiFvI 2025-12-12T16:35:33.710024294+00:00 stderr F time="2025-12-12T16:35:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=CiFvI 2025-12-12T16:35:33.911710572+00:00 stderr F time="2025-12-12T16:35:33Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=+E/pv 2025-12-12T16:35:33.911710572+00:00 stderr F time="2025-12-12T16:35:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=+E/pv 2025-12-12T16:35:34.509741478+00:00 stderr F time="2025-12-12T16:35:34Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=yqVUP 2025-12-12T16:35:34.509741478+00:00 stderr F time="2025-12-12T16:35:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=yqVUP 2025-12-12T16:35:34.709922628+00:00 stderr F time="2025-12-12T16:35:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ppD9s 2025-12-12T16:35:34.709922628+00:00 stderr F time="2025-12-12T16:35:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ppD9s 2025-12-12T16:35:35.710684932+00:00 stderr F time="2025-12-12T16:35:35Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=yqVUP 2025-12-12T16:35:35.710684932+00:00 stderr F time="2025-12-12T16:35:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=yqVUP 2025-12-12T16:35:35.710758584+00:00 stderr F time="2025-12-12T16:35:35Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=yqVUP 2025-12-12T16:35:35.710758584+00:00 stderr F time="2025-12-12T16:35:35Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=yqVUP 2025-12-12T16:35:35.710758584+00:00 stderr F time="2025-12-12T16:35:35Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=yqVUP 2025-12-12T16:35:35.910775630+00:00 stderr F time="2025-12-12T16:35:35Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ppD9s 2025-12-12T16:35:35.910775630+00:00 stderr F time="2025-12-12T16:35:35Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ppD9s 2025-12-12T16:35:39.159555727+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=3n2yv 2025-12-12T16:35:39.159555727+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=3n2yv 2025-12-12T16:35:39.159688630+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=r4iZ0 2025-12-12T16:35:39.159688630+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=r4iZ0 2025-12-12T16:35:39.167900967+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=r4iZ0 2025-12-12T16:35:39.167900967+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=r4iZ0 2025-12-12T16:35:39.168014399+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=3n2yv 2025-12-12T16:35:39.168014399+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=3n2yv 2025-12-12T16:35:39.757652715+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=7IhrQ 2025-12-12T16:35:39.757652715+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=7IhrQ 2025-12-12T16:35:39.957355862+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4mTZ 2025-12-12T16:35:39.957355862+00:00 stderr F time="2025-12-12T16:35:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4mTZ 2025-12-12T16:35:40.957150681+00:00 stderr F time="2025-12-12T16:35:40Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=7IhrQ 2025-12-12T16:35:40.957150681+00:00 stderr F time="2025-12-12T16:35:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=7IhrQ 2025-12-12T16:35:41.155781542+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4mTZ 2025-12-12T16:35:41.155781542+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=G4mTZ 2025-12-12T16:35:41.155781542+00:00 stderr F time="2025-12-12T16:35:41Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=G4mTZ 2025-12-12T16:35:41.155781542+00:00 stderr F time="2025-12-12T16:35:41Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=G4mTZ 2025-12-12T16:35:41.155894935+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=G4mTZ 2025-12-12T16:35:41.755903760+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=XU+Sh 2025-12-12T16:35:41.755903760+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=XU+Sh 2025-12-12T16:35:41.957527817+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CXYhB 2025-12-12T16:35:41.957527817+00:00 stderr F time="2025-12-12T16:35:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CXYhB 2025-12-12T16:35:42.956700890+00:00 stderr F time="2025-12-12T16:35:42Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=XU+Sh 2025-12-12T16:35:42.956700890+00:00 stderr F time="2025-12-12T16:35:42Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=XU+Sh 2025-12-12T16:35:43.155962377+00:00 stderr F time="2025-12-12T16:35:43Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CXYhB 2025-12-12T16:35:43.155962377+00:00 stderr F time="2025-12-12T16:35:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CXYhB 2025-12-12T16:35:43.558823808+00:00 stderr F time="2025-12-12T16:35:43Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=LNuCx 2025-12-12T16:35:43.558823808+00:00 stderr F time="2025-12-12T16:35:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=LNuCx 2025-12-12T16:35:44.157613462+00:00 stderr F time="2025-12-12T16:35:44Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=LNuCx 2025-12-12T16:35:44.157613462+00:00 stderr F time="2025-12-12T16:35:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=LNuCx 2025-12-12T16:35:44.157613462+00:00 stderr F time="2025-12-12T16:35:44Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=LNuCx 2025-12-12T16:35:44.157613462+00:00 stderr F time="2025-12-12T16:35:44Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=LNuCx 2025-12-12T16:35:44.157613462+00:00 stderr F time="2025-12-12T16:35:44Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=LNuCx 2025-12-12T16:35:55.739022337+00:00 stderr F time="2025-12-12T16:35:55Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=nVupq 2025-12-12T16:35:55.739022337+00:00 stderr F time="2025-12-12T16:35:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=nVupq 2025-12-12T16:35:55.747930270+00:00 stderr F time="2025-12-12T16:35:55Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=nVupq 2025-12-12T16:35:55.747930270+00:00 stderr F time="2025-12-12T16:35:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=nVupq 2025-12-12T16:35:55.747986812+00:00 stderr F time="2025-12-12T16:35:55Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=nVupq 2025-12-12T16:35:55.747995522+00:00 stderr F time="2025-12-12T16:35:55Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=nVupq 2025-12-12T16:35:55.748071064+00:00 stderr F time="2025-12-12T16:35:55Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=nVupq 2025-12-12T16:35:57.401260101+00:00 stderr F time="2025-12-12T16:35:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=a9U8G 2025-12-12T16:35:57.401260101+00:00 stderr F time="2025-12-12T16:35:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=a9U8G 2025-12-12T16:35:57.414645257+00:00 stderr F time="2025-12-12T16:35:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=a9U8G 2025-12-12T16:35:57.414645257+00:00 stderr F time="2025-12-12T16:35:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=a9U8G 2025-12-12T16:35:57.414728159+00:00 stderr F time="2025-12-12T16:35:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=a9U8G 2025-12-12T16:35:57.414728159+00:00 stderr F time="2025-12-12T16:35:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=a9U8G 2025-12-12T16:35:57.414728159+00:00 stderr F time="2025-12-12T16:35:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=a9U8G 2025-12-12T16:36:11.391942609+00:00 stderr F time="2025-12-12T16:36:11Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=uzoLo 2025-12-12T16:36:11.392034591+00:00 stderr F time="2025-12-12T16:36:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=uzoLo 2025-12-12T16:36:11.399985921+00:00 stderr F time="2025-12-12T16:36:11Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=uzoLo 2025-12-12T16:36:11.400066633+00:00 stderr F time="2025-12-12T16:36:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=uzoLo 2025-12-12T16:36:11.400159575+00:00 stderr F time="2025-12-12T16:36:11Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=uzoLo 2025-12-12T16:36:11.400214297+00:00 stderr F time="2025-12-12T16:36:11Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=uzoLo 2025-12-12T16:36:11.400245128+00:00 stderr F time="2025-12-12T16:36:11Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=uzoLo 2025-12-12T16:36:14.533933152+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=W+wz9 2025-12-12T16:36:14.533933152+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=W+wz9 2025-12-12T16:36:14.534089986+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=wJjOd 2025-12-12T16:36:14.534099047+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=wJjOd 2025-12-12T16:36:14.548544380+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=W+wz9 2025-12-12T16:36:14.548544380+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=W+wz9 2025-12-12T16:36:14.548669163+00:00 stderr F time="2025-12-12T16:36:14Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=W+wz9 2025-12-12T16:36:14.548669163+00:00 stderr F time="2025-12-12T16:36:14Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=W+wz9 2025-12-12T16:36:14.548716884+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=W+wz9 2025-12-12T16:36:14.548859638+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=wJjOd 2025-12-12T16:36:14.548870098+00:00 stderr F time="2025-12-12T16:36:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=wJjOd 2025-12-12T16:36:15.131045345+00:00 stderr F time="2025-12-12T16:36:15Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=BX6c5 2025-12-12T16:36:15.131045345+00:00 stderr F time="2025-12-12T16:36:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=BX6c5 2025-12-12T16:36:15.329713057+00:00 stderr F time="2025-12-12T16:36:15Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=by/tV 2025-12-12T16:36:15.329713057+00:00 stderr F time="2025-12-12T16:36:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=by/tV 2025-12-12T16:36:16.332528723+00:00 stderr F time="2025-12-12T16:36:16Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=BX6c5 2025-12-12T16:36:16.332653386+00:00 stderr F time="2025-12-12T16:36:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=BX6c5 2025-12-12T16:36:16.529485491+00:00 stderr F time="2025-12-12T16:36:16Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=by/tV 2025-12-12T16:36:16.529571474+00:00 stderr F time="2025-12-12T16:36:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=by/tV 2025-12-12T16:36:22.334301191+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KQQeo 2025-12-12T16:36:22.334472125+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KQQeo 2025-12-12T16:36:22.334740112+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=N/Pfp 2025-12-12T16:36:22.334849105+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=N/Pfp 2025-12-12T16:36:22.345008750+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=N/Pfp 2025-12-12T16:36:22.345089392+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=N/Pfp 2025-12-12T16:36:22.345493722+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KQQeo 2025-12-12T16:36:22.345493722+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=KQQeo 2025-12-12T16:36:22.729886851+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5CGUv 2025-12-12T16:36:22.729886851+00:00 stderr F time="2025-12-12T16:36:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5CGUv 2025-12-12T16:36:23.330270456+00:00 stderr F time="2025-12-12T16:36:23Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5CGUv 2025-12-12T16:36:23.330434160+00:00 stderr F time="2025-12-12T16:36:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=5CGUv 2025-12-12T16:36:25.756329418+00:00 stderr F time="2025-12-12T16:36:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=wducu 2025-12-12T16:36:25.756329418+00:00 stderr F time="2025-12-12T16:36:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=wducu 2025-12-12T16:36:25.769115290+00:00 stderr F time="2025-12-12T16:36:25Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=wducu 2025-12-12T16:36:25.769300404+00:00 stderr F time="2025-12-12T16:36:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=wducu 2025-12-12T16:36:25.769459418+00:00 stderr F time="2025-12-12T16:36:25Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=wducu 2025-12-12T16:36:25.769526450+00:00 stderr F time="2025-12-12T16:36:25Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=wducu 2025-12-12T16:36:25.769606032+00:00 stderr F time="2025-12-12T16:36:25Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=wducu 2025-12-12T16:36:28.265569342+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving sources" id=31WXT namespace=cert-manager-operator 2025-12-12T16:36:28.265699065+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="checking if subscriptions need update" id=31WXT namespace=cert-manager-operator 2025-12-12T16:36:28.265797728+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving sources" id=hTadB namespace=openshift-monitoring 2025-12-12T16:36:28.265924181+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="checking if subscriptions need update" id=hTadB namespace=openshift-monitoring 2025-12-12T16:36:28.271465100+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="No subscriptions were found in namespace openshift-monitoring" id=hTadB namespace=openshift-monitoring 2025-12-12T16:36:28.271540842+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving sources" id=besUA namespace=openshift-operator-lifecycle-manager 2025-12-12T16:36:28.271578753+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="checking if subscriptions need update" id=besUA namespace=openshift-operator-lifecycle-manager 2025-12-12T16:36:28.274037975+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="No subscriptions were found in namespace openshift-operator-lifecycle-manager" id=besUA namespace=openshift-operator-lifecycle-manager 2025-12-12T16:36:28.274195179+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving sources" id=9OtHt namespace=openshift-operators 2025-12-12T16:36:28.274241040+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="checking if subscriptions need update" id=9OtHt namespace=openshift-operators 2025-12-12T16:36:28.285987525+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving subscriptions in namespace" id=31WXT namespace=cert-manager-operator 2025-12-12T16:36:28.290375195+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving subscriptions in namespace" id=9OtHt namespace=openshift-operators 2025-12-12T16:36:28.308366657+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="no subscriptions were updated" id=31WXT namespace=cert-manager-operator 2025-12-12T16:36:28.318398339+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="no subscriptions were updated" id=9OtHt namespace=openshift-operators 2025-12-12T16:36:28.322324658+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="resolving sources" id=EQ1O8 namespace=service-telemetry 2025-12-12T16:36:28.322406100+00:00 stderr F time="2025-12-12T16:36:28Z" level=info msg="checking if subscriptions need update" id=EQ1O8 namespace=service-telemetry 2025-12-12T16:36:29.074029934+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="checking for existing installplan" channel=unstable id=EQ1O8 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:29.074114336+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="resolving subscriptions in namespace" id=EQ1O8 namespace=service-telemetry 2025-12-12T16:36:29.270503541+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:29.275214579+00:00 stderr F E1212 16:36:29.275141 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:29.275434755+00:00 stderr F I1212 16:36:29.275328 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:29.281949458+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="resolving sources" id=zBquI namespace=service-telemetry 2025-12-12T16:36:29.282040571+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="checking if subscriptions need update" id=zBquI namespace=service-telemetry 2025-12-12T16:36:29.674831720+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="checking for existing installplan" channel=unstable id=zBquI namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:29.674946403+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="resolving subscriptions in namespace" id=zBquI namespace=service-telemetry 2025-12-12T16:36:29.870399824+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:29.872917967+00:00 stderr F E1212 16:36:29.872894 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:29.873117652+00:00 stderr F I1212 16:36:29.873015 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:29.884855317+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="resolving sources" id=PaZbb namespace=service-telemetry 2025-12-12T16:36:29.885027611+00:00 stderr F time="2025-12-12T16:36:29Z" level=info msg="checking if subscriptions need update" id=PaZbb namespace=service-telemetry 2025-12-12T16:36:30.278785244+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="checking for existing installplan" channel=unstable id=PaZbb namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:30.278846786+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="resolving subscriptions in namespace" id=PaZbb namespace=service-telemetry 2025-12-12T16:36:30.472663876+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:30.476031410+00:00 stderr F E1212 16:36:30.475668 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:30.476979474+00:00 stderr F I1212 16:36:30.475823 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:30.498799771+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="resolving sources" id=aaUSH namespace=service-telemetry 2025-12-12T16:36:30.498865713+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="checking if subscriptions need update" id=aaUSH namespace=service-telemetry 2025-12-12T16:36:30.876293276+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="checking for existing installplan" channel=unstable id=aaUSH namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:30.876293276+00:00 stderr F time="2025-12-12T16:36:30Z" level=info msg="resolving subscriptions in namespace" id=aaUSH namespace=service-telemetry 2025-12-12T16:36:31.071639845+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:31.075487001+00:00 stderr F E1212 16:36:31.075384 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:31.075487001+00:00 stderr F I1212 16:36:31.075465 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:31.117269161+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="resolving sources" id=f7FF1 namespace=service-telemetry 2025-12-12T16:36:31.117383274+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="checking if subscriptions need update" id=f7FF1 namespace=service-telemetry 2025-12-12T16:36:31.482663351+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="checking for existing installplan" channel=unstable id=f7FF1 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:31.482663351+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="resolving subscriptions in namespace" id=f7FF1 namespace=service-telemetry 2025-12-12T16:36:31.670656565+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:31.673506147+00:00 stderr F E1212 16:36:31.673443 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:31.673773673+00:00 stderr F I1212 16:36:31.673731 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:31.755321872+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="resolving sources" id=G3X3h namespace=service-telemetry 2025-12-12T16:36:31.755321872+00:00 stderr F time="2025-12-12T16:36:31Z" level=info msg="checking if subscriptions need update" id=G3X3h namespace=service-telemetry 2025-12-12T16:36:32.075223279+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="checking for existing installplan" channel=unstable id=G3X3h namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:32.075223279+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="resolving subscriptions in namespace" id=G3X3h namespace=service-telemetry 2025-12-12T16:36:32.271929292+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:32.274945257+00:00 stderr F E1212 16:36:32.274903 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:32.275047180+00:00 stderr F I1212 16:36:32.275018 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:32.436941428+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="resolving sources" id=Qmqq5 namespace=service-telemetry 2025-12-12T16:36:32.436941428+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="checking if subscriptions need update" id=Qmqq5 namespace=service-telemetry 2025-12-12T16:36:32.675513182+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="checking for existing installplan" channel=unstable id=Qmqq5 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:32.675513182+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="resolving subscriptions in namespace" id=Qmqq5 namespace=service-telemetry 2025-12-12T16:36:32.870132572+00:00 stderr F time="2025-12-12T16:36:32Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:32.875337403+00:00 stderr F E1212 16:36:32.874613 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:32.875337403+00:00 stderr F I1212 16:36:32.874913 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:33.196595694+00:00 stderr F time="2025-12-12T16:36:33Z" level=info msg="resolving sources" id=P6Y9J namespace=service-telemetry 2025-12-12T16:36:33.196595694+00:00 stderr F time="2025-12-12T16:36:33Z" level=info msg="checking if subscriptions need update" id=P6Y9J namespace=service-telemetry 2025-12-12T16:36:33.280133174+00:00 stderr F time="2025-12-12T16:36:33Z" level=info msg="checking for existing installplan" channel=unstable id=P6Y9J namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:33.280133174+00:00 stderr F time="2025-12-12T16:36:33Z" level=info msg="resolving subscriptions in namespace" id=P6Y9J namespace=service-telemetry 2025-12-12T16:36:33.471231665+00:00 stderr F time="2025-12-12T16:36:33Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:33.474326313+00:00 stderr F E1212 16:36:33.474284 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:33.474366594+00:00 stderr F I1212 16:36:33.474340 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:34.116072146+00:00 stderr F time="2025-12-12T16:36:34Z" level=info msg="resolving sources" id=VF3EH namespace=service-telemetry 2025-12-12T16:36:34.116072146+00:00 stderr F time="2025-12-12T16:36:34Z" level=info msg="checking if subscriptions need update" id=VF3EH namespace=service-telemetry 2025-12-12T16:36:34.129345609+00:00 stderr F time="2025-12-12T16:36:34Z" level=info msg="checking for existing installplan" channel=unstable id=VF3EH namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:34.129345609+00:00 stderr F time="2025-12-12T16:36:34Z" level=info msg="resolving subscriptions in namespace" id=VF3EH namespace=service-telemetry 2025-12-12T16:36:34.132636662+00:00 stderr F time="2025-12-12T16:36:34Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:34.136196791+00:00 stderr F I1212 16:36:34.136105 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:43.703984245+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:36:43.704102728+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:36:43.709880893+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.709880893+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.710197361+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.710197361+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.713878304+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.713878304+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.715742500+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.715742500+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.908616797+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.908616797+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:43.921362977+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:36:43.921390578+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="resolving sources" id=isUo/ namespace=cert-manager-operator 2025-12-12T16:36:43.921390578+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="checking if subscriptions need update" id=isUo/ namespace=cert-manager-operator 2025-12-12T16:36:43.933698937+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="resolving subscriptions in namespace" id=isUo/ namespace=cert-manager-operator 2025-12-12T16:36:43.953226997+00:00 stderr F time="2025-12-12T16:36:43Z" level=info msg="no subscriptions were updated" id=isUo/ namespace=cert-manager-operator 2025-12-12T16:36:44.109568096+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:44.109568096+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:44.123094096+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:36:44.123207138+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="resolving sources" id=MuSwb namespace=openshift-operators 2025-12-12T16:36:44.123244619+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="checking if subscriptions need update" id=MuSwb namespace=openshift-operators 2025-12-12T16:36:44.141600130+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="resolving subscriptions in namespace" id=MuSwb namespace=openshift-operators 2025-12-12T16:36:44.156336021+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="no subscriptions were updated" id=MuSwb namespace=openshift-operators 2025-12-12T16:36:44.708235118+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:44.708235118+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:44.908855998+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:44.908855998+00:00 stderr F time="2025-12-12T16:36:44Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:45.508135455+00:00 stderr F time="2025-12-12T16:36:45Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:45.508135455+00:00 stderr F time="2025-12-12T16:36:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:45.709543965+00:00 stderr F time="2025-12-12T16:36:45Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:45.709543965+00:00 stderr F time="2025-12-12T16:36:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:46.307848238+00:00 stderr F time="2025-12-12T16:36:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:46.307848238+00:00 stderr F time="2025-12-12T16:36:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:46.510085399+00:00 stderr F time="2025-12-12T16:36:46Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:46.510085399+00:00 stderr F time="2025-12-12T16:36:46Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:47.109115620+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:47.109115620+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:47.115880370+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving sources" id=RIpVf namespace=service-telemetry 2025-12-12T16:36:47.115880370+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking if subscriptions need update" id=RIpVf namespace=service-telemetry 2025-12-12T16:36:47.132162749+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking for existing installplan" channel=unstable id=RIpVf namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:47.132162749+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving subscriptions in namespace" id=RIpVf namespace=service-telemetry 2025-12-12T16:36:47.136126029+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:47.144654223+00:00 stderr F E1212 16:36:47.144556 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:47.144912180+00:00 stderr F I1212 16:36:47.144811 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:47.151401913+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving sources" id=VX/BK namespace=service-telemetry 2025-12-12T16:36:47.151492985+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking if subscriptions need update" id=VX/BK namespace=service-telemetry 2025-12-12T16:36:47.171807035+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking for existing installplan" channel=unstable id=VX/BK namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:47.171807035+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving subscriptions in namespace" id=VX/BK namespace=service-telemetry 2025-12-12T16:36:47.178091383+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:47.183521350+00:00 stderr F E1212 16:36:47.183433 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:47.183699374+00:00 stderr F I1212 16:36:47.183659 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:47.195269325+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving sources" id=elGh5 namespace=service-telemetry 2025-12-12T16:36:47.195365407+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking if subscriptions need update" id=elGh5 namespace=service-telemetry 2025-12-12T16:36:47.209636886+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking for existing installplan" channel=unstable id=elGh5 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:47.210362294+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving subscriptions in namespace" id=elGh5 namespace=service-telemetry 2025-12-12T16:36:47.214465647+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:47.218917079+00:00 stderr F E1212 16:36:47.218868 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:47.219175426+00:00 stderr F I1212 16:36:47.219087 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:47.240669726+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving sources" id=zfzdD namespace=service-telemetry 2025-12-12T16:36:47.240764438+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking if subscriptions need update" id=zfzdD namespace=service-telemetry 2025-12-12T16:36:47.311166637+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:47.311166637+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:47.311438144+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:47.326848931+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking for existing installplan" channel=unstable id=zfzdD namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:47.326848931+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving subscriptions in namespace" id=zfzdD namespace=service-telemetry 2025-12-12T16:36:47.521949473+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:47.525002960+00:00 stderr F E1212 16:36:47.524954 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:47.525081402+00:00 stderr F I1212 16:36:47.525044 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:47.526340773+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving sources" id=16hmJ namespace=service-telemetry 2025-12-12T16:36:47.526362764+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking if subscriptions need update" id=16hmJ namespace=service-telemetry 2025-12-12T16:36:47.932643922+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="checking for existing installplan" channel=unstable id=16hmJ namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:47.932643922+00:00 stderr F time="2025-12-12T16:36:47Z" level=info msg="resolving subscriptions in namespace" id=16hmJ namespace=service-telemetry 2025-12-12T16:36:48.121092567+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:48.123711493+00:00 stderr F E1212 16:36:48.123655 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:48.123832276+00:00 stderr F I1212 16:36:48.123761 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:48.124956444+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="resolving sources" id=bwIqb namespace=service-telemetry 2025-12-12T16:36:48.124998955+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="checking if subscriptions need update" id=bwIqb namespace=service-telemetry 2025-12-12T16:36:48.527912518+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="checking for existing installplan" channel=unstable id=bwIqb namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:48.527987800+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="resolving subscriptions in namespace" id=bwIqb namespace=service-telemetry 2025-12-12T16:36:48.720410624+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:48.723254246+00:00 stderr F E1212 16:36:48.723226 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:48.723366329+00:00 stderr F I1212 16:36:48.723319 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:48.724604680+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="resolving sources" id=0j+FN namespace=service-telemetry 2025-12-12T16:36:48.724619330+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="checking if subscriptions need update" id=0j+FN namespace=service-telemetry 2025-12-12T16:36:48.741287449+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:36:48.741373341+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:36:48.746614193+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:48.746660544+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:48.746762657+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:48.746762657+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:48.746788177+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=LCJjm 2025-12-12T16:36:48.746788177+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=LCJjm 2025-12-12T16:36:48.907310871+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=ap8kG 2025-12-12T16:36:48.907310871+00:00 stderr F time="2025-12-12T16:36:48Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=ap8kG 2025-12-12T16:36:49.124759614+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="checking for existing installplan" channel=unstable id=0j+FN namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:49.124866687+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="resolving subscriptions in namespace" id=0j+FN namespace=service-telemetry 2025-12-12T16:36:49.319497817+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:49.325926148+00:00 stderr F E1212 16:36:49.325869 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:49.326150954+00:00 stderr F I1212 16:36:49.326095 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:49.327202281+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="resolving sources" id=KtBjd namespace=service-telemetry 2025-12-12T16:36:49.327202281+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="checking if subscriptions need update" id=KtBjd namespace=service-telemetry 2025-12-12T16:36:49.726514603+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="checking for existing installplan" channel=unstable id=KtBjd namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:49.726608406+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="resolving subscriptions in namespace" id=KtBjd namespace=service-telemetry 2025-12-12T16:36:49.907814658+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:49.907996783+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:49.923453681+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:49.926426796+00:00 stderr F E1212 16:36:49.926284 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:49.926549599+00:00 stderr F I1212 16:36:49.926407 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:49.927646377+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="resolving sources" id=1ArVE namespace=service-telemetry 2025-12-12T16:36:49.927646377+00:00 stderr F time="2025-12-12T16:36:49Z" level=info msg="checking if subscriptions need update" id=1ArVE namespace=service-telemetry 2025-12-12T16:36:50.109591768+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:50.109591768+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:50.325777180+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="checking for existing installplan" channel=unstable id=1ArVE namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:50.325777180+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="resolving subscriptions in namespace" id=1ArVE namespace=service-telemetry 2025-12-12T16:36:50.519781124+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:50.522397730+00:00 stderr F I1212 16:36:50.522298 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:50.568516879+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="resolving sources" id=Xw70B namespace=service-telemetry 2025-12-12T16:36:50.568516879+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="checking if subscriptions need update" id=Xw70B namespace=service-telemetry 2025-12-12T16:36:50.926104773+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="checking for existing installplan" channel=unstable id=Xw70B namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:50.926104773+00:00 stderr F time="2025-12-12T16:36:50Z" level=info msg="resolving subscriptions in namespace" id=Xw70B namespace=service-telemetry 2025-12-12T16:36:51.108096926+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=LCJjm 2025-12-12T16:36:51.108096926+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=LCJjm 2025-12-12T16:36:51.121446051+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:51.125663987+00:00 stderr F E1212 16:36:51.125601 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:51.125732219+00:00 stderr F I1212 16:36:51.125695 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:51.132086029+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="resolving sources" id=5tD1l namespace=service-telemetry 2025-12-12T16:36:51.132086029+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="checking if subscriptions need update" id=5tD1l namespace=service-telemetry 2025-12-12T16:36:51.307735622+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=ap8kG 2025-12-12T16:36:51.307735622+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=ap8kG 2025-12-12T16:36:51.507908801+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:51.507908801+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:51.523475833+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="checking for existing installplan" channel=unstable id=5tD1l namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:51.523475833+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="resolving subscriptions in namespace" id=5tD1l namespace=service-telemetry 2025-12-12T16:36:51.709127167+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:51.709127167+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:36:51.721853487+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:51.724433652+00:00 stderr F E1212 16:36:51.724360 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:51.724530864+00:00 stderr F I1212 16:36:51.724464 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:51.735897150+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="resolving sources" id=stgLq namespace=service-telemetry 2025-12-12T16:36:51.735897150+00:00 stderr F time="2025-12-12T16:36:51Z" level=info msg="checking if subscriptions need update" id=stgLq namespace=service-telemetry 2025-12-12T16:36:52.131424327+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="checking for existing installplan" channel=unstable id=stgLq namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:52.131424327+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="resolving subscriptions in namespace" id=stgLq namespace=service-telemetry 2025-12-12T16:36:52.321440841+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:52.327733219+00:00 stderr F E1212 16:36:52.327659 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:52.327859802+00:00 stderr F I1212 16:36:52.327775 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:52.349400043+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="resolving sources" id=heNPn namespace=service-telemetry 2025-12-12T16:36:52.349400043+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="checking if subscriptions need update" id=heNPn namespace=service-telemetry 2025-12-12T16:36:52.707224234+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=ZmB5A 2025-12-12T16:36:52.707224234+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=ZmB5A 2025-12-12T16:36:52.735282379+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="checking for existing installplan" channel=unstable id=heNPn namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:52.735282379+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="resolving subscriptions in namespace" id=heNPn namespace=service-telemetry 2025-12-12T16:36:52.909436475+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=aTcMl 2025-12-12T16:36:52.909436475+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=aTcMl 2025-12-12T16:36:52.920601915+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:52.923396416+00:00 stderr F E1212 16:36:52.923325 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:52.923581460+00:00 stderr F I1212 16:36:52.923521 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:52.965121564+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="resolving sources" id=e500e namespace=service-telemetry 2025-12-12T16:36:52.965121564+00:00 stderr F time="2025-12-12T16:36:52Z" level=info msg="checking if subscriptions need update" id=e500e namespace=service-telemetry 2025-12-12T16:36:53.107376428+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:53.107376428+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:53.107571883+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:53.308751418+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:53.308751418+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:36:53.328265038+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="checking for existing installplan" channel=unstable id=e500e namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:53.328265038+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="resolving subscriptions in namespace" id=e500e namespace=service-telemetry 2025-12-12T16:36:53.520128599+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:53.522396756+00:00 stderr F E1212 16:36:53.522336 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:53.522508469+00:00 stderr F I1212 16:36:53.522435 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:53.523600386+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="resolving sources" id=CWy/M namespace=service-telemetry 2025-12-12T16:36:53.523652607+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="checking if subscriptions need update" id=CWy/M namespace=service-telemetry 2025-12-12T16:36:53.926644103+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="checking for existing installplan" channel=unstable id=CWy/M namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:53.926739265+00:00 stderr F time="2025-12-12T16:36:53Z" level=info msg="resolving subscriptions in namespace" id=CWy/M namespace=service-telemetry 2025-12-12T16:36:54.121818687+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:54.124518405+00:00 stderr F E1212 16:36:54.124472 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:54.124638558+00:00 stderr F I1212 16:36:54.124589 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:54.125772137+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="resolving sources" id=/9VBW namespace=service-telemetry 2025-12-12T16:36:54.125772137+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="checking if subscriptions need update" id=/9VBW namespace=service-telemetry 2025-12-12T16:36:54.308591180+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=ZmB5A 2025-12-12T16:36:54.308591180+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=ZmB5A 2025-12-12T16:36:54.308591180+00:00 stderr F time="2025-12-12T16:36:54Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=ZmB5A 2025-12-12T16:36:54.308659242+00:00 stderr F time="2025-12-12T16:36:54Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=ZmB5A 2025-12-12T16:36:54.308659242+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=ZmB5A 2025-12-12T16:36:54.509724133+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=aTcMl 2025-12-12T16:36:54.509724133+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=aTcMl 2025-12-12T16:36:54.524076444+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="checking for existing installplan" channel=unstable id=/9VBW namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:54.524076444+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="resolving subscriptions in namespace" id=/9VBW namespace=service-telemetry 2025-12-12T16:36:54.721789312+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:54.726576092+00:00 stderr F E1212 16:36:54.726494 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:54.726629514+00:00 stderr F I1212 16:36:54.726589 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:54.727830764+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="resolving sources" id=fGBfX namespace=service-telemetry 2025-12-12T16:36:54.727830764+00:00 stderr F time="2025-12-12T16:36:54Z" level=info msg="checking if subscriptions need update" id=fGBfX namespace=service-telemetry 2025-12-12T16:36:55.123268300+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="checking for existing installplan" channel=unstable id=fGBfX namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:55.123268300+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="resolving subscriptions in namespace" id=fGBfX namespace=service-telemetry 2025-12-12T16:36:55.321441799+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:55.324772282+00:00 stderr F E1212 16:36:55.324433 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:55.324772282+00:00 stderr F I1212 16:36:55.324611 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:55.325873170+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="resolving sources" id=rcp8I namespace=service-telemetry 2025-12-12T16:36:55.325873170+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="checking if subscriptions need update" id=rcp8I namespace=service-telemetry 2025-12-12T16:36:55.728698460+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="checking for existing installplan" channel=unstable id=rcp8I namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:55.728698460+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="resolving subscriptions in namespace" id=rcp8I namespace=service-telemetry 2025-12-12T16:36:55.776049140+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=g56nn 2025-12-12T16:36:55.776049140+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=g56nn 2025-12-12T16:36:55.786352689+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=g56nn 2025-12-12T16:36:55.786352689+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=g56nn 2025-12-12T16:36:55.786412290+00:00 stderr F time="2025-12-12T16:36:55Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=g56nn 2025-12-12T16:36:55.786412290+00:00 stderr F time="2025-12-12T16:36:55Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=g56nn 2025-12-12T16:36:55.786412290+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=g56nn 2025-12-12T16:36:55.922842239+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:55.926271805+00:00 stderr F I1212 16:36:55.926165 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:55.966304161+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="resolving sources" id=0DY0R namespace=service-telemetry 2025-12-12T16:36:55.966304161+00:00 stderr F time="2025-12-12T16:36:55Z" level=info msg="checking if subscriptions need update" id=0DY0R namespace=service-telemetry 2025-12-12T16:36:56.326810019+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="checking for existing installplan" channel=unstable id=0DY0R namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:56.326810019+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="resolving subscriptions in namespace" id=0DY0R namespace=service-telemetry 2025-12-12T16:36:56.521398508+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:56.526462545+00:00 stderr F E1212 16:36:56.523855 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:56.526462545+00:00 stderr F I1212 16:36:56.524047 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:56.534386994+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="resolving sources" id=h5GSm namespace=service-telemetry 2025-12-12T16:36:56.534386994+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="checking if subscriptions need update" id=h5GSm namespace=service-telemetry 2025-12-12T16:36:56.925886381+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="checking for existing installplan" channel=unstable id=h5GSm namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:56.925886381+00:00 stderr F time="2025-12-12T16:36:56Z" level=info msg="resolving subscriptions in namespace" id=h5GSm namespace=service-telemetry 2025-12-12T16:36:57.120767217+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:57.125645720+00:00 stderr F E1212 16:36:57.125611 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:57.125778823+00:00 stderr F I1212 16:36:57.125716 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:57.137488757+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="resolving sources" id=1ZW8K namespace=service-telemetry 2025-12-12T16:36:57.137488757+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="checking if subscriptions need update" id=1ZW8K namespace=service-telemetry 2025-12-12T16:36:57.525080766+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="checking for existing installplan" channel=unstable id=1ZW8K namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:57.525080766+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="resolving subscriptions in namespace" id=1ZW8K namespace=service-telemetry 2025-12-12T16:36:57.721254225+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:57.724231810+00:00 stderr F E1212 16:36:57.724171 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:57.724301202+00:00 stderr F I1212 16:36:57.724254 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:57.745981207+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="resolving sources" id=/iBJm namespace=service-telemetry 2025-12-12T16:36:57.745981207+00:00 stderr F time="2025-12-12T16:36:57Z" level=info msg="checking if subscriptions need update" id=/iBJm namespace=service-telemetry 2025-12-12T16:36:58.124551608+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="checking for existing installplan" channel=unstable id=/iBJm namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:58.124551608+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="resolving subscriptions in namespace" id=/iBJm namespace=service-telemetry 2025-12-12T16:36:58.320480641+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:58.322797579+00:00 stderr F E1212 16:36:58.322749 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:58.322916392+00:00 stderr F I1212 16:36:58.322842 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:58.364628301+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="resolving sources" id=gly3D namespace=service-telemetry 2025-12-12T16:36:58.364628301+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="checking if subscriptions need update" id=gly3D namespace=service-telemetry 2025-12-12T16:36:58.727239662+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="checking for existing installplan" channel=unstable id=gly3D namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:58.727239662+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="resolving subscriptions in namespace" id=gly3D namespace=service-telemetry 2025-12-12T16:36:58.920006655+00:00 stderr F time="2025-12-12T16:36:58Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:58.922830396+00:00 stderr F E1212 16:36:58.922764 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:58.922868907+00:00 stderr F I1212 16:36:58.922837 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:59.004585580+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="resolving sources" id=iNItY namespace=service-telemetry 2025-12-12T16:36:59.004585580+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="checking if subscriptions need update" id=iNItY namespace=service-telemetry 2025-12-12T16:36:59.324424585+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="checking for existing installplan" channel=unstable id=iNItY namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:59.324424585+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="resolving subscriptions in namespace" id=iNItY namespace=service-telemetry 2025-12-12T16:36:59.521731383+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:36:59.526899633+00:00 stderr F I1212 16:36:59.526804 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:36:59.526939954+00:00 stderr F E1212 16:36:59.526889 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:36:59.688863012+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="resolving sources" id=Swd43 namespace=service-telemetry 2025-12-12T16:36:59.688863012+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="checking if subscriptions need update" id=Swd43 namespace=service-telemetry 2025-12-12T16:36:59.926104703+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="checking for existing installplan" channel=unstable id=Swd43 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:36:59.926104703+00:00 stderr F time="2025-12-12T16:36:59Z" level=info msg="resolving subscriptions in namespace" id=Swd43 namespace=service-telemetry 2025-12-12T16:37:00.122318283+00:00 stderr F time="2025-12-12T16:37:00Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:00.125525543+00:00 stderr F E1212 16:37:00.125476 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:00.125752019+00:00 stderr F I1212 16:37:00.125673 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:00.447543944+00:00 stderr F time="2025-12-12T16:37:00Z" level=info msg="resolving sources" id=eBe+F namespace=service-telemetry 2025-12-12T16:37:00.447543944+00:00 stderr F time="2025-12-12T16:37:00Z" level=info msg="checking if subscriptions need update" id=eBe+F namespace=service-telemetry 2025-12-12T16:37:00.526507938+00:00 stderr F time="2025-12-12T16:37:00Z" level=info msg="checking for existing installplan" channel=unstable id=eBe+F namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:00.526507938+00:00 stderr F time="2025-12-12T16:37:00Z" level=info msg="resolving subscriptions in namespace" id=eBe+F namespace=service-telemetry 2025-12-12T16:37:00.721952079+00:00 stderr F time="2025-12-12T16:37:00Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:00.724610756+00:00 stderr F E1212 16:37:00.724514 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:00.724715868+00:00 stderr F I1212 16:37:00.724624 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:01.367398247+00:00 stderr F time="2025-12-12T16:37:01Z" level=info msg="resolving sources" id=uR3Za namespace=service-telemetry 2025-12-12T16:37:01.367528090+00:00 stderr F time="2025-12-12T16:37:01Z" level=info msg="checking if subscriptions need update" id=uR3Za namespace=service-telemetry 2025-12-12T16:37:01.379360487+00:00 stderr F time="2025-12-12T16:37:01Z" level=info msg="checking for existing installplan" channel=unstable id=uR3Za namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:01.379407218+00:00 stderr F time="2025-12-12T16:37:01Z" level=info msg="resolving subscriptions in namespace" id=uR3Za namespace=service-telemetry 2025-12-12T16:37:01.382087696+00:00 stderr F time="2025-12-12T16:37:01Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:01.385349178+00:00 stderr F I1212 16:37:01.385321 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:13.659536356+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BoebW 2025-12-12T16:37:13.659536356+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BoebW 2025-12-12T16:37:13.671296702+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BoebW 2025-12-12T16:37:13.671296702+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BoebW 2025-12-12T16:37:13.671296702+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="catalog update required at 2025-12-12 16:37:13.671261801 +0000 UTC m=+1226.787019784" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=BoebW 2025-12-12T16:37:13.682801351+00:00 stderr F I1212 16:37:13.682577 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:37:13.682801351+00:00 stderr F time="2025-12-12T16:37:13Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=BoebW 2025-12-12T16:37:13.682801351+00:00 stderr F time="2025-12-12T16:37:13Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=BoebW 2025-12-12T16:37:13.682801351+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=BoebW 2025-12-12T16:37:13.711479121+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gf+Xn 2025-12-12T16:37:13.711479121+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gf+Xn 2025-12-12T16:37:13.812612502+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:37:13.812737086+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:37:13.813039993+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg=syncing id=/QuxI ip=install-sdtz5 namespace=openshift-operators phase=Complete 2025-12-12T16:37:13.813050083+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg=syncing id=S2ovz ip=install-k6ssv namespace=cert-manager-operator phase=Complete 2025-12-12T16:37:13.813088544+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg=syncing id=IfEHp ip=install-t6x4f namespace=service-telemetry phase=Complete 2025-12-12T16:37:13.855635503+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gf+Xn 2025-12-12T16:37:13.855635503+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=gf+Xn 2025-12-12T16:37:13.855635503+00:00 stderr F time="2025-12-12T16:37:13Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=gf+Xn 2025-12-12T16:37:13.855737836+00:00 stderr F time="2025-12-12T16:37:13Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=gf+Xn 2025-12-12T16:37:13.855737836+00:00 stderr F time="2025-12-12T16:37:13Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=gf+Xn 2025-12-12T16:37:14.656390403+00:00 stderr F time="2025-12-12T16:37:14Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:14.656390403+00:00 stderr F time="2025-12-12T16:37:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:14.857430214+00:00 stderr F time="2025-12-12T16:37:14Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:14.857430214+00:00 stderr F time="2025-12-12T16:37:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:15.056578788+00:00 stderr F time="2025-12-12T16:37:15Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=cTivG 2025-12-12T16:37:15.056578788+00:00 stderr F time="2025-12-12T16:37:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=cTivG 2025-12-12T16:37:15.858826485+00:00 stderr F time="2025-12-12T16:37:15Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:15.858826485+00:00 stderr F time="2025-12-12T16:37:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:16.057546327+00:00 stderr F time="2025-12-12T16:37:16Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:16.057546327+00:00 stderr F time="2025-12-12T16:37:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:16.858252926+00:00 stderr F time="2025-12-12T16:37:16Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=cTivG 2025-12-12T16:37:16.858252926+00:00 stderr F time="2025-12-12T16:37:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=cTivG 2025-12-12T16:37:16.858332328+00:00 stderr F time="2025-12-12T16:37:16Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=cTivG 2025-12-12T16:37:16.858332328+00:00 stderr F time="2025-12-12T16:37:16Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=cTivG 2025-12-12T16:37:16.858332328+00:00 stderr F time="2025-12-12T16:37:16Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=cTivG 2025-12-12T16:37:17.056936037+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:17.056936037+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:17.069952864+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg=syncing reconciling="*v1alpha1.Subscription" selflink= 2025-12-12T16:37:17.069952864+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="resolving sources" id=f4Kgc namespace=openshift-operators 2025-12-12T16:37:17.069952864+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="checking if subscriptions need update" id=f4Kgc namespace=openshift-operators 2025-12-12T16:37:17.090233253+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="resolving subscriptions in namespace" id=f4Kgc namespace=openshift-operators 2025-12-12T16:37:17.106202935+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="no subscriptions were updated" id=f4Kgc namespace=openshift-operators 2025-12-12T16:37:17.256346037+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:17.256346037+00:00 stderr F time="2025-12-12T16:37:17Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:18.055986669+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CRKEL 2025-12-12T16:37:18.055986669+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CRKEL 2025-12-12T16:37:18.256601889+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:18.256601889+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:18.456479791+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:37:18.456479791+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry 2025-12-12T16:37:18.458630725+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving sources" id=9E95h namespace=service-telemetry 2025-12-12T16:37:18.458630725+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking if subscriptions need update" id=9E95h namespace=service-telemetry 2025-12-12T16:37:18.472461523+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking for existing installplan" channel=unstable id=9E95h namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:18.472461523+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving subscriptions in namespace" id=9E95h namespace=service-telemetry 2025-12-12T16:37:18.475817687+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:18.479564001+00:00 stderr F E1212 16:37:18.479500 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:18.479735815+00:00 stderr F I1212 16:37:18.479645 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:18.486039244+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving sources" id=6fMkL namespace=service-telemetry 2025-12-12T16:37:18.486039244+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking if subscriptions need update" id=6fMkL namespace=service-telemetry 2025-12-12T16:37:18.497794029+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking for existing installplan" channel=unstable id=6fMkL namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:18.497794029+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving subscriptions in namespace" id=6fMkL namespace=service-telemetry 2025-12-12T16:37:18.501533513+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:18.504574250+00:00 stderr F E1212 16:37:18.504506 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:18.504666442+00:00 stderr F I1212 16:37:18.504599 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:18.516222502+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving sources" id=G1Ja2 namespace=service-telemetry 2025-12-12T16:37:18.516222502+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking if subscriptions need update" id=G1Ja2 namespace=service-telemetry 2025-12-12T16:37:18.533746493+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking for existing installplan" channel=unstable id=G1Ja2 namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:18.533746493+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving subscriptions in namespace" id=G1Ja2 namespace=service-telemetry 2025-12-12T16:37:18.535666621+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:18.538597034+00:00 stderr F E1212 16:37:18.538548 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:18.538695827+00:00 stderr F I1212 16:37:18.538648 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:18.560143536+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving sources" id=DAzEY namespace=service-telemetry 2025-12-12T16:37:18.560143536+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking if subscriptions need update" id=DAzEY namespace=service-telemetry 2025-12-12T16:37:18.666595881+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking for existing installplan" channel=unstable id=DAzEY namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:18.666595881+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving subscriptions in namespace" id=DAzEY namespace=service-telemetry 2025-12-12T16:37:18.864494973+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:18.866828632+00:00 stderr F E1212 16:37:18.866761 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:18.866938164+00:00 stderr F I1212 16:37:18.866859 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:18.908536320+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="resolving sources" id=c22gA namespace=service-telemetry 2025-12-12T16:37:18.908536320+00:00 stderr F time="2025-12-12T16:37:18Z" level=info msg="checking if subscriptions need update" id=c22gA namespace=service-telemetry 2025-12-12T16:37:19.256340118+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:19.256340118+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:19.269039697+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="checking for existing installplan" channel=unstable id=c22gA namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:19.269039697+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="resolving subscriptions in namespace" id=c22gA namespace=service-telemetry 2025-12-12T16:37:19.456620390+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CRKEL 2025-12-12T16:37:19.456620390+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=CRKEL 2025-12-12T16:37:19.456688372+00:00 stderr F time="2025-12-12T16:37:19Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=CRKEL 2025-12-12T16:37:19.456702313+00:00 stderr F time="2025-12-12T16:37:19Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=CRKEL 2025-12-12T16:37:19.456714343+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=CRKEL 2025-12-12T16:37:19.463457312+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:19.466216762+00:00 stderr F E1212 16:37:19.466142 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:19.466343615+00:00 stderr F I1212 16:37:19.466270 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:19.547940095+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="resolving sources" id=GwalA namespace=service-telemetry 2025-12-12T16:37:19.547940095+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="checking if subscriptions need update" id=GwalA namespace=service-telemetry 2025-12-12T16:37:19.870933630+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="checking for existing installplan" channel=unstable id=GwalA namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:19.870933630+00:00 stderr F time="2025-12-12T16:37:19Z" level=info msg="resolving subscriptions in namespace" id=GwalA namespace=service-telemetry 2025-12-12T16:37:20.056726479+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="evaluating current pod" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:20.056726479+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace 2025-12-12T16:37:20.064229937+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:20.066721680+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="resolving sources" id=CUdsi namespace=cert-manager-operator 2025-12-12T16:37:20.066721680+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="checking if subscriptions need update" id=CUdsi namespace=cert-manager-operator 2025-12-12T16:37:20.069216533+00:00 stderr F E1212 16:37:20.069149 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:20.069299595+00:00 stderr F I1212 16:37:20.069242 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:20.230947046+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="resolving sources" id=WMuRu namespace=service-telemetry 2025-12-12T16:37:20.230947046+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="checking if subscriptions need update" id=WMuRu namespace=service-telemetry 2025-12-12T16:37:20.257910474+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=wLT8f 2025-12-12T16:37:20.257910474+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=wLT8f 2025-12-12T16:37:20.669410012+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="resolving subscriptions in namespace" id=CUdsi namespace=cert-manager-operator 2025-12-12T16:37:20.857264952+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=wLT8f 2025-12-12T16:37:20.857264952+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=wLT8f 2025-12-12T16:37:20.857416896+00:00 stderr F time="2025-12-12T16:37:20Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=wLT8f 2025-12-12T16:37:20.857416896+00:00 stderr F time="2025-12-12T16:37:20Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace error="catalog polling: certified-operators not ready for update: update pod certified-operators-h46w2 has not yet reported ready" id=wLT8f 2025-12-12T16:37:20.857482437+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace id=wLT8f 2025-12-12T16:37:20.873573652+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="checking for existing installplan" channel=unstable id=WMuRu namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:20.873573652+00:00 stderr F time="2025-12-12T16:37:20Z" level=info msg="resolving subscriptions in namespace" id=WMuRu namespace=service-telemetry 2025-12-12T16:37:21.074362707+00:00 stderr F time="2025-12-12T16:37:21Z" level=info msg="no subscriptions were updated" id=CUdsi namespace=cert-manager-operator 2025-12-12T16:37:21.263641212+00:00 stderr F time="2025-12-12T16:37:21Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:21.268589537+00:00 stderr F E1212 16:37:21.268514 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:21.268635958+00:00 stderr F I1212 16:37:21.268598 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:21.591731436+00:00 stderr F time="2025-12-12T16:37:21Z" level=info msg="resolving sources" id=zYp/G namespace=service-telemetry 2025-12-12T16:37:21.591731436+00:00 stderr F time="2025-12-12T16:37:21Z" level=info msg="checking if subscriptions need update" id=zYp/G namespace=service-telemetry 2025-12-12T16:37:22.067729956+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="checking for existing installplan" channel=unstable id=zYp/G namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:22.067729956+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="resolving subscriptions in namespace" id=zYp/G namespace=service-telemetry 2025-12-12T16:37:22.263621368+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:22.267413173+00:00 stderr F E1212 16:37:22.267373 1 queueinformer_operator.go:312] "Unhandled Error" err="sync \"/service-telemetry\" failed: error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused\"" logger="UnhandledError" 2025-12-12T16:37:22.267632698+00:00 stderr F I1212 16:37:22.267584 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:22.519961528+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jWKD0 2025-12-12T16:37:22.519961528+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jWKD0 2025-12-12T16:37:22.528613376+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jWKD0 2025-12-12T16:37:22.528613376+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=jWKD0 2025-12-12T16:37:22.528672237+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="catalog update required at 2025-12-12 16:37:22.528607086 +0000 UTC m=+1235.644365069" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=jWKD0 2025-12-12T16:37:22.535840477+00:00 stderr F I1212 16:37:22.535745 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:37:22.535885448+00:00 stderr F time="2025-12-12T16:37:22Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=jWKD0 2025-12-12T16:37:22.535885448+00:00 stderr F time="2025-12-12T16:37:22Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=jWKD0 2025-12-12T16:37:22.535895449+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=jWKD0 2025-12-12T16:37:22.552554957+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=3lPY7 2025-12-12T16:37:22.552643580+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=3lPY7 2025-12-12T16:37:22.909442634+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="resolving sources" id=Q4+3e namespace=service-telemetry 2025-12-12T16:37:22.909442634+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="checking if subscriptions need update" id=Q4+3e namespace=service-telemetry 2025-12-12T16:37:22.921003935+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="checking for existing installplan" channel=unstable id=Q4+3e namespace=service-telemetry pkg=service-telemetry-operator source=infrawatch-operators sub=service-telemetry-operator 2025-12-12T16:37:22.921092497+00:00 stderr F time="2025-12-12T16:37:22Z" level=info msg="resolving subscriptions in namespace" id=Q4+3e namespace=service-telemetry 2025-12-12T16:37:23.056382326+00:00 stderr F time="2025-12-12T16:37:23Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=3lPY7 2025-12-12T16:37:23.056515680+00:00 stderr F time="2025-12-12T16:37:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=3lPY7 2025-12-12T16:37:23.056651453+00:00 stderr F time="2025-12-12T16:37:23Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=3lPY7 2025-12-12T16:37:23.056692734+00:00 stderr F time="2025-12-12T16:37:23Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=3lPY7 2025-12-12T16:37:23.056729245+00:00 stderr F time="2025-12-12T16:37:23Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=3lPY7 2025-12-12T16:37:23.063101435+00:00 stderr F time="2025-12-12T16:37:23Z" level=info msg="requesting snapshot for catalog source service-telemetry/infrawatch-operators" 2025-12-12T16:37:23.065571277+00:00 stderr F I1212 16:37:23.065513 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"", Name:"service-telemetry", UID:"6621688f-e974-4f3b-8100-711b6a1239e7", APIVersion:"v1", ResourceVersion:"43524", FieldPath:""}): type: 'Warning' reason: 'ResolutionFailed' error using catalogsource service-telemetry/infrawatch-operators: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp 10.217.5.116:50051: connect: connection refused" 2025-12-12T16:37:23.462529521+00:00 stderr F time="2025-12-12T16:37:23Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=y3+PB 2025-12-12T16:37:23.462529521+00:00 stderr F time="2025-12-12T16:37:23Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=y3+PB 2025-12-12T16:37:24.056516885+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=y3+PB 2025-12-12T16:37:24.056516885+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=y3+PB 2025-12-12T16:37:24.056584267+00:00 stderr F time="2025-12-12T16:37:24Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=y3+PB 2025-12-12T16:37:24.056584267+00:00 stderr F time="2025-12-12T16:37:24Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=y3+PB 2025-12-12T16:37:24.056584267+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=y3+PB 2025-12-12T16:37:24.656285644+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=EVLrP 2025-12-12T16:37:24.656380246+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=EVLrP 2025-12-12T16:37:24.856449703+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BDMQR 2025-12-12T16:37:24.856449703+00:00 stderr F time="2025-12-12T16:37:24Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BDMQR 2025-12-12T16:37:25.857007483+00:00 stderr F time="2025-12-12T16:37:25Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=EVLrP 2025-12-12T16:37:25.857007483+00:00 stderr F time="2025-12-12T16:37:25Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=EVLrP 2025-12-12T16:37:25.857097315+00:00 stderr F time="2025-12-12T16:37:25Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=EVLrP 2025-12-12T16:37:25.857106025+00:00 stderr F time="2025-12-12T16:37:25Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=EVLrP 2025-12-12T16:37:25.857114056+00:00 stderr F time="2025-12-12T16:37:25Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=EVLrP 2025-12-12T16:37:26.055915761+00:00 stderr F time="2025-12-12T16:37:26Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BDMQR 2025-12-12T16:37:26.055915761+00:00 stderr F time="2025-12-12T16:37:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=BDMQR 2025-12-12T16:37:26.655472855+00:00 stderr F time="2025-12-12T16:37:26Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=1mGR9 2025-12-12T16:37:26.655472855+00:00 stderr F time="2025-12-12T16:37:26Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=1mGR9 2025-12-12T16:37:27.256659390+00:00 stderr F time="2025-12-12T16:37:27Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=SziIn 2025-12-12T16:37:27.256659390+00:00 stderr F time="2025-12-12T16:37:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=SziIn 2025-12-12T16:37:27.856376477+00:00 stderr F time="2025-12-12T16:37:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=1mGR9 2025-12-12T16:37:27.856376477+00:00 stderr F time="2025-12-12T16:37:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=1mGR9 2025-12-12T16:37:27.856422349+00:00 stderr F time="2025-12-12T16:37:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=1mGR9 2025-12-12T16:37:27.856422349+00:00 stderr F time="2025-12-12T16:37:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=1mGR9 2025-12-12T16:37:27.856422349+00:00 stderr F time="2025-12-12T16:37:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=1mGR9 2025-12-12T16:37:28.456001953+00:00 stderr F time="2025-12-12T16:37:28Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=SziIn 2025-12-12T16:37:28.456001953+00:00 stderr F time="2025-12-12T16:37:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=SziIn 2025-12-12T16:37:28.456001953+00:00 stderr F time="2025-12-12T16:37:28Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=SziIn 2025-12-12T16:37:28.456001953+00:00 stderr F time="2025-12-12T16:37:28Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=SziIn 2025-12-12T16:37:28.456001953+00:00 stderr F time="2025-12-12T16:37:28Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=SziIn 2025-12-12T16:37:28.657440095+00:00 stderr F time="2025-12-12T16:37:28Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=RZ3by 2025-12-12T16:37:28.657440095+00:00 stderr F time="2025-12-12T16:37:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=RZ3by 2025-12-12T16:37:29.259433360+00:00 stderr F time="2025-12-12T16:37:29Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Pl/lK 2025-12-12T16:37:29.259433360+00:00 stderr F time="2025-12-12T16:37:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Pl/lK 2025-12-12T16:37:29.856222065+00:00 stderr F time="2025-12-12T16:37:29Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=RZ3by 2025-12-12T16:37:29.856222065+00:00 stderr F time="2025-12-12T16:37:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=RZ3by 2025-12-12T16:37:30.456753874+00:00 stderr F time="2025-12-12T16:37:30Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Pl/lK 2025-12-12T16:37:30.456753874+00:00 stderr F time="2025-12-12T16:37:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=Pl/lK 2025-12-12T16:37:30.456753874+00:00 stderr F time="2025-12-12T16:37:30Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=Pl/lK 2025-12-12T16:37:30.456753874+00:00 stderr F time="2025-12-12T16:37:30Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=Pl/lK 2025-12-12T16:37:30.456753874+00:00 stderr F time="2025-12-12T16:37:30Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=Pl/lK 2025-12-12T16:37:30.657223571+00:00 stderr F time="2025-12-12T16:37:30Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=iMN0b 2025-12-12T16:37:30.657223571+00:00 stderr F time="2025-12-12T16:37:30Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=iMN0b 2025-12-12T16:37:31.259655267+00:00 stderr F time="2025-12-12T16:37:31Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=iMN0b 2025-12-12T16:37:31.259655267+00:00 stderr F time="2025-12-12T16:37:31Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=iMN0b 2025-12-12T16:37:31.259784000+00:00 stderr F time="2025-12-12T16:37:31Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=iMN0b 2025-12-12T16:37:31.259806681+00:00 stderr F time="2025-12-12T16:37:31Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=iMN0b 2025-12-12T16:37:31.259827222+00:00 stderr F time="2025-12-12T16:37:31Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=iMN0b 2025-12-12T16:37:33.510403048+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=LALgL 2025-12-12T16:37:33.510403048+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=LALgL 2025-12-12T16:37:33.526121403+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=LALgL 2025-12-12T16:37:33.526121403+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=LALgL 2025-12-12T16:37:33.526376009+00:00 stderr F time="2025-12-12T16:37:33Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=LALgL 2025-12-12T16:37:33.526376009+00:00 stderr F time="2025-12-12T16:37:33Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=LALgL 2025-12-12T16:37:33.526421090+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=LALgL 2025-12-12T16:37:33.926807950+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7OK6A 2025-12-12T16:37:33.926807950+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7OK6A 2025-12-12T16:37:33.950684690+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7OK6A 2025-12-12T16:37:33.950684690+00:00 stderr F time="2025-12-12T16:37:33Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=7OK6A 2025-12-12T16:37:34.106919566+00:00 stderr F time="2025-12-12T16:37:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=wOi7I 2025-12-12T16:37:34.106919566+00:00 stderr F time="2025-12-12T16:37:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=wOi7I 2025-12-12T16:37:34.704806388+00:00 stderr F time="2025-12-12T16:37:34Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=wOi7I 2025-12-12T16:37:34.704806388+00:00 stderr F time="2025-12-12T16:37:34Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=wOi7I 2025-12-12T16:37:39.394569001+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=y9SeP 2025-12-12T16:37:39.394569001+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=y9SeP 2025-12-12T16:37:39.408468800+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=y9SeP 2025-12-12T16:37:39.408468800+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=y9SeP 2025-12-12T16:37:39.408526261+00:00 stderr F time="2025-12-12T16:37:39Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=y9SeP 2025-12-12T16:37:39.408526261+00:00 stderr F time="2025-12-12T16:37:39Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=y9SeP 2025-12-12T16:37:39.408526261+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=y9SeP 2025-12-12T16:37:39.949032252+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=hu8Te 2025-12-12T16:37:39.949032252+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=hu8Te 2025-12-12T16:37:39.959911176+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=hu8Te 2025-12-12T16:37:39.959911176+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=hu8Te 2025-12-12T16:37:39.960075210+00:00 stderr F time="2025-12-12T16:37:39Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=hu8Te 2025-12-12T16:37:39.960087320+00:00 stderr F time="2025-12-12T16:37:39Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace error="catalog polling: redhat-operators not ready for update: update pod redhat-operators-k5p4x has not yet reported ready" id=hu8Te 2025-12-12T16:37:39.960117851+00:00 stderr F time="2025-12-12T16:37:39Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace id=hu8Te 2025-12-12T16:37:40.886943428+00:00 stderr F time="2025-12-12T16:37:40Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=/rmtV 2025-12-12T16:37:40.886943428+00:00 stderr F time="2025-12-12T16:37:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=/rmtV 2025-12-12T16:37:40.896857427+00:00 stderr F time="2025-12-12T16:37:40Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=/rmtV 2025-12-12T16:37:40.896857427+00:00 stderr F time="2025-12-12T16:37:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=/rmtV 2025-12-12T16:37:40.902106419+00:00 stderr F time="2025-12-12T16:37:40Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ojZi0 2025-12-12T16:37:40.902106419+00:00 stderr F time="2025-12-12T16:37:40Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ojZi0 2025-12-12T16:37:41.394161812+00:00 stderr F time="2025-12-12T16:37:41Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ojZi0 2025-12-12T16:37:41.394161812+00:00 stderr F time="2025-12-12T16:37:41Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=ojZi0 2025-12-12T16:37:43.690214210+00:00 stderr F time="2025-12-12T16:37:43Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=PpzrC 2025-12-12T16:37:43.690214210+00:00 stderr F time="2025-12-12T16:37:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=PpzrC 2025-12-12T16:37:43.701702219+00:00 stderr F time="2025-12-12T16:37:43Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=PpzrC 2025-12-12T16:37:43.701702219+00:00 stderr F time="2025-12-12T16:37:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=PpzrC 2025-12-12T16:37:52.544683769+00:00 stderr F time="2025-12-12T16:37:52Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vopC6 2025-12-12T16:37:52.544683769+00:00 stderr F time="2025-12-12T16:37:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vopC6 2025-12-12T16:37:52.556408134+00:00 stderr F time="2025-12-12T16:37:52Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vopC6 2025-12-12T16:37:52.556408134+00:00 stderr F time="2025-12-12T16:37:52Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=vopC6 2025-12-12T16:37:57.863451793+00:00 stderr F time="2025-12-12T16:37:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=j5tuH 2025-12-12T16:37:57.863451793+00:00 stderr F time="2025-12-12T16:37:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=j5tuH 2025-12-12T16:37:57.874328477+00:00 stderr F time="2025-12-12T16:37:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=j5tuH 2025-12-12T16:37:57.874328477+00:00 stderr F time="2025-12-12T16:37:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=j5tuH 2025-12-12T16:37:57.874383218+00:00 stderr F time="2025-12-12T16:37:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=j5tuH 2025-12-12T16:37:57.874383218+00:00 stderr F time="2025-12-12T16:37:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=j5tuH 2025-12-12T16:37:57.874394618+00:00 stderr F time="2025-12-12T16:37:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=j5tuH 2025-12-12T16:38:09.699081678+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MFHId 2025-12-12T16:38:09.699081678+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MFHId 2025-12-12T16:38:09.699141120+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=WL9wy 2025-12-12T16:38:09.699162930+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=WL9wy 2025-12-12T16:38:09.707724626+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=WL9wy 2025-12-12T16:38:09.707724626+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=WL9wy 2025-12-12T16:38:09.707764587+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="catalog update required at 2025-12-12 16:38:09.707744206 +0000 UTC m=+1282.823502189" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=WL9wy 2025-12-12T16:38:09.708067644+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="evaluating current pod" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MFHId 2025-12-12T16:38:09.708067644+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=redhat-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=redhat-operators-wqdb8 current-pod.namespace=openshift-marketplace id=MFHId 2025-12-12T16:38:09.904133120+00:00 stderr F I1212 16:38:09.903980 1 warnings.go:110] "Warning: spec.nodeSelector[node-role.kubernetes.io/master]: use \"node-role.kubernetes.io/control-plane\" instead" 2025-12-12T16:38:09.904133120+00:00 stderr F time="2025-12-12T16:38:09Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=WL9wy 2025-12-12T16:38:09.904278784+00:00 stderr F time="2025-12-12T16:38:09Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=WL9wy 2025-12-12T16:38:09.904278784+00:00 stderr F time="2025-12-12T16:38:09Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=WL9wy 2025-12-12T16:38:10.496612837+00:00 stderr F time="2025-12-12T16:38:10Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=oIdXS 2025-12-12T16:38:10.496612837+00:00 stderr F time="2025-12-12T16:38:10Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=oIdXS 2025-12-12T16:38:10.696234873+00:00 stderr F time="2025-12-12T16:38:10Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=lUfiH 2025-12-12T16:38:10.696234873+00:00 stderr F time="2025-12-12T16:38:10Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=lUfiH 2025-12-12T16:38:11.694712879+00:00 stderr F time="2025-12-12T16:38:11Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=oIdXS 2025-12-12T16:38:11.694712879+00:00 stderr F time="2025-12-12T16:38:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=oIdXS 2025-12-12T16:38:11.694761470+00:00 stderr F time="2025-12-12T16:38:11Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=oIdXS 2025-12-12T16:38:11.694761470+00:00 stderr F time="2025-12-12T16:38:11Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=oIdXS 2025-12-12T16:38:11.694761470+00:00 stderr F time="2025-12-12T16:38:11Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=oIdXS 2025-12-12T16:38:11.895402091+00:00 stderr F time="2025-12-12T16:38:11Z" level=info msg="evaluating current pod" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=lUfiH 2025-12-12T16:38:11.895402091+00:00 stderr F time="2025-12-12T16:38:11Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=certified-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=certified-operators-psnw2 current-pod.namespace=openshift-marketplace id=lUfiH 2025-12-12T16:38:12.295209367+00:00 stderr F time="2025-12-12T16:38:12Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=qn4oL 2025-12-12T16:38:12.295209367+00:00 stderr F time="2025-12-12T16:38:12Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=qn4oL 2025-12-12T16:38:12.894729420+00:00 stderr F time="2025-12-12T16:38:12Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=qn4oL 2025-12-12T16:38:12.894729420+00:00 stderr F time="2025-12-12T16:38:12Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=qn4oL 2025-12-12T16:38:12.894826963+00:00 stderr F time="2025-12-12T16:38:12Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=qn4oL 2025-12-12T16:38:12.894826963+00:00 stderr F time="2025-12-12T16:38:12Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=qn4oL 2025-12-12T16:38:12.894826963+00:00 stderr F time="2025-12-12T16:38:12Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=qn4oL 2025-12-12T16:38:13.295779407+00:00 stderr F time="2025-12-12T16:38:13Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=uCc8u 2025-12-12T16:38:13.295779407+00:00 stderr F time="2025-12-12T16:38:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=uCc8u 2025-12-12T16:38:13.895197047+00:00 stderr F time="2025-12-12T16:38:13Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=uCc8u 2025-12-12T16:38:13.895197047+00:00 stderr F time="2025-12-12T16:38:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=uCc8u 2025-12-12T16:38:13.895333571+00:00 stderr F time="2025-12-12T16:38:13Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=uCc8u 2025-12-12T16:38:13.895370612+00:00 stderr F time="2025-12-12T16:38:13Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=uCc8u 2025-12-12T16:38:13.895381742+00:00 stderr F time="2025-12-12T16:38:13Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=uCc8u 2025-12-12T16:38:14.295244098+00:00 stderr F time="2025-12-12T16:38:14Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=D0MMq 2025-12-12T16:38:14.295244098+00:00 stderr F time="2025-12-12T16:38:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=D0MMq 2025-12-12T16:38:14.897253334+00:00 stderr F time="2025-12-12T16:38:14Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=D0MMq 2025-12-12T16:38:14.897253334+00:00 stderr F time="2025-12-12T16:38:14Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=D0MMq 2025-12-12T16:38:14.897253334+00:00 stderr F time="2025-12-12T16:38:14Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=D0MMq 2025-12-12T16:38:14.897253334+00:00 stderr F time="2025-12-12T16:38:14Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=D0MMq 2025-12-12T16:38:14.897253334+00:00 stderr F time="2025-12-12T16:38:14Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=D0MMq 2025-12-12T16:38:15.296104865+00:00 stderr F time="2025-12-12T16:38:15Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FQxep 2025-12-12T16:38:15.296199648+00:00 stderr F time="2025-12-12T16:38:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FQxep 2025-12-12T16:38:15.896582013+00:00 stderr F time="2025-12-12T16:38:15Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FQxep 2025-12-12T16:38:15.896582013+00:00 stderr F time="2025-12-12T16:38:15Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=FQxep 2025-12-12T16:38:15.896627094+00:00 stderr F time="2025-12-12T16:38:15Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=FQxep 2025-12-12T16:38:15.896627094+00:00 stderr F time="2025-12-12T16:38:15Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=FQxep 2025-12-12T16:38:15.896627094+00:00 stderr F time="2025-12-12T16:38:15Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=FQxep 2025-12-12T16:38:16.295843264+00:00 stderr F time="2025-12-12T16:38:16Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zmZuU 2025-12-12T16:38:16.295954207+00:00 stderr F time="2025-12-12T16:38:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zmZuU 2025-12-12T16:38:16.894484015+00:00 stderr F time="2025-12-12T16:38:16Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zmZuU 2025-12-12T16:38:16.894484015+00:00 stderr F time="2025-12-12T16:38:16Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zmZuU 2025-12-12T16:38:16.894584838+00:00 stderr F time="2025-12-12T16:38:16Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=zmZuU 2025-12-12T16:38:16.894584838+00:00 stderr F time="2025-12-12T16:38:16Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=zmZuU 2025-12-12T16:38:16.894584838+00:00 stderr F time="2025-12-12T16:38:16Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=zmZuU 2025-12-12T16:38:20.816846687+00:00 stderr F time="2025-12-12T16:38:20Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=M96FS 2025-12-12T16:38:20.816947499+00:00 stderr F time="2025-12-12T16:38:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=M96FS 2025-12-12T16:38:20.825267768+00:00 stderr F time="2025-12-12T16:38:20Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=M96FS 2025-12-12T16:38:20.825334870+00:00 stderr F time="2025-12-12T16:38:20Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=M96FS 2025-12-12T16:38:20.825677569+00:00 stderr F time="2025-12-12T16:38:20Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=M96FS 2025-12-12T16:38:20.825720150+00:00 stderr F time="2025-12-12T16:38:20Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=M96FS 2025-12-12T16:38:20.825755601+00:00 stderr F time="2025-12-12T16:38:20Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=M96FS 2025-12-12T16:38:21.251581959+00:00 stderr F time="2025-12-12T16:38:21Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=V4wSt 2025-12-12T16:38:21.251647971+00:00 stderr F time="2025-12-12T16:38:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=V4wSt 2025-12-12T16:38:21.261748385+00:00 stderr F time="2025-12-12T16:38:21Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=V4wSt 2025-12-12T16:38:21.261907809+00:00 stderr F time="2025-12-12T16:38:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=V4wSt 2025-12-12T16:38:21.410250855+00:00 stderr F time="2025-12-12T16:38:21Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Jlmuq 2025-12-12T16:38:21.410401469+00:00 stderr F time="2025-12-12T16:38:21Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Jlmuq 2025-12-12T16:38:22.009068850+00:00 stderr F time="2025-12-12T16:38:22Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Jlmuq 2025-12-12T16:38:22.009150812+00:00 stderr F time="2025-12-12T16:38:22Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Jlmuq 2025-12-12T16:38:27.375690626+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=mEURa 2025-12-12T16:38:27.375754427+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=mEURa 2025-12-12T16:38:27.385278666+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=mEURa 2025-12-12T16:38:27.385325748+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=mEURa 2025-12-12T16:38:27.385423930+00:00 stderr F time="2025-12-12T16:38:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=mEURa 2025-12-12T16:38:27.385452521+00:00 stderr F time="2025-12-12T16:38:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace error="catalog polling: community-operators not ready for update: update pod community-operators-4sccg has not yet reported ready" id=mEURa 2025-12-12T16:38:27.385475671+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace id=mEURa 2025-12-12T16:38:27.885103805+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=/fjCI 2025-12-12T16:38:27.885286909+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=/fjCI 2025-12-12T16:38:27.898575653+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=/fjCI 2025-12-12T16:38:27.898710657+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=/fjCI 2025-12-12T16:38:27.898842390+00:00 stderr F time="2025-12-12T16:38:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=/fjCI 2025-12-12T16:38:27.898902852+00:00 stderr F time="2025-12-12T16:38:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=/fjCI 2025-12-12T16:38:27.898951643+00:00 stderr F time="2025-12-12T16:38:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=/fjCI 2025-12-12T16:38:28.296878781+00:00 stderr F time="2025-12-12T16:38:28Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zMspK 2025-12-12T16:38:28.296878781+00:00 stderr F time="2025-12-12T16:38:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zMspK 2025-12-12T16:38:28.372433789+00:00 stderr F time="2025-12-12T16:38:28Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zMspK 2025-12-12T16:38:28.372579883+00:00 stderr F time="2025-12-12T16:38:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=zMspK 2025-12-12T16:38:28.772837008+00:00 stderr F time="2025-12-12T16:38:28Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=U8t5c 2025-12-12T16:38:28.772837008+00:00 stderr F time="2025-12-12T16:38:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=U8t5c 2025-12-12T16:38:29.372392212+00:00 stderr F time="2025-12-12T16:38:29Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=U8t5c 2025-12-12T16:38:29.372392212+00:00 stderr F time="2025-12-12T16:38:29Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=U8t5c 2025-12-12T16:38:45.905963453+00:00 stderr F time="2025-12-12T16:38:45Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Sk+uE 2025-12-12T16:38:45.905963453+00:00 stderr F time="2025-12-12T16:38:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Sk+uE 2025-12-12T16:38:45.916966199+00:00 stderr F time="2025-12-12T16:38:45Z" level=info msg="evaluating current pod" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Sk+uE 2025-12-12T16:38:45.916966199+00:00 stderr F time="2025-12-12T16:38:45Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=community-operators catalogsource.namespace=openshift-marketplace correctHash=true correctImages=true current-pod.name=community-operators-6jgv5 current-pod.namespace=openshift-marketplace id=Sk+uE 2025-12-12T16:38:57.907307801+00:00 stderr F time="2025-12-12T16:38:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hq8XR 2025-12-12T16:38:57.907391553+00:00 stderr F time="2025-12-12T16:38:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hq8XR 2025-12-12T16:38:57.918105712+00:00 stderr F time="2025-12-12T16:38:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hq8XR 2025-12-12T16:38:57.918105712+00:00 stderr F time="2025-12-12T16:38:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=hq8XR 2025-12-12T16:38:57.918146203+00:00 stderr F time="2025-12-12T16:38:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=hq8XR 2025-12-12T16:38:57.918146203+00:00 stderr F time="2025-12-12T16:38:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=hq8XR 2025-12-12T16:38:57.918146203+00:00 stderr F time="2025-12-12T16:38:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=hq8XR 2025-12-12T16:39:27.924447850+00:00 stderr F time="2025-12-12T16:39:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SP9sj 2025-12-12T16:39:27.924447850+00:00 stderr F time="2025-12-12T16:39:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SP9sj 2025-12-12T16:39:27.934690267+00:00 stderr F time="2025-12-12T16:39:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SP9sj 2025-12-12T16:39:27.934690267+00:00 stderr F time="2025-12-12T16:39:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=SP9sj 2025-12-12T16:39:27.934740018+00:00 stderr F time="2025-12-12T16:39:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=SP9sj 2025-12-12T16:39:27.934761979+00:00 stderr F time="2025-12-12T16:39:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=SP9sj 2025-12-12T16:39:27.934761979+00:00 stderr F time="2025-12-12T16:39:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=SP9sj 2025-12-12T16:39:57.942864169+00:00 stderr F time="2025-12-12T16:39:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=4tR3v 2025-12-12T16:39:57.942864169+00:00 stderr F time="2025-12-12T16:39:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=4tR3v 2025-12-12T16:39:57.950232964+00:00 stderr F time="2025-12-12T16:39:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=4tR3v 2025-12-12T16:39:57.950232964+00:00 stderr F time="2025-12-12T16:39:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=4tR3v 2025-12-12T16:39:57.950273285+00:00 stderr F time="2025-12-12T16:39:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=4tR3v 2025-12-12T16:39:57.950273285+00:00 stderr F time="2025-12-12T16:39:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=4tR3v 2025-12-12T16:39:57.950290786+00:00 stderr F time="2025-12-12T16:39:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=4tR3v 2025-12-12T16:40:13.398788775+00:00 stderr F time="2025-12-12T16:40:13Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=GfIQD 2025-12-12T16:40:13.398788775+00:00 stderr F time="2025-12-12T16:40:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=GfIQD 2025-12-12T16:40:13.409071114+00:00 stderr F time="2025-12-12T16:40:13Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=GfIQD 2025-12-12T16:40:13.409071114+00:00 stderr F time="2025-12-12T16:40:13Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=GfIQD 2025-12-12T16:40:13.409146135+00:00 stderr F time="2025-12-12T16:40:13Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=GfIQD 2025-12-12T16:40:13.409146135+00:00 stderr F time="2025-12-12T16:40:13Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=GfIQD 2025-12-12T16:40:13.409146135+00:00 stderr F time="2025-12-12T16:40:13Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=GfIQD 2025-12-12T16:40:27.399744915+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zl08B 2025-12-12T16:40:27.399744915+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zl08B 2025-12-12T16:40:27.407629943+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zl08B 2025-12-12T16:40:27.407688364+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zl08B 2025-12-12T16:40:27.407756986+00:00 stderr F time="2025-12-12T16:40:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=zl08B 2025-12-12T16:40:27.407784497+00:00 stderr F time="2025-12-12T16:40:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=zl08B 2025-12-12T16:40:27.407807317+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=zl08B 2025-12-12T16:40:27.960795222+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=kT6YR 2025-12-12T16:40:27.960795222+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=kT6YR 2025-12-12T16:40:27.977853920+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=kT6YR 2025-12-12T16:40:27.977937322+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=kT6YR 2025-12-12T16:40:27.978030325+00:00 stderr F time="2025-12-12T16:40:27Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=kT6YR 2025-12-12T16:40:27.978070956+00:00 stderr F time="2025-12-12T16:40:27Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=kT6YR 2025-12-12T16:40:27.978108507+00:00 stderr F time="2025-12-12T16:40:27Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=kT6YR 2025-12-12T16:40:38.185927861+00:00 stderr F time="2025-12-12T16:40:38Z" level=info msg="resolving sources" id=2+WcJ namespace=openshift-must-gather-2sjxj 2025-12-12T16:40:38.185927861+00:00 stderr F time="2025-12-12T16:40:38Z" level=info msg="checking if subscriptions need update" id=2+WcJ namespace=openshift-must-gather-2sjxj 2025-12-12T16:40:38.201873882+00:00 stderr F time="2025-12-12T16:40:38Z" level=info msg="No subscriptions were found in namespace openshift-must-gather-2sjxj" id=2+WcJ namespace=openshift-must-gather-2sjxj 2025-12-12T16:40:38.212280583+00:00 stderr F time="2025-12-12T16:40:38Z" level=info msg="resolving sources" id=KJtnv namespace=openshift-must-gather-2sjxj 2025-12-12T16:40:38.212280583+00:00 stderr F time="2025-12-12T16:40:38Z" level=info msg="checking if subscriptions need update" id=KJtnv namespace=openshift-must-gather-2sjxj 2025-12-12T16:40:38.295854333+00:00 stderr F time="2025-12-12T16:40:38Z" level=info msg="No subscriptions were found in namespace openshift-must-gather-2sjxj" id=KJtnv namespace=openshift-must-gather-2sjxj 2025-12-12T16:40:43.393563556+00:00 stderr F time="2025-12-12T16:40:43Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t+tY3 2025-12-12T16:40:43.393563556+00:00 stderr F time="2025-12-12T16:40:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t+tY3 2025-12-12T16:40:43.402538511+00:00 stderr F time="2025-12-12T16:40:43Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t+tY3 2025-12-12T16:40:43.402538511+00:00 stderr F time="2025-12-12T16:40:43Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=t+tY3 2025-12-12T16:40:43.402586262+00:00 stderr F time="2025-12-12T16:40:43Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=t+tY3 2025-12-12T16:40:43.402586262+00:00 stderr F time="2025-12-12T16:40:43Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=t+tY3 2025-12-12T16:40:43.402594452+00:00 stderr F time="2025-12-12T16:40:43Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=t+tY3 2025-12-12T16:40:54.398085739+00:00 stderr F time="2025-12-12T16:40:54Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=arVlm 2025-12-12T16:40:54.398085739+00:00 stderr F time="2025-12-12T16:40:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=arVlm 2025-12-12T16:40:54.408783118+00:00 stderr F time="2025-12-12T16:40:54Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=arVlm 2025-12-12T16:40:54.408783118+00:00 stderr F time="2025-12-12T16:40:54Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=arVlm 2025-12-12T16:40:54.408839459+00:00 stderr F time="2025-12-12T16:40:54Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=arVlm 2025-12-12T16:40:54.408847700+00:00 stderr F time="2025-12-12T16:40:54Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=arVlm 2025-12-12T16:40:54.408854780+00:00 stderr F time="2025-12-12T16:40:54Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=arVlm 2025-12-12T16:40:57.986417738+00:00 stderr F time="2025-12-12T16:40:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=2uWe2 2025-12-12T16:40:57.986417738+00:00 stderr F time="2025-12-12T16:40:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=2uWe2 2025-12-12T16:40:57.999080096+00:00 stderr F time="2025-12-12T16:40:57Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=2uWe2 2025-12-12T16:40:57.999080096+00:00 stderr F time="2025-12-12T16:40:57Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=2uWe2 2025-12-12T16:40:57.999139337+00:00 stderr F time="2025-12-12T16:40:57Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=2uWe2 2025-12-12T16:40:57.999139337+00:00 stderr F time="2025-12-12T16:40:57Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=2uWe2 2025-12-12T16:40:57.999139337+00:00 stderr F time="2025-12-12T16:40:57Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=2uWe2 2025-12-12T16:41:28.008435530+00:00 stderr F time="2025-12-12T16:41:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=3A1Us 2025-12-12T16:41:28.008435530+00:00 stderr F time="2025-12-12T16:41:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=3A1Us 2025-12-12T16:41:28.015871787+00:00 stderr F time="2025-12-12T16:41:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=3A1Us 2025-12-12T16:41:28.015871787+00:00 stderr F time="2025-12-12T16:41:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=3A1Us 2025-12-12T16:41:28.015871787+00:00 stderr F time="2025-12-12T16:41:28Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=3A1Us 2025-12-12T16:41:28.015871787+00:00 stderr F time="2025-12-12T16:41:28Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=3A1Us 2025-12-12T16:41:28.015871787+00:00 stderr F time="2025-12-12T16:41:28Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=3A1Us 2025-12-12T16:41:58.021701826+00:00 stderr F time="2025-12-12T16:41:58Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=T6TMj 2025-12-12T16:41:58.021701826+00:00 stderr F time="2025-12-12T16:41:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=T6TMj 2025-12-12T16:41:58.030053496+00:00 stderr F time="2025-12-12T16:41:58Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=T6TMj 2025-12-12T16:41:58.030053496+00:00 stderr F time="2025-12-12T16:41:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=T6TMj 2025-12-12T16:41:58.030113517+00:00 stderr F time="2025-12-12T16:41:58Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=T6TMj 2025-12-12T16:41:58.030113517+00:00 stderr F time="2025-12-12T16:41:58Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=T6TMj 2025-12-12T16:41:58.030131178+00:00 stderr F time="2025-12-12T16:41:58Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=T6TMj 2025-12-12T16:42:28.035618209+00:00 stderr F time="2025-12-12T16:42:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XicR0 2025-12-12T16:42:28.035618209+00:00 stderr F time="2025-12-12T16:42:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XicR0 2025-12-12T16:42:28.042924863+00:00 stderr F time="2025-12-12T16:42:28Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XicR0 2025-12-12T16:42:28.042924863+00:00 stderr F time="2025-12-12T16:42:28Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=XicR0 2025-12-12T16:42:28.042971254+00:00 stderr F time="2025-12-12T16:42:28Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=XicR0 2025-12-12T16:42:28.042971254+00:00 stderr F time="2025-12-12T16:42:28Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=XicR0 2025-12-12T16:42:28.042971254+00:00 stderr F time="2025-12-12T16:42:28Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=XicR0 2025-12-12T16:42:58.053856101+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zyUH+ 2025-12-12T16:42:58.053856101+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zyUH+ 2025-12-12T16:42:58.064309034+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="evaluating current pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zyUH+ 2025-12-12T16:42:58.064309034+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="of 1 pods matching label selector, 1 have the correct images and matching hash" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry correctHash=true correctImages=true current-pod.name=infrawatch-operators-cdpts current-pod.namespace=service-telemetry id=zyUH+ 2025-12-12T16:42:58.064373115+00:00 stderr F time="2025-12-12T16:42:58Z" level=error msg="error ensuring registry server: could not ensure update pod" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=zyUH+ 2025-12-12T16:42:58.064373115+00:00 stderr F time="2025-12-12T16:42:58Z" level=error msg="error ensuring registry server: ensure update pod error is not of type UpdateNotReadyErr" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry error="catalog polling: infrawatch-operators not ready for update: update pod infrawatch-operators-6bs58 has not yet reported ready" id=zyUH+ 2025-12-12T16:42:58.064373115+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="requeueing registry server for catalog update check: update pod not yet ready" catalogsource.name=infrawatch-operators catalogsource.namespace=service-telemetry id=zyUH+ 2025-12-12T16:42:58.807973319+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="resolving sources" id=skgC4 namespace=openshift-must-gather-2sjxj 2025-12-12T16:42:58.807973319+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="checking if subscriptions need update" id=skgC4 namespace=openshift-must-gather-2sjxj 2025-12-12T16:42:58.812021430+00:00 stderr F time="2025-12-12T16:42:58Z" level=info msg="No subscriptions were found in namespace openshift-must-gather-2sjxj" id=skgC4 namespace=openshift-must-gather-2sjxj 2025-12-12T16:43:04.714784037+00:00 stderr F time="2025-12-12T16:43:04Z" level=info msg="resolving sources" id=7Bejk namespace=openshift-must-gather-2sjxj 2025-12-12T16:43:04.714784037+00:00 stderr F time="2025-12-12T16:43:04Z" level=info msg="checking if subscriptions need update" id=7Bejk namespace=openshift-must-gather-2sjxj 2025-12-12T16:43:04.718366997+00:00 stderr F time="2025-12-12T16:43:04Z" level=info msg="No subscriptions were found in namespace openshift-must-gather-2sjxj" id=7Bejk namespace=openshift-must-gather-2sjxj ././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043043033021 5ustar zuulzuul././@LongLink0000644000000000000000000000025200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043062033022 5ustar zuulzuul././@LongLink0000644000000000000000000000025700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000644000175000017500000000355415117043043033032 0ustar zuulzuul2025-12-12T16:16:54.705275773+00:00 stderr F I1212 16:16:54.700438 1 cmd.go:39] &{ true {false} prune true map[cert-dir:0xc000a4a0a0 max-eligible-revision:0xc000881d60 protected-revisions:0xc000881e00 resource-dir:0xc000881ea0 static-pod-name:0xc000a4a000 v:0xc000a4a780] [0xc000a4a780 0xc000881d60 0xc000881e00 0xc000881ea0 0xc000a4a0a0 0xc000a4a000] [] map[cert-dir:0xc000a4a0a0 help:0xc000a4ab40 log-flush-frequency:0xc000a4a6e0 max-eligible-revision:0xc000881d60 protected-revisions:0xc000881e00 resource-dir:0xc000881ea0 static-pod-name:0xc000a4a000 v:0xc000a4a780 vmodule:0xc000a4a820] [0xc000881d60 0xc000881e00 0xc000881ea0 0xc000a4a000 0xc000a4a0a0 0xc000a4a6e0 0xc000a4a780 0xc000a4a820 0xc000a4ab40] [0xc000a4a0a0 0xc000a4ab40 0xc000a4a6e0 0xc000881d60 0xc000881e00 0xc000881ea0 0xc000a4a000 0xc000a4a780 0xc000a4a820] map[104:0xc000a4ab40 118:0xc000a4a780] [] -1 0 0xc00080ecf0 true 0x242b060 []} 2025-12-12T16:16:54.705275773+00:00 stderr F I1212 16:16:54.701592 1 cmd.go:40] (*prune.PruneOptions)(0xc000896730)({ 2025-12-12T16:16:54.705275773+00:00 stderr F MaxEligibleRevision: (int) 6, 2025-12-12T16:16:54.705275773+00:00 stderr F ProtectedRevisions: ([]int) (len=5 cap=5) { 2025-12-12T16:16:54.705275773+00:00 stderr F (int) 2, 2025-12-12T16:16:54.705275773+00:00 stderr F (int) 3, 2025-12-12T16:16:54.705275773+00:00 stderr F (int) 4, 2025-12-12T16:16:54.705275773+00:00 stderr F (int) 5, 2025-12-12T16:16:54.705275773+00:00 stderr F (int) 6 2025-12-12T16:16:54.705275773+00:00 stderr F }, 2025-12-12T16:16:54.705275773+00:00 stderr F ResourceDir: (string) (len=36) "/etc/kubernetes/static-pod-resources", 2025-12-12T16:16:54.705275773+00:00 stderr F CertDir: (string) (len=20) "kube-scheduler-certs", 2025-12-12T16:16:54.705275773+00:00 stderr F StaticPodName: (string) (len=18) "kube-scheduler-pod" 2025-12-12T16:16:54.705275773+00:00 stderr F }) ././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operato0000755000175000017500000000000015117043043033034 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operato0000755000175000017500000000000015117043062033035 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operato0000644000175000017500000036076315117043043033055 0ustar zuulzuul2025-12-12T16:16:46.782058014+00:00 stderr F I1212 16:16:46.775885 1 profiler.go:21] Starting profiling endpoint at http://127.0.0.1:6060/debug/pprof/ 2025-12-12T16:16:46.782058014+00:00 stderr F I1212 16:16:46.777723 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:46.801417196+00:00 stderr F I1212 16:16:46.801368 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:46.801459577+00:00 stderr F I1212 16:16:46.801426 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:46.805348132+00:00 stderr F I1212 16:16:46.803342 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:46.959968307+00:00 stderr F I1212 16:16:46.956566 1 builder.go:304] openshift-cluster-etcd-operator version 4.20.0-202510211040.p2.g49412ac.assembly.stream.el9-49412ac-49412ac13833adf0da4c44b9c9a0e91f8ac04e4d 2025-12-12T16:16:48.350950387+00:00 stderr F I1212 16:16:48.350320 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:48.350950387+00:00 stderr F W1212 16:16:48.350854 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:48.350950387+00:00 stderr F W1212 16:16:48.350860 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:48.350950387+00:00 stderr F W1212 16:16:48.350864 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:48.350950387+00:00 stderr F W1212 16:16:48.350867 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:48.350950387+00:00 stderr F W1212 16:16:48.350870 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:48.350950387+00:00 stderr F W1212 16:16:48.350872 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:48.360376727+00:00 stderr F I1212 16:16:48.358286 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:48.360376727+00:00 stderr F I1212 16:16:48.358729 1 leaderelection.go:257] attempting to acquire leader lease openshift-etcd-operator/openshift-cluster-etcd-operator-lock... 2025-12-12T16:16:48.384290241+00:00 stderr F I1212 16:16:48.384162 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:48.387830708+00:00 stderr F I1212 16:16:48.385258 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:48.387830708+00:00 stderr F I1212 16:16:48.385399 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.387830708+00:00 stderr F I1212 16:16:48.385536 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:48.387830708+00:00 stderr F I1212 16:16:48.385922 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.390938944+00:00 stderr F I1212 16:16:48.388465 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.390938944+00:00 stderr F I1212 16:16:48.390257 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.394855779+00:00 stderr F I1212 16:16:48.392713 1 leaderelection.go:271] successfully acquired lease openshift-etcd-operator/openshift-cluster-etcd-operator-lock 2025-12-12T16:16:48.398076298+00:00 stderr F I1212 16:16:48.395703 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-etcd-operator", Name:"openshift-cluster-etcd-operator-lock", UID:"a28a8b71-89cd-48dc-b29d-c416d9b1b15b", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37442", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' etcd-operator-69b85846b6-mrrt5_e5dccee5-e7b9-4e63-ad8d-4f5955a1fa0d became leader 2025-12-12T16:16:48.400682432+00:00 stderr F I1212 16:16:48.398678 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:48.400682432+00:00 stderr F I1212 16:16:48.398741 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:48.493934158+00:00 stderr F I1212 16:16:48.491376 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:48.505324366+00:00 stderr F I1212 16:16:48.495331 1 starter.go:195] recorded cluster versions: map[etcd:4.20.1 operator:4.20.1 raw-internal:4.20.1] 2025-12-12T16:16:48.505324366+00:00 stderr F I1212 16:16:48.499425 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:48.548632104+00:00 stderr F I1212 16:16:48.548008 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:48.595364235+00:00 stderr F E1212 16:16:48.595284 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:48.601044583+00:00 stderr F E1212 16:16:48.600292 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:48.621238256+00:00 stderr F E1212 16:16:48.621158 1 static_resource_controller.go:221] "Unhandled Error" err="missing informer for namespace \"\"; no dynamic wiring added, time-based only." 2025-12-12T16:16:48.632186934+00:00 stderr F I1212 16:16:48.631512 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:48.674099177+00:00 stderr F I1212 16:16:48.674036 1 starter.go:411] FeatureGates initializedenabled[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks]disabled[AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:48.674246770+00:00 stderr F I1212 16:16:48.674119 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:48.686210013+00:00 stderr F I1212 16:16:48.683031 1 starter.go:545] waiting for cluster version informer sync... 2025-12-12T16:16:48.696917354+00:00 stderr F I1212 16:16:48.695657 1 starter.go:568] Detected available machine API, starting vertical scaling related controllers and informers... 2025-12-12T16:16:48.731779725+00:00 stderr F I1212 16:16:48.731083 1 base_controller.go:76] Waiting for caches to sync for MachineDeletionHooksController 2025-12-12T16:16:48.731779725+00:00 stderr F I1212 16:16:48.731111 1 base_controller.go:76] Waiting for caches to sync for ClusterMemberRemovalController 2025-12-12T16:16:48.747601931+00:00 stderr F I1212 16:16:48.741309 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:48.754090690+00:00 stderr F I1212 16:16:48.753590 1 base_controller.go:76] Waiting for caches to sync for MissingStaticPodController 2025-12-12T16:16:48.754090690+00:00 stderr F I1212 16:16:48.753918 1 base_controller.go:76] Waiting for caches to sync for GuardController 2025-12-12T16:16:48.754090690+00:00 stderr F I1212 16:16:48.753916 1 base_controller.go:76] Waiting for caches to sync for etcd-UnsupportedConfigOverrides 2025-12-12T16:16:48.754090690+00:00 stderr F I1212 16:16:48.753931 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:48.754090690+00:00 stderr F I1212 16:16:48.754004 1 base_controller.go:76] Waiting for caches to sync for etcd-Node 2025-12-12T16:16:48.754090690+00:00 stderr F I1212 16:16:48.754021 1 base_controller.go:76] Waiting for caches to sync for EtcdEndpointsController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754299 1 base_controller.go:76] Waiting for caches to sync for etcd-operator-UnsupportedConfigOverrides 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754538 1 base_controller.go:76] Waiting for caches to sync for FSyncController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754551 1 base_controller.go:82] Caches are synced for FSyncController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754563 1 base_controller.go:119] Starting #1 worker of FSyncController controller ... 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754623 1 base_controller.go:76] Waiting for caches to sync for TargetConfigController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754683 1 base_controller.go:76] Waiting for caches to sync for EtcdCertSignerController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754693 1 base_controller.go:76] Waiting for caches to sync for etcd 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754735 1 base_controller.go:76] Waiting for caches to sync for EtcdCertCleanerController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754747 1 base_controller.go:82] Caches are synced for EtcdCertCleanerController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754754 1 base_controller.go:119] Starting #1 worker of EtcdCertCleanerController controller ... 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.754664 1 base_controller.go:76] Waiting for caches to sync for ClusterMemberController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755294 1 base_controller.go:76] Waiting for caches to sync for BackingResourceController-StaticResources 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755487 1 base_controller.go:76] Waiting for caches to sync for EtcdStaticResources-StaticResources 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755529 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755585 1 base_controller.go:76] Waiting for caches to sync for etcd-InstallerState 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755688 1 base_controller.go:76] Waiting for caches to sync for RevisionController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755747 1 envvarcontroller.go:236] Starting EnvVarController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.755867 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_etcd 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.756073 1 base_controller.go:76] Waiting for caches to sync for etcd-StaticPodState 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.756374 1 base_controller.go:76] Waiting for caches to sync for BootstrapTeardownController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.756403 1 base_controller.go:76] Waiting for caches to sync for EtcdMembersController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.756421 1 base_controller.go:82] Caches are synced for EtcdMembersController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.756429 1 base_controller.go:119] Starting #1 worker of EtcdMembersController controller ... 2025-12-12T16:16:48.759018180+00:00 stderr F E1212 16:16:48.756852 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: node lister not synced" 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.757169 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ReportEtcdMembersErrorUpdatingStatus' etcds.operator.openshift.io "cluster" not found 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.756598 1 base_controller.go:76] Waiting for caches to sync for PruneController 2025-12-12T16:16:48.759018180+00:00 stderr F I1212 16:16:48.757444 1 base_controller.go:76] Waiting for caches to sync for ScriptController 2025-12-12T16:16:48.763220833+00:00 stderr F I1212 16:16:48.760325 1 base_controller.go:76] Waiting for caches to sync for DefragController 2025-12-12T16:16:48.773721189+00:00 stderr F I1212 16:16:48.773663 1 base_controller.go:76] Waiting for caches to sync for Installer 2025-12-12T16:16:48.773774570+00:00 stderr F I1212 16:16:48.773723 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ReportEtcdMembersErrorUpdatingStatus' etcds.operator.openshift.io "cluster" not found 2025-12-12T16:16:48.773998546+00:00 stderr F E1212 16:16:48.773681 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: node lister not synced" 2025-12-12T16:16:48.787920666+00:00 stderr F I1212 16:16:48.787813 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:48.793375219+00:00 stderr F E1212 16:16:48.788142 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: configmaps lister not synced" 2025-12-12T16:16:48.793375219+00:00 stderr F I1212 16:16:48.788233 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ReportEtcdMembersErrorUpdatingStatus' etcds.operator.openshift.io "cluster" not found 2025-12-12T16:16:48.827354598+00:00 stderr F E1212 16:16:48.823007 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:42020->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.842207411+00:00 stderr F E1212 16:16:48.840413 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: configmaps lister not synced" 2025-12-12T16:16:48.852249926+00:00 stderr F E1212 16:16:48.847969 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:52969->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.857035983+00:00 stderr F I1212 16:16:48.856977 1 base_controller.go:82] Caches are synced for etcd-UnsupportedConfigOverrides 2025-12-12T16:16:48.857035983+00:00 stderr F I1212 16:16:48.857006 1 base_controller.go:119] Starting #1 worker of etcd-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:48.858611662+00:00 stderr F I1212 16:16:48.858551 1 base_controller.go:82] Caches are synced for BackingResourceController-StaticResources 2025-12-12T16:16:48.858611662+00:00 stderr F I1212 16:16:48.858593 1 base_controller.go:119] Starting #1 worker of BackingResourceController-StaticResources controller ... 2025-12-12T16:16:48.860222501+00:00 stderr F I1212 16:16:48.859832 1 base_controller.go:82] Caches are synced for ScriptController 2025-12-12T16:16:48.860222501+00:00 stderr F I1212 16:16:48.859852 1 base_controller.go:119] Starting #1 worker of ScriptController controller ... 2025-12-12T16:16:48.860222501+00:00 stderr F I1212 16:16:48.859880 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:48.860222501+00:00 stderr F I1212 16:16:48.859929 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:48.860376815+00:00 stderr F I1212 16:16:48.860354 1 base_controller.go:82] Caches are synced for etcd-operator-UnsupportedConfigOverrides 2025-12-12T16:16:48.860376815+00:00 stderr F I1212 16:16:48.860366 1 base_controller.go:119] Starting #1 worker of etcd-operator-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:48.863778688+00:00 stderr F I1212 16:16:48.863745 1 base_controller.go:82] Caches are synced for StatusSyncer_etcd 2025-12-12T16:16:48.863796278+00:00 stderr F I1212 16:16:48.863781 1 base_controller.go:119] Starting #1 worker of StatusSyncer_etcd controller ... 2025-12-12T16:16:48.864283120+00:00 stderr F I1212 16:16:48.864082 1 base_controller.go:82] Caches are synced for EtcdStaticResources-StaticResources 2025-12-12T16:16:48.864283120+00:00 stderr F I1212 16:16:48.864241 1 base_controller.go:119] Starting #1 worker of EtcdStaticResources-StaticResources controller ... 2025-12-12T16:16:48.864869784+00:00 stderr F I1212 16:16:48.864841 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:48Z","message":"EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nNodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)","reason":"EtcdMembersController_ErrorUpdatingReportEtcdMembers::NodeController_MasterNodesReady","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:48.877328439+00:00 stderr F I1212 16:16:48.876092 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded changed from False to True ("EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nNodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)") 2025-12-12T16:16:48.877328439+00:00 stderr F I1212 16:16:48.876500 1 base_controller.go:82] Caches are synced for DefragController 2025-12-12T16:16:48.877328439+00:00 stderr F I1212 16:16:48.876512 1 base_controller.go:119] Starting #1 worker of DefragController controller ... 2025-12-12T16:16:48.888461090+00:00 stderr F E1212 16:16:48.886610 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: configmaps lister not synced" 2025-12-12T16:16:48.906086031+00:00 stderr F E1212 16:16:48.905530 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:48.906086031+00:00 stderr F E1212 16:16:48.905544 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:39162->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:48.910032317+00:00 stderr F I1212 16:16:48.907882 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:48Z","message":"EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nNodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)\nScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values","reason":"EtcdMembersController_ErrorUpdatingReportEtcdMembers::NodeController_MasterNodesReady::ScriptController_Error","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:48.924721756+00:00 stderr F E1212 16:16:48.924631 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:48.927454262+00:00 stderr F E1212 16:16:48.926959 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:48.932153437+00:00 stderr F I1212 16:16:48.931013 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded message changed from "EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nNodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)" to "EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nNodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)\nScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:48.943358371+00:00 stderr F I1212 16:16:48.942163 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.943639588+00:00 stderr F E1212 16:16:48.943500 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:48.956971773+00:00 stderr F I1212 16:16:48.956904 1 base_controller.go:82] Caches are synced for etcd-Node 2025-12-12T16:16:48.956971773+00:00 stderr F I1212 16:16:48.956939 1 base_controller.go:119] Starting #1 worker of etcd-Node controller ... 2025-12-12T16:16:48.970978015+00:00 stderr F E1212 16:16:48.970874 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: configmaps lister not synced" 2025-12-12T16:16:48.990236635+00:00 stderr F E1212 16:16:48.988268 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:49.009865774+00:00 stderr F E1212 16:16:49.009795 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:51870->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.020579296+00:00 stderr F E1212 16:16:49.020510 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:49.024212715+00:00 stderr F I1212 16:16:49.023699 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'MasterNodesReadyChanged' All master nodes are ready 2025-12-12T16:16:49.024212715+00:00 stderr F I1212 16:16:49.023904 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values\nNodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.034390243+00:00 stderr F I1212 16:16:49.033645 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded changed from True to False ("EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values\nNodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found") 2025-12-12T16:16:49.071389657+00:00 stderr F E1212 16:16:49.071120 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": missing env var values" 2025-12-12T16:16:49.135638765+00:00 stderr F E1212 16:16:49.135557 1 base_controller.go:279] "Unhandled Error" err="EtcdMembersController reconciliation failed: getting cache client could not retrieve endpoints: configmaps lister not synced" 2025-12-12T16:16:49.137987212+00:00 stderr F E1212 16:16:49.137414 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:34460->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.147985727+00:00 stderr F I1212 16:16:49.146728 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.254764073+00:00 stderr F E1212 16:16:49.254702 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:60399->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.346122964+00:00 stderr F I1212 16:16:49.342762 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.361228162+00:00 stderr F I1212 16:16:49.360788 1 base_controller.go:82] Caches are synced for PruneController 2025-12-12T16:16:49.361228162+00:00 stderr F I1212 16:16:49.360827 1 base_controller.go:119] Starting #1 worker of PruneController controller ... 2025-12-12T16:16:49.361228162+00:00 stderr F I1212 16:16:49.361107 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:16:49.361228162+00:00 stderr F I1212 16:16:49.361186 1 envvarcontroller.go:242] caches synced 2025-12-12T16:16:49.361228162+00:00 stderr F I1212 16:16:49.361217 1 base_controller.go:82] Caches are synced for RevisionController 2025-12-12T16:16:49.361228162+00:00 stderr F I1212 16:16:49.361221 1 base_controller.go:119] Starting #1 worker of RevisionController controller ... 2025-12-12T16:16:49.364650645+00:00 stderr F I1212 16:16:49.361669 1 base_controller.go:82] Caches are synced for TargetConfigController 2025-12-12T16:16:49.364650645+00:00 stderr F I1212 16:16:49.361714 1 base_controller.go:119] Starting #1 worker of TargetConfigController controller ... 2025-12-12T16:16:49.364650645+00:00 stderr F E1212 16:16:49.361851 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: TargetConfigController missing env var values" 2025-12-12T16:16:49.437354920+00:00 stderr F E1212 16:16:49.437260 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:55243->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.462468934+00:00 stderr F I1212 16:16:49.461344 1 etcdcli_pool.go:70] creating a new cached client 2025-12-12T16:16:49.538384937+00:00 stderr F I1212 16:16:49.538282 1 etcdcli_pool.go:70] creating a new cached client 2025-12-12T16:16:49.543821360+00:00 stderr F I1212 16:16:49.542461 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555500 1 base_controller.go:82] Caches are synced for MissingStaticPodController 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555540 1 base_controller.go:82] Caches are synced for ClusterMemberController 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555554 1 base_controller.go:119] Starting #1 worker of MissingStaticPodController controller ... 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555562 1 base_controller.go:119] Starting #1 worker of ClusterMemberController controller ... 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555512 1 base_controller.go:82] Caches are synced for GuardController 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555610 1 base_controller.go:119] Starting #1 worker of GuardController controller ... 2025-12-12T16:16:49.555925205+00:00 stderr F I1212 16:16:49.555665 1 etcdcli_pool.go:70] creating a new cached client 2025-12-12T16:16:49.559233076+00:00 stderr F I1212 16:16:49.556317 1 base_controller.go:82] Caches are synced for etcd-InstallerState 2025-12-12T16:16:49.559233076+00:00 stderr F I1212 16:16:49.556331 1 base_controller.go:119] Starting #1 worker of etcd-InstallerState controller ... 2025-12-12T16:16:49.559233076+00:00 stderr F I1212 16:16:49.557134 1 base_controller.go:82] Caches are synced for etcd-StaticPodState 2025-12-12T16:16:49.559233076+00:00 stderr F I1212 16:16:49.557144 1 base_controller.go:119] Starting #1 worker of etcd-StaticPodState controller ... 2025-12-12T16:16:49.585298072+00:00 stderr F I1212 16:16:49.581921 1 base_controller.go:82] Caches are synced for Installer 2025-12-12T16:16:49.585298072+00:00 stderr F I1212 16:16:49.581942 1 base_controller.go:119] Starting #1 worker of Installer controller ... 2025-12-12T16:16:49.625578416+00:00 stderr F I1212 16:16:49.624888 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:16:49.626098048+00:00 stderr F I1212 16:16:49.626044 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"ScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values\nNodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:49.643780720+00:00 stderr F I1212 16:16:49.643703 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded message changed from "EtcdMembersControllerDegraded: getting cache client could not retrieve endpoints: configmaps lister not synced\nScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values\nNodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found" to "ScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values\nNodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found" 2025-12-12T16:16:49.749519902+00:00 stderr F I1212 16:16:49.749453 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.811245309+00:00 stderr F E1212 16:16:49.807626 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:57750->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:49.935504342+00:00 stderr F I1212 16:16:49.935455 1 request.go:752] "Waited before sending request" delay="1.181229198s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/kube-system/configmaps?limit=500&resourceVersion=0" 2025-12-12T16:16:49.950504429+00:00 stderr F I1212 16:16:49.950427 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.955764 1 base_controller.go:82] Caches are synced for EtcdCertSignerController 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.956541 1 base_controller.go:119] Starting #1 worker of EtcdCertSignerController controller ... 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.956699 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.956735 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.956793 1 base_controller.go:82] Caches are synced for EtcdEndpointsController 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.956798 1 base_controller.go:119] Starting #1 worker of EtcdEndpointsController controller ... 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.957266 1 base_controller.go:82] Caches are synced for BootstrapTeardownController 2025-12-12T16:16:49.958634737+00:00 stderr F I1212 16:16:49.957277 1 base_controller.go:119] Starting #1 worker of BootstrapTeardownController controller ... 2025-12-12T16:16:50.036801786+00:00 stderr F I1212 16:16:50.036736 1 base_controller.go:82] Caches are synced for ClusterMemberRemovalController 2025-12-12T16:16:50.036884468+00:00 stderr F I1212 16:16:50.036868 1 base_controller.go:119] Starting #1 worker of ClusterMemberRemovalController controller ... 2025-12-12T16:16:50.044225497+00:00 stderr F I1212 16:16:50.039371 1 base_controller.go:82] Caches are synced for MachineDeletionHooksController 2025-12-12T16:16:50.044225497+00:00 stderr F I1212 16:16:50.039427 1 base_controller.go:119] Starting #1 worker of MachineDeletionHooksController controller ... 2025-12-12T16:16:50.142758462+00:00 stderr F I1212 16:16:50.141821 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.349259404+00:00 stderr F I1212 16:16:50.348475 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:50.356302456+00:00 stderr F I1212 16:16:50.355534 1 base_controller.go:82] Caches are synced for etcd 2025-12-12T16:16:50.356302456+00:00 stderr F I1212 16:16:50.355560 1 base_controller.go:119] Starting #1 worker of etcd controller ... 2025-12-12T16:16:50.603921721+00:00 stderr F E1212 16:16:50.602942 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:56065->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:50.972916390+00:00 stderr F I1212 16:16:50.971639 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:50.973026033+00:00 stderr F I1212 16:16:50.971949 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:16:50.987856585+00:00 stderr F I1212 16:16:50.983828 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded message changed from "ScriptControllerDegraded: \"configmap/etcd-pod\": missing env var values\nNodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found" to "NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found" 2025-12-12T16:16:51.134510406+00:00 stderr F I1212 16:16:51.133495 1 request.go:752] "Waited before sending request" delay="1.767532183s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-pod" 2025-12-12T16:16:51.907009146+00:00 stderr F E1212 16:16:51.905744 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:60861->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:52.136352165+00:00 stderr F I1212 16:16:52.134632 1 request.go:752] "Waited before sending request" delay="2.174973431s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-endpoints" 2025-12-12T16:16:53.343393683+00:00 stderr F I1212 16:16:53.333724 1 request.go:752] "Waited before sending request" delay="1.590357726s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods/etcd-crc" 2025-12-12T16:16:54.536115063+00:00 stderr F I1212 16:16:54.533413 1 request.go:752] "Waited before sending request" delay="1.590786718s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-pod" 2025-12-12T16:16:54.612648211+00:00 stderr F E1212 16:16:54.609482 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: read udp 10.217.0.39:45732->10.217.4.10:53: read: connection refused" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.899722 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.899672563 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900483 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.900456572 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900502 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.900491733 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900514 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.900506984 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900526 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.900518024 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900539 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.900530694 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900559 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.900543174 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900572 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.900563515 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900585 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.900576595 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900606 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.900590696 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900811 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-etcd-operator.svc\" [serving] validServingFor=[metrics.openshift-etcd-operator.svc,metrics.openshift-etcd-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:12 +0000 UTC to 2027-11-02 07:52:13 +0000 UTC (now=2025-12-12 16:16:55.900795961 +0000 UTC))" 2025-12-12T16:16:55.904240715+00:00 stderr F I1212 16:16:55.900961 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556208\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2028-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:16:55.900947994 +0000 UTC))" 2025-12-12T16:16:59.750050487+00:00 stderr F E1212 16:16:59.749027 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:17:09.999415378+00:00 stderr F E1212 16:17:09.998888 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:17:30.495497384+00:00 stderr F E1212 16:17:30.494691 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.328400 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.328355577 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329260 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.329233219 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329287 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.32927631 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329308 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.329292881 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329367 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.329315961 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329390 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.329377593 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329407 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.329395923 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329432 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.329419194 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329450 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.329437514 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329479 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.329467775 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329495 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.329485525 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329721 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-etcd-operator.svc\" [serving] validServingFor=[metrics.openshift-etcd-operator.svc,metrics.openshift-etcd-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:12 +0000 UTC to 2027-11-02 07:52:13 +0000 UTC (now=2025-12-12 16:17:46.329701691 +0000 UTC))" 2025-12-12T16:17:46.331302660+00:00 stderr F I1212 16:17:46.329939 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556208\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556207\" (2025-12-12 15:16:46 +0000 UTC to 2028-12-12 15:16:46 +0000 UTC (now=2025-12-12 16:17:46.329917386 +0000 UTC))" 2025-12-12T16:17:48.771687613+00:00 stderr F E1212 16:17:48.771543 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:18:11.465992178+00:00 stderr F E1212 16:18:11.464775 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:18:48.412780372+00:00 stderr F E1212 16:18:48.412095 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-etcd-operator/leases/openshift-cluster-etcd-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:48.413583472+00:00 stderr F E1212 16:18:48.413558 1 leaderelection.go:436] error retrieving resource lock openshift-etcd-operator/openshift-cluster-etcd-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-etcd-operator/leases/openshift-cluster-etcd-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.765112163+00:00 stderr F E1212 16:18:48.765056 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:18:48.862703756+00:00 stderr F E1212 16:18:48.862646 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.862774997+00:00 stderr F I1212 16:18:48.862755 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.863699650+00:00 stderr F E1212 16:18:48.863669 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.863807873+00:00 stderr F E1212 16:18:48.863793 1 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd-operator/events\": dial tcp 10.217.4.1:443: connect: connection refused" event="&Event{ObjectMeta:{etcd-operator.1880842074cc4355 openshift-etcd-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Deployment,Namespace:openshift-etcd-operator,Name:etcd-operator,UID:7bcc9069-5a71-4f51-8970-90dddeee56b2,APIVersion:apps/v1,ResourceVersion:,FieldPath:,},Reason:ScriptControllerErrorUpdatingStatus,Message:Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused,Source:EventSource{Component:openshift-cluster-etcd-operator-script-controller-scriptcontroller,Host:,},FirstTimestamp:2025-12-12 16:18:48.862597973 +0000 UTC m=+122.728848359,LastTimestamp:2025-12-12 16:18:48.862597973 +0000 UTC m=+122.728848359,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:openshift-cluster-etcd-operator-script-controller-scriptcontroller,ReportingInstance:,}" 2025-12-12T16:18:48.870768295+00:00 stderr F E1212 16:18:48.870722 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.870768295+00:00 stderr F I1212 16:18:48.870752 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:48.872719613+00:00 stderr F E1212 16:18:48.872687 1 base_controller.go:279] "Unhandled Error" err="EtcdStaticResources-StaticResources reconciliation failed: [\"etcd/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/services/etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/sm.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-etcd-operator/servicemonitors/etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/minimal-sm.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-etcd-operator/servicemonitors/etcd-minimal\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/prometheus-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-etcd/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/prometheus-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-etcd/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-backup-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-cr.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:operator:etcd-backup-role\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:etcd-backup-crb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"EtcdStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=EtcdStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:48.872919198+00:00 stderr F E1212 16:18:48.872900 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.264415747+00:00 stderr F E1212 16:18:49.264007 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.264415747+00:00 stderr F I1212 16:18:49.264073 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:49.464613857+00:00 stderr F E1212 16:18:49.464555 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.682235577+00:00 stderr F E1212 16:18:49.682143 1 base_controller.go:279] "Unhandled Error" err="EtcdStaticResources-StaticResources reconciliation failed: [\"etcd/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/services/etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/sm.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-etcd-operator/servicemonitors/etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/minimal-sm.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-etcd-operator/servicemonitors/etcd-minimal\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/prometheus-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-etcd/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/prometheus-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-etcd/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-backup-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-cr.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:operator:etcd-backup-role\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:etcd-backup-crb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"EtcdStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=EtcdStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.864804031+00:00 stderr F E1212 16:18:49.864004 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.864804031+00:00 stderr F I1212 16:18:49.864045 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:50.266194764+00:00 stderr F E1212 16:18:50.266127 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:50.461888422+00:00 stderr F E1212 16:18:50.461818 1 base_controller.go:279] "Unhandled Error" err="etcd-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.661376644+00:00 stderr F I1212 16:18:50.661287 1 request.go:752] "Waited before sending request" delay="1.103597374s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods/etcd-crc" 2025-12-12T16:18:50.664525262+00:00 stderr F E1212 16:18:50.664469 1 base_controller.go:279] "Unhandled Error" err="etcd-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"etcd-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=etcd-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.064059479+00:00 stderr F E1212 16:18:51.063986 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.064059479+00:00 stderr F I1212 16:18:51.064025 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.265228972+00:00 stderr F E1212 16:18:51.262360 1 base_controller.go:279] "Unhandled Error" err="EtcdCertSignerController reconciliation failed: could not get current etcd-all-bundles configmap Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-all-bundles\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.464902609+00:00 stderr F E1212 16:18:51.464435 1 base_controller.go:279] "Unhandled Error" err="EtcdEndpointsController reconciliation failed: applying configmap update failed :Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.464902609+00:00 stderr F I1212 16:18:51.464493 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'EtcdEndpointsErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:51.465125104+00:00 stderr F E1212 16:18:51.465085 1 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd-operator/events\": dial tcp 10.217.4.1:443: connect: connection refused" event="&Event{ObjectMeta:{etcd-operator.188084210fe06ea0 openshift-etcd-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Deployment,Namespace:openshift-etcd-operator,Name:etcd-operator,UID:7bcc9069-5a71-4f51-8970-90dddeee56b2,APIVersion:apps/v1,ResourceVersion:,FieldPath:,},Reason:EtcdEndpointsErrorUpdatingStatus,Message:Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused,Source:EventSource{Component:openshift-cluster-etcd-operator-etcd-endpoints-controller-etcdendpointscontroller,Host:,},FirstTimestamp:2025-12-12 16:18:51.464388256 +0000 UTC m=+125.330638642,LastTimestamp:2025-12-12 16:18:51.464388256 +0000 UTC m=+125.330638642,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:openshift-cluster-etcd-operator-etcd-endpoints-controller-etcdendpointscontroller,ReportingInstance:,}" 2025-12-12T16:18:51.593635021+00:00 stderr F E1212 16:18:51.593566 1 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd-operator/events\": dial tcp 10.217.4.1:443: connect: connection refused" event="&Event{ObjectMeta:{etcd-operator.188084210fe06ea0 openshift-etcd-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Deployment,Namespace:openshift-etcd-operator,Name:etcd-operator,UID:7bcc9069-5a71-4f51-8970-90dddeee56b2,APIVersion:apps/v1,ResourceVersion:,FieldPath:,},Reason:EtcdEndpointsErrorUpdatingStatus,Message:Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused,Source:EventSource{Component:openshift-cluster-etcd-operator-etcd-endpoints-controller-etcdendpointscontroller,Host:,},FirstTimestamp:2025-12-12 16:18:51.464388256 +0000 UTC m=+125.330638642,LastTimestamp:2025-12-12 16:18:51.464388256 +0000 UTC m=+125.330638642,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:openshift-cluster-etcd-operator-etcd-endpoints-controller-etcdendpointscontroller,ReportingInstance:,}" 2025-12-12T16:18:51.661467158+00:00 stderr F I1212 16:18:51.661395 1 request.go:752] "Waited before sending request" delay="1.599026051s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/restore-etcd-pod" 2025-12-12T16:18:51.664809811+00:00 stderr F E1212 16:18:51.664791 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.865477072+00:00 stderr F E1212 16:18:51.865413 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:52.065719953+00:00 stderr F E1212 16:18:52.065656 1 base_controller.go:279] "Unhandled Error" err="etcd-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.264132088+00:00 stderr F E1212 16:18:52.263917 1 base_controller.go:279] "Unhandled Error" err="etcd-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"etcd-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=etcd-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.663496202+00:00 stderr F E1212 16:18:52.663371 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.663496202+00:00 stderr F I1212 16:18:52.663421 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:52.861024515+00:00 stderr F I1212 16:18:52.860935 1 request.go:752] "Waited before sending request" delay="1.592169763s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-all-bundles" 2025-12-12T16:18:52.861933718+00:00 stderr F E1212 16:18:52.861889 1 base_controller.go:279] "Unhandled Error" err="EtcdCertSignerController reconciliation failed: could not get current etcd-all-bundles configmap Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-all-bundles\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.064504296+00:00 stderr F E1212 16:18:53.064408 1 base_controller.go:279] "Unhandled Error" err="EtcdEndpointsController reconciliation failed: applying configmap update failed :Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.064504296+00:00 stderr F I1212 16:18:53.064467 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'EtcdEndpointsErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:53.464733410+00:00 stderr F E1212 16:18:53.464648 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.662044809+00:00 stderr F E1212 16:18:53.661972 1 base_controller.go:279] "Unhandled Error" err="etcd-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.861489610+00:00 stderr F I1212 16:18:53.861408 1 request.go:752] "Waited before sending request" delay="1.585606511s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods/etcd-crc" 2025-12-12T16:18:53.868694948+00:00 stderr F E1212 16:18:53.868641 1 base_controller.go:279] "Unhandled Error" err="etcd-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"etcd-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=etcd-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.265483547+00:00 stderr F E1212 16:18:54.265404 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.265483547+00:00 stderr F I1212 16:18:54.265445 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:54.462329054+00:00 stderr F E1212 16:18:54.462258 1 base_controller.go:279] "Unhandled Error" err="EtcdCertSignerController reconciliation failed: could not get current etcd-all-bundles configmap Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-all-bundles\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.663813264+00:00 stderr F E1212 16:18:54.663677 1 base_controller.go:279] "Unhandled Error" err="EtcdEndpointsController reconciliation failed: applying configmap update failed :Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:54.663813264+00:00 stderr F I1212 16:18:54.663721 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'EtcdEndpointsErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:54.861463591+00:00 stderr F I1212 16:18:54.861409 1 request.go:752] "Waited before sending request" delay="1.59855496s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/restore-etcd-pod" 2025-12-12T16:18:54.864221279+00:00 stderr F E1212 16:18:54.864193 1 base_controller.go:277] "Unhandled Error" err="\"TargetConfigController\" controller failed to sync \"\", err: Put \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.064286295+00:00 stderr F E1212 16:18:55.064235 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-etcd-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:55.262490615+00:00 stderr F E1212 16:18:55.262439 1 base_controller.go:279] "Unhandled Error" err="etcd-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.464064669+00:00 stderr F E1212 16:18:55.463994 1 base_controller.go:279] "Unhandled Error" err="etcd-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"etcd-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=etcd-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.666250607+00:00 stderr F E1212 16:18:55.666131 1 base_controller.go:279] "Unhandled Error" err="EtcdStaticResources-StaticResources reconciliation failed: [\"etcd/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/services/etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/sm.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-etcd-operator/servicemonitors/etcd\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/minimal-sm.yaml\" (string): Get \"https://10.217.4.1:443/apis/monitoring.coreos.com/v1/namespaces/openshift-etcd-operator/servicemonitors/etcd-minimal\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/prometheus-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-etcd/roles/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/prometheus-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-etcd/rolebindings/prometheus-k8s\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-backup-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-cr.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:operator:etcd-backup-role\": dial tcp 10.217.4.1:443: connect: connection refused, \"etcd/backups-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:etcd-backup-crb\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"EtcdStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status?fieldManager=EtcdStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:55.866466157+00:00 stderr F E1212 16:18:55.866414 1 base_controller.go:279] "Unhandled Error" err="EtcdCertSignerController reconciliation failed: could not get current etcd-all-bundles configmap Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-all-bundles\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.061093049+00:00 stderr F I1212 16:18:56.061019 1 request.go:752] "Waited before sending request" delay="1.473237981s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts" 2025-12-12T16:18:56.064090733+00:00 stderr F E1212 16:18:56.064010 1 base_controller.go:279] "Unhandled Error" err="ScriptController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.064090733+00:00 stderr F I1212 16:18:56.064048 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'ScriptControllerErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.264247361+00:00 stderr F E1212 16:18:56.264156 1 base_controller.go:279] "Unhandled Error" err="EtcdEndpointsController reconciliation failed: applying configmap update failed :Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-endpoints\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:56.264247361+00:00 stderr F I1212 16:18:56.264221 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Warning' reason: 'EtcdEndpointsErrorUpdatingStatus' Put "https://10.217.4.1:443/apis/operator.openshift.io/v1/etcds/cluster/status": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:56.662072617+00:00 stderr F E1212 16:18:56.661985 1 base_controller.go:279] "Unhandled Error" err="etcd-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:57.261312262+00:00 stderr F I1212 16:18:57.261210 1 request.go:752] "Waited before sending request" delay="1.572411884s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd" 2025-12-12T16:18:57.883259768+00:00 stderr F E1212 16:18:57.881080 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-pod\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:58.466249960+00:00 stderr F I1212 16:18:58.460717 1 request.go:752] "Waited before sending request" delay="1.196684524s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/etcd-sa" 2025-12-12T16:19:00.764243333+00:00 stderr F I1212 16:19:00.763462 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:28.958231884+00:00 stderr F I1212 16:19:28.956985 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:29.362870084+00:00 stderr F I1212 16:19:29.362783 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:33.789400026+00:00 stderr F I1212 16:19:33.788720 1 reflector.go:430] "Caches populated" type="*v1.Etcd" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:34.782909440+00:00 stderr F I1212 16:19:34.782803 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:36.180424309+00:00 stderr F I1212 16:19:36.177316 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:36.180424309+00:00 stderr F I1212 16:19:36.177486 1 reflector.go:430] "Caches populated" type="*v1.Endpoints" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:36.180424309+00:00 stderr F I1212 16:19:36.179466 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:19:36.697457641+00:00 stderr F I1212 16:19:36.697356 1 reflector.go:430] "Caches populated" type="*v1beta1.Machine" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:38.187055671+00:00 stderr F I1212 16:19:38.186658 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:40.316284042+00:00 stderr F I1212 16:19:40.316207 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:42.933372571+00:00 stderr F I1212 16:19:42.933003 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:44.899723702+00:00 stderr F I1212 16:19:44.899451 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:48.765528214+00:00 stderr F E1212 16:19:48.764648 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:19:50.776804903+00:00 stderr F I1212 16:19:50.776380 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.455367076+00:00 stderr F I1212 16:19:53.455309 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:01.730082696+00:00 stderr F I1212 16:20:01.729626 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:02.255790276+00:00 stderr F I1212 16:20:02.255730 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:03.498800885+00:00 stderr F I1212 16:20:03.498479 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:07.087105670+00:00 stderr F I1212 16:20:07.086761 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.200809603+00:00 stderr F I1212 16:20:08.200744 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.632519703+00:00 stderr F I1212 16:20:08.632435 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.235778535+00:00 stderr F I1212 16:20:11.235499 1 reflector.go:430] "Caches populated" type="*v1.Job" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.827872301+00:00 stderr F I1212 16:20:11.827800 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.931961064+00:00 stderr F I1212 16:20:11.931900 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=etcds" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.932892828+00:00 stderr F I1212 16:20:11.932857 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found\nTargetConfigControllerDegraded: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-pod\": dial tcp 10.217.4.1:443: connect: connection refused","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:11.933592585+00:00 stderr F I1212 16:20:11.933551 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:11.937461362+00:00 stderr F I1212 16:20:11.937423 1 etcdcli_pool.go:70] creating a new cached client 2025-12-12T16:20:11.945069133+00:00 stderr F I1212 16:20:11.945014 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found" to "NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found\nTargetConfigControllerDegraded: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-pod\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:20:12.140047729+00:00 stderr F I1212 16:20:12.139483 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.026281621+00:00 stderr F I1212 16:20:13.026198 1 request.go:752] "Waited before sending request" delay="1.090709556s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-scripts" 2025-12-12T16:20:13.846501104+00:00 stderr F I1212 16:20:13.846102 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:13.849252723+00:00 stderr F I1212 16:20:13.849214 1 status_controller.go:230] clusteroperator/etcd diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:49Z","message":"NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:05:28Z","message":"NodeInstallerProgressing: 1 node is at revision 2\nEtcdMembersProgressing: No unstarted etcd members found","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T07:59:32Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 2\nEtcdMembersAvailable: 1 members are available","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:49Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:49Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:13.858391653+00:00 stderr F I1212 16:20:13.858082 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-etcd-operator", Name:"etcd-operator", UID:"7bcc9069-5a71-4f51-8970-90dddeee56b2", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/etcd changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found\nTargetConfigControllerDegraded: \"configmap/etcd-pod\": Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-pod\": dial tcp 10.217.4.1:443: connect: connection refused" to "NodeControllerDegraded: All master nodes are ready\nEtcdMembersDegraded: No unhealthy members found" 2025-12-12T16:20:14.026534475+00:00 stderr F I1212 16:20:14.026456 1 request.go:752] "Waited before sending request" delay="1.395669392s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/pods/etcd-crc" 2025-12-12T16:20:15.026723708+00:00 stderr F I1212 16:20:15.026652 1 request.go:752] "Waited before sending request" delay="1.180794018s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/serviceaccounts/installer-sa" 2025-12-12T16:20:16.227209930+00:00 stderr F I1212 16:20:16.227083 1 request.go:752] "Waited before sending request" delay="1.39715938s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/restore-etcd-pod" 2025-12-12T16:20:19.430788265+00:00 stderr F I1212 16:20:19.430701 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:20.686004900+00:00 stderr F I1212 16:20:20.685475 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:21.829887381+00:00 stderr F I1212 16:20:21.829278 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:22.430343027+00:00 stderr F I1212 16:20:22.428897 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:22.628436561+00:00 stderr F I1212 16:20:22.628310 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:23.392396383+00:00 stderr F I1212 16:20:23.392325 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:23.608578241+00:00 stderr F I1212 16:20:23.608501 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:25.131708712+00:00 stderr F I1212 16:20:25.030731 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:25.137143909+00:00 stderr F I1212 16:20:25.137089 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:26.628418542+00:00 stderr F I1212 16:20:26.628339 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:31.978863544+00:00 stderr F I1212 16:20:31.978355 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:34.631725619+00:00 stderr F I1212 16:20:34.631112 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:34.632026207+00:00 stderr F I1212 16:20:34.631987 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:34.636008611+00:00 stderr F I1212 16:20:34.635929 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:34.636117584+00:00 stderr F I1212 16:20:34.636087 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:48.767299800+00:00 stderr F E1212 16:20:48.766742 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:20:55.312455678+00:00 stderr F E1212 16:20:55.311766 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:21:48.765448001+00:00 stderr F E1212 16:21:48.764917 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:22:48.769432240+00:00 stderr F E1212 16:22:48.768682 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:23:48.771540307+00:00 stderr F E1212 16:23:48.771342 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:24:48.772390823+00:00 stderr F E1212 16:24:48.771761 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:25:48.771046085+00:00 stderr F E1212 16:25:48.770437 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:25:50.181983357+00:00 stderr F I1212 16:25:50.181901 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:26:48.786872602+00:00 stderr F E1212 16:26:48.784649 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:27:48.773404736+00:00 stderr F E1212 16:27:48.772455 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:28:48.775697035+00:00 stderr F E1212 16:28:48.775303 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:29:48.773153435+00:00 stderr F E1212 16:29:48.772871 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:30:34.634164155+00:00 stderr F I1212 16:30:34.632794 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:30:34.634164155+00:00 stderr F I1212 16:30:34.634027 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:30:48.774467969+00:00 stderr F E1212 16:30:48.774091 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:31:48.776203822+00:00 stderr F E1212 16:31:48.775531 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:32:48.776270486+00:00 stderr F E1212 16:32:48.775777 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:33:18.185901830+00:00 stderr F I1212 16:33:18.184858 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:33:48.776691490+00:00 stderr F E1212 16:33:48.776014 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:33:50.993476477+00:00 stderr F I1212 16:33:50.992526 1 request.go:752] "Waited before sending request" delay="1.021666712s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-etcd/configmaps/etcd-endpoints" 2025-12-12T16:34:48.778455596+00:00 stderr F E1212 16:34:48.777962 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:35:48.778929934+00:00 stderr F E1212 16:35:48.778258 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:36:48.778780551+00:00 stderr F E1212 16:36:48.777837 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:37:35.325605335+00:00 stderr F E1212 16:37:35.325242 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:37:48.778812312+00:00 stderr F E1212 16:37:48.778075 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:38:31.190048312+00:00 stderr F I1212 16:38:31.189314 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:38:48.779739778+00:00 stderr F E1212 16:38:48.778626 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:39:48.786112723+00:00 stderr F E1212 16:39:48.785768 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:40:34.635035683+00:00 stderr F I1212 16:40:34.634087 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639011943+00:00 stderr F I1212 16:40:34.638928 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639133466+00:00 stderr F I1212 16:40:34.639097 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639257199+00:00 stderr F I1212 16:40:34.639225 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639409193+00:00 stderr F I1212 16:40:34.639334 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639493705+00:00 stderr F I1212 16:40:34.639459 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639618338+00:00 stderr F I1212 16:40:34.639588 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:34.639718391+00:00 stderr F I1212 16:40:34.639692 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:48.796453175+00:00 stderr F E1212 16:40:48.795995 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:41:48.785837421+00:00 stderr F E1212 16:41:48.785629 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" 2025-12-12T16:42:48.785238042+00:00 stderr F E1212 16:42:48.784042 1 base_controller.go:279] "Unhandled Error" err="FSyncController reconciliation failed: Post \"https://thanos-querier.openshift-monitoring.svc:9091/api/v1/query\": dial tcp: lookup thanos-querier.openshift-monitoring.svc on 10.217.4.10:53: no such host" ././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000755000175000017500000000000015117043043033125 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000755000175000017500000000000015117043062033126 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000644000175000017500000044767115117043043033152 0ustar zuulzuul2025-12-12T16:27:30.107243149+00:00 stderr F 2025/12/12 16:27:30 INFO GOMEMLIMIT is updated package=github.com/KimMachineGun/automemlimit/memlimit GOMEMLIMIT=966367641 previous=9223372036854775807 2025-12-12T16:27:30.152873464+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.152Z","log.logger":"manager","message":"maxprocs: Updating GOMAXPROCS=1: determined from CPU quota","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0"} 2025-12-12T16:27:30.152920595+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.152Z","log.logger":"manager","message":"Setting default container registry","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","container_registry":"docker.elastic.co"} 2025-12-12T16:27:30.153432448+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.153Z","log.logger":"manager","message":"Setting up scheme","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0"} 2025-12-12T16:27:30.154649049+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.154Z","log.logger":"manager","message":"Operator configured to manage a single namespace","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","namespace":"service-telemetry","operator_namespace":"service-telemetry"} 2025-12-12T16:27:30.181225281+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.178Z","log.logger":"manager","message":"Setting up controllers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.184Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-agent-k8s-elastic-co-v1alpha1-agent"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.184Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-apm-k8s-elastic-co-v1-apmserver"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.184Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-apm-k8s-elastic-co-v1beta1-apmserver"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.184Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-beat-k8s-elastic-co-v1beta1-beat"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.184Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-enterprisesearch-k8s-elastic-co-v1-enterprisesearch"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-enterprisesearch-k8s-elastic-co-v1beta1-enterprisesearch"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-elasticsearch-k8s-elastic-co-v1beta1-elasticsearch"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-kibana-k8s-elastic-co-v1-kibana"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-kibana-k8s-elastic-co-v1beta1-kibana"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-ems-k8s-elastic-co-v1alpha1-mapsservers"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-scp-k8s-elastic-co-v1alpha1-stackconfigpolicies"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"es-validation","message":"Registering Elasticsearch validating webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-elasticsearch-k8s-elastic-co-v1-elasticsearch"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-elasticsearch-k8s-elastic-co-v1-elasticsearch"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"esa-validation","message":"Registering ElasticsearchAutoscaler validating webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-autoscaling-k8s-elastic-co-v1alpha1-elasticsearchautoscaler"} 2025-12-12T16:27:30.185441928+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-autoscaling-k8s-elastic-co-v1alpha1-elasticsearchautoscaler"} 2025-12-12T16:27:30.185821528+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"ls-validation","message":"Registering Logstash validating webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-logstash-k8s-elastic-co-v1alpha1-logstash"} 2025-12-12T16:27:30.185821528+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"controller-runtime.webhook","message":"Registering webhook","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/validate-logstash-k8s-elastic-co-v1alpha1-logstash"} 2025-12-12T16:27:30.185821528+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.185Z","log.logger":"manager","message":"Polling for the webhook certificate to be available","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","path":"/tmp/k8s-webhook-server/serving-certs/tls.crt"} 2025-12-12T16:27:30.186163526+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.186Z","log.logger":"manager","message":"Starting the manager","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","uuid":"fa6d31d3-e1a5-4187-b583-db40e23a11d9","namespace":"service-telemetry","version":"3.2.0","build_hash":"3ed7be5a","build_date":"2025-10-30T08:32:16Z","build_snapshot":"false"} 2025-12-12T16:27:30.186287790+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.186Z","log.logger":"controller-runtime.metrics","message":"Starting metrics server","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0"} 2025-12-12T16:27:30.186843464+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.186Z","log.logger":"controller-runtime.webhook","message":"Starting webhook server","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0"} 2025-12-12T16:27:30.187488390+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.187Z","log.logger":"controller-runtime.certwatcher","message":"Updated current TLS certificate","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","cert":"/tmp/k8s-webhook-server/serving-certs/tls.crt","key":"/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-12T16:27:30.187784247+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.187Z","log.logger":"controller-runtime.certwatcher","message":"Starting certificate poll+watcher","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","cert":"/tmp/k8s-webhook-server/serving-certs/tls.crt","key":"/tmp/k8s-webhook-server/serving-certs/tls.key","interval":10} 2025-12-12T16:27:30.188751292+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.188Z","log.logger":"controller-runtime.metrics","message":"Serving metrics server","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","bindAddress":"0.0.0.0:0","secure":false} 2025-12-12T16:27:30.188861675+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.188Z","log.logger":"controller-runtime.webhook","message":"Serving webhook server","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","host":"","port":9443} 2025-12-12T16:27:30.188974418+00:00 stderr F I1212 16:27:30.188938 1 leaderelection.go:257] attempting to acquire leader lease service-telemetry/elastic-operator-leader... 2025-12-12T16:27:30.209607210+00:00 stderr F I1212 16:27:30.209547 1 leaderelection.go:271] successfully acquired lease service-telemetry/elastic-operator-leader 2025-12-12T16:27:30.210993685+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.210Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.210993685+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.210Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-autoscaler","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.211032876+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.210Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.ApmServer"} 2025-12-12T16:27:30.211032876+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.Deployment"} 2025-12-12T16:27:30.211146609+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.211146609+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.211146609+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211146609+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211407355+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"channel source: 0xc00012a380"} 2025-12-12T16:27:30.211407355+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211426226+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-autoscaler","source":"kind source: *v1alpha1.ElasticsearchAutoscaler"} 2025-12-12T16:27:30.211485497+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.211485497+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.StatefulSet"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.ConfigMap"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.PodDisruptionBudget"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"license-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.EnterpriseSearch"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.Deployment"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.211936679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.211Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.212254567+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.212Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Kibana"} 2025-12-12T16:27:30.213652112+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.213Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Deployment"} 2025-12-12T16:27:30.213733684+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.213Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.213828807+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.213Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.213920629+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.213Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.213955840+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.213Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.214008691+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.213Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","source":"kind source: *v1.ConfigMap"} 2025-12-12T16:27:30.214273568+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.214Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.214483783+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.214Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.214483783+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.214Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"license-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.218323420+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.218Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1beta1.Beat"} 2025-12-12T16:27:30.218821223+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.218Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1.DaemonSet"} 2025-12-12T16:27:30.218966577+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.218Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1.Deployment"} 2025-12-12T16:27:30.218966577+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.218Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.218979967+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.218Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.219076709+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.219223583+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"trial-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.219500720+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.219586952+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1alpha1.Agent"} 2025-12-12T16:27:30.219700385+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.DaemonSet"} 2025-12-12T16:27:30.219752516+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.Deployment"} 2025-12-12T16:27:30.219785437+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.StatefulSet"} 2025-12-12T16:27:30.219834089+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.219887500+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.219929651+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.219Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.221419029+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.221Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"remotecluster-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.221472820+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.221Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller","source":"kind source: *v1alpha1.StackConfigPolicy"} 2025-12-12T16:27:30.221640454+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.221Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.221655065+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.221Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller","source":"kind source: *v1.Kibana"} 2025-12-12T16:27:30.221735177+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.221Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.221987953+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.221Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1alpha1.ElasticMapsServer"} 2025-12-12T16:27:30.222090226+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.222Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1.Deployment"} 2025-12-12T16:27:30.222136437+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.222Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.222188738+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.222Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.223528242+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.223Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.223605654+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.223Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.223717227+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.223Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.232808387+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.232Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.233768971+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.233Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.234056589+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.234Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"remotecluster-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.234515570+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.234Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"remotecluster-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.234841858+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.234Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"remotecluster-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.237563667+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.237Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1alpha1.Logstash"} 2025-12-12T16:27:30.239023414+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.238Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1.StatefulSet"} 2025-12-12T16:27:30.239094936+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.239Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1.Pod"} 2025-12-12T16:27:30.239280061+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.239Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.239333212+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.239Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.239786344+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.239Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.240374698+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.240Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.240557943+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.240Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.240850901+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.240Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller","source":"kind source: *v1.ApmServer"} 2025-12-12T16:27:30.240947833+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.240Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.241094307+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.241Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller","source":"kind source: *v1.Kibana"} 2025-12-12T16:27:30.241195709+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.241Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.246150915+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.246Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller","source":"kind source: *v1.ApmServer"} 2025-12-12T16:27:30.246270138+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.246Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.246408171+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.246Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.246494063+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.246Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.246682728+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.246Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.246875613+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.246Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-es-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.247053988+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.247Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller","source":"kind source: *v1.Kibana"} 2025-12-12T16:27:30.247156860+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.247Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.247368895+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.247Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller","source":"kind source: *v1.EnterpriseSearch"} 2025-12-12T16:27:30.247368895+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.247Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.248341920+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.248Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller","source":"kind source: *v1.Kibana"} 2025-12-12T16:27:30.248411462+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.248Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.248491554+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.248Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.248582446+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.248Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.248738700+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.248Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ent-es-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.248881544+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.248Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-es-association-controller","source":"kind source: *v1.Service"} 2025-12-12T16:27:30.249034168+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.249Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-es-association-controller","source":"kind source: *v1beta1.Beat"} 2025-12-12T16:27:30.249621772+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.249Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-es-association-controller","source":"kind source: *v1.Secret"} 2025-12-12T16:27:30.249621772+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.249Z","log.logger":"manager.eck-operator","message":"Starting EventSource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-es-association-controller","source":"kind source: *v1.Elasticsearch"} 2025-12-12T16:27:30.249621772+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.249Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ent-es-association-controller"} 2025-12-12T16:27:30.249643753+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.249Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ent-es-association-controller","worker count":3} 2025-12-12T16:27:30.250209247+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.250Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-es-association-controller"} 2025-12-12T16:27:30.250209247+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.250Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-es-association-controller","worker count":3} 2025-12-12T16:27:30.250490034+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.250Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-kibana-association-controller"} 2025-12-12T16:27:30.250490034+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.250Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-kibana-association-controller","worker count":3} 2025-12-12T16:27:30.251027038+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.250Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-fleetserver-association-controller"} 2025-12-12T16:27:30.251027038+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.250Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-fleetserver-association-controller","worker count":3} 2025-12-12T16:27:30.251348416+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.251Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-kibana-association-controller"} 2025-12-12T16:27:30.251348416+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.251Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-kibana-association-controller","worker count":3} 2025-12-12T16:27:30.251938521+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.251Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-es-association-controller"} 2025-12-12T16:27:30.251938521+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.251Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-es-association-controller","worker count":3} 2025-12-12T16:27:30.252339391+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.252Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ems-es-association-controller"} 2025-12-12T16:27:30.252339391+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.252Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ems-es-association-controller","worker count":3} 2025-12-12T16:27:30.253635084+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.253Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-monitoring-association-controller"} 2025-12-12T16:27:30.253672325+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.253Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-monitoring-association-controller","worker count":3} 2025-12-12T16:27:30.254112786+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.254Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"es-monitoring-association-controller"} 2025-12-12T16:27:30.254144577+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.254Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"es-monitoring-association-controller","worker count":3} 2025-12-12T16:27:30.254732402+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.254Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ls-monitoring-association-controller"} 2025-12-12T16:27:30.254794393+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.254Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"ls-monitoring-association-controller","worker count":3} 2025-12-12T16:27:30.255032419+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.255Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:30.255321027+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.255Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000280967} 2025-12-12T16:27:30.255767338+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.255Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-monitoring-association-controller"} 2025-12-12T16:27:30.255795689+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.255Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-monitoring-association-controller","worker count":3} 2025-12-12T16:27:30.312465993+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.312Z","log.logger":"manager","message":"Operator license key validated","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","license_type":"basic"} 2025-12-12T16:27:30.316496325+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.316Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller"} 2025-12-12T16:27:30.316569237+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.316Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kibana-controller","worker count":3} 2025-12-12T16:27:30.316830923+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.316Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"license-controller"} 2025-12-12T16:27:30.316860874+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.316Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"license-controller","worker count":3} 2025-12-12T16:27:30.317022038+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.316Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:30.317539121+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.317Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000539754} 2025-12-12T16:27:30.317734176+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.317Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-autoscaler"} 2025-12-12T16:27:30.317761997+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.317Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-autoscaler","worker count":3} 2025-12-12T16:27:30.317843149+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.317Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller"} 2025-12-12T16:27:30.317879430+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.317Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apmserver-controller","worker count":3} 2025-12-12T16:27:30.317989683+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.317Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller"} 2025-12-12T16:27:30.318012293+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.318Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"elasticsearch-controller","worker count":3} 2025-12-12T16:27:30.318085315+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.318Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:30.319020089+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.318Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"ConfigMap","namespace":"service-telemetry","name":"elasticsearch-es-scripts"} 2025-12-12T16:27:30.321102012+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.321Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"trial-controller"} 2025-12-12T16:27:30.321165883+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.321Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"trial-controller","worker count":3} 2025-12-12T16:27:30.323002820+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.322Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller"} 2025-12-12T16:27:30.323053641+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-controller","worker count":3} 2025-12-12T16:27:30.323222775+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller"} 2025-12-12T16:27:30.323261236+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"agent-controller","worker count":3} 2025-12-12T16:27:30.323345438+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller"} 2025-12-12T16:27:30.323424360+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"enterprisesearch-controller","worker count":3} 2025-12-12T16:27:30.323979664+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller"} 2025-12-12T16:27:30.324006575+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.323Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"stackconfigpolicy-controller","worker count":3} 2025-12-12T16:27:30.324268052+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.324Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller"} 2025-12-12T16:27:30.324354024+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.324Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"maps-controller","worker count":3} 2025-12-12T16:27:30.328138690+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.328Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"ConfigMap","namespace":"service-telemetry","name":"elasticsearch-es-scripts","resourceVersion":"42493"} 2025-12-12T16:27:30.328498559+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.328Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-transport"} 2025-12-12T16:27:30.336937872+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.336Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"remotecluster-controller"} 2025-12-12T16:27:30.337020714+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.336Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"remotecluster-controller","worker count":3} 2025-12-12T16:27:30.337470386+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.337Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-transport","resourceVersion":"42494"} 2025-12-12T16:27:30.337622290+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.337Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-http"} 2025-12-12T16:27:30.339904957+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.339Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:30.340443401+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.340Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000566424} 2025-12-12T16:27:30.341918268+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.341Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller"} 2025-12-12T16:27:30.342011221+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.341Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"logstash-controller","worker count":3} 2025-12-12T16:27:30.342138754+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.342Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller"} 2025-12-12T16:27:30.342197005+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.342Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-kibana-association-controller","worker count":3} 2025-12-12T16:27:30.347055628+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.346Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller"} 2025-12-12T16:27:30.347148311+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-es-association-controller","worker count":3} 2025-12-12T16:27:30.347451848+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller"} 2025-12-12T16:27:30.347486849+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"kb-ent-association-controller","worker count":3} 2025-12-12T16:27:30.347540811+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller"} 2025-12-12T16:27:30.347565311+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"apm-es-association-controller","worker count":3} 2025-12-12T16:27:30.347614063+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting Controller","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-es-association-controller"} 2025-12-12T16:27:30.347637683+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.347Z","log.logger":"manager.eck-operator","message":"Starting workers","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","controller":"beat-es-association-controller","worker count":3} 2025-12-12T16:27:30.352240460+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.352Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-http","resourceVersion":"42499"} 2025-12-12T16:27:30.353815740+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.352Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-internal-http"} 2025-12-12T16:27:30.367992188+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.367Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-internal-http","resourceVersion":"42505"} 2025-12-12T16:27:30.443057568+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.442Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-elastic-user"} 2025-12-12T16:27:30.449075530+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.449Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-elastic-user","resourceVersion":"42510"} 2025-12-12T16:27:30.806158128+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.805Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-internal-users"} 2025-12-12T16:27:30.832999877+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.832Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-internal-users","resourceVersion":"42547"} 2025-12-12T16:27:30.834835104+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.834Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-xpack-file-realm"} 2025-12-12T16:27:30.835728236+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.835Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:30.839386209+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.835Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000197705} 2025-12-12T16:27:30.839386209+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.836Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:30.839386209+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.836Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000696077} 2025-12-12T16:27:30.853231349+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.853Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-xpack-file-realm","resourceVersion":"42550"} 2025-12-12T16:27:30.853542437+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.853Z","log.logger":"elasticsearch-controller","message":"No internal CA certificate Secret found, creating a new one","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","owner_namespace":"service-telemetry","owner_name":"elasticsearch","ca_type":"http"} 2025-12-12T16:27:30.876291643+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.875Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:30.876291643+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.875Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000191225} 2025-12-12T16:27:30.876291643+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.876Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:30.880224892+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.876Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000186005} 2025-12-12T16:27:30.923869577+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.921Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-http-ca-internal"} 2025-12-12T16:27:30.927628152+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.926Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:30.927628152+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.926Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000213995} 2025-12-12T16:27:30.927628152+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.926Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:30.927628152+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.927Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000196145} 2025-12-12T16:27:30.931068829+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:30.927Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-http-ca-internal","resourceVersion":"42551"} 2025-12-12T16:27:31.073409092+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.073Z","log.logger":"elasticsearch-controller","message":"Issuing new HTTP certificate","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","secret_name":"elasticsearch-es-http-certs-internal","owner_namespace":"service-telemetry","owner_name":"elasticsearch"} 2025-12-12T16:27:31.076253954+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.076Z","log.logger":"elasticsearch-controller","message":"Creating HTTP internal certificate secret","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","secret_name":"elasticsearch-es-http-certs-internal"} 2025-12-12T16:27:31.082637165+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.082Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.082827050+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.082Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000231236} 2025-12-12T16:27:31.083044845+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.083Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.083355683+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.083Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000322479} 2025-12-12T16:27:31.083850196+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.083Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-http-certs-public"} 2025-12-12T16:27:31.088220366+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.087Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-http-certs-public","resourceVersion":"42555"} 2025-12-12T16:27:31.088220366+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.088Z","log.logger":"elasticsearch-observer","message":"Creating observer for cluster","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.088220366+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.088Z","log.logger":"elasticsearch-controller","message":"No internal CA certificate Secret found, creating a new one","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","owner_namespace":"service-telemetry","owner_name":"elasticsearch","ca_type":"transport"} 2025-12-12T16:27:31.151038396+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.150Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-transport-ca-internal"} 2025-12-12T16:27:31.155651993+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.155Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.156611427+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.155Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000155434} 2025-12-12T16:27:31.156611427+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.155Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.156611427+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.156Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000220106} 2025-12-12T16:27:31.156611427+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.156Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-transport-ca-internal","resourceVersion":"42561"} 2025-12-12T16:27:31.156611427+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.156Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-transport-certs-public"} 2025-12-12T16:27:31.162269330+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.162Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-transport-certs-public","resourceVersion":"42562"} 2025-12-12T16:27:31.162269330+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.162Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-default-es-transport-certs"} 2025-12-12T16:27:31.166931489+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.166Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.166931489+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.166Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000128573} 2025-12-12T16:27:31.166931489+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.166Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.167105863+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.167Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000154794} 2025-12-12T16:27:31.167402640+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.167Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-default-es-transport-certs","resourceVersion":"42563"} 2025-12-12T16:27:31.179418025+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.179Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.179711622+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.179Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000230616} 2025-12-12T16:27:31.179836555+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.179Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.180079831+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.180Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000254856} 2025-12-12T16:27:31.180446311+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.180Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-remote-ca"} 2025-12-12T16:27:31.188201757+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.186Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-remote-ca","resourceVersion":"42567"} 2025-12-12T16:27:31.188201757+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.186Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"ConfigMap","namespace":"service-telemetry","name":"elasticsearch-es-unicast-hosts"} 2025-12-12T16:27:31.188201757+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.186Z","log.logger":"elasticsearch-controller","message":"Creating seed hosts","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch","hosts":[]} 2025-12-12T16:27:31.188201757+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.186Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.188201757+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.187Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch","took":0.00079376} 2025-12-12T16:27:31.191424848+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.191Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.192231629+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.191Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000307688} 2025-12-12T16:27:31.199608176+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.197Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"ConfigMap","namespace":"service-telemetry","name":"elasticsearch-es-unicast-hosts","resourceVersion":"42568"} 2025-12-12T16:27:31.207235178+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.205Z","log.logger":"elasticsearch-controller","message":"Creating master node","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","statefulset_name":"elasticsearch-es-default","actualReplicas":0,"targetReplicas":1} 2025-12-12T16:27:31.207235178+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.206Z","log.logger":"elasticsearch-controller","message":"Setting `cluster.initial_master_nodes`","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch","cluster.initial_master_nodes":"elasticsearch-es-default-0"} 2025-12-12T16:27:31.231588225+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.231Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-default-es-config"} 2025-12-12T16:27:31.231933914+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.231Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.232374455+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.232Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000435391} 2025-12-12T16:27:31.232493128+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.232Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.232743934+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.232Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000262026} 2025-12-12T16:27:31.232974050+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.232Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.233093293+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.233Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000120563} 2025-12-12T16:27:31.247914568+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.247Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-default-es-config","resourceVersion":"42572"} 2025-12-12T16:27:31.248022211+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.247Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-default"} 2025-12-12T16:27:31.248670857+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.248Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.248871982+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.248Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000195585} 2025-12-12T16:27:31.249104128+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.249Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.249405966+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.249Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000295288} 2025-12-12T16:27:31.255390377+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.255Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Service","namespace":"service-telemetry","name":"elasticsearch-es-default","resourceVersion":"42573"} 2025-12-12T16:27:31.255585722+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.255Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"StatefulSet","namespace":"service-telemetry","name":"elasticsearch-es-default"} 2025-12-12T16:27:31.293326637+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.293Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"StatefulSet","namespace":"service-telemetry","name":"elasticsearch-es-default","resourceVersion":"42577"} 2025-12-12T16:27:31.293619185+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.293Z","log.logger":"elasticsearch-controller","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"PodDisruptionBudget","namespace":"service-telemetry","name":"elasticsearch-es-default"} 2025-12-12T16:27:31.298623971+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.298Z","log.logger":"elasticsearch-controller","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","kind":"PodDisruptionBudget","namespace":"service-telemetry","name":"elasticsearch-es-default","resourceVersion":"42579"} 2025-12-12T16:27:31.400247303+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.399Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.420516026+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.419Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"1","namespace":"service-telemetry","es_name":"elasticsearch","took":1.101253911} 2025-12-12T16:27:31.420516026+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.419Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.423449001+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.423Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:31.425867282+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.425Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.449436388+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.447Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.449436388+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.448Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch","took":0.00041946} 2025-12-12T16:27:31.449436388+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.448Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.449436388+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.448Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000116543} 2025-12-12T16:27:31.449436388+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.448Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.449436388+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.448Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000246647} 2025-12-12T16:27:31.458330783+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.458Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"2","namespace":"service-telemetry","es_name":"elasticsearch","took":0.038692279} 2025-12-12T16:27:31.458430286+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.458Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.476373500+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.476Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:31.484463905+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.484Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.496315695+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.493Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"3","namespace":"service-telemetry","es_name":"elasticsearch","took":0.034930774} 2025-12-12T16:27:31.496315695+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.494Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:27:31.496315695+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.494Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000228816} 2025-12-12T16:27:31.496315695+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.494Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.499649659+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.499Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:31.503687831+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.503Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.503687831+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.503Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch","took":0.009210203} 2025-12-12T16:27:31.504336398+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.503Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.504336398+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.504Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es_name":"elasticsearch","took":0.00038531} 2025-12-12T16:27:31.504336398+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.504Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:31.504584244+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:31.504Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"4","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000264107} 2025-12-12T16:27:32.060613267+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:32.060Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:32.067682185+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:32.067Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:32.070539968+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:32.070Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:32.070607040+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:32.070Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch","took":0.010056975} 2025-12-12T16:27:40.259598521+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.258Z","log.logger":"resource-reporter","message":"Creating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","kind":"ConfigMap","namespace":"service-telemetry","name":"elastic-licensing"} 2025-12-12T16:27:40.269272776+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.269Z","log.logger":"resource-reporter","message":"Created resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","kind":"ConfigMap","namespace":"service-telemetry","name":"elastic-licensing","resourceVersion":"43132"} 2025-12-12T16:27:40.290245087+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.288Z","log.logger":"manager","message":"Orphan secrets garbage collection complete","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0"} 2025-12-12T16:27:40.451240631+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.447Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:40.455217582+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.452Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:40.456853563+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.456Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:40.456929725+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:40.456Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch","took":0.008960786} 2025-12-12T16:27:41.420534792+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:41.420Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:41.424107942+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:41.424Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:41.426576105+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:41.426Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:41.426596455+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:41.426Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch","took":0.006541436} 2025-12-12T16:27:51.427922229+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:51.427Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:51.435505271+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:51.435Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:27:51.446920160+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:51.446Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:27:51.454556944+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:27:51.454Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch","took":0.027367012} 2025-12-12T16:28:01.455220097+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:01.455Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:01.459202248+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:01.459Z","log.logger":"elasticsearch-controller","message":"Skipping pod because it has no IP yet","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:28:01.462022819+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:01.461Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:01.462065970+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:01.462Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch","took":0.006909595} 2025-12-12T16:28:02.263228916+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.263Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.342771329+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.342Z","log.logger":"elasticsearch-controller","message":"No tls certificate found in secret","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:28:02.342771329+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.342Z","log.logger":"elasticsearch-controller","message":"Issuing new certificate","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","pod_name":"elasticsearch-es-default-0"} 2025-12-12T16:28:02.353692706+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.353Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:02.353842529+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.353Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000203406} 2025-12-12T16:28:02.354126297+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.354Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.354381463+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.354Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000271957} 2025-12-12T16:28:02.384098405+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.384Z","log.logger":"elasticsearch-controller","message":"Updating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","kind":"ConfigMap","namespace":"service-telemetry","name":"elasticsearch-es-unicast-hosts"} 2025-12-12T16:28:02.391347129+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.391Z","log.logger":"elasticsearch-controller","message":"Seed hosts updated","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch","hosts":["10.217.0.53:9300"]} 2025-12-12T16:28:02.418467055+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.418Z","log.logger":"elasticsearch-controller","message":"Updated resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","kind":"ConfigMap","namespace":"service-telemetry","name":"elasticsearch-es-unicast-hosts","resourceVersion":"43566"} 2025-12-12T16:28:02.422887887+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.422Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.422976199+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.422Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","took":0.159827245} 2025-12-12T16:28:02.423072992+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.423Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.433748812+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.433Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.433810543+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.433Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"11","namespace":"service-telemetry","es_name":"elasticsearch","took":0.010727732} 2025-12-12T16:28:02.433974948+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.433Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.448534746+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.448Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:02.448736191+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:02.448Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"12","namespace":"service-telemetry","es_name":"elasticsearch","took":0.014756583} 2025-12-12T16:28:04.222962235+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:04.222Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:04.233295646+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:04.232Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:04.233489491+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:04.233Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"13","namespace":"service-telemetry","es_name":"elasticsearch","took":0.01107802} 2025-12-12T16:28:08.324255183+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:08.323Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:08.336988355+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:08.336Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:08.336988355+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:08.336Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"14","namespace":"service-telemetry","es_name":"elasticsearch","took":0.013337088} 2025-12-12T16:28:09.318014013+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.317Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:09.348846644+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.348Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:09.358615921+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.358Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:09.359102333+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.359Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"5","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000582575} 2025-12-12T16:28:09.359461492+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.359Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:09.359607896+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.359Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000221985} 2025-12-12T16:28:09.359744100+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.359Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:09.360014936+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.359Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000268127} 2025-12-12T16:28:09.360502679+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.360Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"15","namespace":"service-telemetry","es_name":"elasticsearch","took":0.042539776} 2025-12-12T16:28:09.360532049+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.360Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:09.371868196+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.371Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:09.371868196+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:09.371Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es_name":"elasticsearch","took":0.011279655} 2025-12-12T16:28:11.466238511+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:11.462Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:11.474286525+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:11.473Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:11.474286525+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:11.473Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es_name":"elasticsearch","took":0.011152162} 2025-12-12T16:28:21.474939377+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:21.474Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:21.485052613+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:21.485Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:21.485077783+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:21.485Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es_name":"elasticsearch","took":0.010592538} 2025-12-12T16:28:25.941320806+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:25.941Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:28.902352976+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.902Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:28.914237146+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.914Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:28.914597096+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.914Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"6","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000417201} 2025-12-12T16:28:28.914788830+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.914Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:28.914892783+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.914Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000105092} 2025-12-12T16:28:28.914991175+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.914Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:28.915246342+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.915Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"16","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000274777} 2025-12-12T16:28:28.915953830+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.915Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es_name":"elasticsearch","took":2.974680485} 2025-12-12T16:28:28.915994601+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.915Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:28.952280069+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.950Z","log.logger":"elasticsearch-controller","message":"Elasticsearch cannot be reached yet, re-queuing","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:28.952280069+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:28.950Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es_name":"elasticsearch","took":0.034432642} 2025-12-12T16:28:31.148162973+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.147Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.238259204+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.237Z","log.logger":"elasticsearch-controller","message":"Annotating bootstrapped cluster with its UUID","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch","uuid":"5KZB4d1OT_OEaf4vq9BKfQ"} 2025-12-12T16:28:31.254967946+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.254Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.255164531+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.255Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000252836} 2025-12-12T16:28:31.255355356+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.255Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.255549661+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.255Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"7","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000183305} 2025-12-12T16:28:31.255705945+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.255Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:31.255778327+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.255Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"17","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000085772} 2025-12-12T16:28:31.270500060+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.270Z","log.logger":"elasticsearch-controller","message":"Zen 2 bootstrap is complete","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.280783710+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.280Z","log.logger":"elasticsearch-controller","message":"Ensuring no voting exclusions are set","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.281795795+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.281Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:31.281943279+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.281Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000162454} 2025-12-12T16:28:31.282040952+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.281Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.282375520+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.282Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"18","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000348989} 2025-12-12T16:28:31.282452942+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.282Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.282673958+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.282Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"8","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000213785} 2025-12-12T16:28:31.426114148+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.425Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch","took":0.278693743} 2025-12-12T16:28:31.426167219+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.426Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"22","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.470837220+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.470Z","log.logger":"elasticsearch-controller","message":"Updating resource","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"22","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-default-es-config"} 2025-12-12T16:28:31.478122224+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.476Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:31.478122224+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.477Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000959185} 2025-12-12T16:28:31.481245713+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.479Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.481245713+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.480Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"19","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000864351} 2025-12-12T16:28:31.481779947+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.481Z","log.logger":"elasticsearch-controller","message":"Updated resource successfully","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"22","namespace":"service-telemetry","es_name":"elasticsearch","kind":"Secret","namespace":"service-telemetry","name":"elasticsearch-es-default-es-config","resourceVersion":"43725"} 2025-12-12T16:28:31.484102466+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.484Z","log.logger":"elasticsearch-controller","message":"Ensuring no voting exclusions are set","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"22","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.573610801+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.573Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:31.573798016+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.573Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000253456} 2025-12-12T16:28:31.573871597+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.573Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.574204056+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.574Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"20","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000309408} 2025-12-12T16:28:31.574332839+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.574Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.575991431+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.575Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"9","namespace":"service-telemetry","es_name":"elasticsearch","took":0.001656372} 2025-12-12T16:28:31.578696850+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.578Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"22","namespace":"service-telemetry","es_name":"elasticsearch","took":0.152472709} 2025-12-12T16:28:31.578696850+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.578Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"23","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.636258636+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.635Z","log.logger":"elasticsearch-controller","message":"Ensuring no voting exclusions are set","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"23","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.716989270+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.716Z","log.logger":"es-monitoring","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es-mon_name":"elasticsearch"} 2025-12-12T16:28:31.717130183+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.717Z","log.logger":"es-monitoring","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es-mon_name":"elasticsearch","took":0.000188724} 2025-12-12T16:28:31.717239386+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.717Z","log.logger":"remotecluster-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.717596915+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.717Z","log.logger":"remotecluster-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"21","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000354329} 2025-12-12T16:28:31.717671147+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.717Z","log.logger":"license-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.718035616+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.718Z","log.logger":"license-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"10","namespace":"service-telemetry","es_name":"elasticsearch","took":0.000359289} 2025-12-12T16:28:31.718450877+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.718Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"23","namespace":"service-telemetry","es_name":"elasticsearch","took":0.139730077} 2025-12-12T16:28:31.718504198+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.718Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"24","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.813426230+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.813Z","log.logger":"elasticsearch-controller","message":"Ensuring no voting exclusions are set","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"24","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:31.857995738+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:31.857Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"24","namespace":"service-telemetry","es_name":"elasticsearch","took":0.139432019} 2025-12-12T16:28:41.579918720+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:41.578Z","log.logger":"elasticsearch-controller","message":"Starting reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"25","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:41.622786564+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:41.622Z","log.logger":"elasticsearch-controller","message":"Ensuring no voting exclusions are set","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"25","namespace":"service-telemetry","es_name":"elasticsearch","namespace":"service-telemetry","es_name":"elasticsearch"} 2025-12-12T16:28:51.926167670+00:00 stderr F {"log.level":"info","@timestamp":"2025-12-12T16:28:51.925Z","log.logger":"elasticsearch-controller","message":"Ending reconciliation run","service.version":"3.2.0+3ed7be5a","service.type":"eck","ecs.version":"1.4.0","iteration":"25","namespace":"service-telemetry","es_name":"elasticsearch","took":10.346856625} ././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000755000175000017500000000000015117043044033126 5ustar zuulzuul././@LongLink0000644000000000000000000000025700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000755000175000017500000000000015117043063033127 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000644000175000017500000014702115117043044033135 0ustar zuulzuul2025-12-12T16:28:11.553983622+00:00 stderr F Dec 12, 2025 4:28:11 PM sun.util.locale.provider.LocaleProviderAdapter 2025-12-12T16:28:11.553983622+00:00 stderr F WARNING: COMPAT locale provider will be removed in a future release 2025-12-12T16:28:12.443630898+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:12,439Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "version[7.17.20], pid[2], build[default/docker/b26557f585b7d95c71a5549e571a6bcd2667697d/2024-04-08T08:34:31.070382898Z], OS[Linux/5.14.0-570.57.1.el9_6.x86_64/amd64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/21.0.2/21.0.2+13-58]" } 2025-12-12T16:28:12.444071909+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:12,443Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "JVM home [/usr/share/elasticsearch/jdk], using bundled JDK [true]" } 2025-12-12T16:28:12.444510060+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:12,444Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "JVM arguments [-Xshare:auto, -Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -XX:+ShowCodeDetailsInExceptionMessages, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dio.netty.allocator.numDirectArenas=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j2.formatMsgNoLookups=true, -Djava.locale.providers=SPI,COMPAT, --add-opens=java.base/java.io=ALL-UNNAMED, -Djava.security.manager=allow, -XX:+UseG1GC, -Djava.io.tmpdir=/tmp/elasticsearch-6625502159805851271, -XX:+HeapDumpOnOutOfMemoryError, -XX:+ExitOnOutOfMemoryError, -XX:HeapDumpPath=data, -XX:ErrorFile=logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m, -Des.cgroups.hierarchy.override=/, -Xms1024m, -Xmx1024m, -XX:MaxDirectMemorySize=536870912, -XX:G1HeapRegionSize=4m, -XX:InitiatingHeapOccupancyPercent=30, -XX:G1ReservePercent=15, -Des.path.home=/usr/share/elasticsearch, -Des.path.conf=/usr/share/elasticsearch/config, -Des.distribution.flavor=default, -Des.distribution.type=docker, -Des.bundled_jdk=true]" } 2025-12-12T16:28:14.771424211+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,770Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [aggs-matrix-stats]" } 2025-12-12T16:28:14.771711628+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,771Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [analysis-common]" } 2025-12-12T16:28:14.771890692+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,771Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [constant-keyword]" } 2025-12-12T16:28:14.772058097+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,771Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [frozen-indices]" } 2025-12-12T16:28:14.772249152+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,772Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [ingest-common]" } 2025-12-12T16:28:14.772416806+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,772Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [ingest-geoip]" } 2025-12-12T16:28:14.772608501+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,772Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [ingest-user-agent]" } 2025-12-12T16:28:14.772767865+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,772Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [kibana]" } 2025-12-12T16:28:14.772916609+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,772Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [lang-expression]" } 2025-12-12T16:28:14.773058942+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,772Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [lang-mustache]" } 2025-12-12T16:28:14.773224076+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,773Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [lang-painless]" } 2025-12-12T16:28:14.773376760+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,773Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [legacy-geo]" } 2025-12-12T16:28:14.773570515+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,773Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [mapper-extras]" } 2025-12-12T16:28:14.773751610+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,773Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [mapper-version]" } 2025-12-12T16:28:14.773902433+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,773Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [parent-join]" } 2025-12-12T16:28:14.774048277+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,773Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [percolator]" } 2025-12-12T16:28:14.774210441+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,774Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [rank-eval]" } 2025-12-12T16:28:14.774364475+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,774Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [reindex]" } 2025-12-12T16:28:14.774515789+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,774Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [repositories-metering-api]" } 2025-12-12T16:28:14.774687453+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,774Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [repository-encrypted]" } 2025-12-12T16:28:14.774853327+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,774Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [repository-url]" } 2025-12-12T16:28:14.775045412+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,774Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [runtime-fields-common]" } 2025-12-12T16:28:14.775427862+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,775Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [search-business-rules]" } 2025-12-12T16:28:14.775761271+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,775Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [searchable-snapshots]" } 2025-12-12T16:28:14.775934885+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,775Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [snapshot-repo-test-kit]" } 2025-12-12T16:28:14.776079829+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,775Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [spatial]" } 2025-12-12T16:28:14.776234753+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,776Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [transform]" } 2025-12-12T16:28:14.776416267+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,776Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [transport-netty4]" } 2025-12-12T16:28:14.776625292+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,776Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [unsigned-long]" } 2025-12-12T16:28:14.776812707+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,776Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [vector-tile]" } 2025-12-12T16:28:14.777002522+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,776Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [vectors]" } 2025-12-12T16:28:14.777232848+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,777Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [wildcard]" } 2025-12-12T16:28:14.777421923+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,777Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-aggregate-metric]" } 2025-12-12T16:28:14.777597407+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,777Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-analytics]" } 2025-12-12T16:28:14.777766151+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,777Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-async]" } 2025-12-12T16:28:14.777928265+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,777Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-async-search]" } 2025-12-12T16:28:14.778080329+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,777Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-autoscaling]" } 2025-12-12T16:28:14.778238343+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,778Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-ccr]" } 2025-12-12T16:28:14.778382757+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,778Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-core]" } 2025-12-12T16:28:14.778557451+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,778Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-data-streams]" } 2025-12-12T16:28:14.778753256+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,778Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-deprecation]" } 2025-12-12T16:28:14.778957291+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,778Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-enrich]" } 2025-12-12T16:28:14.779215278+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,778Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-eql]" } 2025-12-12T16:28:14.779422223+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,779Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-fleet]" } 2025-12-12T16:28:14.779603518+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,779Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-graph]" } 2025-12-12T16:28:14.779777752+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,779Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-identity-provider]" } 2025-12-12T16:28:14.779951287+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,779Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-ilm]" } 2025-12-12T16:28:14.780112021+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,779Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-logstash]" } 2025-12-12T16:28:14.780288005+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,780Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-ml]" } 2025-12-12T16:28:14.780447609+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,780Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-monitoring]" } 2025-12-12T16:28:14.780683915+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,780Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-ql]" } 2025-12-12T16:28:14.780852849+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,780Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-rollup]" } 2025-12-12T16:28:14.781000983+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,780Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-security]" } 2025-12-12T16:28:14.781205588+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,781Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-shutdown]" } 2025-12-12T16:28:14.781391353+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,781Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-sql]" } 2025-12-12T16:28:14.781608899+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,781Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-stack]" } 2025-12-12T16:28:14.781761712+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,781Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-text-structure]" } 2025-12-12T16:28:14.781938307+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,781Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-voting-only-node]" } 2025-12-12T16:28:14.782102191+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,781Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "loaded module [x-pack-watcher]" } 2025-12-12T16:28:14.782672185+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,782Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "no plugins loaded" } 2025-12-12T16:28:14.822121834+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,821Z", "level": "INFO", "component": "o.e.e.NodeEnvironment", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "using [1] data paths, mounts [[/usr/share/elasticsearch/data (/dev/vda4)]], net usable_space [53.3gb], net total_space [79.4gb], types [xfs]" } 2025-12-12T16:28:14.822426412+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,822Z", "level": "INFO", "component": "o.e.e.NodeEnvironment", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "heap size [1gb], compressed ordinary object pointers [true]" } 2025-12-12T16:28:14.839906534+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:14,839Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "node name [elasticsearch-es-default-0], node ID [oHisfbxFQKaS5PA_uMC72w], cluster name [elasticsearch], roles [master, data, ingest]" } 2025-12-12T16:28:17.516283549+00:00 stdout F {"type": "deprecation.elasticsearch", "timestamp": "2025-12-12T16:28:17,514Z", "level": "CRITICAL", "component": "o.e.d.c.r.OperationRouting", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "searches will not be routed based on awareness attributes starting in version 8.0.0; to opt into this behaviour now please set the system property [es.search.ignore_awareness_attributes] to [true]", "key": "searches_not_routed_on_awareness_attributes", "category": "settings" } 2025-12-12T16:28:19.954139018+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:19,952Z", "level": "INFO", "component": "o.e.x.m.p.l.CppLogMessageHandler", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "[controller/206] [Main.cc@122] controller (64 bit): Version 7.17.20 (Build 7a252d9f420169) Copyright (c) 2024 Elasticsearch BV" } 2025-12-12T16:28:20.344568709+00:00 stderr F {"timestamp": "2025-12-12T16:28:20+00:00", "message": "readiness probe failed", "curl_rc": "7"} 2025-12-12T16:28:20.467343766+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:20,466Z", "level": "INFO", "component": "o.e.x.s.a.Realms", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "license mode is [trial], currently licensed security realms are [reserved/reserved,file/file1,native/native1]" } 2025-12-12T16:28:20.973388083+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:20,972Z", "level": "INFO", "component": "o.e.x.s.a.s.FileRolesStore", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "parsed [55] roles from file [/usr/share/elasticsearch/config/roles.yml]" } 2025-12-12T16:28:21.514231381+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:21,513Z", "level": "INFO", "component": "o.e.i.g.ConfigDatabases", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "initialized default databases [[GeoLite2-Country.mmdb, GeoLite2-City.mmdb, GeoLite2-ASN.mmdb]], config databases [[]] and watching [/usr/share/elasticsearch/config/ingest-geoip] for changes" } 2025-12-12T16:28:21.515504573+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:21,515Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "initialized database registry, using geoip-databases directory [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w]" } 2025-12-12T16:28:22.153124931+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:22,152Z", "level": "INFO", "component": "o.e.t.NettyAllocator", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "creating NettyAllocator with the following configs: [name=unpooled, suggested_max_allocation_size=1mb, factors={es.unsafe.use_unpooled_allocator=null, g1gc_enabled=true, g1gc_region_size=4mb, heap_size=1gb}]" } 2025-12-12T16:28:22.187457840+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:22,187Z", "level": "INFO", "component": "o.e.i.r.RecoverySettings", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b]" } 2025-12-12T16:28:22.237347043+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:22,236Z", "level": "INFO", "component": "o.e.d.DiscoveryModule", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "using discovery type [zen] and seed hosts providers [settings, file]" } 2025-12-12T16:28:22.873098144+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:22,872Z", "level": "INFO", "component": "o.e.g.DanglingIndicesState", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "gateway.auto_import_dangling_indices is disabled, dangling indices will not be automatically detected or imported and must be managed manually" } 2025-12-12T16:28:23.634767161+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,633Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "initialized" } 2025-12-12T16:28:23.634855753+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,634Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "starting ..." } 2025-12-12T16:28:23.692770239+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,692Z", "level": "INFO", "component": "o.e.x.s.c.f.PersistentCache", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "persistent cache index loaded" } 2025-12-12T16:28:23.693979769+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,693Z", "level": "INFO", "component": "o.e.x.d.l.DeprecationIndexingComponent", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "deprecation component started" } 2025-12-12T16:28:23.806373433+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,805Z", "level": "INFO", "component": "o.e.t.TransportService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "publish_address {10.217.0.53:9300}, bound_addresses {[::]:9300}" } 2025-12-12T16:28:23.816306234+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,815Z", "level": "INFO", "component": "o.e.x.m.Monitoring", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "creating template [.monitoring-alerts-7] with version [7]" } 2025-12-12T16:28:23.821969048+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,821Z", "level": "INFO", "component": "o.e.x.m.Monitoring", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "creating template [.monitoring-es] with version [7]" } 2025-12-12T16:28:23.823664030+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,823Z", "level": "INFO", "component": "o.e.x.m.Monitoring", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "creating template [.monitoring-kibana] with version [7]" } 2025-12-12T16:28:23.825378574+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,825Z", "level": "INFO", "component": "o.e.x.m.Monitoring", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "creating template [.monitoring-logstash] with version [7]" } 2025-12-12T16:28:23.828070272+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:23,827Z", "level": "INFO", "component": "o.e.x.m.Monitoring", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "creating template [.monitoring-beats] with version [7]" } 2025-12-12T16:28:24.016551552+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,014Z", "level": "INFO", "component": "o.e.b.BootstrapChecks", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "bound or publishing to a non-loopback address, enforcing bootstrap checks" } 2025-12-12T16:28:24.034602019+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,033Z", "level": "INFO", "component": "o.e.c.c.Coordinator", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "setting initial configuration to VotingConfiguration{oHisfbxFQKaS5PA_uMC72w}" } 2025-12-12T16:28:24.194735332+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,193Z", "level": "INFO", "component": "o.e.c.s.MasterService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "elected-as-master ([1] nodes joined)[{elasticsearch-es-default-0}{oHisfbxFQKaS5PA_uMC72w}{29D5xSVISWWQHf-wZ6Yf3A}{10.217.0.53}{10.217.0.53:9300}{dim} elect leader, _BECOME_MASTER_TASK_, _FINISH_ELECTION_], term: 1, version: 1, delta: master node changed {previous [], current [{elasticsearch-es-default-0}{oHisfbxFQKaS5PA_uMC72w}{29D5xSVISWWQHf-wZ6Yf3A}{10.217.0.53}{10.217.0.53:9300}{dim}]}" } 2025-12-12T16:28:24.247995740+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,247Z", "level": "INFO", "component": "o.e.c.c.CoordinationState", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "cluster UUID set to [5KZB4d1OT_OEaf4vq9BKfQ]" } 2025-12-12T16:28:24.277381854+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,276Z", "level": "INFO", "component": "o.e.c.s.ClusterApplierService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "master node changed {previous [], current [{elasticsearch-es-default-0}{oHisfbxFQKaS5PA_uMC72w}{29D5xSVISWWQHf-wZ6Yf3A}{10.217.0.53}{10.217.0.53:9300}{dim}]}, term: 1, version: 1, reason: Publication{term=1, version=1}" } 2025-12-12T16:28:24.335332980+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,334Z", "level": "INFO", "component": "o.e.h.AbstractHttpServerTransport", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "publish_address {elasticsearch-es-default-0.elasticsearch-es-default.service-telemetry.svc/10.217.0.53:9200}, bound_addresses {[::]:9200}", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:24.335764411+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,335Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "started", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:24.505483047+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,504Z", "level": "INFO", "component": "o.e.g.GatewayService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "recovered [0] indices into cluster_state", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:24.844037305+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,842Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.ml-stats] for index patterns [.ml-stats-*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:24.937954362+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:24,937Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.ml-notifications-000002] for index patterns [.ml-notifications-000002]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.021399614+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,020Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.ml-anomalies-] for index patterns [.ml-anomalies-*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.069068700+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,068Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.ml-state] for index patterns [.ml-state*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.130119276+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,129Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [logs-settings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.179862344+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,179Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [synthetics-settings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.243017993+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,242Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [synthetics-mappings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.296317842+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,286Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [metrics-mappings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.348570374+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,348Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [data-streams-mappings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.434212622+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,433Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [logs-mappings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.506788158+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,506Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [metrics-settings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.625820721+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,625Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.watch-history-13] for index patterns [.watcher-history-13*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.719408780+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,718Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [ilm-history] for index patterns [ilm-history-5*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.812162477+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,811Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [.deprecation-indexing-mappings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.855820992+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,855Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding component template [.deprecation-indexing-settings]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:25.946041266+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:25,945Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.slm-history] for index patterns [.slm-history-5*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.013811941+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,013Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [synthetics] for index patterns [synthetics-*-*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.107662496+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,107Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [logs] for index patterns [logs-*-*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.155907057+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,155Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [metrics] for index patterns [metrics-*-*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.232094725+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,231Z", "level": "INFO", "component": "o.e.c.m.MetadataIndexTemplateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index template [.deprecation-indexing-template] for index patterns [.logs-deprecation.*]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.272795375+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,272Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [ml-size-based-ilm-policy]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.328001783+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,327Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [synthetics]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.366069466+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,365Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [metrics]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.407376461+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,406Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [logs]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.457425568+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,456Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [7-days-default]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.513542168+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,512Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [90-days-default]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.553846608+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,553Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [180-days-default]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.598671423+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,598Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [30-days-default]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.634264834+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,633Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [365-days-default]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.676826231+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,676Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [watch-history-ilm-policy]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.713000896+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,712Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [ilm-history-ilm-policy]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.749778807+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,749Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [slm-history-ilm-policy]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.790506528+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,789Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [.deprecation-indexing-ilm-policy]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:26.826140480+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:26,825Z", "level": "INFO", "component": "o.e.x.i.a.TransportPutLifecycleAction", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "adding index lifecycle policy [.fleet-actions-results-ilm-policy]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:27.770425508+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:27,769Z", "level": "INFO", "component": "o.e.i.g.GeoIpDownloader", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "updating geoip databases", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:27.770578342+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:27,770Z", "level": "INFO", "component": "o.e.i.g.GeoIpDownloader", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "fetching geoip databases overview from [https://geoip.elastic.co/v1/database?elastic_geoip_service_tos=agree]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:28.608083458+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:28,606Z", "level": "INFO", "component": "o.e.l.LicenseService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "license [fe67ede4-84fa-404b-9268-0ae6b9530dad] mode [basic] - valid", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:28.609144125+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:28,608Z", "level": "INFO", "component": "o.e.x.s.a.Realms", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "license mode is [basic], currently licensed security realms are [reserved/reserved,file/file1,native/native1]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:28.610203022+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:28,610Z", "level": "INFO", "component": "o.e.x.s.s.SecurityStatusChangeListener", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "Active license is now [BASIC]; Security is enabled", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:28.629506270+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:28,628Z", "level": "INFO", "component": "o.e.c.m.MetadataCreateIndexService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "[.geoip_databases] creating index, cause [auto(bulk api)], templates [], shards [1]/[0]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:28.900818827+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:28,900Z", "level": "INFO", "component": "o.e.c.r.a.AllocationService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "Cluster health status changed from [YELLOW] to [GREEN] (reason: [shards started [[.geoip_databases][0]]]).", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:29.700419904+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:29,699Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "retrieve geoip database [GeoLite2-ASN.mmdb] from [.geoip_databases] to [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w/GeoLite2-ASN.mmdb.tmp.gz]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:29.713600757+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:29,713Z", "level": "INFO", "component": "o.e.i.g.GeoIpDownloader", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "successfully downloaded geoip database [GeoLite2-ASN.mmdb]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:30.107472556+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:30,106Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "successfully reloaded changed geoip database file [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w/GeoLite2-ASN.mmdb]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:51.894329004+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:51,893Z", "level": "WARN", "component": "o.e.g.PersistedClusterStateService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "writing cluster state took [10212ms] which is above the warn threshold of [10s]; wrote global metadata [false] and metadata for [0] indices and skipped [1] unchanged indices", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:28:51.894867418+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:28:51,894Z", "level": "INFO", "component": "o.e.c.c.C.CoordinatorPublication", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "after [10.2s] publication of cluster state version [49] is still waiting for {elasticsearch-es-default-0}{oHisfbxFQKaS5PA_uMC72w}{29D5xSVISWWQHf-wZ6Yf3A}{10.217.0.53}{10.217.0.53:9300}{dim}{k8s_node_name=crc, xpack.installed=true, transform.node=false} [SENT_PUBLISH_REQUEST]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:29:04.684026284+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:29:04,683Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "retrieve geoip database [GeoLite2-City.mmdb] from [.geoip_databases] to [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w/GeoLite2-City.mmdb.tmp.gz]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:29:04.694854858+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:29:04,694Z", "level": "INFO", "component": "o.e.i.g.GeoIpDownloader", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "successfully downloaded geoip database [GeoLite2-City.mmdb]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:29:05.544223895+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:29:05,543Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "successfully reloaded changed geoip database file [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w/GeoLite2-City.mmdb]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:29:07.173882429+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:29:07,172Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "retrieve geoip database [GeoLite2-Country.mmdb] from [.geoip_databases] to [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w/GeoLite2-Country.mmdb.tmp.gz]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:29:07.183791780+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:29:07,183Z", "level": "INFO", "component": "o.e.i.g.GeoIpDownloader", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "successfully downloaded geoip database [GeoLite2-Country.mmdb]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } 2025-12-12T16:29:07.339833469+00:00 stdout F {"type": "server", "timestamp": "2025-12-12T16:29:07,339Z", "level": "INFO", "component": "o.e.i.g.DatabaseNodeService", "cluster.name": "elasticsearch", "node.name": "elasticsearch-es-default-0", "message": "successfully reloaded changed geoip database file [/tmp/elasticsearch-6625502159805851271/geoip-databases/oHisfbxFQKaS5PA_uMC72w/GeoLite2-Country.mmdb]", "cluster.uuid": "5KZB4d1OT_OEaf4vq9BKfQ", "node.id": "oHisfbxFQKaS5PA_uMC72w" } ././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000755000175000017500000000000015117043063033127 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000644000175000017500000002445315117043044033140 0ustar zuulzuul2025-12-12T16:28:01.500480223+00:00 stdout F Starting init script 2025-12-12T16:28:01.603519320+00:00 stdout F Copying /usr/share/elasticsearch/config/* to /mnt/elastic-internal/elasticsearch-config-local/ 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/elasticsearch-plugins.example.yml' -> '/mnt/elastic-internal/elasticsearch-config-local/elasticsearch-plugins.example.yml' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/elasticsearch.yml' -> '/mnt/elastic-internal/elasticsearch-config-local/elasticsearch.yml' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/..2025_12_12_16_27_31.2378956188' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/..2025_12_12_16_27_31.2378956188' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/..2025_12_12_16_27_31.2378956188/ca.crt' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/..2025_12_12_16_27_31.2378956188/ca.crt' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/..2025_12_12_16_27_31.2378956188/tls.crt' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/..2025_12_12_16_27_31.2378956188/tls.crt' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/..2025_12_12_16_27_31.2378956188/tls.key' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/..2025_12_12_16_27_31.2378956188/tls.key' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/..data' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/..data' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/tls.crt' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/tls.crt' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/tls.key' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/tls.key' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/http-certs/ca.crt' -> '/mnt/elastic-internal/elasticsearch-config-local/http-certs/ca.crt' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/jvm.options' -> '/mnt/elastic-internal/elasticsearch-config-local/jvm.options' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/jvm.options.d' -> '/mnt/elastic-internal/elasticsearch-config-local/jvm.options.d' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/log4j2.file.properties' -> '/mnt/elastic-internal/elasticsearch-config-local/log4j2.file.properties' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/log4j2.properties' -> '/mnt/elastic-internal/elasticsearch-config-local/log4j2.properties' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/role_mapping.yml' -> '/mnt/elastic-internal/elasticsearch-config-local/role_mapping.yml' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/roles.yml' -> '/mnt/elastic-internal/elasticsearch-config-local/roles.yml' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/transport-remote-certs' -> '/mnt/elastic-internal/elasticsearch-config-local/transport-remote-certs' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/transport-remote-certs/..2025_12_12_16_27_31.2724074096' -> '/mnt/elastic-internal/elasticsearch-config-local/transport-remote-certs/..2025_12_12_16_27_31.2724074096' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/transport-remote-certs/..2025_12_12_16_27_31.2724074096/ca.crt' -> '/mnt/elastic-internal/elasticsearch-config-local/transport-remote-certs/..2025_12_12_16_27_31.2724074096/ca.crt' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/transport-remote-certs/..data' -> '/mnt/elastic-internal/elasticsearch-config-local/transport-remote-certs/..data' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/transport-remote-certs/ca.crt' -> '/mnt/elastic-internal/elasticsearch-config-local/transport-remote-certs/ca.crt' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/users' -> '/mnt/elastic-internal/elasticsearch-config-local/users' 2025-12-12T16:28:01.702094145+00:00 stdout F '/usr/share/elasticsearch/config/users_roles' -> '/mnt/elastic-internal/elasticsearch-config-local/users_roles' 2025-12-12T16:28:01.802042415+00:00 stdout F Empty dir /usr/share/elasticsearch/plugins 2025-12-12T16:28:01.897958123+00:00 stdout F Copying /usr/share/elasticsearch/bin/* to /mnt/elastic-internal/elasticsearch-bin-local/ 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-certgen' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-certgen' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-certutil' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-certutil' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-cli' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-cli' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-croneval' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-croneval' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-env' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-env' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-env-from-file' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-env-from-file' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-geoip' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-geoip' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-keystore' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-keystore' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-migrate' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-migrate' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-node' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-node' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-plugin' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-plugin' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-saml-metadata' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-saml-metadata' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-service-tokens' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-service-tokens' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-setup-passwords' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-setup-passwords' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-shard' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-shard' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-sql-cli' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-sql-cli' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-sql-cli-7.17.20.jar' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-sql-cli-7.17.20.jar' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-syskeygen' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-syskeygen' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/elasticsearch-users' -> '/mnt/elastic-internal/elasticsearch-bin-local/elasticsearch-users' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/x-pack-env' -> '/mnt/elastic-internal/elasticsearch-bin-local/x-pack-env' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/x-pack-security-env' -> '/mnt/elastic-internal/elasticsearch-bin-local/x-pack-security-env' 2025-12-12T16:28:02.009076625+00:00 stdout F '/usr/share/elasticsearch/bin/x-pack-watcher-env' -> '/mnt/elastic-internal/elasticsearch-bin-local/x-pack-watcher-env' 2025-12-12T16:28:02.102084419+00:00 stdout F Files copy duration: 1 sec. 2025-12-12T16:28:02.105288820+00:00 stdout F Linking /mnt/elastic-internal/xpack-file-realm/users to /mnt/elastic-internal/elasticsearch-config-local/users 2025-12-12T16:28:02.229545366+00:00 stdout F Linking /mnt/elastic-internal/xpack-file-realm/roles.yml to /mnt/elastic-internal/elasticsearch-config-local/roles.yml 2025-12-12T16:28:02.233843014+00:00 stdout F Linking /mnt/elastic-internal/xpack-file-realm/users_roles to /mnt/elastic-internal/elasticsearch-config-local/users_roles 2025-12-12T16:28:02.298762235+00:00 stdout F Linking /mnt/elastic-internal/elasticsearch-config/elasticsearch.yml to /mnt/elastic-internal/elasticsearch-config-local/elasticsearch.yml 2025-12-12T16:28:02.302285395+00:00 stdout F Linking /mnt/elastic-internal/unicast-hosts/unicast_hosts.txt to /mnt/elastic-internal/elasticsearch-config-local/unicast_hosts.txt 2025-12-12T16:28:02.304870010+00:00 stdout F Linking /mnt/elastic-internal/xpack-file-realm/service_tokens to /mnt/elastic-internal/elasticsearch-config-local/service_tokens 2025-12-12T16:28:02.497677640+00:00 stdout F File linking duration: 0 sec. 2025-12-12T16:28:02.602464802+00:00 stdout F chown duration: 0 sec. 2025-12-12T16:28:02.602553254+00:00 stdout F waiting for the transport certificates (/mnt/elastic-internal/transport-certificates/elasticsearch-es-default-0.tls.key or /mnt/elastic-internal/transport-certificates/transport.certs.disabled) 2025-12-12T16:28:03.330977620+00:00 stdout F wait duration: 1 sec. 2025-12-12T16:28:03.331041281+00:00 stdout F Init script successful 2025-12-12T16:28:03.399959895+00:00 stdout F Script duration: 2 sec. ././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000755000175000017500000000000015117043063033127 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elas0000644000175000017500000000000015117043044033116 0ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043043033052 5ustar zuulzuul././@LongLink0000644000000000000000000000036200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015117043062033053 5ustar zuulzuul././@LongLink0000644000000000000000000000036700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000644000175000017500000043527015117043043033067 0ustar zuulzuul2025-12-12T16:16:45.206032077+00:00 stderr F I1212 16:16:45.204460 1 cmd.go:253] Using service-serving-cert provided certificates 2025-12-12T16:16:45.206032077+00:00 stderr F I1212 16:16:45.205039 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:45.218225464+00:00 stderr F I1212 16:16:45.218147 1 observer_polling.go:159] Starting file observer 2025-12-12T16:16:45.250889422+00:00 stderr F I1212 16:16:45.249985 1 builder.go:304] kube-controller-manager-operator version v0.0.0-unknown-afdae35-afdae35 2025-12-12T16:16:46.178285703+00:00 stderr F I1212 16:16:46.175902 1 secure_serving.go:57] Forcing use of http/1.1 only 2025-12-12T16:16:46.178285703+00:00 stderr F W1212 16:16:46.176504 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.178285703+00:00 stderr F W1212 16:16:46.176513 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected. 2025-12-12T16:16:46.178285703+00:00 stderr F W1212 16:16:46.176517 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected. 2025-12-12T16:16:46.178285703+00:00 stderr F W1212 16:16:46.176520 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected. 2025-12-12T16:16:46.178285703+00:00 stderr F W1212 16:16:46.176523 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected. 2025-12-12T16:16:46.178285703+00:00 stderr F W1212 16:16:46.176526 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected. 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.189414 1 secure_serving.go:211] Serving securely on [::]:8443 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.189656 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.189952 1 requestheader_controller.go:180] Starting RequestHeaderAuthRequestController 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.190010 1 shared_informer.go:350] "Waiting for caches to sync" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.190072 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.190082 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.190096 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.192235203+00:00 stderr F I1212 16:16:46.190102 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.196203150+00:00 stderr F I1212 16:16:46.196134 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" 2025-12-12T16:16:46.202213697+00:00 stderr F I1212 16:16:46.201300 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:16:46.209082645+00:00 stderr F I1212 16:16:46.203216 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-controller-manager-operator/kube-controller-manager-operator-lock... 2025-12-12T16:16:46.235225233+00:00 stderr F I1212 16:16:46.233861 1 leaderelection.go:271] successfully acquired lease openshift-kube-controller-manager-operator/kube-controller-manager-operator-lock 2025-12-12T16:16:46.235225233+00:00 stderr F I1212 16:16:46.234302 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator-lock", UID:"ab30a0ce-4d7c-4055-b611-6246fc368ec9", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"37208", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' kube-controller-manager-operator-69d5f845f8-nsdgk_4197d067-5d88-4de7-b31b-7cde2ae0e042 became leader 2025-12-12T16:16:46.236258118+00:00 stderr F I1212 16:16:46.235569 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:46.259533226+00:00 stderr F I1212 16:16:46.257163 1 starter.go:97] FeatureGates initialized: knownFeatureGates=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:46.265044691+00:00 stderr F I1212 16:16:46.263714 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:46.297279748+00:00 stderr F I1212 16:16:46.296614 1 shared_informer.go:357] "Caches are synced" controller="RequestHeaderAuthRequestController" 2025-12-12T16:16:46.297279748+00:00 stderr F I1212 16:16:46.296812 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:16:46.297279748+00:00 stderr F I1212 16:16:46.296846 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" 2025-12-12T16:16:46.339432857+00:00 stderr F I1212 16:16:46.337051 1 base_controller.go:76] Waiting for caches to sync for GarbageCollectorWatcherController 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340262 1 base_controller.go:76] Waiting for caches to sync for MissingStaticPodController 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340531 1 base_controller.go:76] Waiting for caches to sync for KubeControllerManagerStaticResources-StaticResources 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340548 1 base_controller.go:76] Waiting for caches to sync for TargetConfigController 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340560 1 base_controller.go:76] Waiting for caches to sync for ConfigObserver 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340663 1 base_controller.go:76] Waiting for caches to sync for StatusSyncer_kube-controller-manager 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340686 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340709 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager-StaticPodState 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340717 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager-InstallerState 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340736 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager-Node 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340756 1 base_controller.go:76] Waiting for caches to sync for CertRotationController 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340778 1 base_controller.go:76] Waiting for caches to sync for SATokenSignerController 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340808 1 base_controller.go:76] Waiting for caches to sync for WorkerLatencyProfile 2025-12-12T16:16:46.341212711+00:00 stderr F I1212 16:16:46.340808 1 base_controller.go:76] Waiting for caches to sync for GuardController 2025-12-12T16:16:46.342208855+00:00 stderr F I1212 16:16:46.341272 1 base_controller.go:76] Waiting for caches to sync for RevisionController 2025-12-12T16:16:46.342208855+00:00 stderr F I1212 16:16:46.341523 1 base_controller.go:76] Waiting for caches to sync for Installer 2025-12-12T16:16:46.342208855+00:00 stderr F I1212 16:16:46.340726 1 base_controller.go:76] Waiting for caches to sync for PruneController 2025-12-12T16:16:46.342208855+00:00 stderr F I1212 16:16:46.341583 1 base_controller.go:76] Waiting for caches to sync for kube-controller-manager-operator-UnsupportedConfigOverrides 2025-12-12T16:16:46.342208855+00:00 stderr F I1212 16:16:46.341648 1 base_controller.go:76] Waiting for caches to sync for LoggingSyncer 2025-12-12T16:16:46.342208855+00:00 stderr F I1212 16:16:46.341858 1 base_controller.go:76] Waiting for caches to sync for BackingResourceController-StaticResources 2025-12-12T16:16:46.442493833+00:00 stderr F I1212 16:16:46.442022 1 base_controller.go:82] Caches are synced for LoggingSyncer 2025-12-12T16:16:46.442493833+00:00 stderr F I1212 16:16:46.442065 1 base_controller.go:119] Starting #1 worker of LoggingSyncer controller ... 2025-12-12T16:16:46.448593332+00:00 stderr F I1212 16:16:46.443020 1 base_controller.go:82] Caches are synced for kube-controller-manager-operator-UnsupportedConfigOverrides 2025-12-12T16:16:46.448593332+00:00 stderr F I1212 16:16:46.443034 1 base_controller.go:119] Starting #1 worker of kube-controller-manager-operator-UnsupportedConfigOverrides controller ... 2025-12-12T16:16:46.453840590+00:00 stderr F I1212 16:16:46.453035 1 base_controller.go:82] Caches are synced for BackingResourceController-StaticResources 2025-12-12T16:16:46.453840590+00:00 stderr F I1212 16:16:46.453087 1 base_controller.go:119] Starting #1 worker of BackingResourceController-StaticResources controller ... 2025-12-12T16:16:46.453840590+00:00 stderr F I1212 16:16:46.453154 1 base_controller.go:82] Caches are synced for StatusSyncer_kube-controller-manager 2025-12-12T16:16:46.453840590+00:00 stderr F I1212 16:16:46.453161 1 base_controller.go:119] Starting #1 worker of StatusSyncer_kube-controller-manager controller ... 2025-12-12T16:16:46.453840590+00:00 stderr F I1212 16:16:46.453222 1 base_controller.go:82] Caches are synced for kube-controller-manager-Node 2025-12-12T16:16:46.453840590+00:00 stderr F I1212 16:16:46.453230 1 base_controller.go:119] Starting #1 worker of kube-controller-manager-Node controller ... 2025-12-12T16:16:46.519300298+00:00 stderr F I1212 16:16:46.518632 1 status_controller.go:230] clusteroperator/kube-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)","reason":"NodeController_MasterNodesReady","status":"True","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:17:05Z","message":"NodeInstallerProgressing: 1 node is at revision 7","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:06:09Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 7","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:55Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:55Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:46.548502861+00:00 stderr F I1212 16:16:46.544932 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-controller-manager changed: Degraded changed from False to True ("NodeControllerDegraded: The master nodes not ready: node \"crc\" not ready since 2025-11-03 09:40:44 +0000 UTC because NodeStatusUnknown (Kubelet stopped posting node status.)") 2025-12-12T16:16:46.550693705+00:00 stderr F I1212 16:16:46.550612 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.606931668+00:00 stderr F I1212 16:16:46.606861 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.637867953+00:00 stderr F I1212 16:16:46.637807 1 base_controller.go:82] Caches are synced for GarbageCollectorWatcherController 2025-12-12T16:16:46.637867953+00:00 stderr F I1212 16:16:46.637838 1 base_controller.go:119] Starting #1 worker of GarbageCollectorWatcherController controller ... 2025-12-12T16:16:46.769575319+00:00 stderr F I1212 16:16:46.768120 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.884822513+00:00 stderr F I1212 16:16:46.884274 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'MasterNodesReadyChanged' All master nodes are ready 2025-12-12T16:16:46.930108538+00:00 stderr F I1212 16:16:46.929392 1 status_controller.go:230] clusteroperator/kube-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:17:05Z","message":"NodeInstallerProgressing: 1 node is at revision 7","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:06:09Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 7","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:55Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:55Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:16:46.952266089+00:00 stderr F I1212 16:16:46.948586 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:46.970476714+00:00 stderr F I1212 16:16:46.970400 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-controller-manager changed: Degraded changed from True to False ("NodeControllerDegraded: All master nodes are ready") 2025-12-12T16:16:47.147709471+00:00 stderr F I1212 16:16:47.147652 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.241086431+00:00 stderr F I1212 16:16:47.241030 1 base_controller.go:82] Caches are synced for CertRotationController 2025-12-12T16:16:47.241140542+00:00 stderr F I1212 16:16:47.241124 1 base_controller.go:119] Starting #1 worker of CertRotationController controller ... 2025-12-12T16:16:47.241240484+00:00 stderr F I1212 16:16:47.241225 1 base_controller.go:82] Caches are synced for SATokenSignerController 2025-12-12T16:16:47.241276945+00:00 stderr F I1212 16:16:47.241266 1 base_controller.go:119] Starting #1 worker of SATokenSignerController controller ... 2025-12-12T16:16:47.343714046+00:00 stderr F I1212 16:16:47.343107 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.541921255+00:00 stderr F I1212 16:16:47.541847 1 request.go:752] "Waited before sending request" delay="1.199041474s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/services?limit=500&resourceVersion=0" 2025-12-12T16:16:47.548517686+00:00 stderr F I1212 16:16:47.545119 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.753222564+00:00 stderr F I1212 16:16:47.752712 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:47.842763110+00:00 stderr F I1212 16:16:47.842282 1 base_controller.go:82] Caches are synced for GuardController 2025-12-12T16:16:47.842763110+00:00 stderr F I1212 16:16:47.842353 1 base_controller.go:119] Starting #1 worker of GuardController controller ... 2025-12-12T16:16:47.842799991+00:00 stderr F I1212 16:16:47.842777 1 base_controller.go:82] Caches are synced for kube-controller-manager-StaticPodState 2025-12-12T16:16:47.842808991+00:00 stderr F I1212 16:16:47.842801 1 base_controller.go:119] Starting #1 worker of kube-controller-manager-StaticPodState controller ... 2025-12-12T16:16:47.842945585+00:00 stderr F I1212 16:16:47.842919 1 base_controller.go:82] Caches are synced for kube-controller-manager-InstallerState 2025-12-12T16:16:47.842982776+00:00 stderr F I1212 16:16:47.842971 1 base_controller.go:119] Starting #1 worker of kube-controller-manager-InstallerState controller ... 2025-12-12T16:16:47.946030301+00:00 stderr F I1212 16:16:47.945962 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.161325008+00:00 stderr F I1212 16:16:48.160690 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.241680740+00:00 stderr F I1212 16:16:48.240461 1 base_controller.go:82] Caches are synced for MissingStaticPodController 2025-12-12T16:16:48.241680740+00:00 stderr F I1212 16:16:48.241105 1 base_controller.go:119] Starting #1 worker of MissingStaticPodController controller ... 2025-12-12T16:16:48.241680740+00:00 stderr F I1212 16:16:48.241255 1 base_controller.go:82] Caches are synced for WorkerLatencyProfile 2025-12-12T16:16:48.241680740+00:00 stderr F I1212 16:16:48.241277 1 base_controller.go:119] Starting #1 worker of WorkerLatencyProfile controller ... 2025-12-12T16:16:48.243299779+00:00 stderr F I1212 16:16:48.242858 1 base_controller.go:82] Caches are synced for kube-controller-manager 2025-12-12T16:16:48.243299779+00:00 stderr F I1212 16:16:48.242875 1 base_controller.go:119] Starting #1 worker of kube-controller-manager controller ... 2025-12-12T16:16:48.243299779+00:00 stderr F I1212 16:16:48.242902 1 base_controller.go:82] Caches are synced for TargetConfigController 2025-12-12T16:16:48.243299779+00:00 stderr F I1212 16:16:48.242907 1 base_controller.go:119] Starting #1 worker of TargetConfigController controller ... 2025-12-12T16:16:48.243299779+00:00 stderr F I1212 16:16:48.242922 1 base_controller.go:82] Caches are synced for ConfigObserver 2025-12-12T16:16:48.243299779+00:00 stderr F I1212 16:16:48.242925 1 base_controller.go:119] Starting #1 worker of ConfigObserver controller ... 2025-12-12T16:16:48.243515374+00:00 stderr F I1212 16:16:48.243365 1 base_controller.go:82] Caches are synced for RevisionController 2025-12-12T16:16:48.243515374+00:00 stderr F I1212 16:16:48.243423 1 base_controller.go:119] Starting #1 worker of RevisionController controller ... 2025-12-12T16:16:48.243515374+00:00 stderr F I1212 16:16:48.243475 1 base_controller.go:82] Caches are synced for Installer 2025-12-12T16:16:48.243515374+00:00 stderr F I1212 16:16:48.243483 1 base_controller.go:119] Starting #1 worker of Installer controller ... 2025-12-12T16:16:48.251318565+00:00 stderr F I1212 16:16:48.243344 1 base_controller.go:82] Caches are synced for PruneController 2025-12-12T16:16:48.251318565+00:00 stderr F I1212 16:16:48.245687 1 base_controller.go:119] Starting #1 worker of PruneController controller ... 2025-12-12T16:16:48.251318565+00:00 stderr F I1212 16:16:48.247627 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:16:48.356564744+00:00 stderr F I1212 16:16:48.354376 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:16:48.454377852+00:00 stderr F I1212 16:16:48.453459 1 base_controller.go:82] Caches are synced for KubeControllerManagerStaticResources-StaticResources 2025-12-12T16:16:48.454377852+00:00 stderr F I1212 16:16:48.453503 1 base_controller.go:119] Starting #1 worker of KubeControllerManagerStaticResources-StaticResources controller ... 2025-12-12T16:16:48.542805541+00:00 stderr F I1212 16:16:48.541454 1 request.go:752] "Waited before sending request" delay="2.086574143s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa" 2025-12-12T16:16:49.742241534+00:00 stderr F I1212 16:16:49.740408 1 request.go:752] "Waited before sending request" delay="1.286195511s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager" 2025-12-12T16:16:50.142829664+00:00 stderr F I1212 16:16:50.142372 1 warnings.go:110] "Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" 2025-12-12T16:16:50.143972102+00:00 stderr F I1212 16:16:50.143477 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SATokenSignerControllerOK' found expected kube-apiserver endpoints 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.905876 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.905823853 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916467 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.916394791 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916495 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.916486314 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916512 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.916501644 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916525 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.916516594 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916541 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.916531925 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916554 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.916546075 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916568 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.916559475 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916609 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.916572896 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916627 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:16:55.916617887 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.916912 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-controller-manager-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-controller-manager-operator.svc,metrics.openshift-kube-controller-manager-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:12 +0000 UTC to 2027-11-02 07:52:13 +0000 UTC (now=2025-12-12 16:16:55.916891744 +0000 UTC))" 2025-12-12T16:16:55.922271745+00:00 stderr F I1212 16:16:55.917088 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556206\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:16:55.917069788 +0000 UTC))" 2025-12-12T16:17:00.749144589+00:00 stderr F I1212 16:17:00.748637 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:17:01.066058756+00:00 stderr P I1212 16:17:01.065813 1 core.go:352] ConfigMap "openshift-kube-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3V 2025-12-12T16:17:01.066140668+00:00 stderr F msOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:16:58Z"}],"resourceVersion":null,"uid":"60afc254-0d91-486c-a410-610b8f84e03e"}} 2025-12-12T16:17:01.066864156+00:00 stderr F I1212 16:17:01.066816 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-kube-controller-manager: 2025-12-12T16:17:01.066864156+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.315671 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.315635343 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316169 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.316156916 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316200 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.316175666 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316218 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.316205857 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316235 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.316223538 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316250 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.316240578 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316262 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.316254098 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316275 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.316266979 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316296 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.316279059 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316310 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.3163026 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316326 1 tlsconfig.go:181] "Loaded client CA" index=10 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file,client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file" certDetail="\"openshift-kube-apiserver-operator_aggregator-client-signer@1762159055\" [] issuer=\"\" (2025-11-03 08:37:34 +0000 UTC to 2026-11-03 08:37:35 +0000 UTC (now=2025-12-12 16:17:46.31631742 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316534 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/var/run/secrets/serving-cert/tls.crt::/var/run/secrets/serving-cert/tls.key" certDetail="\"metrics.openshift-kube-controller-manager-operator.svc\" [serving] validServingFor=[metrics.openshift-kube-controller-manager-operator.svc,metrics.openshift-kube-controller-manager-operator.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:12 +0000 UTC to 2027-11-02 07:52:13 +0000 UTC (now=2025-12-12 16:17:46.316521335 +0000 UTC))" 2025-12-12T16:17:46.317790816+00:00 stderr F I1212 16:17:46.316684 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556206\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556205\" (2025-12-12 15:16:45 +0000 UTC to 2028-12-12 15:16:45 +0000 UTC (now=2025-12-12 16:17:46.316674069 +0000 UTC))" 2025-12-12T16:17:49.512949221+00:00 stderr P I1212 16:17:49.510527 1 core.go:352] ConfigMap "openshift-kube-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIDMDCCAhigAwIBAgIIIzF/30wVgUkwDQYJKoZIhvcNAQELBQAwNjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSAwHgYDVQQDExdhZG1pbi1rdWJlY29uZmlnLXNpZ25lcjAe\nFw0yNTExMDIwNzM0MDdaFw0zNTEwMzEwNzM0MDdaMDYxEjAQBgNVBAsTCW9wZW5z\naGlmdDEgMB4GA1UEAxMXYWRtaW4ta3ViZWNvbmZpZy1zaWduZXIwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEaFvaxE/Ah0Q+4T67KuL6N5MoncMcfqtm\njKd8txx/b3t8o2WCAMF0IKDNMDDobraupmimcAQwOWen0WJzp3DqjVAIKabrG/DZ\nXqsx3xVHxhSvFOKEFQbiFu6HL0FvXs1bsMkm5YAcM/voHkGHefR+5YEgpgTuhZ6a\n9muG9cxUjlZ/BmMP3UwsgmRfxQ7TG3Ixf/mp++cLxi114b8ld8S4XtVuG//82BzB\nvk3J6+7tnRjli/AHSm0fx7ZvgRPY1b1IGSvGUMc6Qrc+nim/Ufd017TeFlkwKIRP\nPnUGuz0S/5Rz9XMoWJ/OHi/vB0eQs3pyqHBDPgTYCt1NZUO9nN7tAgMBAAGjQjBA\nMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBT9Y6Mp\nr1Yg0NUI8hFWKTlX1CJd6zANBgkqhkiG9w0BAQsFAAOCAQEAWD6f4r0mm7D7+uVn\nCs3w3zq0JIVBUtBTXYuSbgpnpmSVALRCSnRyguNx37SHU2qXVF7+hsaWYuO/B6iR\nZ5uZ6fkLEEFI5YN7q1HBEPBRaBFtJ7fSOBl9iSsHI11DX53+wRhJR319P3fZ18eq\nGwTdUHTy+L9ec1NjaJvOz2eJEVB3O2A9ySh+Rhdv75mFqTbNvxyf5fjw7OHDd5ti\nWPCT1UzyXUXpE8ET6HA59gQO3Ix/VPzZTpNWX1FAXDYpYFkK1t9Ifzjdqf3/P+uP\nvwMtUNixJg8RYhfRNZ4RbfULWU9Y0DpadRVX5WppGBTRNAAgmNGBYPPR7HuxVGx1\nReJ2Bg==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3V 2025-12-12T16:17:49.513056773+00:00 stderr F msOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:47Z"}],"resourceVersion":null,"uid":"60afc254-0d91-486c-a410-610b8f84e03e"}} 2025-12-12T16:17:49.513056773+00:00 stderr F I1212 16:17:49.511562 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:17:49.513056773+00:00 stderr F I1212 16:17:49.512004 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-kube-controller-manager: 2025-12-12T16:17:49.513056773+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:18:07.533531676+00:00 stderr P I1212 16:18:07.532752 1 core.go:352] ConfigMap "openshift-kube-controller-manager/client-ca" changes: {"data":{"ca-bundle.crt":"-----BEGIN CERTIFICATE-----\nMIIFWzCCA0OgAwIBAgIUQLy5hoffN9SGXMo67f/i/5XEwyEwDQYJKoZIhvcNAQEL\nBQAwPTESMBAGA1UECwwJb3BlbnNoaWZ0MScwJQYDVQQDDB5hZG1pbi1rdWJlY29u\nZmlnLXNpZ25lci1jdXN0b20wHhcNMjUxMjEyMTYxNzQyWhcNMzUxMjEwMTYxNzQy\nWjA9MRIwEAYDVQQLDAlvcGVuc2hpZnQxJzAlBgNVBAMMHmFkbWluLWt1YmVjb25m\naWctc2lnbmVyLWN1c3RvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB\nAJRtnlRieMPhCvStW7Uo/SGT7vUyHxVn8BYwaDaVxTUKO0kZRGBZ0gHPv8jnIZ0u\n9xNf88H2LEduWCso2Q+si1eaGR24oL/nxabCArbO4hHFtwsF4LFbA2v4iknUaW5d\nQWb4bl072Tx3O0Wly+wiVv7H93PVCyuLWmBeH80F+Cwt80s3RbtDthOHKjYK/a5W\naFRjDBzvmoWoK+/GxYjovUmgZCpLvElE9a2TmH1hExyn1ST15L8ESyah0e0vSZFW\n2odSnLavfmUw2HbS4lpy0peAGSa3R52E+rvF16qGuAGHIja09i/0GN/TM/aTrFRu\noyDcrAWgxNwU/ungVw8jX1+ReTC0ZkohS78CfvE5jhM6AcRCbMisGAFibd7VHbA3\nUot5wB7w/fAs7YPatGUdRNVYE4vQxfTYTRktYiSLSsPOEuHaqA6lBHLklssMGGAt\nPrFydiB76a4MaZrHgBNLVUpVT5txScHOmqTrU4m/Ix+SxgARUfhnN4QizhWNMhi1\nKxmHi9OQye+tTxpBLBbvagZFoQOkjMfHPABQkaORgdBwKn73V2psrwZpoZh5jDWZ\ny9RB3N/K+vsQvCmhjnX4rKHhcX/tLVUDcvN1XNLbPFJUoVpFqtSZb6yaQlHze+zL\nNSw9SUfOHN5+VuDW9O0ho/cDIV2w1e05pPZe7+SiSwEZAgMBAAGjUzBRMB0GA1Ud\nDgQWBBSLpZUyMU6+pBaqwQ1fQgx26OkbpjAfBgNVHSMEGDAWgBSLpZUyMU6+pBaq\nwQ1fQgx26OkbpjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQB8\nS/trc5KgMr4wUI8dKDSGKElBdqbMWIY0xPt1ANYj36eoA9Fj8k2qX+vxqSRw5NIU\nPuLASD2jk5vM7Chn29XLnEO0uZlFrdX6ywCGHqU00X6AKPBOaflmdiUo5/GvY0ll\nK6Rk+Y+jueATGDVseDzArN6DW22ZJ0M/7uwmJjW4VRmK0gPmz0PXBGrNXuR/e00n\nrXGvq3CqUw+g9lnR1JI6DSRk3iu5SgK4syO+B6P7Z7Cxuj2YDyYJEv4uTPeti0E9\nVn0UH3h0/GzDFu/ghy4bEhb/CX74FcoBx0geoZu3YYhSjL4gmRLnTwmL5Yg4d0R5\nKsFg3Ht5VBh757nG+aMBJb+aheFuu0z62XOQlpJC4+qx1e4cYU+jInKsnZYSptT7\nMmtGFjud4bmlSyDgJ0s+gfu+7PBeoowJi9sQSxlA4HFStbc/9Qh8eUxeZG1nGIYa\n+ACyCDzlVAt2+EOB8l+mxTFoBD1UcYG7E+2wepOpkz92zUqS5vlQOeZg1KlfHOcz\nAzrBCHw3+xYQN62WaNX97Beewz2y05I/CvSZfIW0U4CReywukh1V+66m5WWvA2Qo\nqXKlRZjy/rcLHXG557FT6EnQDoh2R+FEko6O1tJ4urkAD1/E1fY1U7qdkr9uvMlo\nvOrzaWzKi110hAolc+gWzOnIcG3nf1y7EMIVbBbHMw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIIHuh6I9HTH+QwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM1WhcNMjYx\nMDI4MDgxNzM2WjAmMSQwIgYDVQQDDBtrdWJlLWNzci1zaWduZXJfQDE3NjIwNzE0\nNTUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb91UVWMxGg+Q7+o+5\ntt2z/Oq+7Mc1SNGJ4oF0Y99WM1Y7tIo5NOKWiIXE16rGuzY4+D9O53fh14ngzJm3\nWh3GjCGvr8/2W7J1BblXwuKwDuRl+OfJvyPtETUeME42Y9V6XP/B60iaaEYhm5t7\nTDf7TmmjEpqeWix43KqHnpcW9Zr8tM+tHLrGwcHnb+z3LvGkQA7mbXsaHiuryCVx\nudxQYNKWgtAkw3OOtuVyJ2gGD7iYVni1jg7nc9ZhQOYBoYRbAw3zh36CY50dZA89\nhDKYsVVourd9xfAdwSmrcgtsVo1X0ucCpEEUYKEz3/udgk+Dgf2hy3flIMcg9kcI\nS8xzAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0G\nA1UdDgQWBBSGMLijow+n1t5asMgXimhNhoMUvDAfBgNVHSMEGDAWgBQwDwrhoAwS\n6q2GA5CnoIQZVVim5jANBgkqhkiG9w0BAQsFAAOCAQEAhDT7ncsDiLG3QyVtffPp\nTjWscZowuHbVwUjwpZt5OxQFonSJxNCnqrj4MlIqqxraH5nnsrW5FqWyWWeMmXpz\nbFkiVhPFCVGjmRN9V1LXjHDc4lufe3ixq+MvgtU1YL/WJBmUxxw5dPifTT15IApJ\n6stLJ0QtHBNeEIIroUFpDB+O7OJYZ85ed6o6gT4n/v9nxBaLsZNpO2TzaWfI0Bst\nFEoPsfPKgBvwg9+2GijlP/VyKmP2gFdFm25PWeROU1VZzPrEhaOliO+/YXHt32YU\nJkxTF/smrLzxRRbb507cuvWEilzud93YbHmAhAj1h0CpDdzjxYDr5zRYJSP7BV+6\nUA==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDiTCCAnGgAwIBAgIIOsA0z3vOTSgwDQYJKoZIhvcNAQELBQAwUjFQME4GA1UE\nAwxHb3BlbnNoaWZ0LWt1YmUtY29udHJvbGxlci1tYW5hZ2VyLW9wZXJhdG9yX2Nz\nci1zaWduZXItc2lnbmVyQDE3NjIwNzE0NTUwHhcNMjUxMTAyMDgxNzM0WhcNMjcx\nMDIzMDgxNzM1WjBSMVAwTgYDVQQDDEdvcGVuc2hpZnQta3ViZS1jb250cm9sbGVy\nLW1hbmFnZXItb3BlcmF0b3JfY3NyLXNpZ25lci1zaWduZXJAMTc2MjA3MTQ1NTCC\nASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPY8aATxrjWpRKfnFU7BgmD1\nkNEst9RVY3fzGsARc5tF3x+zhl9BbsoJ9NAncCj+KnkAIYk4pJMU6BHIWIakpvqV\nLsnlPM45VU8ocwOWb5Z7g88YdqHGRWeZqZt/rPXmH/846iVGDstB0YQWgKKKK97X\nvjKMsq9ALSVj8gRWai7B7MVP/bZ4FgeqsYq6zIH9XKdPO8ev20qffrob4nmLGHdJ\nikAliwIy6nkVYrATKOS8t56votMD7xuFQ8rM6uQ0YVejA5rE/Tmq4/NsFpfFfkCP\nMU0vO2XwqCBV81XKD00SmW9MXIxbTq5lU9YjjE1sDFREtN4uZL4nEDa2+wnvIxEC\nAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O\nBBYEFDAPCuGgDBLqrYYDkKeghBlVWKbmMB8GA1UdIwQYMBaAFDAPCuGgDBLqrYYD\nkKeghBlVWKbmMA0GCSqGSIb3DQEBCwUAA4IBAQDS31/o3/4/9DBLFVcLvIwOQCK1\n0ZwJUa/8iK5GaMlMH6cNfvVZBrb4SwTVx0WXI5wrIXrlvYc+PtXL0MJeyIJmMpoU\nRyQAJZsh8cckeQjghV2Pf7wMfEbHudKTp8uoQDUBntkfNhJa4pPxmNWuhOrlvdB5\nEF/6IGviKAdSy0jcNpscvD3W0oSpCYRW0Ki/25LaFvIqP2Xy/cNJlWhzJWqZbK6k\nR9I4knhvIv/JYmppOVXw1rEvP+8Pn8UF2oSfFXcN5W+j4YIIhrAUnjAbaflpyX8k\nAUEKdtgVNNe7RlW9nQrhO8GqFJItitBbNtVuSkw9XIlQ0gc40E7mgB7Mjnbu\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDQjCCAiqgAwIBAgIICN23rtJ7PrYwDQYJKoZIhvcNAQELBQAwPzESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSkwJwYDVQQDEyBrdWJlLWFwaXNlcnZlci10by1rdWJlbGV0\nLXNpZ25lcjAeFw0yNTExMDIwNzM0MTFaFw0yNjExMDIwNzM0MTFaMD8xEjAQBgNV\nBAsTCW9wZW5zaGlmdDEpMCcGA1UEAxMga3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxl\ndC1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDzRlKEKNEy\nCyd+PPBkdzQZd3BeUb0b2qi1h8fbUiSNENrOpafKxxAHcr3a4KWB6sKhi28r14mF\nxcJ2Yb92/jLpkS15p629AUrGXxKuL8QtkBsY3dH0CqKMBedO6oxodva9Avc+3DMI\nvvYBJFy+4on/0JbM54fduvDmcEmhBtgRItK3Z87VbhemVPj7uDi9EV381uRMlmq4\ncgtD5mfS1yeRu0ut5IIr7/PN1G+93slLGQkHveqWlsFrDYd8Qm5PqirRBYy+18RC\nmEuNirFX3yPrEGwMvRlJyFia0RKuK69bFL2vduI5Wu7h/6VKP0/vEpEYqI6bJYoV\nbUjA2vqrV/1VAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTAD\nAQH/MB0GA1UdDgQWBBRNj9dsSIhfwKZ491Q/XZYt2lRWLzANBgkqhkiG9w0BAQsF\nAAOCAQEANmt7a5N+zch83D5+YeC4VQoFvpkHPKEYYT0EM/ykjch3453RmQWQEwkj\nVvstV1U16YpnEI61l1s347RHi64SwtlwV6tiNCpopDF2u3Bb+eiJqrlIC69EFnZE\n1426AVmZZq3sWu3eKB0HgT5u6B1rErSTl3c4hK4SiDsWWlVktBSN0BS4cD+urSAF\nc673/wLKCjq2I+9i3Wv2K7Ton3w5oaETE7lgQyImbKOVhJhFrPGu9fKXaeWlyXGY\nj7tz68vNTvecRynKrmzUJ9BBMfAXTrCowitzjBjanFitgXK4DnQMkb+8lv2Txb/n\nkB7RzcFDyIVd3g5XWBujR3fkQFWsNQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDlTCCAn2gAwIBAgIIJ6CFEe7+79cwDQYJKoZIhvcNAQELBQAwWDFWMFQGA1UE\nAwxNb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtYXBpc2Vy\ndmVyLXRvLWt1YmVsZXQtc2lnbmVyQDE3NjU1NTYyMDkwHhcNMjUxMjEyMTYxNjQ4\nWhcNMjYxMjEyMTYxNjQ5WjBYMVYwVAYDVQQDDE1vcGVuc2hpZnQta3ViZS1hcGlz\nZXJ2ZXItb3BlcmF0b3Jfa3ViZS1hcGlzZXJ2ZXItdG8ta3ViZWxldC1zaWduZXJA\nMTc2NTU1NjIwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMFdeL5w\n1lJs3DvHYhLuNjrkSOLA4aXRwP85WxI5EZ4JKmSU1IxWlKucnS96ghUWuJudciUT\nGeDw1fYjUOvv+YnMhcM5avzLMmc/4JwRwmPsBNqeS6NSZsJYasHBQqT5lihBnZmU\nxRCSGpOAJqL/tWMsC3MxHG41AQunmbpk4RSZdPjfiJ3U8Gty9rnppq9GfZ9n+LxL\niIkodGqPe95J2csyNuLpDmOlhA5x7/miPLT7Wtp/hN/s2DSgpKQEWIHEsEVlfU+q\nH+GO/W1yWO+dDthJ2/yGn4ZQVe2riuw5uUcxjY27wkobhZ25/9brhqAKxweioGLf\nB32mvwrIarjuWKsCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgKkMA8GA1UdEwEB/wQF\nMAMBAf8wHQYDVR0OBBYEFAl1BlYxjAq7rEsUpr186nCtx13/MB8GA1UdIwQYMBaA\nFAl1BlYxjAq7rEsUpr186nCtx13/MA0GCSqGSIb3DQEBCwUAA4IBAQCu3dv4kIwz\nu5DkLCS1z/I9QcxrEZmJs72LUWpri/eyvFxK5LrC4bU0d3LZXdjQUxLhrk1A8qD6\ne7QoqzRf9QdNRUiU3ilpwtANd6NwmIzd2PrcalBdoglrtDpBz5VAZ0j26YJAcpyn\nXbMpaDDOgz247mp4Ts1wHVjQa0H0bFAtuKbab+R+EFWCVc77MSRXVDiQndg/gtdZ\nEekPhH4kNhFT2c74uf32ICivfylG79g0sGNUVO0SPLup4psWmGf2pxI+HGftIT8p\ni0idY7Ij5JlIyQ8ypGbD7AqA1bRwcT6o8Al2iQw349XO694SJUNs10QGwuVKdHqL\nJC64gPwOl26J\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDNDCCAhygAwIBAgIILvKlXd2YBKIwDQYJKoZIhvcNAQELBQAwODESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVy\nMB4XDTI1MTEwMjA3MzQxMFoXDTI2MTEwMjA3MzQxMFowODESMBAGA1UECxMJb3Bl\nbnNoaWZ0MSIwIAYDVQQDExlrdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyMIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsOnFEdxnqqE7l+5NxN82TBBycC2F\n5cUboCcUVr211lubqmjCusnJzzz+6rKpO8uFqqGeu4C7rGpudOaK52IZhG0WP8b7\n0raQhnM4DV8t2SHDV4GhFUiuNE+b4FJrZ6jiljQo2g9ZeeCZgdmmBrIFHBXDFzEc\nA1RPScqOtvBbbH064Zd267gOmVPJnWmxDXo6X/RGYCm1YUS6FQ2WWpl707ComvgZ\nAvWGSA4H1sZirMQ+ug3bctkLb+SiXUzf+tLnGIHPeqDNfMrNUhxBl6dDhlMbUIzY\nrVxgDD8y3i7eg5i5HG8yntl8epgs2gn47wfavfjLqATBlciJPZY6Qv3BFQIDAQAB\no0IwQDAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\nWi6wmZ50AVdwxaiO3WjTvp82+sEwDQYJKoZIhvcNAQELBQADggEBAA4siMWwOGL5\nCiCxbiJsuKMbCLlFR6JaD43FWnUCZDoe7np7W76Oh4py4Zht7XlZKotcXrrfRYVO\ndVht66PCbSujy375p/B6c3isG4h/1cNSGDm1uhAkHXGZ88S2wSjKT5YJ/HUAkvyj\nadQgZeO7Q60YBSDE/67Ldq1zqvBrMF2k8pF49p1AdAtf4OSDzIaGGPUQJTFExA0 2025-12-12T16:18:07.533589238+00:00 stderr F E\n03xMlOPNhYZ8MgFT2XE6nRT74lCAfK9krAsZLtFuAtp/14t013PD0FqTTQRUmuSj\nO6pJKDTH8tZ3ieXxSzRR+j4p5hkHaehgQVyUbwiw8WVXkd6NcWR6yQcSeqIsTSCD\neMDdTmmKyuo=\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhzCCAm+gAwIBAgIID5n6gpWYc8YwDQYJKoZIhvcNAQELBQAwUTFPME0GA1UE\nAwxGb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX2t1YmUtY29udHJv\nbC1wbGFuZS1zaWduZXJAMTc2NTU1NjIwOTAeFw0yNTEyMTIxNjE2NDhaFw0yNjAy\nMTAxNjE2NDlaMFExTzBNBgNVBAMMRm9wZW5zaGlmdC1rdWJlLWFwaXNlcnZlci1v\ncGVyYXRvcl9rdWJlLWNvbnRyb2wtcGxhbmUtc2lnbmVyQDE3NjU1NTYyMDkwggEi\nMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtQLJD7hc4OcHS2GPLdn9xsKkM\nAqHIcDr2UgXYWTVShY348nGviBJGCBWyKqQPpM6u4zIbS4xHMeQ3fnVCRvti8Ggf\n7zKigxyoxwcx+f4z9L5fCO6RbScmwmaYchePaAMT/7sNBt8NIKyFfep/bExW2UPw\nrb0qUqAbmBse+Azrl0V+UHNexaG3VmsOEfCJORAVgIo8SNIAG8jHe6+r3BtnwTsC\n59znIuoxDrBL9cq82ZZGQ++jVx4AE6JM0Lj/UBPVsJ06+X/829a1PiHuqlY2oN/p\nm+xCLSQgi59nrDifJLQXNBjahDW8ccWnbkvkUPjO8/OzS2nJb6uXihez5m2xAgMB\nAAGjYzBhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\nBBR9PxK41FQkWasZMK1vVKm6UNhzHzAfBgNVHSMEGDAWgBR9PxK41FQkWasZMK1v\nVKm6UNhzHzANBgkqhkiG9w0BAQsFAAOCAQEAfHCzxMKk00cOsThQkkL2trlY5tl2\n9wXEd/62Fh8EoOhNmCIpyPYLWMDnD2GB2BS7J5S+zcqby2+7s8Etub0gpvbN2Ocq\nHss3f+WcAFm7t9hiQrJ4gPYSkwEQGCwJ3ueGIEmPyyrTQPTmzNYudSdXt1WSrTpO\nO1sKWdQro5M0V4U9Z6MWGnG4nIZljqHWgVkZXkluh6Rvshoen8rhUNa6VV3aMHcZ\n94dtvZRSye9RsOZwZygsG/HU2+GcnKKYvqkIo8FZVAYTyu3rlOlT9dmmZpxwukb6\nADjin/tgzt7r0FiU+Z9uYqI3SoFog9pv+mlqpuf3zLm+Q3DKwBpxaxn5nQ==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDSDCCAjCgAwIBAgIIV//3Qv2OxM4wDQYJKoZIhvcNAQELBQAwQjESMBAGA1UE\nCxMJb3BlbnNoaWZ0MSwwKgYDVQQDEyNrdWJlbGV0LWJvb3RzdHJhcC1rdWJlY29u\nZmlnLXNpZ25lcjAeFw0yNTExMDIwNzM0MDhaFw0zNTEwMzEwNzM0MDhaMEIxEjAQ\nBgNVBAsTCW9wZW5zaGlmdDEsMCoGA1UEAxMja3ViZWxldC1ib290c3RyYXAta3Vi\nZWNvbmZpZy1zaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc\nZBn9WZ5wLPck6+g3f4p0NBGyE6LaXnWavXx9m0sVdTVQooknndmKufeYkXGZ5Lb+\nfbMAp/6swgSJ1DjdBj06rCqJEZfdZl3uZoD/Th4ha2Phl12bXaNYLiuOu5BOZ3UW\n08y1Wab9Y9zc0o4Z71pHH4o9TH3QPNT6BqAz4kkgD6t1r/R7E7lrZbx+7e+0JBAW\nRgufaFOX1AYU5B4+pSM21eJY7oP1P9I4DMeeJW39opCCHAuUQgHpOV1YPtRqEPJ4\n9matas8qm5qIMIPbGEGFckSJqgny9YCfHaLezJtZMIHJgz5LW4H91gQCGvfSbLtH\nYxYO/PcTXQCnDYNwqf29AgMBAAGjQjBAMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBSCg7Y8QkzypoNAqYVPNK5YWMVIXzANBgkqhkiG\n9w0BAQsFAAOCAQEAsP1NR5ZgC7F5FvoU2CXla4FufdeYTk4bjjNoFJMqnvI5gcY6\nJDxP1Rl2YBHCpRxngtBFxw3Xoe8UnBKzZWsS5wLUYRloprhGVBSLM0vqJJOvP7M0\njt3SLuB7h0dG2GO9yQ4y10+xVWxP5Os9wcbQcRgTQKL3gHmCq4aQN1cqJSxyJ/ut\nlbfYlM/xBcfLMY5Leeas6y2FPCFIEONh1U9FJZlF3YkhPp+XD7aePtC4tJqsokMc\nP80IwPn54aDT9akRPsOteB6C+xSAz2TlfWaJ/l/x9yXK+HJrRhMartqyN511SeEd\nDpNcMW9qPTjJzBj+N3f0ZfvbTmhVSvV65ZEtAw==\n-----END CERTIFICATE-----\n-----BEGIN CERTIFICATE-----\nMIIDhTCCAm2gAwIBAgIIfksp5LDEMMgwDQYJKoZIhvcNAQELBQAwUDFOMEwGA1UE\nAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9wZXJhdG9yX25vZGUtc3lzdGVt\nLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MB4XDTI1MTEwMjA3NTEyN1oXDTI4MTEw\nMTA3NTEyOFowUDFOMEwGA1UEAwxFb3BlbnNoaWZ0LWt1YmUtYXBpc2VydmVyLW9w\nZXJhdG9yX25vZGUtc3lzdGVtLWFkbWluLXNpZ25lckAxNzYyMDY5ODg3MIIBIjAN\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA402A+B9GaYmzBVLJEyrGvytCY0Vb\nvvIKHW2kxl9IFN3+LNZHW/mKeJfz/hBTm2bs+6uRCDlMSyONDlVUWVsuE+q0+F42\n0n3VyxWRSrDZ2ur5oNxmoBSsHRM+PxccQ6X3JTZyO397LHNOzxAs/Es+St8A8sbY\nGLc1lNqeOLvwAOT5d2PrFlYCAfXYs/UVIaio846jidKKN1f8Z6W5pgdAHuTXbyBQ\nLDQh6s43TBPhww1KszmcwURjEBDCT6KlhsM/quMd9XlMU0ZEAMf6XxsqvW8ia8C8\nF+RNAaGkwmiS4qZ+hJ4KIUnWM84j+bsyNBqlHFKi1e7LsKRyjnQ288FqIQIDAQAB\no2MwYTAOBgNVHQ8BAf8EBAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU\n9O+dX0K7dzD13bshuR70sHbUAeAwHwYDVR0jBBgwFoAU9O+dX0K7dzD13bshuR70\nsHbUAeAwDQYJKoZIhvcNAQELBQADggEBADPRGSn1U/YwBUkpU7vTzsxqLaVJW21o\n6hV/W2IjjGNGqp6c2kSH/3ZGSEjNwIJqKRFpC2gmTPgAqnC4nDosOHx5F5HXTmrU\n1l2Ivcm1Ep+t/zBgNHjBi3yommx8n2iTTdakpQaq7/u1s0I4UiRqXydjoGXp7H1C\naAsmRlK8ovgEAWzItjeMBzy65wqiStPBK+XAIddqznHCxrRyH5xk3HcnyMG4GDWl\nrogdK8yTGCuZVCvGfe9Hwm8tyYrxDRNvRLTc0ssTonAwnR/7IzaVVc9Pp0svCynJ\n6VX3FGhgWwDVWeajj8yrXeR42az/Rr1TAAOZtJMW+4hIkaU0/+msvgw=\n-----END CERTIFICATE-----\n"},"metadata":{"creationTimestamp":"2025-11-02T07:51:50Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:data":{".":{},"f:ca-bundle.crt":{}},"f:metadata":{"f:annotations":{".":{},"f:openshift.io/owning-component":{}}}},"manager":"cluster-kube-apiserver-operator","operation":"Update","time":"2025-12-12T16:17:52Z"}],"resourceVersion":null,"uid":"60afc254-0d91-486c-a410-610b8f84e03e"}} 2025-12-12T16:18:07.533809363+00:00 stderr F I1212 16:18:07.533773 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'ConfigMapUpdated' Updated ConfigMap/client-ca -n openshift-kube-controller-manager: 2025-12-12T16:18:07.533809363+00:00 stderr F cause by changes in data.ca-bundle.crt 2025-12-12T16:18:07.537334720+00:00 stderr F I1212 16:18:07.535489 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:18:46.256494093+00:00 stderr F E1212 16:18:46.255795 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager-operator/leases/kube-controller-manager-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:46.257393936+00:00 stderr F E1212 16:18:46.257335 1 leaderelection.go:436] error retrieving resource lock openshift-kube-controller-manager-operator/kube-controller-manager-operator-lock: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager-operator/leases/kube-controller-manager-operator-lock?timeout=4m0s": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:18:46.459757069+00:00 stderr F E1212 16:18:46.459654 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.470787951+00:00 stderr F E1212 16:18:46.470704 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.487855313+00:00 stderr F E1212 16:18:46.487695 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.516319657+00:00 stderr F E1212 16:18:46.515647 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.579091579+00:00 stderr F E1212 16:18:46.579006 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.665659459+00:00 stderr F E1212 16:18:46.665570 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:46.831785566+00:00 stderr F E1212 16:18:46.831705 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.157505609+00:00 stderr F E1212 16:18:47.157422 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.804013462+00:00 stderr F E1212 16:18:47.803937 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:47.845728933+00:00 stderr F E1212 16:18:47.845661 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.847648040+00:00 stderr F E1212 16:18:47.847571 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.852714116+00:00 stderr F E1212 16:18:47.852686 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.857136135+00:00 stderr F E1212 16:18:47.857091 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.865574104+00:00 stderr F E1212 16:18:47.865537 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.872041104+00:00 stderr F E1212 16:18:47.871988 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.888583733+00:00 stderr F E1212 16:18:47.888538 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:47.902513067+00:00 stderr F E1212 16:18:47.902468 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.055580541+00:00 stderr F E1212 16:18:48.055485 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.244693617+00:00 stderr F E1212 16:18:48.244635 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.252341996+00:00 stderr F E1212 16:18:48.252277 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.258436806+00:00 stderr F E1212 16:18:48.258382 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.265250635+00:00 stderr F E1212 16:18:48.265153 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.289488584+00:00 stderr F E1212 16:18:48.289398 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.333760359+00:00 stderr F E1212 16:18:48.333702 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.456558435+00:00 stderr F E1212 16:18:48.456092 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.460256746+00:00 stderr F E1212 16:18:48.460166 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.659917432+00:00 stderr F E1212 16:18:48.659843 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:48.859946598+00:00 stderr F E1212 16:18:48.859633 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.057660656+00:00 stderr F E1212 16:18:49.056753 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.059456030+00:00 stderr F E1212 16:18:49.059417 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.457071840+00:00 stderr F E1212 16:18:49.457003 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:49.680830532+00:00 stderr F E1212 16:18:49.680775 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:49.702769585+00:00 stderr F E1212 16:18:49.702697 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.055625708+00:00 stderr F E1212 16:18:50.055564 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.257789936+00:00 stderr F E1212 16:18:50.257729 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:50.986093282+00:00 stderr F E1212 16:18:50.986021 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.056068941+00:00 stderr F E1212 16:18:51.055964 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:51.260432964+00:00 stderr F E1212 16:18:51.260346 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-syncer-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:podsecurity-admission-label-syncer-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-syncer-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:podsecurity-admission-label-syncer-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-privileged-namespaces-syncer-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:privileged-namespaces-psa-label-syncer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-privileged-namespaces-syncer-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:privileged-namespaces-psa-label-syncer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-openshift-infra.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-infra\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/services/kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/kube-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/recycler-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-infra/serviceaccounts/pv-recycler-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-controller-manager-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/csr_approver_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:cluster-csr-approver-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/csr_approver_clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:cluster-csr-approver-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-sa.yaml\" (string): Delete \"https://10.217.4.1:443/api/v1/namespaces/kube-system/serviceaccounts/vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:kube-controller-manager:vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-binding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:kube-controller-manager:vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeControllerManagerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=KubeControllerManagerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:51.458727926+00:00 stderr F E1212 16:18:51.458662 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:52.463973699+00:00 stderr F E1212 16:18:52.463338 1 base_controller.go:279] "Unhandled Error" err="BackingResourceController-StaticResources reconciliation failed: [\"manifests/installer-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/installer-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"manifests/installer-cluster-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:openshift-kube-controller-manager-installer\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"BackingResourceController-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=BackingResourceController-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:52.856362530+00:00 stderr F E1212 16:18:52.856288 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.258703007+00:00 stderr F E1212 16:18:53.258615 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:53.459052690+00:00 stderr F E1212 16:18:53.458960 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-syncer-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:podsecurity-admission-label-syncer-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-syncer-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:podsecurity-admission-label-syncer-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-privileged-namespaces-syncer-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:privileged-namespaces-psa-label-syncer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-privileged-namespaces-syncer-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:privileged-namespaces-psa-label-syncer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-openshift-infra.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-infra\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/services/kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/kube-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/recycler-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-infra/serviceaccounts/pv-recycler-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-controller-manager-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/csr_approver_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:cluster-csr-approver-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/csr_approver_clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:cluster-csr-approver-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-sa.yaml\" (string): Delete \"https://10.217.4.1:443/api/v1/namespaces/kube-system/serviceaccounts/vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:kube-controller-manager:vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-binding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:kube-controller-manager:vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeControllerManagerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=KubeControllerManagerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:53.549841825+00:00 stderr F E1212 16:18:53.549745 1 base_controller.go:279] "Unhandled Error" err="TargetConfigController reconciliation failed: Get \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.420210554+00:00 stderr F E1212 16:18:55.420125 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-InstallerState reconciliation failed: Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/pods?labelSelector=app%3Dinstaller\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:55.662234188+00:00 stderr F E1212 16:18:55.662143 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-syncer-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:podsecurity-admission-label-syncer-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-syncer-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:podsecurity-admission-label-syncer-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-privileged-namespaces-syncer-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:privileged-namespaces-psa-label-syncer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/podsecurity-admission-label-privileged-namespaces-syncer-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:privileged-namespaces-psa-label-syncer\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-openshift-infra.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-infra\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/svc.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/services/kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/kube-controller-manager-sa\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/recycler-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-infra/serviceaccounts/pv-recycler-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/localhost-recovery-client-crb.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:operator:kube-controller-manager-recovery\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/localhost-recovery-sa.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager/serviceaccounts/localhost-recovery-client\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/csr_approver_clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:cluster-csr-approver-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/csr_approver_clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:cluster-csr-approver-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-sa.yaml\" (string): Delete \"https://10.217.4.1:443/api/v1/namespaces/kube-system/serviceaccounts/vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-role.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:kube-controller-manager:vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/vsphere/legacy-cloud-provider-binding.yaml\" (string): Delete \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:kube-controller-manager:vsphere-legacy-cloud-provider\": dial tcp 10.217.4.1:443: connect: connection refused, unable to ApplyStatus for operator using fieldManager \"KubeControllerManagerStaticResources-StaticResources\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=KubeControllerManagerStaticResources-StaticResources&force=true\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:18:55.824224913+00:00 stderr F E1212 16:18:55.824141 1 base_controller.go:279] "Unhandled Error" err="kube-controller-manager-StaticPodState reconciliation failed: unable to ApplyStatus for operator using fieldManager \"kube-controller-manager-StaticPodState\": Patch \"https://10.217.4.1:443/apis/operator.openshift.io/v1/kubecontrollermanagers/cluster/status?fieldManager=kube-controller-manager-StaticPodState&force=true\": dial tcp 10.217.4.1:443: connect: connection refused" 2025-12-12T16:18:58.746404506+00:00 stderr F E1212 16:18:58.740162 1 base_controller.go:279] "Unhandled Error" err="KubeControllerManagerStaticResources-StaticResources reconciliation failed: [\"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused, \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused]" 2025-12-12T16:19:28.137671312+00:00 stderr F I1212 16:19:28.136973 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:28.692438431+00:00 stderr F I1212 16:19:28.691889 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:28.763938196+00:00 stderr F I1212 16:19:28.763875 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:30.092613977+00:00 stderr F I1212 16:19:30.092531 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:30.722937362+00:00 stderr F I1212 16:19:30.722856 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:36.186080031+00:00 stderr F I1212 16:19:36.185072 1 reflector.go:430] "Caches populated" type="*v1.ClusterRoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:46.150948928+00:00 stderr F I1212 16:19:46.150441 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:48.517885846+00:00 stderr F I1212 16:19:48.516752 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:53.765744699+00:00 stderr F I1212 16:19:53.764983 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:56.356822825+00:00 stderr F I1212 16:19:56.356296 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:57.976430290+00:00 stderr F I1212 16:19:57.976359 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:19:58.575512952+00:00 stderr F I1212 16:19:58.575442 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:00.975205923+00:00 stderr F I1212 16:20:00.974615 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:04.432228822+00:00 stderr F I1212 16:20:04.432112 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:04.776122496+00:00 stderr F I1212 16:20:04.775770 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:05.204964653+00:00 stderr F I1212 16:20:05.204538 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:05.586091153+00:00 stderr F I1212 16:20:05.586005 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:06.576225433+00:00 stderr F I1212 16:20:06.576091 1 reflector.go:430] "Caches populated" type="*v1.ServiceAccount" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.178505643+00:00 stderr F I1212 16:20:08.178410 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:08.458143744+00:00 stderr F I1212 16:20:08.458076 1 reflector.go:430] "Caches populated" type="*v1.ClusterRole" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:09.603981084+00:00 stderr F I1212 16:20:09.603929 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:10.576054070+00:00 stderr F I1212 16:20:10.575990 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.579470834+00:00 stderr F I1212 16:20:11.579356 1 reflector.go:430] "Caches populated" type="*v1.Role" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:11.927115703+00:00 stderr F I1212 16:20:11.927023 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:12.374991068+00:00 stderr F I1212 16:20:12.374910 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.197359936+00:00 stderr F I1212 16:20:13.196386 1 reflector.go:430] "Caches populated" type="*v1.ClusterOperator" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.219304837+00:00 stderr F I1212 16:20:13.219243 1 reflector.go:430] "Caches populated" type="*v1.RoleBinding" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:13.976330944+00:00 stderr F I1212 16:20:13.976252 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:15.593272723+00:00 stderr F I1212 16:20:15.593169 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:17.409643137+00:00 stderr F I1212 16:20:17.409572 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubecontrollermanagers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:17.410069478+00:00 stderr F I1212 16:20:17.410035 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:17.410904769+00:00 stderr F I1212 16:20:17.410870 1 status_controller.go:230] clusteroperator/kube-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: ","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:17:05Z","message":"NodeInstallerProgressing: 1 node is at revision 7","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:06:09Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 7","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:55Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:55Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:17.419915535+00:00 stderr F I1212 16:20:17.419084 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-controller-manager changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready" to "NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: " 2025-12-12T16:20:19.411368087+00:00 stderr F I1212 16:20:19.411261 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:19.416535017+00:00 stderr F I1212 16:20:19.416397 1 status_controller.go:230] clusteroperator/kube-controller-manager diff {"status":{"conditions":[{"lastTransitionTime":"2025-12-12T16:16:46Z","message":"NodeControllerDegraded: All master nodes are ready","reason":"AsExpected","status":"False","type":"Degraded"},{"lastTransitionTime":"2025-11-02T08:17:05Z","message":"NodeInstallerProgressing: 1 node is at revision 7","reason":"AsExpected","status":"False","type":"Progressing"},{"lastTransitionTime":"2025-11-02T08:06:09Z","message":"StaticPodsAvailable: 1 nodes are active; 1 node is at revision 7","reason":"AsExpected","status":"True","type":"Available"},{"lastTransitionTime":"2025-11-02T07:51:55Z","message":"All is well","reason":"AsExpected","status":"True","type":"Upgradeable"},{"lastTransitionTime":"2025-11-02T07:51:55Z","reason":"NoData","status":"Unknown","type":"EvaluationConditionsDetected"}]}} 2025-12-12T16:20:19.428682962+00:00 stderr F I1212 16:20:19.428596 1 event.go:377] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"openshift-kube-controller-manager-operator", Name:"kube-controller-manager-operator", UID:"09857aec-2c93-4f0d-9e38-a820bd5b8362", APIVersion:"apps/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OperatorStatusChanged' Status for clusteroperator/kube-controller-manager changed: Degraded message changed from "NodeControllerDegraded: All master nodes are ready\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/ns.yaml\" (string): Get \"https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:openshift:leader-locking-kube-controller-manager\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-cluster-policy-controller-role.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/roles/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/leader-election-cluster-policy-controller-rolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/namespaces/openshift-kube-controller-manager/rolebindings/system:openshift:leader-election-lock-cluster-policy-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrole.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterroles/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: \"assets/kube-controller-manager/namespace-security-allocation-controller-clusterrolebinding.yaml\" (string): Get \"https://10.217.4.1:443/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:openshift:controller:namespace-security-allocation-controller\": dial tcp 10.217.4.1:443: connect: connection refused\nKubeControllerManagerStaticResourcesDegraded: " to "NodeControllerDegraded: All master nodes are ready" 2025-12-12T16:20:20.774015170+00:00 stderr F I1212 16:20:20.773537 1 request.go:752] "Waited before sending request" delay="1.172788496s" reason="client-side throttling, not priority and fairness" verb="GET" URL="https://10.217.4.1:443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps?resourceVersion=38860" 2025-12-12T16:20:20.776236516+00:00 stderr F I1212 16:20:20.776202 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:24.879753796+00:00 stderr F I1212 16:20:24.879486 1 reflector.go:430] "Caches populated" type="*v1.Proxy" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:26.077617232+00:00 stderr F I1212 16:20:26.077537 1 reflector.go:430] "Caches populated" type="*v1.Network" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:26.980257496+00:00 stderr F I1212 16:20:26.980202 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:28.522759514+00:00 stderr F I1212 16:20:28.522220 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:30.861655281+00:00 stderr F I1212 16:20:30.860976 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:31.817243215+00:00 stderr F I1212 16:20:31.817141 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:20:31.817879802+00:00 stderr F I1212 16:20:31.817837 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:31.818123348+00:00 stderr F I1212 16:20:31.818076 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:31.818369515+00:00 stderr F I1212 16:20:31.818340 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:31.818608521+00:00 stderr F I1212 16:20:31.818584 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:31.818842247+00:00 stderr F I1212 16:20:31.818818 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:31.819882634+00:00 stderr F I1212 16:20:31.819839 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:31.820130821+00:00 stderr F I1212 16:20:31.820089 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:20:34.376887454+00:00 stderr F I1212 16:20:34.376303 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:30:31.819159295+00:00 stderr F I1212 16:30:31.818612 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:30:31.819423801+00:00 stderr F I1212 16:30:31.819403 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:30:31.821028751+00:00 stderr F I1212 16:30:31.820279 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:31.820917438+00:00 stderr F I1212 16:40:31.819719 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:31.821317698+00:00 stderr F I1212 16:40:31.821274 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:31.821725498+00:00 stderr F I1212 16:40:31.821686 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:31.822385085+00:00 stderr F I1212 16:40:31.822336 1 prune_controller.go:277] Nothing to prune 2025-12-12T16:40:31.822786085+00:00 stderr F I1212 16:40:31.822746 1 prune_controller.go:277] Nothing to prune ././@LongLink0000644000000000000000000000023700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_cons0000755000175000017500000000000015117043043033132 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_cons0000755000175000017500000000000015117043062033133 5ustar zuulzuul././@LongLink0000644000000000000000000000025400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_cons0000644000175000017500000000552515117043043033143 0ustar zuulzuul2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.904747 1 main.go:259] Console plugins are enabled in following order: 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905767 1 main.go:261] - networking-console-plugin 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905805 1 main.go:302] Console telemetry options: 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905826 1 main.go:304] - CLUSTER_ID 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905830 1 main.go:304] - SEGMENT_API_HOST console.redhat.com/connections/api/v1 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905834 1 main.go:304] - SEGMENT_JS_HOST console.redhat.com/connections/cdn 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905839 1 main.go:304] - SEGMENT_PUBLIC_API_KEY BnuS1RP39EmLQjP21ko67oDjhbl9zpNU 2025-12-12T16:16:43.905876254+00:00 stderr F I1212 16:16:43.905851 1 main.go:304] - TELEMETER_CLIENT_DISABLED true 2025-12-12T16:16:43.905876254+00:00 stderr F W1212 16:16:43.905866 1 authoptions.go:112] Flag inactivity-timeout is set to less then 300 seconds and will be ignored! 2025-12-12T16:16:43.908139019+00:00 stderr F I1212 16:16:43.908090 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:43.908139019+00:00 stderr F I1212 16:16:43.908111 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:43.908139019+00:00 stderr F I1212 16:16:43.908117 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:43.908139019+00:00 stderr F I1212 16:16:43.908122 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:43.908139019+00:00 stderr F I1212 16:16:43.908134 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:17:04.115846344+00:00 stderr F I1212 16:17:04.115510 1 main.go:718] Binding to [::]:8443... 2025-12-12T16:17:04.115846344+00:00 stderr F I1212 16:17:04.115540 1 main.go:723] Using TLS 2025-12-12T16:17:07.144342333+00:00 stderr F I1212 16:17:07.143412 1 metrics.go:133] serverconfig.Metrics: Update ConsolePlugin metrics... 2025-12-12T16:17:07.157769511+00:00 stderr F I1212 16:17:07.157662 1 metrics.go:143] serverconfig.Metrics: Update ConsolePlugin metrics: &map[networking:map[enabled:1]] (took 13.354196ms) 2025-12-12T16:17:09.116973023+00:00 stderr F I1212 16:17:09.116889 1 metrics.go:80] usage.Metrics: Count console users... 2025-12-12T16:17:09.522848853+00:00 stderr F I1212 16:17:09.522711 1 metrics.go:156] usage.Metrics: Update console users metrics: 0 kubeadmin, 0 cluster-admins, 0 developers, 0 unknown/errors (took 405.767487ms) ././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043043032775 5ustar zuulzuul././@LongLink0000644000000000000000000000033200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000033700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000021144015117043043033001 0ustar zuulzuul2025-12-12T16:16:47.458697643+00:00 stderr F I1212 16:16:47.458619 1 start.go:61] Version: 89b561f0 (f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:16:47.471587118+00:00 stderr F I1212 16:16:47.471489 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:16:47.475212127+00:00 stderr F I1212 16:16:47.471913 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:16:47.475212127+00:00 stderr F I1212 16:16:47.471945 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:16:47.475212127+00:00 stderr F I1212 16:16:47.471951 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:16:47.475212127+00:00 stderr F I1212 16:16:47.471956 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:16:47.475212127+00:00 stderr F I1212 16:16:47.471965 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:16:47.614347744+00:00 stderr F I1212 16:16:47.614283 1 leaderelection.go:257] attempting to acquire leader lease openshift-machine-config-operator/machine-config-controller... 2025-12-12T16:16:47.635986212+00:00 stderr F I1212 16:16:47.635927 1 leaderelection.go:271] successfully acquired lease openshift-machine-config-operator/machine-config-controller 2025-12-12T16:16:47.703402778+00:00 stderr F I1212 16:16:47.702427 1 simple_featuregate_reader.go:171] Starting feature-gate-detector 2025-12-12T16:16:47.703402778+00:00 stderr F I1212 16:16:47.702564 1 metrics.go:92] Registering Prometheus metrics 2025-12-12T16:16:47.703402778+00:00 stderr F I1212 16:16:47.702635 1 metrics.go:99] Starting metrics listener on 127.0.0.1:8797 2025-12-12T16:16:47.739795396+00:00 stderr F I1212 16:16:47.737129 1 certrotation_controller.go:173] MCS CA/TLS cert rotator not added 2025-12-12T16:16:47.752302892+00:00 stderr F I1212 16:16:47.750264 1 reflector.go:430] "Caches populated" type="*v1.KubeletConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.754421033+00:00 stderr F I1212 16:16:47.753151 1 reflector.go:430] "Caches populated" type="*v1.ControllerConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.776577034+00:00 stderr F I1212 16:16:47.751820 1 reflector.go:430] "Caches populated" type="*v1.MachineConfigPool" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.777103847+00:00 stderr F I1212 16:16:47.777072 1 reflector.go:430] "Caches populated" type="*v1alpha1.ImageContentSourcePolicy" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.777273551+00:00 stderr F I1212 16:16:47.777245 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.777538288+00:00 stderr F I1212 16:16:47.777518 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.793459447+00:00 stderr F I1212 16:16:47.793389 1 reflector.go:430] "Caches populated" type="*v1.MachineConfiguration" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.793846866+00:00 stderr F I1212 16:16:47.793815 1 template_controller.go:146] Re-syncing ControllerConfig due to secret pull-secret change 2025-12-12T16:16:47.794436140+00:00 stderr F I1212 16:16:47.794406 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.811811835+00:00 stderr F I1212 16:16:47.797122 1 reflector.go:430] "Caches populated" type="*v1.ImageDigestMirrorSet" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.811811835+00:00 stderr F I1212 16:16:47.797793 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.811811835+00:00 stderr F I1212 16:16:47.797971 1 kubelet_config_controller.go:221] Re-syncing all kubelet config controller generated MachineConfigs due to apiServer cluster change 2025-12-12T16:16:47.811811835+00:00 stderr F I1212 16:16:47.798201 1 template_controller.go:198] Re-syncing ControllerConfig due to apiServer cluster change 2025-12-12T16:16:47.811811835+00:00 stderr F I1212 16:16:47.802871 1 reflector.go:430] "Caches populated" type="*v1.ContainerRuntimeConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.813450235+00:00 stderr F I1212 16:16:47.813391 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.877835607+00:00 stderr F I1212 16:16:47.877713 1 reflector.go:430] "Caches populated" type="*v1.MachineOSConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.878132274+00:00 stderr F I1212 16:16:47.878117 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.878270387+00:00 stderr F I1212 16:16:47.878255 1 reflector.go:430] "Caches populated" type="*v1.Scheduler" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.878766329+00:00 stderr F I1212 16:16:47.878724 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.879410615+00:00 stderr F I1212 16:16:47.879378 1 reflector.go:430] "Caches populated" type="*v1.ImageTagMirrorSet" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.879570269+00:00 stderr F I1212 16:16:47.879535 1 featuregates.go:112] FeatureGates initialized: enabled=[AdditionalRoutingCapabilities AdminNetworkPolicy AlibabaPlatform AzureWorkloadIdentity BuildCSIVolumes CPMSMachineNamePrefix ConsolePluginContentSecurityPolicy GatewayAPI GatewayAPIController HighlyAvailableArbiter ImageVolume IngressControllerLBSubnetsAWS KMSv1 MachineConfigNodes ManagedBootImages ManagedBootImagesAWS MetricsCollectionProfiles NetworkDiagnosticsConfig NetworkLiveMigration NetworkSegmentation NewOLM PinnedImages ProcMountType RouteAdvertisements RouteExternalCertificate ServiceAccountTokenNodeBinding SetEIPForNLBIngressController SigstoreImageVerification StoragePerformantSecurityPolicy UpgradeStatus UserNamespacesPodSecurityStandards UserNamespacesSupport VSphereMultiDisk VSphereMultiNetworks], disabled=[AWSClusterHostedDNS AWSClusterHostedDNSInstall AWSDedicatedHosts AWSServiceLBNetworkSecurityGroup AutomatedEtcdBackup AzureClusterHostedDNSInstall AzureDedicatedHosts AzureMultiDisk BootImageSkewEnforcement BootcNodeManagement ClusterAPIInstall ClusterAPIInstallIBMCloud ClusterMonitoringConfig ClusterVersionOperatorConfiguration DNSNameResolver DualReplica DyanmicServiceEndpointIBMCloud DynamicResourceAllocation EtcdBackendQuota EventedPLEG Example Example2 ExternalOIDC ExternalOIDCWithUIDAndExtraClaimMappings ExternalSnapshotMetadata GCPClusterHostedDNS GCPClusterHostedDNSInstall GCPCustomAPIEndpoints GCPCustomAPIEndpointsInstall ImageModeStatusReporting ImageStreamImportMode IngressControllerDynamicConfigurationManager InsightsConfig InsightsConfigAPI InsightsOnDemandDataGather IrreconcilableMachineConfig KMSEncryptionProvider MachineAPIMigration MachineAPIOperatorDisableMachineHealthCheckController ManagedBootImagesAzure ManagedBootImagesvSphere MaxUnavailableStatefulSet MinimumKubeletVersion MixedCPUsAllocation MultiArchInstallAzure MultiDiskSetup MutatingAdmissionPolicy NewOLMCatalogdAPIV1Metas NewOLMOwnSingleNamespace NewOLMPreflightPermissionChecks NewOLMWebhookProviderOpenshiftServiceCA NoRegistryClusterOperations NodeSwap NutanixMultiSubnets OVNObservability OpenShiftPodSecurityAdmission PreconfiguredUDNAddresses SELinuxMount ShortCertRotation SignatureStores SigstoreImageVerificationPKI TranslateStreamCloseWebsocketRequests VSphereConfigurableMaxAllowedBlockVolumesPerNode VSphereHostVMGroupZonal VSphereMixedNodeEnv VolumeAttributesClass VolumeGroupSnapshot] 2025-12-12T16:16:47.879793664+00:00 stderr F I1212 16:16:47.879773 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.880008099+00:00 stderr F I1212 16:16:47.879988 1 reflector.go:430] "Caches populated" type="*v1.MachineOSBuild" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.881232779+00:00 stderr F I1212 16:16:47.881206 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.882614723+00:00 stderr F I1212 16:16:47.882534 1 event.go:377] Event(v1.ObjectReference{Kind:"Node", Namespace:"openshift-machine-config-operator", Name:"crc", UID:"23216ff3-032e-49af-af7e-1d23d5907b59", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'FeatureGatesInitialized' FeatureGates updated to featuregates.Features{Enabled:[]v1.FeatureGateName{"AdditionalRoutingCapabilities", "AdminNetworkPolicy", "AlibabaPlatform", "AzureWorkloadIdentity", "BuildCSIVolumes", "CPMSMachineNamePrefix", "ConsolePluginContentSecurityPolicy", "GatewayAPI", "GatewayAPIController", "HighlyAvailableArbiter", "ImageVolume", "IngressControllerLBSubnetsAWS", "KMSv1", "MachineConfigNodes", "ManagedBootImages", "ManagedBootImagesAWS", "MetricsCollectionProfiles", "NetworkDiagnosticsConfig", "NetworkLiveMigration", "NetworkSegmentation", "NewOLM", "PinnedImages", "ProcMountType", "RouteAdvertisements", "RouteExternalCertificate", "ServiceAccountTokenNodeBinding", "SetEIPForNLBIngressController", "SigstoreImageVerification", "StoragePerformantSecurityPolicy", "UpgradeStatus", "UserNamespacesPodSecurityStandards", "UserNamespacesSupport", "VSphereMultiDisk", "VSphereMultiNetworks"}, Disabled:[]v1.FeatureGateName{"AWSClusterHostedDNS", "AWSClusterHostedDNSInstall", "AWSDedicatedHosts", "AWSServiceLBNetworkSecurityGroup", "AutomatedEtcdBackup", "AzureClusterHostedDNSInstall", "AzureDedicatedHosts", "AzureMultiDisk", "BootImageSkewEnforcement", "BootcNodeManagement", "ClusterAPIInstall", "ClusterAPIInstallIBMCloud", "ClusterMonitoringConfig", "ClusterVersionOperatorConfiguration", "DNSNameResolver", "DualReplica", "DyanmicServiceEndpointIBMCloud", "DynamicResourceAllocation", "EtcdBackendQuota", "EventedPLEG", "Example", "Example2", "ExternalOIDC", "ExternalOIDCWithUIDAndExtraClaimMappings", "ExternalSnapshotMetadata", "GCPClusterHostedDNS", "GCPClusterHostedDNSInstall", "GCPCustomAPIEndpoints", "GCPCustomAPIEndpointsInstall", "ImageModeStatusReporting", "ImageStreamImportMode", "IngressControllerDynamicConfigurationManager", "InsightsConfig", "InsightsConfigAPI", "InsightsOnDemandDataGather", "IrreconcilableMachineConfig", "KMSEncryptionProvider", "MachineAPIMigration", "MachineAPIOperatorDisableMachineHealthCheckController", "ManagedBootImagesAzure", "ManagedBootImagesvSphere", "MaxUnavailableStatefulSet", "MinimumKubeletVersion", "MixedCPUsAllocation", "MultiArchInstallAzure", "MultiDiskSetup", "MutatingAdmissionPolicy", "NewOLMCatalogdAPIV1Metas", "NewOLMOwnSingleNamespace", "NewOLMPreflightPermissionChecks", "NewOLMWebhookProviderOpenshiftServiceCA", "NoRegistryClusterOperations", "NodeSwap", "NutanixMultiSubnets", "OVNObservability", "OpenShiftPodSecurityAdmission", "PreconfiguredUDNAddresses", "SELinuxMount", "ShortCertRotation", "SignatureStores", "SigstoreImageVerificationPKI", "TranslateStreamCloseWebsocketRequests", "VSphereConfigurableMaxAllowedBlockVolumesPerNode", "VSphereHostVMGroupZonal", "VSphereMixedNodeEnv", "VolumeAttributesClass", "VolumeGroupSnapshot"}} 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.886642 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.889457 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.893660 1 reflector.go:430] "Caches populated" type="*v1.PinnedImageSet" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.904059 1 certrotation_controller.go:192] Starting machineconfigcontroller-certrotationcontroller 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.904074 1 certrotation_controller.go:180] Waiting for machineconfigcontroller-certrotationcontroller 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.904084 1 certrotation_controller.go:188] Finished waiting for machineconfigcontroller-certrotationcontroller 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.904088 1 certrotation_controller.go:198] No cert rotators needed, shutting down 2025-12-12T16:16:47.904105778+00:00 stderr F I1212 16:16:47.904092 1 certrotation_controller.go:199] Shutting down machineconfigcontroller-certrotationcontroller 2025-12-12T16:16:47.904151459+00:00 stderr F I1212 16:16:47.904111 1 kubelet_config_controller.go:200] Starting MachineConfigController-KubeletConfigController 2025-12-12T16:16:47.904340334+00:00 stderr F I1212 16:16:47.904327 1 container_runtime_config_controller.go:234] addded image policy observers with sigstore featuregate enabled 2025-12-12T16:16:47.904410985+00:00 stderr F I1212 16:16:47.904400 1 drain_controller.go:178] Starting MachineConfigController-DrainController 2025-12-12T16:16:47.908826513+00:00 stderr F I1212 16:16:47.908789 1 reflector.go:430] "Caches populated" type="*v1.ClusterImagePolicy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.974363893+00:00 stderr F I1212 16:16:47.974297 1 reflector.go:430] "Caches populated" type="*v1.ImagePolicy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:16:47.994699780+00:00 stderr F I1212 16:16:47.994621 1 pinned_image_set.go:115] Starting MachineConfigController-PinnedImageSetController 2025-12-12T16:16:47.999801014+00:00 stderr F I1212 16:16:47.997437 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:16:48.005113794+00:00 stderr F I1212 16:16:48.005054 1 container_runtime_config_controller.go:244] Starting MachineConfigController-ContainerRuntimeConfigController 2025-12-12T16:16:48.013706244+00:00 stderr F I1212 16:16:48.012887 1 reflector.go:430] "Caches populated" type="*v1.MachineConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:16:48.104968842+00:00 stderr F I1212 16:16:48.104789 1 template_controller.go:294] Starting MachineConfigController-TemplateController 2025-12-12T16:16:48.105133786+00:00 stderr F I1212 16:16:48.105097 1 node_controller.go:264] Starting MachineConfigController-NodeController 2025-12-12T16:16:48.105214778+00:00 stderr F I1212 16:16:48.105201 1 render_controller.go:155] Starting MachineConfigController-RenderController 2025-12-12T16:16:48.726717921+00:00 stderr F I1212 16:16:48.726656 1 kubelet_config_features.go:125] Applied FeatureSet cluster on MachineConfigPool master 2025-12-12T16:16:48.763996182+00:00 stderr F I1212 16:16:48.763944 1 kubelet_config_nodes.go:162] Applied Node configuration 97-master-generated-kubelet on MachineConfigPool master 2025-12-12T16:16:49.143310172+00:00 stderr F I1212 16:16:49.143233 1 kubelet_config_nodes.go:162] Applied Node configuration 97-worker-generated-kubelet on MachineConfigPool worker 2025-12-12T16:16:49.152244901+00:00 stderr F I1212 16:16:49.149443 1 kubelet_config_features.go:125] Applied FeatureSet cluster on MachineConfigPool worker 2025-12-12T16:16:50.116645465+00:00 stderr F I1212 16:16:50.113335 1 kubelet_config_features.go:125] Applied FeatureSet cluster on MachineConfigPool master 2025-12-12T16:16:50.763240581+00:00 stderr F I1212 16:16:50.762800 1 kubelet_config_features.go:125] Applied FeatureSet cluster on MachineConfigPool worker 2025-12-12T16:16:52.765889105+00:00 stderr F I1212 16:16:52.765035 1 status.go:325] Degraded Machine: crc and Degraded Reason: unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file "/var/lib/kubelet/config.json" 2025-12-12T16:16:52.946948394+00:00 stderr F W1212 16:16:52.946879 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-d582710c680b4cd4536e11249c7e09e9 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:16:53.088845618+00:00 stderr F E1212 16:16:53.088758 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:53.097902840+00:00 stderr F E1212 16:16:53.097824 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:53.097902840+00:00 stderr F I1212 16:16:53.097864 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:16:53.185274003+00:00 stderr F W1212 16:16:53.181070 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-d582710c680b4cd4536e11249c7e09e9 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:16:57.782577772+00:00 stderr F I1212 16:16:57.782371 1 status.go:325] Degraded Machine: crc and Degraded Reason: unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file "/var/lib/kubelet/config.json" 2025-12-12T16:17:45.816379040+00:00 stderr F I1212 16:17:45.814583 1 kubelet_config_controller.go:221] Re-syncing all kubelet config controller generated MachineConfigs due to apiServer cluster change 2025-12-12T16:17:45.816379040+00:00 stderr F I1212 16:17:45.814631 1 template_controller.go:198] Re-syncing ControllerConfig due to apiServer cluster change 2025-12-12T16:17:46.062768551+00:00 stderr F I1212 16:17:46.062707 1 kubelet_config_nodes.go:162] Applied Node configuration 97-master-generated-kubelet on MachineConfigPool master 2025-12-12T16:17:46.338924699+00:00 stderr F I1212 16:17:46.337562 1 kubelet_config_nodes.go:162] Applied Node configuration 97-worker-generated-kubelet on MachineConfigPool worker 2025-12-12T16:17:46.501271452+00:00 stderr F I1212 16:17:46.500170 1 kubelet_config_features.go:125] Applied FeatureSet cluster on MachineConfigPool master 2025-12-12T16:17:46.825480397+00:00 stderr F I1212 16:17:46.825406 1 kubelet_config_features.go:125] Applied FeatureSet cluster on MachineConfigPool worker 2025-12-12T16:18:47.655377817+00:00 stderr F E1212 16:18:47.655283 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-config-operator/leases/machine-config-controller": dial tcp 10.217.4.1:443: connect: connection refused, falling back to slow path 2025-12-12T16:18:47.656158556+00:00 stderr F E1212 16:18:47.656110 1 leaderelection.go:436] error retrieving resource lock openshift-machine-config-operator/machine-config-controller: Get "https://10.217.4.1:443/apis/coordination.k8s.io/v1/namespaces/openshift-machine-config-operator/leases/machine-config-controller": dial tcp 10.217.4.1:443: connect: connection refused 2025-12-12T16:19:39.152234815+00:00 stderr F I1212 16:19:39.151686 1 reflector.go:430] "Caches populated" type="*v1.MachineConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:19:39.631434927+00:00 stderr F I1212 16:19:39.631377 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:42.133611451+00:00 stderr F I1212 16:19:42.133527 1 reflector.go:430] "Caches populated" type="*v1alpha1.ImageContentSourcePolicy" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:19:42.732879697+00:00 stderr F I1212 16:19:42.732803 1 reflector.go:430] "Caches populated" type="*v1.Infrastructure" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:19:44.212919908+00:00 stderr F W1212 16:19:44.212396 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:44.266133644+00:00 stderr F I1212 16:19:44.266032 1 render_controller.go:584] Generated machineconfig rendered-worker-3dcc16ccd3a3eea0254ec40e36d4bfbe from 7 configs: [{MachineConfig 00-worker machineconfiguration.openshift.io/v1 } {MachineConfig 01-worker-container-runtime machineconfiguration.openshift.io/v1 } {MachineConfig 01-worker-kubelet machineconfiguration.openshift.io/v1 } {MachineConfig 97-worker-generated-kubelet machineconfiguration.openshift.io/v1 } {MachineConfig 98-worker-generated-kubelet machineconfiguration.openshift.io/v1 } {MachineConfig 99-worker-generated-registries machineconfiguration.openshift.io/v1 } {MachineConfig 99-worker-ssh machineconfiguration.openshift.io/v1 }] 2025-12-12T16:19:44.267077678+00:00 stderr F I1212 16:19:44.267015 1 event.go:377] Event(v1.ObjectReference{Kind:"MachineConfigPool", Namespace:"openshift-machine-config-operator", Name:"worker", UID:"633fcfae-03e0-4a3a-8d5c-de9a658e82f6", APIVersion:"machineconfiguration.openshift.io/v1", ResourceVersion:"32065", FieldPath:""}): type: 'Normal' reason: 'RenderedConfigGenerated' rendered-worker-3dcc16ccd3a3eea0254ec40e36d4bfbe successfully generated (release version: 4.20.1, controller version: f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:19:44.271313805+00:00 stderr F I1212 16:19:44.271154 1 render_controller.go:584] Generated machineconfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 from 11 configs: [{MachineConfig 00-master machineconfiguration.openshift.io/v1 } {MachineConfig 01-master-container-runtime machineconfiguration.openshift.io/v1 } {MachineConfig 01-master-kubelet machineconfiguration.openshift.io/v1 } {MachineConfig 97-master-generated-kubelet machineconfiguration.openshift.io/v1 } {MachineConfig 98-master-generated-kubelet machineconfiguration.openshift.io/v1 } {MachineConfig 99-master-generated-registries machineconfiguration.openshift.io/v1 } {MachineConfig 99-master-ssh machineconfiguration.openshift.io/v1 } {MachineConfig 99-node-sizing-for-crc machineconfiguration.openshift.io/v1 } {MachineConfig 99-openshift-machineconfig-master-console machineconfiguration.openshift.io/v1 } {MachineConfig 99-openshift-machineconfig-master-dummy-networks machineconfiguration.openshift.io/v1 } {MachineConfig custom-image machineconfiguration.openshift.io/v1 }] 2025-12-12T16:19:44.271551360+00:00 stderr F I1212 16:19:44.271521 1 event.go:377] Event(v1.ObjectReference{Kind:"MachineConfig", Namespace:"openshift-machine-config-operator", Name:"rendered-master-842a93c7bb3e86c26c29ba8a7f596b70", UID:"", APIVersion:"machineconfiguration.openshift.io/v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'OSImageURLOverridden' OSImageURL was overridden via machineconfig in rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 (was: is: image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest) 2025-12-12T16:19:44.271588621+00:00 stderr F I1212 16:19:44.271572 1 event.go:377] Event(v1.ObjectReference{Kind:"MachineConfigPool", Namespace:"openshift-machine-config-operator", Name:"master", UID:"3b9df6d6-bacd-4862-b99f-10ec7fcf29ac", APIVersion:"machineconfiguration.openshift.io/v1", ResourceVersion:"37700", FieldPath:""}): type: 'Normal' reason: 'RenderedConfigGenerated' rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 successfully generated (release version: 4.20.1, controller version: f587a1bfbaba518cc1d49ad6300e29eeb9c38cec) 2025-12-12T16:19:44.279266864+00:00 stderr F I1212 16:19:44.279227 1 render_controller.go:610] Pool worker: now targeting: rendered-worker-3dcc16ccd3a3eea0254ec40e36d4bfbe 2025-12-12T16:19:44.281267114+00:00 stderr F I1212 16:19:44.281219 1 render_controller.go:610] Pool master: now targeting: rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 2025-12-12T16:19:49.339845234+00:00 stderr F W1212 16:19:49.339746 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:49.362878782+00:00 stderr F E1212 16:19:49.362759 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.367013216+00:00 stderr F E1212 16:19:49.366925 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.367013216+00:00 stderr F I1212 16:19:49.366983 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.380439453+00:00 stderr F E1212 16:19:49.380333 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.384871674+00:00 stderr F E1212 16:19:49.384809 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.384898965+00:00 stderr F I1212 16:19:49.384859 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.452471152+00:00 stderr F W1212 16:19:49.452388 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:49.465651323+00:00 stderr F E1212 16:19:49.465596 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.470982917+00:00 stderr F E1212 16:19:49.470928 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.471060428+00:00 stderr F I1212 16:19:49.471047 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.490537847+00:00 stderr F E1212 16:19:49.490470 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.496726553+00:00 stderr F E1212 16:19:49.496671 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.496726553+00:00 stderr F I1212 16:19:49.496706 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.571975112+00:00 stderr F W1212 16:19:49.571899 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:49.577923412+00:00 stderr F E1212 16:19:49.577873 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.581764588+00:00 stderr F E1212 16:19:49.581711 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.581764588+00:00 stderr F I1212 16:19:49.581732 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.606625282+00:00 stderr F E1212 16:19:49.606522 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.764126197+00:00 stderr F E1212 16:19:49.764041 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.764126197+00:00 stderr F I1212 16:19:49.764075 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:49.837277843+00:00 stderr F W1212 16:19:49.837141 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:49.965141773+00:00 stderr F E1212 16:19:49.965059 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.165263469+00:00 stderr F E1212 16:19:50.165200 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.367823355+00:00 stderr F E1212 16:19:50.367727 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.367823355+00:00 stderr F I1212 16:19:50.367759 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.565933949+00:00 stderr F E1212 16:19:50.565836 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.565933949+00:00 stderr F I1212 16:19:50.565866 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.657698443+00:00 stderr F W1212 16:19:50.657625 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:50.766108005+00:00 stderr F E1212 16:19:50.764481 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:50.964717121+00:00 stderr F E1212 16:19:50.964597 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.164732063+00:00 stderr F E1212 16:19:51.164657 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.164732063+00:00 stderr F I1212 16:19:51.164690 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.363830722+00:00 stderr F E1212 16:19:51.363751 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.363830722+00:00 stderr F I1212 16:19:51.363786 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.500159965+00:00 stderr F W1212 16:19:51.500090 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:51.563687980+00:00 stderr F E1212 16:19:51.563625 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.765996530+00:00 stderr F E1212 16:19:51.765917 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.964880952+00:00 stderr F E1212 16:19:51.964802 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:51.964921903+00:00 stderr F I1212 16:19:51.964879 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.164399312+00:00 stderr F E1212 16:19:52.163916 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.164399312+00:00 stderr F I1212 16:19:52.163950 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.366093826+00:00 stderr F E1212 16:19:52.365988 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.382645662+00:00 stderr F W1212 16:19:52.382550 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:52.564140269+00:00 stderr F E1212 16:19:52.564073 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.564140269+00:00 stderr F I1212 16:19:52.564104 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.768042688+00:00 stderr F E1212 16:19:52.767963 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.852460568+00:00 stderr F I1212 16:19:52.852367 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:52.980412890+00:00 stderr F E1212 16:19:52.979935 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:52.980412890+00:00 stderr F I1212 16:19:52.979972 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:53.166606706+00:00 stderr F E1212 16:19:53.166543 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:53.351744154+00:00 stderr F W1212 16:19:53.351664 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:53.364281219+00:00 stderr F E1212 16:19:53.364202 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:53.364281219+00:00 stderr F I1212 16:19:53.364241 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:53.566162168+00:00 stderr F E1212 16:19:53.566103 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:53.766767464+00:00 stderr F E1212 16:19:53.766680 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:53.766767464+00:00 stderr F I1212 16:19:53.766707 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:54.074817389+00:00 stderr F E1212 16:19:54.074737 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:54.164622034+00:00 stderr F E1212 16:19:54.164537 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:54.164622034+00:00 stderr F I1212 16:19:54.164571 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:54.476721960+00:00 stderr F W1212 16:19:54.476628 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:54.515977786+00:00 stderr F E1212 16:19:54.515916 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:54.565296614+00:00 stderr F E1212 16:19:54.565233 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:54.565296614+00:00 stderr F I1212 16:19:54.565268 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:55.048209959+00:00 stderr F I1212 16:19:55.047843 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:55.537028011+00:00 stderr F E1212 16:19:55.536568 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:55.545040683+00:00 stderr F E1212 16:19:55.544996 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:55.545108684+00:00 stderr F I1212 16:19:55.545097 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:55.906368605+00:00 stderr F W1212 16:19:55.906262 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:55.950936154+00:00 stderr F E1212 16:19:55.950311 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:55.957600811+00:00 stderr F E1212 16:19:55.957552 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:55.957600811+00:00 stderr F I1212 16:19:55.957577 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:57.564085127+00:00 stderr F I1212 16:19:57.564006 1 reflector.go:430] "Caches populated" type="*v1.KubeletConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:19:58.191073230+00:00 stderr F E1212 16:19:58.190917 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:58.196503286+00:00 stderr F E1212 16:19:58.196440 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:58.196503286+00:00 stderr F I1212 16:19:58.196464 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:58.567612784+00:00 stderr F W1212 16:19:58.567518 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:19:58.600986662+00:00 stderr F E1212 16:19:58.600913 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:58.606356127+00:00 stderr F E1212 16:19:58.606282 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:58.606356127+00:00 stderr F I1212 16:19:58.606306 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:19:59.807121305+00:00 stderr F I1212 16:19:59.807045 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:02.130743676+00:00 stderr F I1212 16:20:02.130666 1 reflector.go:430] "Caches populated" type="*v1.ImageDigestMirrorSet" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:03.053574356+00:00 stderr F I1212 16:20:03.053498 1 reflector.go:430] "Caches populated" type="*v1.PinnedImageSet" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:20:03.401453660+00:00 stderr F E1212 16:20:03.401381 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:03.405526963+00:00 stderr F E1212 16:20:03.405500 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:03.405574544+00:00 stderr F I1212 16:20:03.405561 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:03.788264803+00:00 stderr F W1212 16:20:03.788072 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:20:03.827615091+00:00 stderr F E1212 16:20:03.827558 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:03.833836897+00:00 stderr F E1212 16:20:03.833780 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:03.833901179+00:00 stderr F I1212 16:20:03.833889 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:07.482530358+00:00 stderr F I1212 16:20:07.482453 1 reflector.go:430] "Caches populated" type="*v1.MachineConfiguration" reflector="github.com/openshift/client-go/operator/informers/externalversions/factory.go:125" 2025-12-12T16:20:07.760473677+00:00 stderr F I1212 16:20:07.760359 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:07.760675142+00:00 stderr F I1212 16:20:07.760609 1 template_controller.go:146] Re-syncing ControllerConfig due to secret pull-secret change 2025-12-12T16:20:09.841025135+00:00 stderr F I1212 16:20:09.840978 1 reflector.go:430] "Caches populated" type="*v1.ImageTagMirrorSet" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:11.139994600+00:00 stderr F I1212 16:20:11.139900 1 reflector.go:430] "Caches populated" type="*v1.MachineOSConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:20:12.673725839+00:00 stderr F I1212 16:20:12.673111 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:13.723609359+00:00 stderr F E1212 16:20:13.723277 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:13.727872536+00:00 stderr F E1212 16:20:13.727826 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:13.727872536+00:00 stderr F I1212 16:20:13.727850 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:14.126038583+00:00 stderr F W1212 16:20:14.125948 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:20:14.165997756+00:00 stderr F E1212 16:20:14.165919 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:14.171235738+00:00 stderr F E1212 16:20:14.171151 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:14.171235738+00:00 stderr F I1212 16:20:14.171202 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:14.634452968+00:00 stderr F I1212 16:20:14.634379 1 reflector.go:430] "Caches populated" type="*v1.Scheduler" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:16.680884241+00:00 stderr F I1212 16:20:16.680769 1 reflector.go:430] "Caches populated" type="*v1.FeatureGate" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:16.799078378+00:00 stderr F I1212 16:20:16.798971 1 reflector.go:430] "Caches populated" type="*v1.ControllerConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:20:17.936902386+00:00 stderr F I1212 16:20:17.936832 1 reflector.go:430] "Caches populated" type="*v1.MachineOSBuild" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:20:20.762814909+00:00 stderr F I1212 16:20:20.762197 1 reflector.go:430] "Caches populated" type="*v1.ClusterVersion" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:26.209154875+00:00 stderr F I1212 16:20:26.209052 1 reflector.go:430] "Caches populated" type="*v1.APIServer" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:27.115411480+00:00 stderr F I1212 16:20:27.115332 1 reflector.go:430] "Caches populated" type="*v1.MachineConfigPool" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:20:27.499602966+00:00 stderr F I1212 16:20:27.499270 1 reflector.go:430] "Caches populated" type="*v1.ImagePolicy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:29.743719628+00:00 stderr F I1212 16:20:29.743640 1 reflector.go:430] "Caches populated" type="*v1.Image" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:31.916556430+00:00 stderr F I1212 16:20:31.916483 1 reflector.go:430] "Caches populated" type="*v1.ClusterImagePolicy" reflector="github.com/openshift/client-go/config/informers/externalversions/factory.go:125" 2025-12-12T16:20:32.116910015+00:00 stderr F I1212 16:20:32.116849 1 status.go:273] Pool worker: All nodes are updated with MachineConfig rendered-worker-3dcc16ccd3a3eea0254ec40e36d4bfbe 2025-12-12T16:20:32.126917988+00:00 stderr F I1212 16:20:32.126720 1 event.go:377] Event(v1.ObjectReference{Kind:"MachineConfigPool", Namespace:"openshift-machine-config-operator", Name:"worker", UID:"633fcfae-03e0-4a3a-8d5c-de9a658e82f6", APIVersion:"machineconfiguration.openshift.io/v1", ResourceVersion:"39598", FieldPath:""}): type: 'Normal' reason: 'Completed' Pool worker has completed update to MachineConfig rendered-worker-3dcc16ccd3a3eea0254ec40e36d4bfbe 2025-12-12T16:20:32.129960608+00:00 stderr F E1212 16:20:32.129923 1 pinned_image_set.go:350] Error syncing pinned image sets: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.136037957+00:00 stderr F E1212 16:20:32.135991 1 pinned_image_set.go:373] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.136037957+00:00 stderr F I1212 16:20:32.136022 1 pinned_image_set.go:299] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.154121811+00:00 stderr F I1212 16:20:32.152861 1 node_controller.go:676] Pool master: node crc: changed taints 2025-12-12T16:20:32.154121811+00:00 stderr F I1212 16:20:32.153116 1 status.go:325] Degraded Machine: crc and Degraded Reason: unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file "/var/lib/kubelet/config.json" 2025-12-12T16:20:32.205159440+00:00 stderr F W1212 16:20:32.205075 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:20:32.269545499+00:00 stderr F E1212 16:20:32.269471 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.276884641+00:00 stderr F E1212 16:20:32.276824 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.276884641+00:00 stderr F I1212 16:20:32.276851 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.281758489+00:00 stderr F E1212 16:20:32.281687 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.286110773+00:00 stderr F E1212 16:20:32.286028 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:32.286154114+00:00 stderr F I1212 16:20:32.286113 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:36.144122668+00:00 stderr F I1212 16:20:36.144060 1 reflector.go:430] "Caches populated" type="*v1.ContainerRuntimeConfig" reflector="github.com/openshift/client-go/machineconfiguration/informers/externalversions/factory.go:125" 2025-12-12T16:20:37.163587048+00:00 stderr F I1212 16:20:37.161735 1 status.go:325] Degraded Machine: crc and Degraded Reason: unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file "/var/lib/kubelet/config.json" 2025-12-12T16:20:37.230220976+00:00 stderr F W1212 16:20:37.230035 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:20:41.981667736+00:00 stderr F I1212 16:20:41.981459 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:20:43.324776024+00:00 stderr F I1212 16:20:43.324708 1 node_controller.go:1120] Pool master is paused and will not update. 2025-12-12T16:20:43.338327870+00:00 stderr F I1212 16:20:43.338043 1 status.go:325] Degraded Machine: crc and Degraded Reason: unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file "/var/lib/kubelet/config.json" 2025-12-12T16:20:43.407189436+00:00 stderr F W1212 16:20:43.405135 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:20:43.487604455+00:00 stderr F E1212 16:20:43.486374 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:43.492283528+00:00 stderr F E1212 16:20:43.492052 1 render_controller.go:497] Error updating MachineConfigPool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:43.492283528+00:00 stderr F I1212 16:20:43.492089 1 render_controller.go:408] Error syncing machineconfigpool master: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "master": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:43.549750195+00:00 stderr F W1212 16:20:43.549519 1 render_controller.go:673] OSImageURL "image-registry.openshift-image-registry.svc:5000/openshift-machine-config-operator/rhcos:latest" for MachineConfig rendered-master-842a93c7bb3e86c26c29ba8a7f596b70 is set using a tag instead of a digest. It is highly recommended to use a digest 2025-12-12T16:20:43.995527118+00:00 stderr F E1212 16:20:43.995462 1 render_controller.go:475] Error syncing Generated MCFG: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:44.001191727+00:00 stderr F E1212 16:20:44.001138 1 render_controller.go:497] Error updating MachineConfigPool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:44.001191727+00:00 stderr F I1212 16:20:44.001156 1 render_controller.go:408] Error syncing machineconfigpool worker: Operation cannot be fulfilled on machineconfigpools.machineconfiguration.openshift.io "worker": the object has been modified; please apply your changes to the latest version and try again 2025-12-12T16:20:48.353150567+00:00 stderr F I1212 16:20:48.352552 1 status.go:325] Degraded Machine: crc and Degraded Reason: unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file "/var/lib/kubelet/config.json" ././@LongLink0000644000000000000000000000032000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015117043062032776 5ustar zuulzuul././@LongLink0000644000000000000000000000032500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000644000175000017500000000227415117043043033004 0ustar zuulzuul2025-12-12T16:16:47.863351443+00:00 stderr F W1212 16:16:47.862114 1 deprecated.go:66] 2025-12-12T16:16:47.863351443+00:00 stderr F ==== Removed Flag Warning ====================== 2025-12-12T16:16:47.863351443+00:00 stderr F 2025-12-12T16:16:47.863351443+00:00 stderr F logtostderr is removed in the k8s upstream and has no effect any more. 2025-12-12T16:16:47.863351443+00:00 stderr F 2025-12-12T16:16:47.863351443+00:00 stderr F =============================================== 2025-12-12T16:16:47.863351443+00:00 stderr F 2025-12-12T16:16:47.863351443+00:00 stderr F I1212 16:16:47.862318 1 kube-rbac-proxy.go:532] Reading config file: /etc/kube-rbac-proxy/config-file.yaml 2025-12-12T16:16:47.877889918+00:00 stderr F I1212 16:16:47.877798 1 kube-rbac-proxy.go:235] Valid token audiences: 2025-12-12T16:16:47.904196260+00:00 stderr F I1212 16:16:47.890950 1 kube-rbac-proxy.go:349] Reading certificate files 2025-12-12T16:16:47.904196260+00:00 stderr F I1212 16:16:47.893910 1 kube-rbac-proxy.go:397] Starting TCP socket on 0.0.0.0:9001 2025-12-12T16:16:47.904196260+00:00 stderr F I1212 16:16:47.894290 1 kube-rbac-proxy.go:404] Listening securely on 0.0.0.0:9001 ././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015117043043033023 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015117043062033024 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000644000175000017500000063353415117043043033043 0ustar zuulzuul2025-12-12T16:26:41.118918323+00:00 stderr F time="2025-12-12T16:26:41.117436926Z" level=info msg="start registry" distribution_version=v3.0.0+unknown go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" openshift_version=4.20.0-202510211040.p2.g0c09647.assembly.stream.el9-0c09647 2025-12-12T16:26:41.118918323+00:00 stderr F time="2025-12-12T16:26:41.11842347Z" level=info msg="caching project quota objects with TTL 1m0s" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:41.119621891+00:00 stderr F time="2025-12-12T16:26:41.11960143Z" level=info msg="redis not configured" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:41.119820196+00:00 stderr F time="2025-12-12T16:26:41.119749034Z" level=info msg="Starting upload purge in 45m0s" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:41.119829256+00:00 stderr F time="2025-12-12T16:26:41.119813756Z" level=info msg="using openshift blob descriptor cache" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:41.119869867+00:00 stderr F time="2025-12-12T16:26:41.119845466Z" level=warning msg="Registry does not implement RepositoryRemover. Will not be able to delete repos and tags" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:41.120861582+00:00 stderr F time="2025-12-12T16:26:41.120825831Z" level=info msg="Using \"image-registry.openshift-image-registry.svc:5000\" as Docker Registry URL" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:41.121049257+00:00 stderr F time="2025-12-12T16:26:41.120990565Z" level=info msg="listening on :5000, tls" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" 2025-12-12T16:26:50.298594004+00:00 stderr F time="2025-12-12T16:26:50.297589608Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=911b8341-5ac7-4e54-9555-0037cafb7d76 http.request.method=GET http.request.remoteaddr="10.217.0.2:49336" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="92.682µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:00.301997522+00:00 stderr F time="2025-12-12T16:27:00.301428367Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8f07b973-70bd-4a88-aefa-3df6d2164b77 http.request.method=GET http.request.remoteaddr="10.217.0.2:54098" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="68.842µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:02.970405615+00:00 stderr F time="2025-12-12T16:27:02.967656496Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=936033e5-5b21-4661-a95c-cfd18c8ccec5 http.request.method=GET http.request.remoteaddr="10.217.0.2:57942" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="70.922µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:10.298787357+00:00 stderr F time="2025-12-12T16:27:10.297768641Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=e54a1f24-3094-4b9f-a43c-26bc4b06d771 http.request.method=GET http.request.remoteaddr="10.217.0.2:57956" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="55.442µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:12.970459712+00:00 stderr F time="2025-12-12T16:27:12.969661472Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=66f8ebc9-325f-4a83-956a-6dd50dc571d8 http.request.method=GET http.request.remoteaddr="10.217.0.2:59464" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="55.231µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:20.303851040+00:00 stderr F time="2025-12-12T16:27:20.302098285Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=66945b2a-850b-4a5f-9fe0-c8a56aa473ea http.request.method=GET http.request.remoteaddr="10.217.0.2:59476" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="86.062µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:22.967141643+00:00 stderr F time="2025-12-12T16:27:22.96702148Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=b39e923c-2632-4604-9c61-5f17a0684358 http.request.method=GET http.request.remoteaddr="10.217.0.2:46930" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="83.592µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:30.299339791+00:00 stderr F time="2025-12-12T16:27:30.297618747Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=edf61be8-d7c9-4b08-a709-d509dce7f846 http.request.method=GET http.request.remoteaddr="10.217.0.2:46944" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="71.782µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:32.972229368+00:00 stderr F time="2025-12-12T16:27:32.971527631Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=5997f33c-5bd6-4a50-898b-f71d9e20cecd http.request.method=GET http.request.remoteaddr="10.217.0.2:57416" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="40.491µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:40.304388495+00:00 stderr F time="2025-12-12T16:27:40.297109261Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=f4c0138e-f12f-454e-bd43-c690d28e815f http.request.method=GET http.request.remoteaddr="10.217.0.2:57418" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="125.773µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:42.966824509+00:00 stderr F time="2025-12-12T16:27:42.966687526Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8aab2ff6-1542-4c8a-b21c-0a4908a1267f http.request.method=GET http.request.remoteaddr="10.217.0.2:58310" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="109.433µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:50.295966031+00:00 stderr F time="2025-12-12T16:27:50.295336595Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=49b06589-c30f-4c4e-90be-1a3614e9200e http.request.method=GET http.request.remoteaddr="10.217.0.2:58314" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="57.312µs" http.response.status=200 http.response.written=0 2025-12-12T16:27:52.970147151+00:00 stderr F time="2025-12-12T16:27:52.970039638Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2fd6165e-5583-4036-a7bd-a3e9c7d5ba61 http.request.method=GET http.request.remoteaddr="10.217.0.2:55520" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="58.681µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:00.302565135+00:00 stderr F time="2025-12-12T16:28:00.301646262Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=030fab7e-bb99-43e0-85d3-f925bfee9ac4 http.request.method=GET http.request.remoteaddr="10.217.0.2:55528" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="55.941µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:02.967655294+00:00 stderr F time="2025-12-12T16:28:02.966656119Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=5d0abeca-e6cf-43aa-9a1b-50a57c044651 http.request.method=GET http.request.remoteaddr="10.217.0.2:59312" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="97.003µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:10.296308202+00:00 stderr F time="2025-12-12T16:28:10.295251295Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9ae8e556-0d00-454c-b356-d4c570a1e2ce http.request.method=GET http.request.remoteaddr="10.217.0.2:59316" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="87.922µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:12.968321127+00:00 stderr F time="2025-12-12T16:28:12.967709832Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9f18bc7d-4337-4b30-9a15-658c3af3884f http.request.method=GET http.request.remoteaddr="10.217.0.2:41880" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="50.402µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:20.295814195+00:00 stderr F time="2025-12-12T16:28:20.295192779Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=e870ddc0-2c50-4853-8439-710fa2e7dd4d http.request.method=GET http.request.remoteaddr="10.217.0.2:41882" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="67.322µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:22.969250917+00:00 stderr F time="2025-12-12T16:28:22.96738593Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fca5f5a2-35a9-4aee-bd69-4c0cd4a46f65 http.request.method=GET http.request.remoteaddr="10.217.0.2:52262" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="78.222µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:30.305246111+00:00 stderr F time="2025-12-12T16:28:30.301012444Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=1eda039c-148a-4d82-a6bd-fffa38dfeafa http.request.method=GET http.request.remoteaddr="10.217.0.2:52270" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="92.423µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:32.968538115+00:00 stderr F time="2025-12-12T16:28:32.968008712Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ce805ee5-8c8f-46e4-b022-519ca4e5a678 http.request.method=GET http.request.remoteaddr="10.217.0.2:34736" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="54.802µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:40.299283979+00:00 stderr F time="2025-12-12T16:28:40.295958785Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=255b8343-e117-4ea4-95dd-a6c5ff7d6764 http.request.method=GET http.request.remoteaddr="10.217.0.2:34740" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="48.212µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:42.970572386+00:00 stderr F time="2025-12-12T16:28:42.969628002Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=267f1620-3f8e-4d58-a6a3-0ad1750ea7a3 http.request.method=GET http.request.remoteaddr="10.217.0.2:51086" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="104.273µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:50.296225378+00:00 stderr F time="2025-12-12T16:28:50.295554921Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7918db73-e192-409e-8dac-f0bf59c10bc7 http.request.method=GET http.request.remoteaddr="10.217.0.2:51094" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="73.302µs" http.response.status=200 http.response.written=0 2025-12-12T16:28:52.970978532+00:00 stderr F time="2025-12-12T16:28:52.970863859Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=11511675-4f28-44fa-bf89-8c250e7a409f http.request.method=GET http.request.remoteaddr="10.217.0.2:52450" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="53.952µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:00.301265950+00:00 stderr F time="2025-12-12T16:29:00.300623594Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d34b4546-4b77-4b04-b4d2-26741c5f1c3d http.request.method=GET http.request.remoteaddr="10.217.0.2:52464" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="39.331µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:02.967003609+00:00 stderr F time="2025-12-12T16:29:02.966895386Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=10e0ee45-b5ad-48e4-a964-4a960c9b46d9 http.request.method=GET http.request.remoteaddr="10.217.0.2:40828" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="47.101µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:10.297380597+00:00 stderr F time="2025-12-12T16:29:10.295683064Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3a9d45d0-c68f-4843-97fd-4b2aae3c05a6 http.request.method=GET http.request.remoteaddr="10.217.0.2:40838" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="103.812µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:12.966383235+00:00 stderr F time="2025-12-12T16:29:12.966255502Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=4c1831e3-a69d-42bd-9cfc-ca91ebc00a8b http.request.method=GET http.request.remoteaddr="10.217.0.2:57526" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="121.493µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:20.298833061+00:00 stderr F time="2025-12-12T16:29:20.297779795Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c00c4985-e555-4b10-ba69-bc67c3a9944b http.request.method=GET http.request.remoteaddr="10.217.0.2:57542" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="62.292µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:22.968069334+00:00 stderr F time="2025-12-12T16:29:22.967329146Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2608bf2e-d22b-4637-a775-422c90fd283b http.request.method=GET http.request.remoteaddr="10.217.0.2:52340" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="98.232µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:24.030965001+00:00 stderr F time="2025-12-12T16:29:24.030603412Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=5d9683a0-9708-4186-9e4b-79eb68056806 http.request.method=GET http.request.remoteaddr="100.64.0.2:58336" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:29:24.031045473+00:00 stderr F time="2025-12-12T16:29:24.031010432Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=8bc42eec-00c6-4ea4-928b-81545b1d3276 http.request.method=GET http.request.remoteaddr="100.64.0.2:58336" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=1.407316ms http.response.status=401 http.response.written=87 2025-12-12T16:29:24.045758954+00:00 stderr F time="2025-12-12T16:29:24.045355634Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=5f780b8e-f9b8-420c-a3ca-5e25596d50c2 http.request.method=GET http.request.remoteaddr="100.64.0.2:58342" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=10.70911ms http.response.status=200 http.response.written=2893 2025-12-12T16:29:24.053605422+00:00 stderr F time="2025-12-12T16:29:24.053504519Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=3e77cb8c-12a1-4ccd-8c98-a7b2e58c84c2 http.request.method=GET http.request.remoteaddr="100.64.0.2:58342" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:24.062771153+00:00 stderr F time="2025-12-12T16:29:24.06265337Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=3e77cb8c-12a1-4ccd-8c98-a7b2e58c84c2 http.request.method=GET http.request.remoteaddr="100.64.0.2:58342" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=16.160407ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:24.062771153+00:00 stderr F time="2025-12-12T16:29:24.062727682Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=2136c68c-b8ea-41dc-816e-13598de1c4cf http.request.method=GET http.request.remoteaddr="100.64.0.2:58342" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=16.27604ms http.response.status=404 http.response.written=96 2025-12-12T16:29:24.068084387+00:00 stderr F time="2025-12-12T16:29:24.068037175Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=0aace1bc-8e3c-4a95-80f3-058462fb8253 http.request.method=GET http.request.remoteaddr="100.64.0.2:58352" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:29:24.068104037+00:00 stderr F time="2025-12-12T16:29:24.068068516Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=625697e7-6323-493e-9de1-c2e478a6b1da http.request.method=GET http.request.remoteaddr="100.64.0.2:58352" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="134.393µs" http.response.status=401 http.response.written=87 2025-12-12T16:29:24.075095903+00:00 stderr F time="2025-12-12T16:29:24.075039532Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=772e9f02-51fb-462f-a7c2-eed3b58f91c9 http.request.method=GET http.request.remoteaddr="100.64.0.2:58366" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.091508ms http.response.status=200 http.response.written=2893 2025-12-12T16:29:24.080205522+00:00 stderr F time="2025-12-12T16:29:24.08013016Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=fa7b9d24-6f35-435e-ab8d-b39ad28c5500 http.request.method=GET http.request.remoteaddr="100.64.0.2:58366" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:24.084456799+00:00 stderr F time="2025-12-12T16:29:24.084391018Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=fa7b9d24-6f35-435e-ab8d-b39ad28c5500 http.request.method=GET http.request.remoteaddr="100.64.0.2:58366" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=8.73443ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:24.084478240+00:00 stderr F time="2025-12-12T16:29:24.084448039Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1de6cd78-3daf-4f64-8753-8cca5ac024da http.request.method=GET http.request.remoteaddr="100.64.0.2:58366" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=8.812032ms http.response.status=404 http.response.written=96 2025-12-12T16:29:29.242104087+00:00 stderr F time="2025-12-12T16:29:29.241565443Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=360bc511-3f3d-4f90-b4fe-b30a1a37d30e http.request.method=GET http.request.remoteaddr="100.64.0.2:58370" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:29:29.242139397+00:00 stderr F time="2025-12-12T16:29:29.242088746Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=dfbb4ddf-1630-42d3-b0a3-7b595ba88103 http.request.method=GET http.request.remoteaddr="100.64.0.2:58370" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="640.756µs" http.response.status=401 http.response.written=87 2025-12-12T16:29:29.250316654+00:00 stderr F time="2025-12-12T16:29:29.250211851Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=deabccd2-2e50-4bf0-930e-43a27e9c19e5 http.request.method=GET http.request.remoteaddr="100.64.0.2:58380" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.801086ms http.response.status=200 http.response.written=2893 2025-12-12T16:29:29.258057749+00:00 stderr F time="2025-12-12T16:29:29.257985797Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=a6e7f719-15d1-493b-9a6b-f0c58f3202ca http.request.method=GET http.request.remoteaddr="100.64.0.2:58380" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:29.263610219+00:00 stderr F time="2025-12-12T16:29:29.263502876Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=a6e7f719-15d1-493b-9a6b-f0c58f3202ca http.request.method=GET http.request.remoteaddr="100.64.0.2:58380" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.488745ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:29.263698051+00:00 stderr F time="2025-12-12T16:29:29.263628549Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=6a7f2581-2715-410e-8be1-37054e072ea6 http.request.method=GET http.request.remoteaddr="100.64.0.2:58380" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.640349ms http.response.status=404 http.response.written=96 2025-12-12T16:29:29.270187264+00:00 stderr F time="2025-12-12T16:29:29.270135463Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=16447217-dbea-4d63-b0b6-98c6e3716460 http.request.method=GET http.request.remoteaddr="100.64.0.2:58386" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:29:29.270234946+00:00 stderr F time="2025-12-12T16:29:29.270206615Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9796383c-7f33-4b7d-96b4-866373731df7 http.request.method=GET http.request.remoteaddr="100.64.0.2:58386" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="209.835µs" http.response.status=401 http.response.written=87 2025-12-12T16:29:29.277551540+00:00 stderr F time="2025-12-12T16:29:29.277483048Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7266c4d4-4a41-4bcb-87a2-b64ad2020194 http.request.method=GET http.request.remoteaddr="100.64.0.2:58394" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.473047ms http.response.status=200 http.response.written=2893 2025-12-12T16:29:29.283065399+00:00 stderr F time="2025-12-12T16:29:29.283009708Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=2a24b3e7-258d-4edf-9f74-453bf604e5cb http.request.method=GET http.request.remoteaddr="100.64.0.2:58394" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:29.287572883+00:00 stderr F time="2025-12-12T16:29:29.287499651Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=2a24b3e7-258d-4edf-9f74-453bf604e5cb http.request.method=GET http.request.remoteaddr="100.64.0.2:58394" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.461529ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:29.287593783+00:00 stderr F time="2025-12-12T16:29:29.287567223Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=8cee5b77-1892-41ce-a946-e7603c9aed66 http.request.method=GET http.request.remoteaddr="100.64.0.2:58394" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.555361ms http.response.status=404 http.response.written=96 2025-12-12T16:29:30.296476708+00:00 stderr F time="2025-12-12T16:29:30.295985166Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=46619c0c-a8fd-4b52-ab3b-51d16b7101a7 http.request.method=GET http.request.remoteaddr="10.217.0.2:52350" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="61.001µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:32.966667434+00:00 stderr F time="2025-12-12T16:29:32.966305995Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=1c120b31-0368-47d4-8edb-f5c8266346d7 http.request.method=GET http.request.remoteaddr="10.217.0.2:41090" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="42.301µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:40.296223557+00:00 stderr F time="2025-12-12T16:29:40.295393217Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=a43acbfc-5ad5-43e1-b194-df84813723fa http.request.method=GET http.request.remoteaddr="10.217.0.2:41098" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="68.281µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:42.966647748+00:00 stderr F time="2025-12-12T16:29:42.966533425Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9880449c-333d-4d69-9d15-905dcbdc1492 http.request.method=GET http.request.remoteaddr="10.217.0.2:51128" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="86.002µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:46.386408364+00:00 stderr F time="2025-12-12T16:29:46.385554942Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=07b87dc3-fb57-433e-a0d4-33d6693630b7 http.request.method=GET http.request.remoteaddr="100.64.0.2:51310" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:29:46.386408364+00:00 stderr F time="2025-12-12T16:29:46.386385083Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1554202f-2825-4a74-b456-b7eab4864216 http.request.method=GET http.request.remoteaddr="100.64.0.2:51310" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="975.775µs" http.response.status=401 http.response.written=87 2025-12-12T16:29:46.395674207+00:00 stderr F time="2025-12-12T16:29:46.395598445Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9c3701f9-739e-4349-bbbd-b284d502fd4f http.request.method=GET http.request.remoteaddr="100.64.0.2:51326" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=5.16991ms http.response.status=200 http.response.written=2893 2025-12-12T16:29:46.403506275+00:00 stderr F time="2025-12-12T16:29:46.403436943Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d39e4c30-6240-40d0-80c9-6398bd1d455d http.request.method=GET http.request.remoteaddr="100.64.0.2:51326" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:46.411153417+00:00 stderr F time="2025-12-12T16:29:46.411052535Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d39e4c30-6240-40d0-80c9-6398bd1d455d http.request.method=GET http.request.remoteaddr="100.64.0.2:51326" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=14.745042ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:46.411205799+00:00 stderr F time="2025-12-12T16:29:46.411164038Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=3234ad97-b746-43f9-a740-1b7d984408a3 http.request.method=GET http.request.remoteaddr="100.64.0.2:51326" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=14.890155ms http.response.status=404 http.response.written=96 2025-12-12T16:29:46.416710598+00:00 stderr F time="2025-12-12T16:29:46.416646896Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=09b4098f-65cd-4cff-aaa9-c8dff2fb7192 http.request.method=GET http.request.remoteaddr="100.64.0.2:51338" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:29:46.416710598+00:00 stderr F time="2025-12-12T16:29:46.416693417Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e598054d-c51e-459c-a197-63282290b602 http.request.method=GET http.request.remoteaddr="100.64.0.2:51338" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="176.714µs" http.response.status=401 http.response.written=87 2025-12-12T16:29:46.425971371+00:00 stderr F time="2025-12-12T16:29:46.425870988Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1ceaf945-927e-49f7-84f5-f6c0357d4b8c http.request.method=GET http.request.remoteaddr="100.64.0.2:51344" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=4.143684ms http.response.status=200 http.response.written=2893 2025-12-12T16:29:46.433387398+00:00 stderr F time="2025-12-12T16:29:46.433206613Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9aefc6ef-7cd8-4d60-97ce-25698d6ee11d http.request.method=GET http.request.remoteaddr="100.64.0.2:51344" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:46.438481026+00:00 stderr F time="2025-12-12T16:29:46.438411015Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9aefc6ef-7cd8-4d60-97ce-25698d6ee11d http.request.method=GET http.request.remoteaddr="100.64.0.2:51344" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=11.843638ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:29:46.438517747+00:00 stderr F time="2025-12-12T16:29:46.438493617Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=577a6c6e-94da-4abe-b417-12436b6823fc http.request.method=GET http.request.remoteaddr="100.64.0.2:51344" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=11.981432ms http.response.status=404 http.response.written=96 2025-12-12T16:29:50.296763885+00:00 stderr F time="2025-12-12T16:29:50.296616911Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8666c60f-0bd3-4232-b8c4-6b1ea2cb1f2f http.request.method=GET http.request.remoteaddr="10.217.0.2:51134" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="47.791µs" http.response.status=200 http.response.written=0 2025-12-12T16:29:52.967198729+00:00 stderr F time="2025-12-12T16:29:52.966602634Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7e918308-04a7-46a6-a21c-5e4dfc5628cb http.request.method=GET http.request.remoteaddr="10.217.0.2:50910" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="87.872µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:00.296547706+00:00 stderr F time="2025-12-12T16:30:00.29592032Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fd9bf637-ebe9-49e3-8c33-8dd3534642c1 http.request.method=GET http.request.remoteaddr="10.217.0.2:50926" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="70.052µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:02.967774479+00:00 stderr F time="2025-12-12T16:30:02.966981009Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=76349a4b-4b0a-460b-b910-9cd41fe21df6 http.request.method=GET http.request.remoteaddr="10.217.0.2:41742" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="50.472µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:09.379413260+00:00 stderr F time="2025-12-12T16:30:09.378284712Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c8d72877-c558-457e-aff2-fd800402a400 http.request.method=GET http.request.remoteaddr="100.64.0.2:38310" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:30:09.379413260+00:00 stderr F time="2025-12-12T16:30:09.378805195Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=add41312-6722-4801-ad56-3b8abe875ffd http.request.method=GET http.request.remoteaddr="100.64.0.2:38310" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="690.648µs" http.response.status=401 http.response.written=87 2025-12-12T16:30:09.393500075+00:00 stderr F time="2025-12-12T16:30:09.391784832Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d0864722-402c-4040-91ee-5eb8059b4560 http.request.method=GET http.request.remoteaddr="100.64.0.2:38324" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=6.508984ms http.response.status=200 http.response.written=2893 2025-12-12T16:30:09.399271431+00:00 stderr F time="2025-12-12T16:30:09.398596614Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=3b1979ae-e6f7-4d19-b82d-82c5f4106d7e http.request.method=GET http.request.remoteaddr="100.64.0.2:38324" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:09.406422371+00:00 stderr F time="2025-12-12T16:30:09.406354019Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=3b1979ae-e6f7-4d19-b82d-82c5f4106d7e http.request.method=GET http.request.remoteaddr="100.64.0.2:38324" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=13.296475ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:09.406511453+00:00 stderr F time="2025-12-12T16:30:09.406488483Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9eea80ce-cef5-4d22-87c7-b4eaa58fedfe http.request.method=GET http.request.remoteaddr="100.64.0.2:38324" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=13.47278ms http.response.status=404 http.response.written=96 2025-12-12T16:30:09.411664453+00:00 stderr F time="2025-12-12T16:30:09.411605852Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=67d8af66-bc48-48f1-bf6d-937c7fb65cfe http.request.method=GET http.request.remoteaddr="100.64.0.2:38328" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:30:09.411697814+00:00 stderr F time="2025-12-12T16:30:09.411655093Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=13f97934-591d-4238-9ee8-61949a1656a0 http.request.method=GET http.request.remoteaddr="100.64.0.2:38328" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="175.405µs" http.response.status=401 http.response.written=87 2025-12-12T16:30:09.419254845+00:00 stderr F time="2025-12-12T16:30:09.419190503Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=933dcc68-534a-4715-b1f2-91a5f590dc7d http.request.method=GET http.request.remoteaddr="100.64.0.2:38336" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=2.79769ms http.response.status=200 http.response.written=2893 2025-12-12T16:30:09.424672121+00:00 stderr F time="2025-12-12T16:30:09.42462115Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bae5c8af-bf32-47f6-9db9-9745027c6b16 http.request.method=GET http.request.remoteaddr="100.64.0.2:38336" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:09.428913728+00:00 stderr F time="2025-12-12T16:30:09.428862037Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bae5c8af-bf32-47f6-9db9-9745027c6b16 http.request.method=GET http.request.remoteaddr="100.64.0.2:38336" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.226083ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:09.428982870+00:00 stderr F time="2025-12-12T16:30:09.428964789Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c1ca01ea-dd0a-48e0-9d3b-19dfea355824 http.request.method=GET http.request.remoteaddr="100.64.0.2:38336" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.354206ms http.response.status=404 http.response.written=96 2025-12-12T16:30:10.297615199+00:00 stderr F time="2025-12-12T16:30:10.297497126Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9bbf113e-6dbf-4313-8413-5cfc37c16a75 http.request.method=GET http.request.remoteaddr="10.217.0.2:41750" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="57.232µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:12.968172303+00:00 stderr F time="2025-12-12T16:30:12.967642219Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7bc8d70e-5e6f-4285-8f28-f01733f20299 http.request.method=GET http.request.remoteaddr="10.217.0.2:37000" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="56.821µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:20.295341281+00:00 stderr F time="2025-12-12T16:30:20.295229119Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=b8558f7d-2bc1-45cc-87b1-e47299608a2e http.request.method=GET http.request.remoteaddr="10.217.0.2:37010" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="88.732µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:22.970539760+00:00 stderr F time="2025-12-12T16:30:22.969772161Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d709eccc-5e73-470d-9b2b-3d23e1c848a0 http.request.method=GET http.request.remoteaddr="10.217.0.2:45294" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="91.642µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:30.305516924+00:00 stderr F time="2025-12-12T16:30:30.305018802Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=14fbdbe8-dc1b-40a3-bf67-8dbc37bbecc5 http.request.method=GET http.request.remoteaddr="10.217.0.2:45302" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="64.592µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:32.967565125+00:00 stderr F time="2025-12-12T16:30:32.966833607Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=1b992b0f-d7dc-4b19-b37e-925f74a94ef8 http.request.method=GET http.request.remoteaddr="10.217.0.2:49694" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="67.122µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:40.297680450+00:00 stderr F time="2025-12-12T16:30:40.297255839Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=521c1055-beed-4869-a4ca-8639459c3d91 http.request.method=GET http.request.remoteaddr="10.217.0.2:49696" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="143.804µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:42.967107475+00:00 stderr F time="2025-12-12T16:30:42.967004732Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=874c105d-7ba1-436d-a371-93e58bcb0ba6 http.request.method=GET http.request.remoteaddr="10.217.0.2:41206" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="36.951µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:50.299538864+00:00 stderr F time="2025-12-12T16:30:50.296459137Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3f5500f5-9c40-4216-a8d6-94f78f5f38ff http.request.method=GET http.request.remoteaddr="10.217.0.2:41216" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="69.352µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:52.967213434+00:00 stderr F time="2025-12-12T16:30:52.9670376Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ed488783-8559-4044-adea-9bfc53eed797 http.request.method=GET http.request.remoteaddr="10.217.0.2:48992" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="57.772µs" http.response.status=200 http.response.written=0 2025-12-12T16:30:53.386545264+00:00 stderr F time="2025-12-12T16:30:53.386455791Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e0add9d6-082a-44df-ad93-67992c57f04e http.request.method=GET http.request.remoteaddr="100.64.0.2:47374" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:30:53.386611185+00:00 stderr F time="2025-12-12T16:30:53.386523283Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=df97f777-b56d-46cf-95d9-c7bc135349a3 http.request.method=GET http.request.remoteaddr="100.64.0.2:47374" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="252.566µs" http.response.status=401 http.response.written=87 2025-12-12T16:30:53.398610215+00:00 stderr F time="2025-12-12T16:30:53.398522143Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1de894fb-89af-4941-b282-f3aec193209a http.request.method=GET http.request.remoteaddr="100.64.0.2:47390" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=6.997054ms http.response.status=200 http.response.written=2893 2025-12-12T16:30:53.406919602+00:00 stderr F time="2025-12-12T16:30:53.40681931Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ded9a6be-d554-4455-bdfb-4c499ce8a664 http.request.method=GET http.request.remoteaddr="100.64.0.2:47390" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:53.415708272+00:00 stderr F time="2025-12-12T16:30:53.415573428Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ded9a6be-d554-4455-bdfb-4c499ce8a664 http.request.method=GET http.request.remoteaddr="100.64.0.2:47390" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=16.43794ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:53.415708272+00:00 stderr F time="2025-12-12T16:30:53.41565803Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d6b088de-4c42-40e1-b3ba-ba854e01b07c http.request.method=GET http.request.remoteaddr="100.64.0.2:47390" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=16.545623ms http.response.status=404 http.response.written=96 2025-12-12T16:30:53.421075326+00:00 stderr F time="2025-12-12T16:30:53.421004394Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=065ef3fd-6e18-4020-8e9d-9c1473d84005 http.request.method=GET http.request.remoteaddr="100.64.0.2:47394" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:30:53.421096216+00:00 stderr F time="2025-12-12T16:30:53.421069005Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=11f501ff-e5b2-448e-93ee-7f7ef1193d65 http.request.method=GET http.request.remoteaddr="100.64.0.2:47394" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="211.515µs" http.response.status=401 http.response.written=87 2025-12-12T16:30:53.428118511+00:00 stderr F time="2025-12-12T16:30:53.4280672Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c3ecd6bf-de4e-44e0-b090-c223d40ecae5 http.request.method=GET http.request.remoteaddr="100.64.0.2:47404" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=2.80859ms http.response.status=200 http.response.written=2893 2025-12-12T16:30:53.432745277+00:00 stderr F time="2025-12-12T16:30:53.432685485Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4268f88e-1fee-4029-b5ce-ba7ef29800c8 http.request.method=GET http.request.remoteaddr="100.64.0.2:47404" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:53.437013694+00:00 stderr F time="2025-12-12T16:30:53.436929481Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4268f88e-1fee-4029-b5ce-ba7ef29800c8 http.request.method=GET http.request.remoteaddr="100.64.0.2:47404" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=8.176444ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:30:53.437013694+00:00 stderr F time="2025-12-12T16:30:53.436981103Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d87a179a-c5c1-4b76-a3d1-331f7422e1c4 http.request.method=GET http.request.remoteaddr="100.64.0.2:47404" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=8.252156ms http.response.status=404 http.response.written=96 2025-12-12T16:31:00.297235455+00:00 stderr F time="2025-12-12T16:31:00.296329722Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=e6cd4781-666f-444f-b46c-6587c38f85be http.request.method=GET http.request.remoteaddr="10.217.0.2:49002" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="51.641µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:02.968518227+00:00 stderr F time="2025-12-12T16:31:02.968390034Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=02a24caa-b055-4818-ad1f-8eb986ade98d http.request.method=GET http.request.remoteaddr="10.217.0.2:56558" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="59.352µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:10.298757326+00:00 stderr F time="2025-12-12T16:31:10.297833433Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=95fcd12c-e154-4dec-86d5-91dcccba64c3 http.request.method=GET http.request.remoteaddr="10.217.0.2:56562" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="60.661µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:12.969410211+00:00 stderr F time="2025-12-12T16:31:12.968366175Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=58c9b073-ab6c-42ac-9e87-dc34c40174aa http.request.method=GET http.request.remoteaddr="10.217.0.2:36446" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="63.372µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:20.298055035+00:00 stderr F time="2025-12-12T16:31:20.297097591Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=bb036894-dca7-4b9e-ab39-1d422ff4af28 http.request.method=GET http.request.remoteaddr="10.217.0.2:36456" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="79.612µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:22.968255041+00:00 stderr F time="2025-12-12T16:31:22.968127057Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2a6cd669-0a77-4832-919a-7a96fbba52fd http.request.method=GET http.request.remoteaddr="10.217.0.2:54848" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="44.131µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:30.295795343+00:00 stderr F time="2025-12-12T16:31:30.294660545Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=71c25ada-63a2-4b3f-8497-39b98ac95273 http.request.method=GET http.request.remoteaddr="10.217.0.2:54864" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="51.711µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:32.966310777+00:00 stderr F time="2025-12-12T16:31:32.966202795Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7d7917a4-c3a8-424f-ac05-66eda6a52e6d http.request.method=GET http.request.remoteaddr="10.217.0.2:45700" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="67.061µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:40.295885301+00:00 stderr F time="2025-12-12T16:31:40.295059131Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=aa64d898-d2ed-41f1-9565-4a1f31acf6f7 http.request.method=GET http.request.remoteaddr="10.217.0.2:45710" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="79.802µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:42.968243082+00:00 stderr F time="2025-12-12T16:31:42.96735098Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2ca3fcb4-a9b0-4a5e-85df-274c61a081ef http.request.method=GET http.request.remoteaddr="10.217.0.2:38758" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="49.131µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:50.297428727+00:00 stderr F time="2025-12-12T16:31:50.29631919Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c105e2f8-ece7-44a1-9517-e4e7f49b91b8 http.request.method=GET http.request.remoteaddr="10.217.0.2:38772" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.222µs" http.response.status=200 http.response.written=0 2025-12-12T16:31:52.967934941+00:00 stderr F time="2025-12-12T16:31:52.967822478Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3f9b519f-24bf-418d-8a0c-673b0f3e5f31 http.request.method=GET http.request.remoteaddr="10.217.0.2:51862" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="54.401µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:00.296286455+00:00 stderr F time="2025-12-12T16:32:00.295595038Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=99abb38c-b6bb-4fd4-a633-72c64fb2c5a9 http.request.method=GET http.request.remoteaddr="10.217.0.2:51866" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="55.951µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:02.971288430+00:00 stderr F time="2025-12-12T16:32:02.96965118Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=5aeed673-c3f7-492b-b015-ecf4eda7a123 http.request.method=GET http.request.remoteaddr="10.217.0.2:43662" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="74.892µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:10.296423145+00:00 stderr F time="2025-12-12T16:32:10.295365559Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8d4758f7-21a2-4090-ab71-47fb0d5ec93c http.request.method=GET http.request.remoteaddr="10.217.0.2:43674" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="71.692µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:12.967293707+00:00 stderr F time="2025-12-12T16:32:12.967173104Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=adc3556a-dffb-46ef-9c83-15474d6f3cc9 http.request.method=GET http.request.remoteaddr="10.217.0.2:37474" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="80.922µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:20.297002684+00:00 stderr F time="2025-12-12T16:32:20.295946668Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9c8dffb2-9eee-46fc-9ffe-ad85383d06db http.request.method=GET http.request.remoteaddr="10.217.0.2:37484" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="64.672µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:22.970265472+00:00 stderr F time="2025-12-12T16:32:22.970066707Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=0ead9e3c-b3b9-4206-a4eb-934c242a8148 http.request.method=GET http.request.remoteaddr="10.217.0.2:55930" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="78.982µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:26.383753013+00:00 stderr F time="2025-12-12T16:32:26.382881072Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4fd518f4-1ecb-4834-9629-1c27ef03b49a http.request.method=GET http.request.remoteaddr="100.64.0.2:43602" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:32:26.383823705+00:00 stderr F time="2025-12-12T16:32:26.383773834Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=abbb1cc0-2078-429e-8b9c-4ef271c6dfac http.request.method=GET http.request.remoteaddr="100.64.0.2:43602" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=1.034506ms http.response.status=401 http.response.written=87 2025-12-12T16:32:26.398033900+00:00 stderr F time="2025-12-12T16:32:26.397910077Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c6d1a37b-4614-45f7-8290-0c474e964417 http.request.method=GET http.request.remoteaddr="100.64.0.2:43604" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.21042ms http.response.status=200 http.response.written=2893 2025-12-12T16:32:26.405245321+00:00 stderr F time="2025-12-12T16:32:26.405124888Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=65d18fd9-245b-46bd-b835-a5e7d541b20f http.request.method=GET http.request.remoteaddr="100.64.0.2:43604" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:32:26.414574164+00:00 stderr F time="2025-12-12T16:32:26.41443726Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=65d18fd9-245b-46bd-b835-a5e7d541b20f http.request.method=GET http.request.remoteaddr="100.64.0.2:43604" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=15.718783ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:32:26.414574164+00:00 stderr F time="2025-12-12T16:32:26.414525353Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ea7d0693-f6fd-4409-ae07-956ab8ce937b http.request.method=GET http.request.remoteaddr="100.64.0.2:43604" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=15.847646ms http.response.status=404 http.response.written=96 2025-12-12T16:32:26.420546043+00:00 stderr F time="2025-12-12T16:32:26.420477391Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=940b2df7-7497-4dcc-99c6-99ea4ccb11ad http.request.method=GET http.request.remoteaddr="100.64.0.2:43620" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:32:26.420566864+00:00 stderr F time="2025-12-12T16:32:26.420530583Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=89510c31-33f9-45a7-8257-2613156bf83b http.request.method=GET http.request.remoteaddr="100.64.0.2:43620" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="188.014µs" http.response.status=401 http.response.written=87 2025-12-12T16:32:26.428866261+00:00 stderr F time="2025-12-12T16:32:26.428748048Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e1a478e6-5ede-4815-b17c-8ace85efa2ea http.request.method=GET http.request.remoteaddr="100.64.0.2:43632" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=4.181385ms http.response.status=200 http.response.written=2893 2025-12-12T16:32:26.434692527+00:00 stderr F time="2025-12-12T16:32:26.434618725Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=90d590ed-ab26-4912-9346-3a58d593df8c http.request.method=GET http.request.remoteaddr="100.64.0.2:43632" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:32:26.439482876+00:00 stderr F time="2025-12-12T16:32:26.439373343Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=90d590ed-ab26-4912-9346-3a58d593df8c http.request.method=GET http.request.remoteaddr="100.64.0.2:43632" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.949008ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:32:26.439482876+00:00 stderr F time="2025-12-12T16:32:26.439458386Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=190e8739-51cd-4e95-8e4f-59de0dbf90b5 http.request.method=GET http.request.remoteaddr="100.64.0.2:43632" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=10.073452ms http.response.status=404 http.response.written=96 2025-12-12T16:32:30.297286372+00:00 stderr F time="2025-12-12T16:32:30.296148193Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fc71a518-cdd2-4783-989e-f7334be37fa3 http.request.method=GET http.request.remoteaddr="10.217.0.2:55938" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="88.402µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:32.970136267+00:00 stderr F time="2025-12-12T16:32:32.969272335Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=f371073b-a0e7-4363-b289-8d691dba39cd http.request.method=GET http.request.remoteaddr="10.217.0.2:50114" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="76.842µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:40.298254938+00:00 stderr F time="2025-12-12T16:32:40.296317119Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7070d271-9451-442b-a207-f92a0d737897 http.request.method=GET http.request.remoteaddr="10.217.0.2:50120" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="54.801µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:42.968017396+00:00 stderr F time="2025-12-12T16:32:42.967608416Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=bcc6e780-9d0b-41b1-bf4d-2eb672832ee6 http.request.method=GET http.request.remoteaddr="10.217.0.2:35948" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="82.712µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:50.296034397+00:00 stderr F time="2025-12-12T16:32:50.2953855Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=40b3c132-3b4c-4eef-b6d7-aa23f74e5727 http.request.method=GET http.request.remoteaddr="10.217.0.2:35950" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="143.743µs" http.response.status=200 http.response.written=0 2025-12-12T16:32:52.966055221+00:00 stderr F time="2025-12-12T16:32:52.965925778Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=49bcfbec-d048-4300-9d02-ac19de1dfdb2 http.request.method=GET http.request.remoteaddr="10.217.0.2:51268" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="67.942µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:00.295606141+00:00 stderr F time="2025-12-12T16:33:00.29474665Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3a753c51-ee3b-46a1-abc0-a4b66ac2142d http.request.method=GET http.request.remoteaddr="10.217.0.2:51284" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="46.291µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:02.967540973+00:00 stderr F time="2025-12-12T16:33:02.96742658Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=07ab6b31-0a17-4c21-a55b-3c5d49085268 http.request.method=GET http.request.remoteaddr="10.217.0.2:58712" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="47.561µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:10.296907814+00:00 stderr F time="2025-12-12T16:33:10.29553674Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=53b39dbd-3cb1-4fc6-9217-812716c30a2e http.request.method=GET http.request.remoteaddr="10.217.0.2:58720" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="49.992µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:12.967890073+00:00 stderr F time="2025-12-12T16:33:12.966842647Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=575378c7-6863-4bab-b56c-670c38689d77 http.request.method=GET http.request.remoteaddr="10.217.0.2:33886" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="58.481µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:20.300521015+00:00 stderr F time="2025-12-12T16:33:20.299332625Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=f71ffe64-7f68-4fc5-bb9d-ea096a7f2998 http.request.method=GET http.request.remoteaddr="10.217.0.2:33892" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="159.104µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:22.967856723+00:00 stderr F time="2025-12-12T16:33:22.9677387Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=745ccbba-10fa-4e91-a891-2794a7579e28 http.request.method=GET http.request.remoteaddr="10.217.0.2:57792" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.232µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:30.300046447+00:00 stderr F time="2025-12-12T16:33:30.298925009Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ce221b69-21f7-4e97-82d9-c647c51c20d1 http.request.method=GET http.request.remoteaddr="10.217.0.2:57804" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="53.352µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:32.968618967+00:00 stderr F time="2025-12-12T16:33:32.967593092Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9f9230aa-6082-4e69-8378-8684eb3acc02 http.request.method=GET http.request.remoteaddr="10.217.0.2:39868" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="83.452µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:40.294839004+00:00 stderr F time="2025-12-12T16:33:40.294376562Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c6a926ef-7fdf-49f0-ab41-dc92193a406d http.request.method=GET http.request.remoteaddr="10.217.0.2:39878" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="84.332µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:42.968578852+00:00 stderr F time="2025-12-12T16:33:42.968450249Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=013372fe-8dca-4673-a041-98cec7353a7e http.request.method=GET http.request.remoteaddr="10.217.0.2:33342" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="59.311µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:50.297281199+00:00 stderr F time="2025-12-12T16:33:50.296271114Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d7f0f5b8-9d62-42c5-8a84-354a782518c2 http.request.method=GET http.request.remoteaddr="10.217.0.2:33352" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="49.092µs" http.response.status=200 http.response.written=0 2025-12-12T16:33:52.967769324+00:00 stderr F time="2025-12-12T16:33:52.967636911Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=00911cfe-e1d6-4194-a8be-c741c96919e1 http.request.method=GET http.request.remoteaddr="10.217.0.2:51634" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="60.342µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:00.299621967+00:00 stderr F time="2025-12-12T16:34:00.29896904Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2694b256-1f86-42cd-9d1b-00c27b3a11ca http.request.method=GET http.request.remoteaddr="10.217.0.2:51644" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="64.281µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:02.967811407+00:00 stderr F time="2025-12-12T16:34:02.967688194Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=94b10012-862d-4cbc-a460-b2a89231b97c http.request.method=GET http.request.remoteaddr="10.217.0.2:48946" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="58.842µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:10.295902529+00:00 stderr F time="2025-12-12T16:34:10.295294504Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7ae25fc7-5aa3-4eb7-90ae-5402469aaa68 http.request.method=GET http.request.remoteaddr="10.217.0.2:48948" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="128.053µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:12.969029403+00:00 stderr F time="2025-12-12T16:34:12.968555421Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=f1220c67-d39a-4cf4-80e6-0d5ecb37a566 http.request.method=GET http.request.remoteaddr="10.217.0.2:45744" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="95.122µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:20.301411622+00:00 stderr F time="2025-12-12T16:34:20.296734605Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3ca39c9c-c172-4b07-9084-2b185b92faab http.request.method=GET http.request.remoteaddr="10.217.0.2:45746" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="111.753µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:22.966708418+00:00 stderr F time="2025-12-12T16:34:22.966548054Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7478364f-eb47-4547-96e5-057f84a5fb4c http.request.method=GET http.request.remoteaddr="10.217.0.2:34086" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="122.303µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:26.309610845+00:00 stderr F time="2025-12-12T16:34:26.309145124Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9e78d87a-6f34-422a-b61d-93fd4ee60a2d http.request.method=GET http.request.remoteaddr="100.64.0.2:36196" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:34:26.309667687+00:00 stderr F time="2025-12-12T16:34:26.309609165Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9cc76ede-b778-4077-861f-a2e685bfddfa http.request.method=GET http.request.remoteaddr="100.64.0.2:36196" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="598.185µs" http.response.status=401 http.response.written=87 2025-12-12T16:34:26.323145973+00:00 stderr F time="2025-12-12T16:34:26.323064161Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=b6055cde-dacc-4425-90f8-57140d0ec769 http.request.method=GET http.request.remoteaddr="100.64.0.2:36208" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.428355ms http.response.status=200 http.response.written=2893 2025-12-12T16:34:26.327774519+00:00 stderr F time="2025-12-12T16:34:26.327714118Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=0bc89457-da50-4968-acc2-7c89b633a4a1 http.request.method=GET http.request.remoteaddr="100.64.0.2:36208" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:26.335199655+00:00 stderr F time="2025-12-12T16:34:26.335086542Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=0bc89457-da50-4968-acc2-7c89b633a4a1 http.request.method=GET http.request.remoteaddr="100.64.0.2:36208" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=11.369944ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:26.335199655+00:00 stderr F time="2025-12-12T16:34:26.335160914Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=19e2b809-954c-45b4-bdc5-6a8e605e2750 http.request.method=GET http.request.remoteaddr="100.64.0.2:36208" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=11.490688ms http.response.status=404 http.response.written=96 2025-12-12T16:34:26.340356374+00:00 stderr F time="2025-12-12T16:34:26.340288652Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d004da0f-8639-426b-b16b-3e446707afe6 http.request.method=GET http.request.remoteaddr="100.64.0.2:36222" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:34:26.340381654+00:00 stderr F time="2025-12-12T16:34:26.340344103Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d45db68e-3eb8-4436-bbbe-fb3c2e1bcd21 http.request.method=GET http.request.remoteaddr="100.64.0.2:36222" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="228.156µs" http.response.status=401 http.response.written=87 2025-12-12T16:34:26.347780759+00:00 stderr F time="2025-12-12T16:34:26.347681647Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=f65f6350-f762-498d-9b79-383cbd9c7c6f http.request.method=GET http.request.remoteaddr="100.64.0.2:36226" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.878076ms http.response.status=200 http.response.written=2893 2025-12-12T16:34:26.352889337+00:00 stderr F time="2025-12-12T16:34:26.352810455Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ce63cf6f-f530-4c44-af98-183f0f8cc2a8 http.request.method=GET http.request.remoteaddr="100.64.0.2:36226" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:26.358590169+00:00 stderr F time="2025-12-12T16:34:26.358502867Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ce63cf6f-f530-4c44-af98-183f0f8cc2a8 http.request.method=GET http.request.remoteaddr="100.64.0.2:36226" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=10.01866ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:26.358613350+00:00 stderr F time="2025-12-12T16:34:26.358585979Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=16901fc6-deda-4062-9246-d1835712b711 http.request.method=GET http.request.remoteaddr="100.64.0.2:36226" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=10.135223ms http.response.status=404 http.response.written=96 2025-12-12T16:34:30.296275365+00:00 stderr F time="2025-12-12T16:34:30.295033493Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=a637a9a9-b2e5-4f13-8e49-0d033999ebe2 http.request.method=GET http.request.remoteaddr="10.217.0.2:34088" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="57.351µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:32.967951912+00:00 stderr F time="2025-12-12T16:34:32.967862339Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=073e22dc-e151-410d-a816-5561a640a6c1 http.request.method=GET http.request.remoteaddr="10.217.0.2:35918" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="90.762µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:39.381028511+00:00 stderr F time="2025-12-12T16:34:39.379993835Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bbd847e1-e226-4f16-a15b-3b80a5ecd201 http.request.method=GET http.request.remoteaddr="100.64.0.2:38952" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:34:39.381079963+00:00 stderr F time="2025-12-12T16:34:39.381008401Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4fb3c8d4-5361-4ee5-8a5f-ce547b747079 http.request.method=GET http.request.remoteaddr="100.64.0.2:38952" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=1.18655ms http.response.status=401 http.response.written=87 2025-12-12T16:34:39.392341926+00:00 stderr F time="2025-12-12T16:34:39.392255024Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d9f52504-6a73-4627-bd9c-2aa5efcb132a http.request.method=GET http.request.remoteaddr="100.64.0.2:38968" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=5.936379ms http.response.status=200 http.response.written=2893 2025-12-12T16:34:39.397840944+00:00 stderr F time="2025-12-12T16:34:39.39770739Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7fb6f866-a567-4373-817d-e70ace50be6c http.request.method=GET http.request.remoteaddr="100.64.0.2:38968" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:39.407419634+00:00 stderr F time="2025-12-12T16:34:39.407310452Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7fb6f866-a567-4373-817d-e70ace50be6c http.request.method=GET http.request.remoteaddr="100.64.0.2:38968" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=14.29153ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:39.407419634+00:00 stderr F time="2025-12-12T16:34:39.407386314Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c17e4f8f-bb88-40db-8f9b-a92c216d3903 http.request.method=GET http.request.remoteaddr="100.64.0.2:38968" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=14.405162ms http.response.status=404 http.response.written=96 2025-12-12T16:34:39.414285417+00:00 stderr F time="2025-12-12T16:34:39.414175734Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=968c301b-d2ce-4e8f-aa1a-ef21fd8e1488 http.request.method=GET http.request.remoteaddr="100.64.0.2:38978" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:34:39.414333298+00:00 stderr F time="2025-12-12T16:34:39.414265157Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=dda16bb4-7573-4294-9a4b-08e5712e4e5a http.request.method=GET http.request.remoteaddr="100.64.0.2:38978" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="235.036µs" http.response.status=401 http.response.written=87 2025-12-12T16:34:39.422009721+00:00 stderr F time="2025-12-12T16:34:39.421915379Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=93fd9a89-5d82-4af9-b3fe-18b0d7f1253c http.request.method=GET http.request.remoteaddr="100.64.0.2:38992" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.815646ms http.response.status=200 http.response.written=2893 2025-12-12T16:34:39.426944125+00:00 stderr F time="2025-12-12T16:34:39.426895234Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=17bfa86a-7d8c-4325-9ec2-c05309ff6ed3 http.request.method=GET http.request.remoteaddr="100.64.0.2:38992" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:39.431793677+00:00 stderr F time="2025-12-12T16:34:39.431719795Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=17bfa86a-7d8c-4325-9ec2-c05309ff6ed3 http.request.method=GET http.request.remoteaddr="100.64.0.2:38992" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.271283ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:34:39.431793677+00:00 stderr F time="2025-12-12T16:34:39.431773616Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9ea7d443-8208-4146-a595-8a8f6c0bd0e7 http.request.method=GET http.request.remoteaddr="100.64.0.2:38992" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=9.345964ms http.response.status=404 http.response.written=96 2025-12-12T16:34:40.298333619+00:00 stderr F time="2025-12-12T16:34:40.295047526Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=06798638-35fa-4e34-ab9a-daf8d0568112 http.request.method=GET http.request.remoteaddr="10.217.0.2:35920" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="68.902µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:42.967661117+00:00 stderr F time="2025-12-12T16:34:42.967012081Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=4a010973-4472-4985-95c1-9435784bf8f4 http.request.method=GET http.request.remoteaddr="10.217.0.2:57900" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="74.302µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:50.296850405+00:00 stderr F time="2025-12-12T16:34:50.296377673Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=67588acc-0785-402b-860d-bf5258b0b713 http.request.method=GET http.request.remoteaddr="10.217.0.2:57916" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="64.571µs" http.response.status=200 http.response.written=0 2025-12-12T16:34:52.968251296+00:00 stderr F time="2025-12-12T16:34:52.968123183Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=b405f254-da99-4727-94d1-1683e7c3088f http.request.method=GET http.request.remoteaddr="10.217.0.2:50154" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="64.701µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:00.296787740+00:00 stderr F time="2025-12-12T16:35:00.295730153Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=067a9e59-34ee-4e24-a7ff-e0e74cf51626 http.request.method=GET http.request.remoteaddr="10.217.0.2:50170" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="62.062µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:02.966802434Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=130db07f-29a7-46ab-8421-427bc094a00a http.request.method=GET http.request.remoteaddr="10.217.0.2:44870" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="47.182µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.383446572Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=38c65a2c-2ae9-4dce-b757-c12a3b70c9ad http.request.method=GET http.request.remoteaddr="100.64.0.2:36440" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.383504703Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=6263f32f-6501-4744-9fc9-cbd6fecd0296 http.request.method=GET http.request.remoteaddr="100.64.0.2:36440" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="211.995µs" http.response.status=401 http.response.written=87 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.394564681Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=deabc522-0085-4c3c-91a0-bd0bf228266a http.request.method=GET http.request.remoteaddr="100.64.0.2:36444" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=4.278587ms http.response.status=200 http.response.written=2893 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.401716421Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=87c80218-b3b0-4682-b819-06cd9097f1e6 http.request.method=GET http.request.remoteaddr="100.64.0.2:36444" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.411103807Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=87c80218-b3b0-4682-b819-06cd9097f1e6 http.request.method=GET http.request.remoteaddr="100.64.0.2:36444" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=15.353756ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.4112271Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4ba1aae2-430c-45ee-8ee7-48c9f1882afb http.request.method=GET http.request.remoteaddr="100.64.0.2:36444" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=15.52817ms http.response.status=404 http.response.written=96 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.416969584Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=25f3fc35-bae2-43dd-b4a3-e5ee4e19167f http.request.method=GET http.request.remoteaddr="100.64.0.2:36458" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.416999725Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=886b5b08-2624-459b-bb55-c6638896db34 http.request.method=GET http.request.remoteaddr="100.64.0.2:36458" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="121.013µs" http.response.status=401 http.response.written=87 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.424957725Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=47778724-855f-434a-8d77-d039d43a6745 http.request.method=GET http.request.remoteaddr="100.64.0.2:36470" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.636632ms http.response.status=200 http.response.written=2893 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.430676519Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e945a384-8856-4ac9-9ecc-4369b1080e29 http.request.method=GET http.request.remoteaddr="100.64.0.2:36470" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.436324421Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e945a384-8856-4ac9-9ecc-4369b1080e29 http.request.method=GET http.request.remoteaddr="100.64.0.2:36470" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=10.694659ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:03.497336543+00:00 stderr F time="2025-12-12T16:35:03.436422593Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c7ad16c1-8cc7-4333-88ba-d692878b5f00 http.request.method=GET http.request.remoteaddr="100.64.0.2:36470" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=10.834152ms http.response.status=404 http.response.written=96 2025-12-12T16:35:10.296855201+00:00 stderr F time="2025-12-12T16:35:10.29601159Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=396213bc-3b2d-485a-8f31-4d88265653a0 http.request.method=GET http.request.remoteaddr="10.217.0.2:44880" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="117.523µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:12.967764918+00:00 stderr F time="2025-12-12T16:35:12.967670536Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=b1900ab5-642c-4825-b8a1-2f4ee25dd343 http.request.method=GET http.request.remoteaddr="10.217.0.2:51956" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="100.513µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:17.379144505+00:00 stderr F time="2025-12-12T16:35:17.379072633Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=b011ffff-c972-4a78-b17b-85d406092401 http.request.method=GET http.request.remoteaddr="100.64.0.2:35418" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:35:17.379144505+00:00 stderr F time="2025-12-12T16:35:17.379121294Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7c82e974-a0f2-4aaa-b4f0-ce0b3c84d566 http.request.method=GET http.request.remoteaddr="100.64.0.2:35418" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="166.554µs" http.response.status=401 http.response.written=87 2025-12-12T16:35:17.389550986+00:00 stderr F time="2025-12-12T16:35:17.389494895Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1bb1277f-33cc-4d20-9db2-e8f3546665b6 http.request.method=GET http.request.remoteaddr="100.64.0.2:35426" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=6.975185ms http.response.status=200 http.response.written=2893 2025-12-12T16:35:17.395715231+00:00 stderr F time="2025-12-12T16:35:17.39565118Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=39a61555-66de-4d86-8c49-0362e6264aa1 http.request.method=GET http.request.remoteaddr="100.64.0.2:35426" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:17.404214885+00:00 stderr F time="2025-12-12T16:35:17.404103172Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=39a61555-66de-4d86-8c49-0362e6264aa1 http.request.method=GET http.request.remoteaddr="100.64.0.2:35426" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=14.125725ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:17.404359348+00:00 stderr F time="2025-12-12T16:35:17.404316447Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=47aac976-ab99-498a-906a-c99e96b717cb http.request.method=GET http.request.remoteaddr="100.64.0.2:35426" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=14.363621ms http.response.status=404 http.response.written=96 2025-12-12T16:35:17.409476727+00:00 stderr F time="2025-12-12T16:35:17.409446746Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=f959ba69-f72e-4ea9-99fc-bda392dec73e http.request.method=GET http.request.remoteaddr="100.64.0.2:35434" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:35:17.409577610+00:00 stderr F time="2025-12-12T16:35:17.409556459Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=8570c6cd-164a-46b4-9b6f-6d6e9312f3f1 http.request.method=GET http.request.remoteaddr="100.64.0.2:35434" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="203.235µs" http.response.status=401 http.response.written=87 2025-12-12T16:35:17.416642407+00:00 stderr F time="2025-12-12T16:35:17.416604966Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=969b8b40-5235-4f35-b304-e9ece4524464 http.request.method=GET http.request.remoteaddr="100.64.0.2:35450" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=3.147529ms http.response.status=200 http.response.written=2893 2025-12-12T16:35:17.423981742+00:00 stderr F time="2025-12-12T16:35:17.423889899Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7ff44893-f14b-4c2d-a09d-8a5f374035fc http.request.method=GET http.request.remoteaddr="100.64.0.2:35450" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:17.430308171+00:00 stderr F time="2025-12-12T16:35:17.430163197Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7ff44893-f14b-4c2d-a09d-8a5f374035fc http.request.method=GET http.request.remoteaddr="100.64.0.2:35450" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.826643ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:17.430308171+00:00 stderr F time="2025-12-12T16:35:17.43027346Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=be71e104-69de-4332-886c-4399255dd0ee http.request.method=GET http.request.remoteaddr="100.64.0.2:35450" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=13.008057ms http.response.status=404 http.response.written=96 2025-12-12T16:35:20.298041863+00:00 stderr F time="2025-12-12T16:35:20.297172471Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c7a5619c-dc2e-46e8-b148-617647e0bf00 http.request.method=GET http.request.remoteaddr="10.217.0.2:51958" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="109.403µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:22.967673808+00:00 stderr F time="2025-12-12T16:35:22.967547924Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c8516997-5be4-4489-8d62-051c9f0ad0f2 http.request.method=GET http.request.remoteaddr="10.217.0.2:41750" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="66.191µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:30.297501363+00:00 stderr F time="2025-12-12T16:35:30.296536309Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9750866d-0ccf-4969-858f-880d47a3c09e http.request.method=GET http.request.remoteaddr="10.217.0.2:41764" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="54.821µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:32.968669768+00:00 stderr F time="2025-12-12T16:35:32.967841257Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=a168c781-4536-4c59-a214-a3c57c05c558 http.request.method=GET http.request.remoteaddr="10.217.0.2:35470" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="84.982µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:40.299592311+00:00 stderr F time="2025-12-12T16:35:40.297348065Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8107a00b-0bdd-4966-bbb7-ca70fba96242 http.request.method=GET http.request.remoteaddr="10.217.0.2:35482" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="77.552µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:42.968785464+00:00 stderr F time="2025-12-12T16:35:42.968678761Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=e56a644e-05c9-44f8-8c4e-c2c5036d70f7 http.request.method=GET http.request.remoteaddr="10.217.0.2:60372" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.702µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:45.384279713+00:00 stderr F time="2025-12-12T16:35:45.383225156Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=05ae7088-148a-4b37-93a7-6b3b29772831 http.request.method=GET http.request.remoteaddr="100.64.0.2:51196" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:35:45.384279713+00:00 stderr F time="2025-12-12T16:35:45.384207031Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=196af6ad-9452-4189-8885-3709e6db6706 http.request.method=GET http.request.remoteaddr="100.64.0.2:51196" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=1.1884ms http.response.status=401 http.response.written=87 2025-12-12T16:35:45.394148241+00:00 stderr F time="2025-12-12T16:35:45.394070169Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e409e3da-1e4b-44b2-906c-6ef7de8a7832 http.request.method=GET http.request.remoteaddr="100.64.0.2:51210" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=5.095008ms http.response.status=200 http.response.written=2893 2025-12-12T16:35:45.404657475+00:00 stderr F time="2025-12-12T16:35:45.404539482Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=73a59572-35ff-4657-b4f3-ac8c530dcb2d http.request.method=GET http.request.remoteaddr="100.64.0.2:51210" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:45.416798590+00:00 stderr F time="2025-12-12T16:35:45.416707907Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=73a59572-35ff-4657-b4f3-ac8c530dcb2d http.request.method=GET http.request.remoteaddr="100.64.0.2:51210" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=21.866329ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:45.416838801+00:00 stderr F time="2025-12-12T16:35:45.416776739Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=5fc013df-061c-4d6f-8004-89b51e07d784 http.request.method=GET http.request.remoteaddr="100.64.0.2:51210" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=22.006873ms http.response.status=404 http.response.written=96 2025-12-12T16:35:45.426496083+00:00 stderr F time="2025-12-12T16:35:45.426439312Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=be910cd9-4ca1-4419-b6fc-b4237313a3dc http.request.method=GET http.request.remoteaddr="100.64.0.2:51214" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:35:45.426550665+00:00 stderr F time="2025-12-12T16:35:45.426505683Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=b0e4ff1c-960d-468b-b592-31d74f1a2001 http.request.method=GET http.request.remoteaddr="100.64.0.2:51214" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="204.325µs" http.response.status=401 http.response.written=87 2025-12-12T16:35:45.436202287+00:00 stderr F time="2025-12-12T16:35:45.436119325Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=dcd77517-a805-4631-a004-3e36088ba87c http.request.method=GET http.request.remoteaddr="100.64.0.2:51218" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=4.286448ms http.response.status=200 http.response.written=2893 2025-12-12T16:35:45.446216139+00:00 stderr F time="2025-12-12T16:35:45.446128037Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=213c28fb-7bc1-4e35-8724-c44f2417413f http.request.method=GET http.request.remoteaddr="100.64.0.2:51218" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:45.453492212+00:00 stderr F time="2025-12-12T16:35:45.453407029Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=213c28fb-7bc1-4e35-8724-c44f2417413f http.request.method=GET http.request.remoteaddr="100.64.0.2:51218" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=16.556526ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:35:45.453539773+00:00 stderr F time="2025-12-12T16:35:45.453484931Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=0ffe6fac-990d-4d44-b59b-992b54311c6c http.request.method=GET http.request.remoteaddr="100.64.0.2:51218" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=16.660268ms http.response.status=404 http.response.written=96 2025-12-12T16:35:50.294537934+00:00 stderr F time="2025-12-12T16:35:50.294449652Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=11f5c1c4-0ebd-4af4-b104-7fd7c413a5df http.request.method=GET http.request.remoteaddr="10.217.0.2:60388" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="45.041µs" http.response.status=200 http.response.written=0 2025-12-12T16:35:52.967747188+00:00 stderr F time="2025-12-12T16:35:52.966707892Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7bce3dd7-c7c7-403e-8df0-b2b95bea49c1 http.request.method=GET http.request.remoteaddr="10.217.0.2:55030" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="78.812µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:00.297078899+00:00 stderr F time="2025-12-12T16:36:00.295989862Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=104fd452-3c88-4bf8-963d-04d6920c05db http.request.method=GET http.request.remoteaddr="10.217.0.2:55046" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="57.111µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:02.967490043+00:00 stderr F time="2025-12-12T16:36:02.96737868Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=51435de0-6e6c-4244-8d57-8ea3ad9c0d43 http.request.method=GET http.request.remoteaddr="10.217.0.2:59940" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="58.361µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:10.296796893+00:00 stderr F time="2025-12-12T16:36:10.296317751Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=74a6e275-caf8-4c17-aa21-619eb0286cb1 http.request.method=GET http.request.remoteaddr="10.217.0.2:59954" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="81.162µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:12.967541056+00:00 stderr F time="2025-12-12T16:36:12.967412552Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=597b5574-8619-4fce-9e13-1e08791833eb http.request.method=GET http.request.remoteaddr="10.217.0.2:59200" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="165.084µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:20.296051269+00:00 stderr F time="2025-12-12T16:36:20.295171527Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d8ada0d2-66f6-4a94-be97-90b5a4270464 http.request.method=GET http.request.remoteaddr="10.217.0.2:59214" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="51.601µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:22.967918741+00:00 stderr F time="2025-12-12T16:36:22.967798958Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=092d8de3-5880-4f88-b6f3-57d024450b0f http.request.method=GET http.request.remoteaddr="10.217.0.2:48634" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="105.723µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:30.296851858+00:00 stderr F time="2025-12-12T16:36:30.296352066Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=034f3e2f-d9a8-40db-ab6c-505203f3c0eb http.request.method=GET http.request.remoteaddr="10.217.0.2:48646" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="98.202µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:32.967701543+00:00 stderr F time="2025-12-12T16:36:32.967614201Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=78ead9fe-5ad1-45ba-ae09-e3cc6f2f2ee0 http.request.method=GET http.request.remoteaddr="10.217.0.2:53302" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="49.231µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:40.295856775+00:00 stderr F time="2025-12-12T16:36:40.295104576Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=0c34609a-62bf-4ebd-9890-f1429fb3ff9f http.request.method=GET http.request.remoteaddr="10.217.0.2:53308" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="38.731µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:42.969609043+00:00 stderr F time="2025-12-12T16:36:42.968953817Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=eb35614d-5145-4d2d-a750-ac75ab16a6ef http.request.method=GET http.request.remoteaddr="10.217.0.2:42466" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="65.762µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:50.296053713+00:00 stderr F time="2025-12-12T16:36:50.295355396Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d166c3db-52ed-4b69-9f49-dc857618e5df http.request.method=GET http.request.remoteaddr="10.217.0.2:42480" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="47.582µs" http.response.status=200 http.response.written=0 2025-12-12T16:36:52.966030097+00:00 stderr F time="2025-12-12T16:36:52.965935364Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=64b9f935-180a-4a20-937f-8bb6b4f139f4 http.request.method=GET http.request.remoteaddr="10.217.0.2:60916" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="48.371µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:00.297029952+00:00 stderr F time="2025-12-12T16:37:00.295991336Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c96d479c-8099-4d82-97fe-64572d6a4869 http.request.method=GET http.request.remoteaddr="10.217.0.2:60928" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="53.111µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:02.970191885+00:00 stderr F time="2025-12-12T16:37:02.969343724Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=485b1267-3ffa-42be-8431-f812d7749d1f http.request.method=GET http.request.remoteaddr="10.217.0.2:38498" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="96.482µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:10.301887004+00:00 stderr F time="2025-12-12T16:37:10.300963151Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ea29a601-bab5-4c57-aa84-fa7c811989d1 http.request.method=GET http.request.remoteaddr="10.217.0.2:38514" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="60.022µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:12.967006007+00:00 stderr F time="2025-12-12T16:37:12.966927715Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7aa78e26-a21c-4eb0-84ca-4fe343b3244a http.request.method=GET http.request.remoteaddr="10.217.0.2:43880" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="53.711µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:17.385149113+00:00 stderr F time="2025-12-12T16:37:17.383904152Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=8a659f78-dec9-4beb-8c40-320e6aa576eb http.request.method=GET http.request.remoteaddr="100.64.0.2:37846" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:37:17.385408500+00:00 stderr F time="2025-12-12T16:37:17.385354879Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d7136cc8-c162-4555-aba9-3aaf3e218d3a http.request.method=GET http.request.remoteaddr="100.64.0.2:37846" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=1.636031ms http.response.status=401 http.response.written=87 2025-12-12T16:37:17.405346081+00:00 stderr F time="2025-12-12T16:37:17.405257659Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=d4e7a988-9467-4f6c-840d-e4c50566fc6d http.request.method=GET http.request.remoteaddr="100.64.0.2:37860" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.279458ms http.response.status=200 http.response.written=2893 2025-12-12T16:37:17.414638484+00:00 stderr F time="2025-12-12T16:37:17.414562442Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4223773b-ab4a-40ca-b91b-f02eeaa36cce http.request.method=GET http.request.remoteaddr="100.64.0.2:37860" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:37:17.424852631+00:00 stderr F time="2025-12-12T16:37:17.424757219Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4223773b-ab4a-40ca-b91b-f02eeaa36cce http.request.method=GET http.request.remoteaddr="100.64.0.2:37860" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=18.30573ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:37:17.424959704+00:00 stderr F time="2025-12-12T16:37:17.424938173Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=48983f29-917a-4665-b97c-974247bfe14b http.request.method=GET http.request.remoteaddr="100.64.0.2:37860" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=18.554956ms http.response.status=404 http.response.written=96 2025-12-12T16:37:17.431655752+00:00 stderr F time="2025-12-12T16:37:17.43158539Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=cd78aca1-6adb-4d2a-a717-c056081523e6 http.request.method=GET http.request.remoteaddr="100.64.0.2:37874" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:37:17.431753524+00:00 stderr F time="2025-12-12T16:37:17.431731754Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=e6e09c09-3c33-4cf9-af22-9d6a16c91270 http.request.method=GET http.request.remoteaddr="100.64.0.2:37874" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="338.669µs" http.response.status=401 http.response.written=87 2025-12-12T16:37:17.440973196+00:00 stderr F time="2025-12-12T16:37:17.440871693Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7a789dcc-5e8c-464a-a09d-a2e00be33a95 http.request.method=GET http.request.remoteaddr="100.64.0.2:37876" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=4.217046ms http.response.status=200 http.response.written=2893 2025-12-12T16:37:17.448996867+00:00 stderr F time="2025-12-12T16:37:17.448910845Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=2a7152aa-18d5-425c-b39e-35e7c12807e2 http.request.method=GET http.request.remoteaddr="100.64.0.2:37876" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:37:17.456333532+00:00 stderr F time="2025-12-12T16:37:17.454308871Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=2a7152aa-18d5-425c-b39e-35e7c12807e2 http.request.method=GET http.request.remoteaddr="100.64.0.2:37876" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.523254ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:37:17.456333532+00:00 stderr F time="2025-12-12T16:37:17.454421164Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4147d7cb-a476-4f48-83a7-e422b1d22daa http.request.method=GET http.request.remoteaddr="100.64.0.2:37876" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.703559ms http.response.status=404 http.response.written=96 2025-12-12T16:37:20.299995281+00:00 stderr F time="2025-12-12T16:37:20.298850372Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7a91ca55-594d-4db4-8257-e4f2f7281047 http.request.method=GET http.request.remoteaddr="10.217.0.2:43882" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.792µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:22.968313013+00:00 stderr F time="2025-12-12T16:37:22.968111108Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=18bc05b1-3b6c-4609-ada7-0800931e0f44 http.request.method=GET http.request.remoteaddr="10.217.0.2:44098" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="92.853µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:30.296540648+00:00 stderr F time="2025-12-12T16:37:30.295600075Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=9d12eec6-900a-4223-b2f6-284aa0263872 http.request.method=GET http.request.remoteaddr="10.217.0.2:44100" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="75.222µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:32.975783085+00:00 stderr F time="2025-12-12T16:37:32.974620306Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=4d1d4516-d25f-49c6-beae-4f03fe70f6f6 http.request.method=GET http.request.remoteaddr="10.217.0.2:46592" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="120.853µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:40.295758784+00:00 stderr F time="2025-12-12T16:37:40.294880572Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=59ec0eda-2a8b-4bee-8aaf-b043d9ae5a99 http.request.method=GET http.request.remoteaddr="10.217.0.2:46596" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="68.482µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:42.967877191+00:00 stderr F time="2025-12-12T16:37:42.967740318Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=4b8fffd5-aaeb-4de7-885d-e271791dd107 http.request.method=GET http.request.remoteaddr="10.217.0.2:54652" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="82.922µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:50.296560305+00:00 stderr F time="2025-12-12T16:37:50.296069522Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=b468a28c-68d5-46d7-a8bf-d404502406ce http.request.method=GET http.request.remoteaddr="10.217.0.2:54658" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="77.902µs" http.response.status=200 http.response.written=0 2025-12-12T16:37:52.967585683+00:00 stderr F time="2025-12-12T16:37:52.966760553Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=6d0e921c-21b3-44c9-859d-5baadb19ae13 http.request.method=GET http.request.remoteaddr="10.217.0.2:57940" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="43.401µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:00.297160770+00:00 stderr F time="2025-12-12T16:38:00.296294538Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2bac906b-1189-481a-9b4a-24f81f3961ac http.request.method=GET http.request.remoteaddr="10.217.0.2:57944" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="91.462µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:02.967465633+00:00 stderr F time="2025-12-12T16:38:02.967386011Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=649c8856-3b36-41bb-aa7c-1a5c32a5817c http.request.method=GET http.request.remoteaddr="10.217.0.2:53062" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="59.272µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:10.297224077+00:00 stderr F time="2025-12-12T16:38:10.295564475Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=2fcc7474-46cd-4ddd-98cd-caa59e43e768 http.request.method=GET http.request.remoteaddr="10.217.0.2:53072" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="92.062µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:12.968242637+00:00 stderr F time="2025-12-12T16:38:12.967592431Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=de487784-9be0-477b-84bb-6c79d0d5e9fe http.request.method=GET http.request.remoteaddr="10.217.0.2:41542" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="88.572µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:20.297028686+00:00 stderr F time="2025-12-12T16:38:20.295723213Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=cbca062f-4692-4f43-bd02-7e6bdfea1bc9 http.request.method=GET http.request.remoteaddr="10.217.0.2:41544" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="84.672µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:22.972365851+00:00 stderr F time="2025-12-12T16:38:22.971788637Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=e6d27622-b601-4e5b-b718-5f6dd1748613 http.request.method=GET http.request.remoteaddr="10.217.0.2:46924" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="90.993µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:30.297727752+00:00 stderr F time="2025-12-12T16:38:30.296677185Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=49125aa6-14c1-455c-a246-30a2626a1f59 http.request.method=GET http.request.remoteaddr="10.217.0.2:46932" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="62.321µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:32.971385848+00:00 stderr F time="2025-12-12T16:38:32.970396203Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=6b5d5e5b-f4ae-427e-a4c2-fe865dc97905 http.request.method=GET http.request.remoteaddr="10.217.0.2:36834" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="62.961µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:40.297369955+00:00 stderr F time="2025-12-12T16:38:40.296464392Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=1bf00080-6753-46fe-a1de-e5ef041dcf33 http.request.method=GET http.request.remoteaddr="10.217.0.2:36846" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="51.381µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:42.969352129+00:00 stderr F time="2025-12-12T16:38:42.969159904Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=6e76c9f1-70f7-45a5-8718-fb3fb8a867d7 http.request.method=GET http.request.remoteaddr="10.217.0.2:49330" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="66.431µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:50.303505092+00:00 stderr F time="2025-12-12T16:38:50.298080416Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=77cb69d6-958d-423c-a239-88af764d75f1 http.request.method=GET http.request.remoteaddr="10.217.0.2:49334" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="71.452µs" http.response.status=200 http.response.written=0 2025-12-12T16:38:52.967691062+00:00 stderr F time="2025-12-12T16:38:52.967565979Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=205beb1a-61df-41cf-a2fc-0434eed58164 http.request.method=GET http.request.remoteaddr="10.217.0.2:45936" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="52.511µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:00.302289637+00:00 stderr F time="2025-12-12T16:39:00.301295412Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=cf123c1b-a22e-4844-8a24-c4f29dd7d120 http.request.method=GET http.request.remoteaddr="10.217.0.2:45948" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="83.473µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:02.968707310+00:00 stderr F time="2025-12-12T16:39:02.968383061Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ac26e6d8-73bb-47e7-8510-54d33357fc3b http.request.method=GET http.request.remoteaddr="10.217.0.2:36290" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="66.911µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:10.297064427+00:00 stderr F time="2025-12-12T16:39:10.296040841Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=aea8c10d-6030-4553-97a4-63a95f8b5707 http.request.method=GET http.request.remoteaddr="10.217.0.2:36296" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="51.972µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:12.966198059+00:00 stderr F time="2025-12-12T16:39:12.966062996Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3400862d-39d1-4b52-a749-2fbf783b58b4 http.request.method=GET http.request.remoteaddr="10.217.0.2:57554" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="83.102µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:20.297735955+00:00 stderr F time="2025-12-12T16:39:20.296429962Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fc3b6a33-439e-479e-acbf-c5aa829ba016 http.request.method=GET http.request.remoteaddr="10.217.0.2:57568" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="60.741µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:22.967571116+00:00 stderr F time="2025-12-12T16:39:22.967468073Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=5ace2c85-8b8d-45eb-8680-4464f7cd4211 http.request.method=GET http.request.remoteaddr="10.217.0.2:42708" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="100.032µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:30.299294678+00:00 stderr F time="2025-12-12T16:39:30.298281793Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=513cd700-6244-4b82-8841-6ab8ca752a77 http.request.method=GET http.request.remoteaddr="10.217.0.2:42712" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="104.233µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:32.968167016+00:00 stderr F time="2025-12-12T16:39:32.967279263Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=37dea39d-9704-4f65-a01f-43f6f14ada8a http.request.method=GET http.request.remoteaddr="10.217.0.2:55024" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="107.583µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:40.298574995+00:00 stderr F time="2025-12-12T16:39:40.29758015Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=7339d0b6-b2b8-4b98-8d0d-370ffa0b7af4 http.request.method=GET http.request.remoteaddr="10.217.0.2:55034" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="60.452µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:42.967338587+00:00 stderr F time="2025-12-12T16:39:42.967237664Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=739774cf-2815-4352-9d17-5ff2acb6a474 http.request.method=GET http.request.remoteaddr="10.217.0.2:48466" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="105.433µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:50.296935113+00:00 stderr F time="2025-12-12T16:39:50.296049381Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=40acb261-482e-469b-8b7f-fe10e1a96314 http.request.method=GET http.request.remoteaddr="10.217.0.2:48476" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="52.481µs" http.response.status=200 http.response.written=0 2025-12-12T16:39:52.970950848+00:00 stderr F time="2025-12-12T16:39:52.970136387Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=413862d2-2070-4150-b394-cdbb2d9e3988 http.request.method=GET http.request.remoteaddr="10.217.0.2:54416" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="51.521µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:00.297963941+00:00 stderr F time="2025-12-12T16:40:00.296980227Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fd1461a5-e1fa-4a7d-bb44-11995e5e8dec http.request.method=GET http.request.remoteaddr="10.217.0.2:54432" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="68.241µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:02.386062445+00:00 stderr F time="2025-12-12T16:40:02.385948592Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7b37c56e-2130-4d82-8b35-a560097ed200 http.request.method=GET http.request.remoteaddr="100.64.0.2:37750" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:40:02.386147347+00:00 stderr F time="2025-12-12T16:40:02.386068845Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=8b2466de-5d35-4254-b0e0-39f8967d5e90 http.request.method=GET http.request.remoteaddr="100.64.0.2:37750" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="317.758µs" http.response.status=401 http.response.written=87 2025-12-12T16:40:02.407757510+00:00 stderr F time="2025-12-12T16:40:02.407640407Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7e79707d-8922-4946-8937-ae26ca1a2c3b http.request.method=GET http.request.remoteaddr="100.64.0.2:37762" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=13.93369ms http.response.status=200 http.response.written=2893 2025-12-12T16:40:02.415839083+00:00 stderr F time="2025-12-12T16:40:02.415760661Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=dfc97852-283b-4992-8f73-0a9037bd672b http.request.method=GET http.request.remoteaddr="100.64.0.2:37762" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:02.425935667+00:00 stderr F time="2025-12-12T16:40:02.425774743Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=dfc97852-283b-4992-8f73-0a9037bd672b http.request.method=GET http.request.remoteaddr="100.64.0.2:37762" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=17.093999ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:02.425935667+00:00 stderr F time="2025-12-12T16:40:02.425906616Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=4d7b61b7-1940-487a-8253-626210aabf43 http.request.method=GET http.request.remoteaddr="100.64.0.2:37762" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=17.276314ms http.response.status=404 http.response.written=96 2025-12-12T16:40:02.434428180+00:00 stderr F time="2025-12-12T16:40:02.434314577Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=9e3fe92c-eb2c-42ca-bfc5-4ae8bf173be3 http.request.method=GET http.request.remoteaddr="100.64.0.2:37774" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:40:02.434428180+00:00 stderr F time="2025-12-12T16:40:02.434389829Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=13201438-01a6-4fe5-832f-be2b8e5aaef4 http.request.method=GET http.request.remoteaddr="100.64.0.2:37774" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="277.927µs" http.response.status=401 http.response.written=87 2025-12-12T16:40:02.441375385+00:00 stderr F time="2025-12-12T16:40:02.441299513Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=719b4f50-26e9-4de6-8f40-2964f3905dd2 http.request.method=GET http.request.remoteaddr="100.64.0.2:37788" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=2.925144ms http.response.status=200 http.response.written=2893 2025-12-12T16:40:02.448593146+00:00 stderr F time="2025-12-12T16:40:02.448491084Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bf41e4fb-bd25-4fe7-afa8-afb56dda4ef8 http.request.method=GET http.request.remoteaddr="100.64.0.2:37788" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:02.454117015+00:00 stderr F time="2025-12-12T16:40:02.454045733Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bf41e4fb-bd25-4fe7-afa8-afb56dda4ef8 http.request.method=GET http.request.remoteaddr="100.64.0.2:37788" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.166106ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:02.454142976+00:00 stderr F time="2025-12-12T16:40:02.454105315Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=8a10369a-d43b-46a8-9244-14a5c5473fb4 http.request.method=GET http.request.remoteaddr="100.64.0.2:37788" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=12.266559ms http.response.status=404 http.response.written=96 2025-12-12T16:40:02.967243248+00:00 stderr F time="2025-12-12T16:40:02.967109554Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=20e97bb7-d515-4b77-83d4-2aecaabcf5eb http.request.method=GET http.request.remoteaddr="10.217.0.2:37294" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="62.221µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:10.297303549+00:00 stderr F time="2025-12-12T16:40:10.296059067Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ebe834da-4663-4104-a889-1552e34ae23f http.request.method=GET http.request.remoteaddr="10.217.0.2:37298" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="93.772µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:12.968424862+00:00 stderr F time="2025-12-12T16:40:12.967357935Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=42f96e62-938b-4fd3-b4a0-3f537c1b7154 http.request.method=GET http.request.remoteaddr="10.217.0.2:56506" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="59.781µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:20.299269133+00:00 stderr F time="2025-12-12T16:40:20.297918749Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=cc492466-b03c-4486-b6d6-0d98b0eb874b http.request.method=GET http.request.remoteaddr="10.217.0.2:56522" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="89.652µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:22.968022488+00:00 stderr F time="2025-12-12T16:40:22.967059854Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fdcbf882-31c4-42f8-ab8b-c575a2a78db4 http.request.method=GET http.request.remoteaddr="10.217.0.2:54414" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="116.132µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:29.389601741+00:00 stderr F time="2025-12-12T16:40:29.388439271Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bf133c88-ae32-4fa7-9ebe-b8cb2deebd0a http.request.method=GET http.request.remoteaddr="100.64.0.2:45634" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:40:29.389689583+00:00 stderr F time="2025-12-12T16:40:29.389607241Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=020237e3-ce97-4807-b492-70128cb957a1 http.request.method=GET http.request.remoteaddr="100.64.0.2:45634" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=1.489617ms http.response.status=401 http.response.written=87 2025-12-12T16:40:29.405016608+00:00 stderr F time="2025-12-12T16:40:29.404877654Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1fa58be0-b5e7-4c4f-b929-2bed900ac200 http.request.method=GET http.request.remoteaddr="100.64.0.2:45636" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=7.95551ms http.response.status=200 http.response.written=2893 2025-12-12T16:40:29.413671455+00:00 stderr F time="2025-12-12T16:40:29.413605864Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7ecbcb95-b14b-4ebd-8db6-eb5a4ea853e7 http.request.method=GET http.request.remoteaddr="100.64.0.2:45636" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:29.428940179+00:00 stderr F time="2025-12-12T16:40:29.428780875Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=7ecbcb95-b14b-4ebd-8db6-eb5a4ea853e7 http.request.method=GET http.request.remoteaddr="100.64.0.2:45636" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=22.917485ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:29.428940179+00:00 stderr F time="2025-12-12T16:40:29.428897428Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=bb7c6faf-fe21-442d-941a-f8831b94e22e http.request.method=GET http.request.remoteaddr="100.64.0.2:45636" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=23.08459ms http.response.status=404 http.response.written=96 2025-12-12T16:40:29.438363666+00:00 stderr F time="2025-12-12T16:40:29.438215172Z" level=warning msg="error authorizing context: authorization header required" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=66db13cb-9747-45f8-b1c4-e557431717d8 http.request.method=GET http.request.remoteaddr="100.64.0.2:45642" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" 2025-12-12T16:40:29.438363666+00:00 stderr F time="2025-12-12T16:40:29.438316445Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=24bcd3dc-3306-4f84-a95c-910a95c0c215 http.request.method=GET http.request.remoteaddr="100.64.0.2:45642" http.request.uri=/v2/ http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration="270.026µs" http.response.status=401 http.response.written=87 2025-12-12T16:40:29.447955657+00:00 stderr F time="2025-12-12T16:40:29.447901115Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=1beca979-22c1-4f07-939f-22051388d3a4 http.request.method=GET http.request.remoteaddr="100.64.0.2:45654" http.request.uri="/openshift/token?account=%3Ctoken%3E&scope=repository%3Aservice-telemetry%2Fservice-telemetry-framework-index%3Apull" http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=4.202485ms http.response.status=200 http.response.written=2893 2025-12-12T16:40:29.456606904+00:00 stderr F time="2025-12-12T16:40:29.456521782Z" level=info msg="authorized request" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ab82fb71-ed15-4e16-a0f4-5ff505ae375d http.request.method=GET http.request.remoteaddr="100.64.0.2:45654" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:29.461713042+00:00 stderr F time="2025-12-12T16:40:29.461594269Z" level=error msg="response completed with error" err.code="manifest unknown" err.detail="unknown tag=latest" err.message="manifest unknown" go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=ab82fb71-ed15-4e16-a0f4-5ff505ae375d http.request.method=GET http.request.remoteaddr="100.64.0.2:45654" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=13.067048ms http.response.status=404 http.response.written=96 openshift.auth.user="system:serviceaccount:service-telemetry:infrawatch-operators" openshift.auth.userid=6200f712-5b54-4c19-b404-b36c7c5a7f32 vars.name=service-telemetry/service-telemetry-framework-index vars.reference=latest 2025-12-12T16:40:29.461741563+00:00 stderr F time="2025-12-12T16:40:29.461715762Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="image-registry.openshift-image-registry.svc:5000" http.request.id=c856a064-eb0b-4c79-8f2d-42a0ee8e1c92 http.request.method=GET http.request.remoteaddr="100.64.0.2:45654" http.request.uri=/v2/service-telemetry/service-telemetry-framework-index/manifests/latest http.request.useragent="cri-o/1.33.5 os/linux arch/amd64" http.response.contenttype=application/json http.response.duration=13.220012ms http.response.status=404 http.response.written=96 2025-12-12T16:40:30.295927323+00:00 stderr F time="2025-12-12T16:40:30.295767569Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8818f411-19cc-4c62-93b2-59f86cfa9325 http.request.method=GET http.request.remoteaddr="10.217.0.2:54416" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="78.332µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:32.973915047+00:00 stderr F time="2025-12-12T16:40:32.973745643Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=0fdbb577-b943-40b5-b448-184e47f1e178 http.request.method=GET http.request.remoteaddr="10.217.0.2:36392" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="94.322µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:40.301273151+00:00 stderr F time="2025-12-12T16:40:40.300198284Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=410e1b43-e329-4ea0-961f-91107da084f4 http.request.method=GET http.request.remoteaddr="10.217.0.2:36396" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="55.161µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:42.966474515+00:00 stderr F time="2025-12-12T16:40:42.966395763Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=5ca56c0f-c274-47a6-a2b2-ed165ebc1cb0 http.request.method=GET http.request.remoteaddr="10.217.0.2:50044" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="48.342µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:50.298230938+00:00 stderr F time="2025-12-12T16:40:50.296971727Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=8d564e09-e2bd-46ed-8074-8a4644d1004a http.request.method=GET http.request.remoteaddr="10.217.0.2:50050" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="94.013µs" http.response.status=200 http.response.written=0 2025-12-12T16:40:52.966828438+00:00 stderr F time="2025-12-12T16:40:52.966718345Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=0a4fa034-7b08-4ae3-b379-00d9410ee8e0 http.request.method=GET http.request.remoteaddr="10.217.0.2:36710" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.402µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:00.297403982+00:00 stderr F time="2025-12-12T16:41:00.296629252Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=a5e78bd6-feb5-44b1-970a-dc4c2e36fd85 http.request.method=GET http.request.remoteaddr="10.217.0.2:36724" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.992µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:02.969640931+00:00 stderr F time="2025-12-12T16:41:02.969512597Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=cf1e4371-5444-4659-8a8b-22ce9761d177 http.request.method=GET http.request.remoteaddr="10.217.0.2:56040" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="74.662µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:10.296414958+00:00 stderr F time="2025-12-12T16:41:10.295335181Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c69154d0-e91f-4a35-9db4-5f3d053c6a7a http.request.method=GET http.request.remoteaddr="10.217.0.2:56050" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="53.222µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:12.968115346+00:00 stderr F time="2025-12-12T16:41:12.967367547Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=61953b86-7dad-4a62-930e-2518c6cee9d0 http.request.method=GET http.request.remoteaddr="10.217.0.2:41360" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="53.801µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:20.298376012+00:00 stderr F time="2025-12-12T16:41:20.295818968Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ac2d011f-beab-4772-a3dc-8537728ade78 http.request.method=GET http.request.remoteaddr="10.217.0.2:41368" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="135.944µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:22.967739741+00:00 stderr F time="2025-12-12T16:41:22.967349621Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ba55c907-ed1d-4615-a455-fe8fba918f67 http.request.method=GET http.request.remoteaddr="10.217.0.2:54540" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="45.141µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:30.296646223+00:00 stderr F time="2025-12-12T16:41:30.295660078Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=747f65d3-bf49-432b-8e80-b3af053cb08d http.request.method=GET http.request.remoteaddr="10.217.0.2:54554" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="50.531µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:32.970612647+00:00 stderr F time="2025-12-12T16:41:32.96992992Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=544adeea-1888-4c4a-b565-5f8974e69ae0 http.request.method=GET http.request.remoteaddr="10.217.0.2:34484" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="75.872µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:40.296949905+00:00 stderr F time="2025-12-12T16:41:40.296164645Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=c1f77a89-6697-4338-943b-1c8f51f0b9f9 http.request.method=GET http.request.remoteaddr="10.217.0.2:34498" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="76.822µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:42.968228141+00:00 stderr F time="2025-12-12T16:41:42.967750869Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=11a35c21-01a6-4af6-b02a-b783f6db9fc7 http.request.method=GET http.request.remoteaddr="10.217.0.2:54612" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="98.953µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:50.296924877+00:00 stderr F time="2025-12-12T16:41:50.296296642Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3b1b0afa-3797-4102-a07b-ce3aef9e13f6 http.request.method=GET http.request.remoteaddr="10.217.0.2:54624" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="225.035µs" http.response.status=200 http.response.written=0 2025-12-12T16:41:52.966679296+00:00 stderr F time="2025-12-12T16:41:52.966602034Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=0dd833e5-4690-4ef8-b6fd-9031f5b8d4e6 http.request.method=GET http.request.remoteaddr="10.217.0.2:55910" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="71.012µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:00.296075270+00:00 stderr F time="2025-12-12T16:42:00.295007523Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=e935221a-45ae-4030-9aa4-fb531fd74ba3 http.request.method=GET http.request.remoteaddr="10.217.0.2:55920" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="69.362µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:02.970171059+00:00 stderr F time="2025-12-12T16:42:02.969171824Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d8a16f0f-3b4b-4564-8ea2-573e2f04c34e http.request.method=GET http.request.remoteaddr="10.217.0.2:47712" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="72.742µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:10.295438479+00:00 stderr F time="2025-12-12T16:42:10.294581378Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=350bb722-0e27-467b-954f-a08b746e8c00 http.request.method=GET http.request.remoteaddr="10.217.0.2:47720" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="42.661µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:12.966656005+00:00 stderr F time="2025-12-12T16:42:12.966541442Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=650a766f-20a7-488b-a0cd-2ec4f3b82c64 http.request.method=GET http.request.remoteaddr="10.217.0.2:33590" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="52.782µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:20.298481840+00:00 stderr F time="2025-12-12T16:42:20.29766992Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=09c360e4-c638-49e6-b452-7ad726027fc2 http.request.method=GET http.request.remoteaddr="10.217.0.2:33600" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="97.223µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:22.968361432+00:00 stderr F time="2025-12-12T16:42:22.967282905Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=3554fdb8-2dd2-4a9b-be8f-195941c61866 http.request.method=GET http.request.remoteaddr="10.217.0.2:52528" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="129.903µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:30.301781627+00:00 stderr F time="2025-12-12T16:42:30.301081409Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=929bc697-3e4c-4b88-ad1b-64bb2e8d515d http.request.method=GET http.request.remoteaddr="10.217.0.2:52544" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="113.773µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:32.971746491+00:00 stderr F time="2025-12-12T16:42:32.971581026Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=ab8461ea-c2fd-4475-89d5-8728c68ce190 http.request.method=GET http.request.remoteaddr="10.217.0.2:60118" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="167.834µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:40.306454928+00:00 stderr F time="2025-12-12T16:42:40.306005327Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=4a9547f1-6368-48ad-afcf-a5856970d611 http.request.method=GET http.request.remoteaddr="10.217.0.2:60124" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="42.941µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:42.969282103+00:00 stderr F time="2025-12-12T16:42:42.968326339Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=39ea12ac-126f-4980-9cf7-c8854f8c0105 http.request.method=GET http.request.remoteaddr="10.217.0.2:46184" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="46.091µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:50.295757584+00:00 stderr F time="2025-12-12T16:42:50.29560336Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=a9e6510b-9054-4ab4-9cbd-46bfd4f9e565 http.request.method=GET http.request.remoteaddr="10.217.0.2:46186" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="82.462µs" http.response.status=200 http.response.written=0 2025-12-12T16:42:52.966782426+00:00 stderr F time="2025-12-12T16:42:52.966668843Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=0ab91a66-06f4-442e-acdf-59ed68ffeb12 http.request.method=GET http.request.remoteaddr="10.217.0.2:46074" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="49.802µs" http.response.status=200 http.response.written=0 2025-12-12T16:43:00.296954230+00:00 stderr F time="2025-12-12T16:43:00.295901793Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=d4c09263-4e54-4119-b1b3-29e6f99aa604 http.request.method=GET http.request.remoteaddr="10.217.0.2:46084" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="95.192µs" http.response.status=200 http.response.written=0 2025-12-12T16:43:02.967956718+00:00 stderr F time="2025-12-12T16:43:02.967208829Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=11d1e539-c788-4f73-96fd-8a38662b9292 http.request.method=GET http.request.remoteaddr="10.217.0.2:53570" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="79.352µs" http.response.status=200 http.response.written=0 2025-12-12T16:43:10.296748845+00:00 stderr F time="2025-12-12T16:43:10.296074598Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=06e80fb9-b7cf-44a4-bea7-dd6e295d9890 http.request.method=GET http.request.remoteaddr="10.217.0.2:53576" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="90.592µs" http.response.status=200 http.response.written=0 2025-12-12T16:43:12.966556855+00:00 stderr F time="2025-12-12T16:43:12.966033212Z" level=info msg=response go.version="go1.24.4 (Red Hat 1.24.4-2.el9) X:strictfipsruntime" http.request.host="10.217.0.7:5000" http.request.id=fccec3cd-efe6-4ced-8ca1-a910f7b54a76 http.request.method=GET http.request.remoteaddr="10.217.0.2:55970" http.request.uri=/healthz http.request.useragent=kube-probe/1.33 http.response.duration="46.381µs" http.response.status=200 http.response.written=0 ././@LongLink0000644000000000000000000000024600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043043033021 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043063033023 5ustar zuulzuul././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000644000175000017500000044546315117043043033043 0ustar zuulzuul2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913713 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913804 1 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913809 1 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913813 1 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913817 1 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913821 1 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913824 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913827 1 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913838 1 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913841 1 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913845 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913856 1 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913859 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913862 1 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913866 1 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913870 1 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913874 1 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913878 1 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913881 1 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913884 1 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913887 1 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913890 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913894 1 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913897 1 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913900 1 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913903 1 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913906 1 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913909 1 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913913 1 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913916 1 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913919 1 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913922 1 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913926 1 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913929 1 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913932 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913935 1 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913938 1 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913943 1 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913946 1 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913949 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913952 1 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913957 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913960 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913963 1 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913967 1 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913970 1 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913973 1 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913976 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913980 1 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913983 1 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.913986 1 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914015 1 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914018 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914021 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914024 1 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914027 1 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914030 1 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914034 1 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914037 1 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914040 1 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914043 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914046 1 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914050 1 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914053 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914056 1 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914059 1 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914062 1 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914065 1 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914068 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914071 1 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914074 1 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914077 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914080 1 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914083 1 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914086 1 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914089 1 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914094 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914098 1 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914101 1 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:15:02.914240607+00:00 stderr F W1212 16:15:02.914104 1 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:15:02.914240607+00:00 stderr P W1212 16:15:02.914107 1 feature_gate.go:328] unrecognized feature gate: 2025-12-12T16:15:02.914436611+00:00 stderr F AWSClusterHostedDNSInstall 2025-12-12T16:15:02.914436611+00:00 stderr F W1212 16:15:02.914112 1 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:15:02.914436611+00:00 stderr F W1212 16:15:02.914115 1 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:15:02.914436611+00:00 stderr F W1212 16:15:02.914118 1 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:15:02.914436611+00:00 stderr F W1212 16:15:02.914123 1 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:15:02.914436611+00:00 stderr F W1212 16:15:02.914128 1 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914585 1 flags.go:64] FLAG: --allow-metric-labels="[]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914608 1 flags.go:64] FLAG: --allow-metric-labels-manifest="" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914616 1 flags.go:64] FLAG: --authentication-kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/scheduler-kubeconfig/kubeconfig" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914621 1 flags.go:64] FLAG: --authentication-skip-lookup="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914626 1 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="10s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914632 1 flags.go:64] FLAG: --authentication-tolerate-lookup-failure="true" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914636 1 flags.go:64] FLAG: --authorization-always-allow-paths="[/healthz,/readyz,/livez]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914644 1 flags.go:64] FLAG: --authorization-kubeconfig="/etc/kubernetes/static-pod-resources/configmaps/scheduler-kubeconfig/kubeconfig" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914649 1 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="10s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914653 1 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="10s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914657 1 flags.go:64] FLAG: --bind-address="0.0.0.0" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914663 1 flags.go:64] FLAG: --cert-dir="/var/run/kubernetes" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914667 1 flags.go:64] FLAG: --client-ca-file="" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914671 1 flags.go:64] FLAG: --config="/etc/kubernetes/static-pod-resources/configmaps/config/config.yaml" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914675 1 flags.go:64] FLAG: --contention-profiling="true" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914679 1 flags.go:64] FLAG: --disable-http2-serving="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914682 1 flags.go:64] FLAG: --disabled-metrics="[]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914688 1 flags.go:64] FLAG: --emulated-version="[]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914692 1 flags.go:64] FLAG: --feature-gates=":AWSClusterHostedDNS=false,:AWSClusterHostedDNSInstall=false,:AWSDedicatedHosts=false,:AWSServiceLBNetworkSecurityGroup=false,:AdditionalRoutingCapabilities=true,:AdminNetworkPolicy=true,:AlibabaPlatform=true,:AutomatedEtcdBackup=false,:AzureClusterHostedDNSInstall=false,:AzureDedicatedHosts=false,:AzureMultiDisk=false,:AzureWorkloadIdentity=true,:BootImageSkewEnforcement=false,:BootcNodeManagement=false,:BuildCSIVolumes=true,:CPMSMachineNamePrefix=true,:ClusterAPIInstall=false,:ClusterAPIInstallIBMCloud=false,:ClusterMonitoringConfig=false,:ClusterVersionOperatorConfiguration=false,:ConsolePluginContentSecurityPolicy=true,:DNSNameResolver=false,:DualReplica=false,:DyanmicServiceEndpointIBMCloud=false,:DynamicResourceAllocation=false,:EtcdBackendQuota=false,:EventedPLEG=false,:Example=false,:Example2=false,:ExternalOIDC=false,:ExternalOIDCWithUIDAndExtraClaimMappings=false,:ExternalSnapshotMetadata=false,:GCPClusterHostedDNS=false,:GCPClusterHostedDNSInstall=false,:GCPCustomAPIEndpoints=false,:GCPCustomAPIEndpointsInstall=false,:GatewayAPI=true,:GatewayAPIController=true,:HighlyAvailableArbiter=true,:ImageModeStatusReporting=false,:ImageStreamImportMode=false,:ImageVolume=true,:IngressControllerDynamicConfigurationManager=false,:IngressControllerLBSubnetsAWS=true,:InsightsConfig=false,:InsightsConfigAPI=false,:InsightsOnDemandDataGather=false,:IrreconcilableMachineConfig=false,:KMSEncryptionProvider=false,:KMSv1=true,:MachineAPIMigration=false,:MachineAPIOperatorDisableMachineHealthCheckController=false,:MachineConfigNodes=true,:ManagedBootImages=true,:ManagedBootImagesAWS=true,:ManagedBootImagesAzure=false,:ManagedBootImagesvSphere=false,:MaxUnavailableStatefulSet=false,:MetricsCollectionProfiles=true,:MinimumKubeletVersion=false,:MixedCPUsAllocation=false,:MultiArchInstallAzure=false,:MultiDiskSetup=false,:MutatingAdmissionPolicy=false,:NetworkDiagnosticsConfig=true,:NetworkLiveMigration=true,:NetworkSegmentation=true,:NewOLM=true,:NewOLMCatalogdAPIV1Metas=false,:NewOLMOwnSingleNamespace=false,:NewOLMPreflightPermissionChecks=false,:NewOLMWebhookProviderOpenshiftServiceCA=false,:NoRegistryClusterOperations=false,:NodeSwap=false,:NutanixMultiSubnets=false,:OVNObservability=false,:OpenShiftPodSecurityAdmission=false,:PinnedImages=true,:PreconfiguredUDNAddresses=false,:ProcMountType=true,:RouteAdvertisements=true,:RouteExternalCertificate=true,:SELinuxMount=false,:ServiceAccountTokenNodeBinding=true,:SetEIPForNLBIngressController=true,:ShortCertRotation=false,:SignatureStores=false,:SigstoreImageVerification=true,:SigstoreImageVerificationPKI=false,:StoragePerformantSecurityPolicy=true,:TranslateStreamCloseWebsocketRequests=false,:UpgradeStatus=true,:UserNamespacesPodSecurityStandards=true,:UserNamespacesSupport=true,:VSphereConfigurableMaxAllowedBlockVolumesPerNode=false,:VSphereHostVMGroupZonal=false,:VSphereMixedNodeEnv=false,:VSphereMultiDisk=true,:VSphereMultiNetworks=true,:VolumeAttributesClass=false,:VolumeGroupSnapshot=false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914771 1 flags.go:64] FLAG: --help="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914776 1 flags.go:64] FLAG: --http2-max-streams-per-connection="0" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914781 1 flags.go:64] FLAG: --kube-api-burst="100" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914786 1 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914791 1 flags.go:64] FLAG: --kube-api-qps="50" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914798 1 flags.go:64] FLAG: --kubeconfig="" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914802 1 flags.go:64] FLAG: --leader-elect="true" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914806 1 flags.go:64] FLAG: --leader-elect-lease-duration="15s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914809 1 flags.go:64] FLAG: --leader-elect-renew-deadline="10s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914813 1 flags.go:64] FLAG: --leader-elect-resource-lock="leases" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914817 1 flags.go:64] FLAG: --leader-elect-resource-name="kube-scheduler" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914821 1 flags.go:64] FLAG: --leader-elect-resource-namespace="kube-system" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914825 1 flags.go:64] FLAG: --leader-elect-retry-period="2s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914830 1 flags.go:64] FLAG: --log-flush-frequency="5s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914833 1 flags.go:64] FLAG: --log-json-info-buffer-size="0" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914841 1 flags.go:64] FLAG: --log-json-split-stream="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914844 1 flags.go:64] FLAG: --log-text-info-buffer-size="0" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914848 1 flags.go:64] FLAG: --log-text-split-stream="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914851 1 flags.go:64] FLAG: --logging-format="text" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914855 1 flags.go:64] FLAG: --master="" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914858 1 flags.go:64] FLAG: --permit-address-sharing="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914863 1 flags.go:64] FLAG: --permit-port-sharing="false" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914866 1 flags.go:64] FLAG: --pod-max-in-unschedulable-pods-duration="5m0s" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914869 1 flags.go:64] FLAG: --profiling="true" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914873 1 flags.go:64] FLAG: --requestheader-allowed-names="[]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914878 1 flags.go:64] FLAG: --requestheader-client-ca-file="" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914882 1 flags.go:64] FLAG: --requestheader-extra-headers-prefix="[x-remote-extra-]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914887 1 flags.go:64] FLAG: --requestheader-group-headers="[x-remote-group]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914893 1 flags.go:64] FLAG: --requestheader-uid-headers="[]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914897 1 flags.go:64] FLAG: --requestheader-username-headers="[x-remote-user]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914902 1 flags.go:64] FLAG: --secure-port="10259" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914907 1 flags.go:64] FLAG: --show-hidden-metrics-for-version="" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914911 1 flags.go:64] FLAG: --tls-cert-file="/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914915 1 flags.go:64] FLAG: --tls-cipher-suites="[TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256]" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914928 1 flags.go:64] FLAG: --tls-min-version="VersionTLS12" 2025-12-12T16:15:02.916034539+00:00 stderr F I1212 16:15:02.914932 1 flags.go:64] FLAG: --tls-private-key-file="/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:02.916034539+00:00 stderr P I1212 16:15:02.914937 2025-12-12T16:15:02.916138631+00:00 stderr F 1 flags.go:64] FLAG: --tls-sni-cert-key="[]" 2025-12-12T16:15:02.916138631+00:00 stderr F I1212 16:15:02.914949 1 flags.go:64] FLAG: --unsupported-kube-api-over-localhost="false" 2025-12-12T16:15:02.916138631+00:00 stderr F I1212 16:15:02.914953 1 flags.go:64] FLAG: --v="2" 2025-12-12T16:15:02.916138631+00:00 stderr F I1212 16:15:02.914959 1 flags.go:64] FLAG: --version="false" 2025-12-12T16:15:02.916138631+00:00 stderr F I1212 16:15:02.914964 1 flags.go:64] FLAG: --vmodule="" 2025-12-12T16:15:02.916138631+00:00 stderr F I1212 16:15:02.914969 1 flags.go:64] FLAG: --write-config-to="" 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915353 1 feature_gate.go:328] unrecognized feature gate: NewOLM 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915361 1 feature_gate.go:328] unrecognized feature gate: PinnedImages 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915365 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerificationPKI 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915368 1 feature_gate.go:328] unrecognized feature gate: BuildCSIVolumes 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915371 1 feature_gate.go:328] unrecognized feature gate: ClusterMonitoringConfig 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915374 1 feature_gate.go:328] unrecognized feature gate: KMSEncryptionProvider 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915377 1 feature_gate.go:328] unrecognized feature gate: GatewayAPI 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915381 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesvSphere 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915384 1 feature_gate.go:328] unrecognized feature gate: IngressControllerLBSubnetsAWS 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915387 1 feature_gate.go:328] unrecognized feature gate: ExternalSnapshotMetadata 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915390 1 feature_gate.go:328] unrecognized feature gate: NetworkSegmentation 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915393 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNSInstall 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915396 1 feature_gate.go:328] unrecognized feature gate: CPMSMachineNamePrefix 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915401 1 feature_gate.go:328] unrecognized feature gate: AzureClusterHostedDNSInstall 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915404 1 feature_gate.go:328] unrecognized feature gate: NoRegistryClusterOperations 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915409 1 feature_gate.go:351] Setting GA feature gate ServiceAccountTokenNodeBinding=true. It will be removed in a future release. 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915413 1 feature_gate.go:328] unrecognized feature gate: GatewayAPIController 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915417 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNSInstall 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915420 1 feature_gate.go:328] unrecognized feature gate: InsightsConfig 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915423 1 feature_gate.go:328] unrecognized feature gate: NetworkDiagnosticsConfig 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915427 1 feature_gate.go:328] unrecognized feature gate: SignatureStores 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915430 1 feature_gate.go:328] unrecognized feature gate: ConsolePluginContentSecurityPolicy 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915433 1 feature_gate.go:328] unrecognized feature gate: NewOLMCatalogdAPIV1Metas 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915436 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDCWithUIDAndExtraClaimMappings 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915440 1 feature_gate.go:328] unrecognized feature gate: ClusterVersionOperatorConfiguration 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915443 1 feature_gate.go:328] unrecognized feature gate: RouteAdvertisements 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915445 1 feature_gate.go:328] unrecognized feature gate: NewOLMPreflightPermissionChecks 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915451 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImages 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915454 1 feature_gate.go:328] unrecognized feature gate: MultiDiskSetup 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915457 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstallIBMCloud 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915460 1 feature_gate.go:328] unrecognized feature gate: NewOLMOwnSingleNamespace 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915463 1 feature_gate.go:328] unrecognized feature gate: SetEIPForNLBIngressController 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915468 1 feature_gate.go:349] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915471 1 feature_gate.go:328] unrecognized feature gate: VSphereConfigurableMaxAllowedBlockVolumesPerNode 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915474 1 feature_gate.go:328] unrecognized feature gate: MachineAPIMigration 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915478 1 feature_gate.go:328] unrecognized feature gate: DyanmicServiceEndpointIBMCloud 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915481 1 feature_gate.go:328] unrecognized feature gate: OVNObservability 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915484 1 feature_gate.go:328] unrecognized feature gate: MultiArchInstallAzure 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915487 1 feature_gate.go:328] unrecognized feature gate: ClusterAPIInstall 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915490 1 feature_gate.go:328] unrecognized feature gate: ImageModeStatusReporting 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915493 1 feature_gate.go:328] unrecognized feature gate: AlibabaPlatform 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915496 1 feature_gate.go:328] unrecognized feature gate: ImageStreamImportMode 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915499 1 feature_gate.go:328] unrecognized feature gate: AzureWorkloadIdentity 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915502 1 feature_gate.go:328] unrecognized feature gate: DNSNameResolver 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915505 1 feature_gate.go:328] unrecognized feature gate: AzureDedicatedHosts 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915510 1 feature_gate.go:328] unrecognized feature gate: AdminNetworkPolicy 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915513 1 feature_gate.go:328] unrecognized feature gate: HighlyAvailableArbiter 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915516 1 feature_gate.go:328] unrecognized feature gate: Example2 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915519 1 feature_gate.go:328] unrecognized feature gate: ShortCertRotation 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915522 1 feature_gate.go:328] unrecognized feature gate: MetricsCollectionProfiles 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915525 1 feature_gate.go:328] unrecognized feature gate: BootcNodeManagement 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915528 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpointsInstall 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915533 1 feature_gate.go:328] unrecognized feature gate: AzureMultiDisk 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915535 1 feature_gate.go:328] unrecognized feature gate: VolumeGroupSnapshot 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915539 1 feature_gate.go:328] unrecognized feature gate: NetworkLiveMigration 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915542 1 feature_gate.go:328] unrecognized feature gate: EtcdBackendQuota 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915545 1 feature_gate.go:328] unrecognized feature gate: ExternalOIDC 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915548 1 feature_gate.go:328] unrecognized feature gate: IngressControllerDynamicConfigurationManager 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915553 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAWS 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915556 1 feature_gate.go:328] unrecognized feature gate: GCPClusterHostedDNS 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915560 1 feature_gate.go:328] unrecognized feature gate: MachineConfigNodes 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915563 1 feature_gate.go:328] unrecognized feature gate: AWSServiceLBNetworkSecurityGroup 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915566 1 feature_gate.go:328] unrecognized feature gate: AutomatedEtcdBackup 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915569 1 feature_gate.go:328] unrecognized feature gate: IrreconcilableMachineConfig 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915572 1 feature_gate.go:328] unrecognized feature gate: ManagedBootImagesAzure 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915575 1 feature_gate.go:328] unrecognized feature gate: InsightsConfigAPI 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915578 1 feature_gate.go:328] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915581 1 feature_gate.go:328] unrecognized feature gate: VSphereHostVMGroupZonal 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915584 1 feature_gate.go:328] unrecognized feature gate: OpenShiftPodSecurityAdmission 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915587 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiDisk 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915590 1 feature_gate.go:328] unrecognized feature gate: SigstoreImageVerification 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915593 1 feature_gate.go:328] unrecognized feature gate: DualReplica 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915596 1 feature_gate.go:328] unrecognized feature gate: AWSDedicatedHosts 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915599 1 feature_gate.go:328] unrecognized feature gate: InsightsOnDemandDataGather 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915602 1 feature_gate.go:328] unrecognized feature gate: BootImageSkewEnforcement 2025-12-12T16:15:02.916138631+00:00 stderr F W1212 16:15:02.915605 1 feature_gate.go:328] unrecognized feature gate: MixedCPUsAllocation 2025-12-12T16:15:02.916138631+00:00 stderr P W1212 16 2025-12-12T16:15:02.916206552+00:00 stderr F :15:02.915608 1 feature_gate.go:328] unrecognized feature gate: Example 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915611 1 feature_gate.go:328] unrecognized feature gate: AWSClusterHostedDNS 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915614 1 feature_gate.go:328] unrecognized feature gate: AdditionalRoutingCapabilities 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915617 1 feature_gate.go:328] unrecognized feature gate: UpgradeStatus 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915620 1 feature_gate.go:328] unrecognized feature gate: GCPCustomAPIEndpoints 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915623 1 feature_gate.go:328] unrecognized feature gate: VSphereMixedNodeEnv 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915626 1 feature_gate.go:328] unrecognized feature gate: NutanixMultiSubnets 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915629 1 feature_gate.go:328] unrecognized feature gate: NewOLMWebhookProviderOpenshiftServiceCA 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915632 1 feature_gate.go:328] unrecognized feature gate: PreconfiguredUDNAddresses 2025-12-12T16:15:02.916206552+00:00 stderr F W1212 16:15:02.915635 1 feature_gate.go:328] unrecognized feature gate: VSphereMultiNetworks 2025-12-12T16:15:02.920361726+00:00 stderr F I1212 16:15:02.920244 1 dynamic_serving_content.go:116] "Loaded a new cert/key pair" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:04.152626606+00:00 stderr F W1212 16:15:04.152544 1 authentication.go:397] Error looking up in-cluster authentication configuration: Get "https://api-int.crc.testing:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:15:04.152626606+00:00 stderr F W1212 16:15:04.152588 1 authentication.go:398] Continuing without authentication configuration. This may treat all requests as anonymous. 2025-12-12T16:15:04.152626606+00:00 stderr F W1212 16:15:04.152599 1 authentication.go:399] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false 2025-12-12T16:15:14.164271651+00:00 stderr F I1212 16:15:14.164093 1 framework.go:399] "the scheduler starts to work with those plugins" Plugins={"PreEnqueue":{"Enabled":[{"Name":"SchedulingGates","Weight":0},{"Name":"DefaultPreemption","Weight":0}],"Disabled":null},"QueueSort":{"Enabled":[{"Name":"PrioritySort","Weight":0}],"Disabled":null},"PreFilter":{"Enabled":[{"Name":"NodeAffinity","Weight":0},{"Name":"NodePorts","Weight":0},{"Name":"NodeResourcesFit","Weight":0},{"Name":"VolumeRestrictions","Weight":0},{"Name":"NodeVolumeLimits","Weight":0},{"Name":"VolumeBinding","Weight":0},{"Name":"VolumeZone","Weight":0},{"Name":"PodTopologySpread","Weight":0},{"Name":"InterPodAffinity","Weight":0}],"Disabled":null},"Filter":{"Enabled":[{"Name":"NodeUnschedulable","Weight":0},{"Name":"NodeName","Weight":0},{"Name":"TaintToleration","Weight":0},{"Name":"NodeAffinity","Weight":0},{"Name":"NodePorts","Weight":0},{"Name":"NodeResourcesFit","Weight":0},{"Name":"VolumeRestrictions","Weight":0},{"Name":"NodeVolumeLimits","Weight":0},{"Name":"VolumeBinding","Weight":0},{"Name":"VolumeZone","Weight":0},{"Name":"PodTopologySpread","Weight":0},{"Name":"InterPodAffinity","Weight":0}],"Disabled":null},"PostFilter":{"Enabled":[{"Name":"DefaultPreemption","Weight":0}],"Disabled":null},"PreScore":{"Enabled":[{"Name":"TaintToleration","Weight":0},{"Name":"NodeAffinity","Weight":0},{"Name":"NodeResourcesFit","Weight":0},{"Name":"VolumeBinding","Weight":0},{"Name":"PodTopologySpread","Weight":0},{"Name":"InterPodAffinity","Weight":0},{"Name":"NodeResourcesBalancedAllocation","Weight":0}],"Disabled":null},"Score":{"Enabled":[{"Name":"TaintToleration","Weight":3},{"Name":"NodeAffinity","Weight":2},{"Name":"NodeResourcesFit","Weight":1},{"Name":"VolumeBinding","Weight":1},{"Name":"PodTopologySpread","Weight":2},{"Name":"InterPodAffinity","Weight":2},{"Name":"NodeResourcesBalancedAllocation","Weight":1},{"Name":"ImageLocality","Weight":1}],"Disabled":null},"Reserve":{"Enabled":[{"Name":"VolumeBinding","Weight":0}],"Disabled":null},"Permit":{"Enabled":null,"Disabled":null},"PreBind":{"Enabled":[{"Name":"VolumeBinding","Weight":0}],"Disabled":null},"Bind":{"Enabled":[{"Name":"DefaultBinder","Weight":0}],"Disabled":null},"PostBind":{"Enabled":null,"Disabled":null},"MultiPoint":{"Enabled":null,"Disabled":null}} 2025-12-12T16:15:14.169673301+00:00 stderr F I1212 16:15:14.169593 1 configfile.go:94] "Using component config" config=< 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F clientConnection: 2025-12-12T16:15:14.169673301+00:00 stderr F acceptContentTypes: "" 2025-12-12T16:15:14.169673301+00:00 stderr F burst: 100 2025-12-12T16:15:14.169673301+00:00 stderr F contentType: application/vnd.kubernetes.protobuf 2025-12-12T16:15:14.169673301+00:00 stderr F kubeconfig: /etc/kubernetes/static-pod-resources/configmaps/scheduler-kubeconfig/kubeconfig 2025-12-12T16:15:14.169673301+00:00 stderr F qps: 50 2025-12-12T16:15:14.169673301+00:00 stderr F enableContentionProfiling: false 2025-12-12T16:15:14.169673301+00:00 stderr F enableProfiling: false 2025-12-12T16:15:14.169673301+00:00 stderr F kind: KubeSchedulerConfiguration 2025-12-12T16:15:14.169673301+00:00 stderr F leaderElection: 2025-12-12T16:15:14.169673301+00:00 stderr F leaderElect: true 2025-12-12T16:15:14.169673301+00:00 stderr F leaseDuration: 2m17s 2025-12-12T16:15:14.169673301+00:00 stderr F renewDeadline: 1m47s 2025-12-12T16:15:14.169673301+00:00 stderr F resourceLock: leases 2025-12-12T16:15:14.169673301+00:00 stderr F resourceName: kube-scheduler 2025-12-12T16:15:14.169673301+00:00 stderr F resourceNamespace: openshift-kube-scheduler 2025-12-12T16:15:14.169673301+00:00 stderr F retryPeriod: 26s 2025-12-12T16:15:14.169673301+00:00 stderr F parallelism: 16 2025-12-12T16:15:14.169673301+00:00 stderr F percentageOfNodesToScore: 0 2025-12-12T16:15:14.169673301+00:00 stderr F podInitialBackoffSeconds: 1 2025-12-12T16:15:14.169673301+00:00 stderr F podMaxBackoffSeconds: 10 2025-12-12T16:15:14.169673301+00:00 stderr F profiles: 2025-12-12T16:15:14.169673301+00:00 stderr F - pluginConfig: 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F kind: DefaultPreemptionArgs 2025-12-12T16:15:14.169673301+00:00 stderr F minCandidateNodesAbsolute: 100 2025-12-12T16:15:14.169673301+00:00 stderr F minCandidateNodesPercentage: 10 2025-12-12T16:15:14.169673301+00:00 stderr F name: DefaultPreemption 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F hardPodAffinityWeight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F ignorePreferredTermsOfExistingPods: false 2025-12-12T16:15:14.169673301+00:00 stderr F kind: InterPodAffinityArgs 2025-12-12T16:15:14.169673301+00:00 stderr F name: InterPodAffinity 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F kind: NodeAffinityArgs 2025-12-12T16:15:14.169673301+00:00 stderr F name: NodeAffinity 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F kind: NodeResourcesBalancedAllocationArgs 2025-12-12T16:15:14.169673301+00:00 stderr F resources: 2025-12-12T16:15:14.169673301+00:00 stderr F - name: cpu 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F - name: memory 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F name: NodeResourcesBalancedAllocation 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F kind: NodeResourcesFitArgs 2025-12-12T16:15:14.169673301+00:00 stderr F scoringStrategy: 2025-12-12T16:15:14.169673301+00:00 stderr F resources: 2025-12-12T16:15:14.169673301+00:00 stderr F - name: cpu 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F - name: memory 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F type: LeastAllocated 2025-12-12T16:15:14.169673301+00:00 stderr F name: NodeResourcesFit 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F defaultingType: System 2025-12-12T16:15:14.169673301+00:00 stderr F kind: PodTopologySpreadArgs 2025-12-12T16:15:14.169673301+00:00 stderr F name: PodTopologySpread 2025-12-12T16:15:14.169673301+00:00 stderr F - args: 2025-12-12T16:15:14.169673301+00:00 stderr F apiVersion: kubescheduler.config.k8s.io/v1 2025-12-12T16:15:14.169673301+00:00 stderr F bindTimeoutSeconds: 600 2025-12-12T16:15:14.169673301+00:00 stderr F kind: VolumeBindingArgs 2025-12-12T16:15:14.169673301+00:00 stderr F name: VolumeBinding 2025-12-12T16:15:14.169673301+00:00 stderr F plugins: 2025-12-12T16:15:14.169673301+00:00 stderr F bind: {} 2025-12-12T16:15:14.169673301+00:00 stderr F filter: {} 2025-12-12T16:15:14.169673301+00:00 stderr F multiPoint: 2025-12-12T16:15:14.169673301+00:00 stderr F enabled: 2025-12-12T16:15:14.169673301+00:00 stderr F - name: SchedulingGates 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: PrioritySort 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodeUnschedulable 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodeName 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: TaintToleration 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 3 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodeAffinity 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 2 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodePorts 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodeResourcesFit 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F - name: VolumeRestrictions 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodeVolumeLimits 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: VolumeBinding 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: VolumeZone 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: PodTopologySpread 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 2 2025-12-12T16:15:14.169673301+00:00 stderr F - name: InterPodAffinity 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 2 2025-12-12T16:15:14.169673301+00:00 stderr F - name: DefaultPreemption 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F - name: NodeResourcesBalancedAllocation 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F - name: ImageLocality 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 1 2025-12-12T16:15:14.169673301+00:00 stderr F - name: DefaultBinder 2025-12-12T16:15:14.169673301+00:00 stderr F weight: 0 2025-12-12T16:15:14.169673301+00:00 stderr F permit: {} 2025-12-12T16:15:14.169673301+00:00 stderr F postBind: {} 2025-12-12T16:15:14.169673301+00:00 stderr F postFilter: {} 2025-12-12T16:15:14.169673301+00:00 stderr F preBind: {} 2025-12-12T16:15:14.169673301+00:00 stderr F preEnqueue: {} 2025-12-12T16:15:14.169673301+00:00 stderr F preFilter: {} 2025-12-12T16:15:14.169673301+00:00 stderr F preScore: {} 2025-12-12T16:15:14.169673301+00:00 stderr F queueSort: {} 2025-12-12T16:15:14.169673301+00:00 stderr F reserve: {} 2025-12-12T16:15:14.169673301+00:00 stderr F score: {} 2025-12-12T16:15:14.169673301+00:00 stderr F schedulerName: default-scheduler 2025-12-12T16:15:14.169673301+00:00 stderr F > 2025-12-12T16:15:14.171133736+00:00 stderr F I1212 16:15:14.171105 1 server.go:176] "Starting Kubernetes Scheduler" version="v1.33.5" 2025-12-12T16:15:14.171133736+00:00 stderr F I1212 16:15:14.171124 1 server.go:178] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" 2025-12-12T16:15:14.173553494+00:00 stderr F I1212 16:15:14.173372 1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:15:14.173594775+00:00 stderr F I1212 16:15:14.173581 1 shared_informer.go:350] "Waiting for caches to sync" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:15:14.174017235+00:00 stderr F I1212 16:15:14.173901 1 dynamic_serving_content.go:135] "Starting controller" name="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" 2025-12-12T16:15:14.174118328+00:00 stderr F I1212 16:15:14.174088 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"scheduler.openshift-kube-scheduler.svc\" [serving] validServingFor=[scheduler.openshift-kube-scheduler.svc,scheduler.openshift-kube-scheduler.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:15:14.174053056 +0000 UTC))" 2025-12-12T16:15:14.174464476+00:00 stderr F I1212 16:15:14.174424 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556103\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556103\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:15:14.174405155 +0000 UTC))" 2025-12-12T16:15:14.174522498+00:00 stderr F I1212 16:15:14.174510 1 secure_serving.go:211] Serving securely on [::]:10259 2025-12-12T16:15:14.174983509+00:00 stderr F I1212 16:15:14.174932 1 tlsconfig.go:243] "Starting DynamicServingCertificateController" 2025-12-12T16:15:19.630524379+00:00 stderr F I1212 16:15:19.630451 1 reflector.go:430] "Caches populated" type="*v1.StorageClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.633880780+00:00 stderr F I1212 16:15:19.633758 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.638138112+00:00 stderr F I1212 16:15:19.637123 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.638138112+00:00 stderr F I1212 16:15:19.637863 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.638265165+00:00 stderr F I1212 16:15:19.638225 1 node_tree.go:65] "Added node to NodeTree" node="crc" zone="" 2025-12-12T16:15:19.638589983+00:00 stderr F I1212 16:15:19.638555 1 reflector.go:430] "Caches populated" type="*v1.CSIStorageCapacity" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.638733367+00:00 stderr F I1212 16:15:19.638669 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.639278750+00:00 stderr F I1212 16:15:19.639007 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.639538796+00:00 stderr F I1212 16:15:19.639507 1 reflector.go:430] "Caches populated" type="*v1.CSIDriver" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.639595877+00:00 stderr F I1212 16:15:19.639577 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:15:19.639724660+00:00 stderr F I1212 16:15:19.639695 1 reflector.go:430] "Caches populated" type="*v1.VolumeAttachment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.639896835+00:00 stderr F I1212 16:15:19.639868 1 reflector.go:430] "Caches populated" type="*v1.CSINode" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.640006957+00:00 stderr F I1212 16:15:19.639977 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.640920909+00:00 stderr F I1212 16:15:19.640836 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.651062944+00:00 stderr F I1212 16:15:19.648059 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolume" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.651062944+00:00 stderr F I1212 16:15:19.648288 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.661226318+00:00 stderr F I1212 16:15:19.660683 1 reflector.go:430] "Caches populated" type="*v1.ReplicaSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674200 1 shared_informer.go:357] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674592 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:15:19.67457044 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674616 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:15:19.674610661 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674628 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:15:19.674624081 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674639 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:15:19.674635361 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674651 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:15:19.674646112 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674661 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:15:19.674657622 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674672 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:15:19.674668592 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.674907 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"scheduler.openshift-kube-scheduler.svc\" [serving] validServingFor=[scheduler.openshift-kube-scheduler.svc,scheduler.openshift-kube-scheduler.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:15:19.674883137 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.675069 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556103\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556103\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:15:19.675061452 +0000 UTC))" 2025-12-12T16:15:19.675226766+00:00 stderr F I1212 16:15:19.675136 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-scheduler/kube-scheduler... 2025-12-12T16:15:19.690220047+00:00 stderr F I1212 16:15:19.689260 1 leaderelection.go:271] successfully acquired lease openshift-kube-scheduler/kube-scheduler 2025-12-12T16:15:19.713225281+00:00 stderr F I1212 16:15:19.712600 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-apiserver-operator/openshift-apiserver-operator-846cbfc458-zf8cv" 2025-12-12T16:15:19.713225281+00:00 stderr F I1212 16:15:19.712659 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-apiserver-operator/openshift-apiserver-operator-846cbfc458-zf8cv" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:19.730218991+00:00 stderr F I1212 16:15:19.729896 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-dns/node-resolver-tddhh" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:20.184037003+00:00 stderr F E1212 16:15:20.183750 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-apiserver-operator/openshift-apiserver-operator-846cbfc458-zf8cv" 2025-12-12T16:15:20.184343300+00:00 stderr F I1212 16:15:20.184313 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-apiserver/apiserver-9ddfb9f55-sg8rq" 2025-12-12T16:15:20.184392291+00:00 stderr F I1212 16:15:20.184372 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-apiserver/apiserver-9ddfb9f55-sg8rq" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:20.580096154+00:00 stderr F E1212 16:15:20.579968 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-apiserver/apiserver-9ddfb9f55-sg8rq" 2025-12-12T16:15:20.580550325+00:00 stderr F I1212 16:15:20.580529 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-65b6cccf98-flnsl" 2025-12-12T16:15:20.580577766+00:00 stderr F I1212 16:15:20.580559 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-65b6cccf98-flnsl" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:20.585355041+00:00 stderr F I1212 16:15:20.585016 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-machine-config-operator/machine-config-daemon-qwg8p" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:20.585407132+00:00 stderr F I1212 16:15:20.585360 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-multus/multus-rzhgf" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:20.586038397+00:00 stderr F I1212 16:15:20.585989 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-multus/multus-additional-cni-plugins-mqfd8" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:20.586686943+00:00 stderr F I1212 16:15:20.586318 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ovn-kubernetes/ovnkube-node-wjw4g" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:20.960225992+00:00 stderr F E1212 16:15:20.959275 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-controller-manager/controller-manager-65b6cccf98-flnsl" 2025-12-12T16:15:20.960225992+00:00 stderr F I1212 16:15:20.959466 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-machine-api/machine-api-operator-755bb95488-dmjfw" 2025-12-12T16:15:20.960225992+00:00 stderr F I1212 16:15:20.959492 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-machine-api/machine-api-operator-755bb95488-dmjfw" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:21.334592041+00:00 stderr F E1212 16:15:21.334529 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-machine-api/machine-api-operator-755bb95488-dmjfw" 2025-12-12T16:15:21.334774235+00:00 stderr F I1212 16:15:21.334754 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-oauth-apiserver/apiserver-8596bd845d-njgb5" 2025-12-12T16:15:21.334792166+00:00 stderr F I1212 16:15:21.334781 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-oauth-apiserver/apiserver-8596bd845d-njgb5" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:21.684266985+00:00 stderr F E1212 16:15:21.684218 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-oauth-apiserver/apiserver-8596bd845d-njgb5" 2025-12-12T16:15:21.684462270+00:00 stderr F I1212 16:15:21.684436 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4" 2025-12-12T16:15:21.684526391+00:00 stderr F I1212 16:15:21.684470 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:22.053236644+00:00 stderr F E1212 16:15:22.049956 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4" 2025-12-12T16:15:22.053236644+00:00 stderr F I1212 16:15:22.050122 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-authentication-operator/authentication-operator-7f5c659b84-6t92c" 2025-12-12T16:15:22.053236644+00:00 stderr F I1212 16:15:22.050146 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-authentication-operator/authentication-operator-7f5c659b84-6t92c" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:22.410555302+00:00 stderr F E1212 16:15:22.410437 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-authentication-operator/authentication-operator-7f5c659b84-6t92c" 2025-12-12T16:15:22.410749097+00:00 stderr F I1212 16:15:22.410699 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-cluster-machine-approver/machine-approver-54c688565-62rws" 2025-12-12T16:15:22.410749097+00:00 stderr F I1212 16:15:22.410734 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-cluster-machine-approver/machine-approver-54c688565-62rws" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:22.770619067+00:00 stderr F E1212 16:15:22.770517 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-cluster-machine-approver/machine-approver-54c688565-62rws" 2025-12-12T16:15:22.770768210+00:00 stderr F I1212 16:15:22.770744 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-authentication/oauth-openshift-66458b6674-brfdj" 2025-12-12T16:15:22.770808331+00:00 stderr F I1212 16:15:22.770776 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-authentication/oauth-openshift-66458b6674-brfdj" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:23.141271146+00:00 stderr F E1212 16:15:23.141147 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-authentication/oauth-openshift-66458b6674-brfdj" 2025-12-12T16:15:23.141781008+00:00 stderr F I1212 16:15:23.141739 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-cluster-samples-operator/cluster-samples-operator-6b564684c8-fzlkp" 2025-12-12T16:15:23.141781008+00:00 stderr F I1212 16:15:23.141768 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-cluster-samples-operator/cluster-samples-operator-6b564684c8-fzlkp" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:23.146504102+00:00 stderr F I1212 16:15:23.145522 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-image-registry/node-ca-2xpcq" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:23.483405298+00:00 stderr F E1212 16:15:23.483354 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-cluster-samples-operator/cluster-samples-operator-6b564684c8-fzlkp" 2025-12-12T16:15:23.483623103+00:00 stderr F I1212 16:15:23.483605 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-cluster-version/cluster-version-operator-7c9b9cfd6-d85ps" 2025-12-12T16:15:23.483679654+00:00 stderr F I1212 16:15:23.483663 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-cluster-version/cluster-version-operator-7c9b9cfd6-d85ps" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:23.839406044+00:00 stderr F E1212 16:15:23.839344 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-cluster-version/cluster-version-operator-7c9b9cfd6-d85ps" 2025-12-12T16:15:23.839529707+00:00 stderr F I1212 16:15:23.839510 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-config-operator/openshift-config-operator-5777786469-49zmj" 2025-12-12T16:15:23.839590998+00:00 stderr F I1212 16:15:23.839570 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-config-operator/openshift-config-operator-5777786469-49zmj" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:24.197466230+00:00 stderr F E1212 16:15:24.197380 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-config-operator/openshift-config-operator-5777786469-49zmj" 2025-12-12T16:15:24.197677515+00:00 stderr F I1212 16:15:24.197597 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-686468bdd5-xknw6" 2025-12-12T16:15:24.197677515+00:00 stderr F I1212 16:15:24.197631 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-686468bdd5-xknw6" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:24.544357517+00:00 stderr F E1212 16:15:24.544285 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-686468bdd5-xknw6" 2025-12-12T16:15:24.544456879+00:00 stderr F I1212 16:15:24.544418 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-console-operator/console-operator-67c89758df-5tw72" 2025-12-12T16:15:24.544465280+00:00 stderr F I1212 16:15:24.544451 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-console-operator/console-operator-67c89758df-5tw72" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:24.911569753+00:00 stderr F E1212 16:15:24.911439 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-console-operator/console-operator-67c89758df-5tw72" 2025-12-12T16:15:24.911773318+00:00 stderr F I1212 16:15:24.911730 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-console/console-64d44f6ddf-zhgm9" 2025-12-12T16:15:24.911789249+00:00 stderr F I1212 16:15:24.911769 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-console/console-64d44f6ddf-zhgm9" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:25.271215158+00:00 stderr F E1212 16:15:25.271135 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-console/console-64d44f6ddf-zhgm9" 2025-12-12T16:15:25.271436383+00:00 stderr F I1212 16:15:25.271401 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-console/downloads-747b44746d-sm46g" 2025-12-12T16:15:25.271436383+00:00 stderr F I1212 16:15:25.271428 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-console/downloads-747b44746d-sm46g" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:25.639451099+00:00 stderr F E1212 16:15:25.639346 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-console/downloads-747b44746d-sm46g" 2025-12-12T16:15:25.639740906+00:00 stderr F I1212 16:15:25.639694 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-image-registry/image-registry-66587d64c8-jqtjf" 2025-12-12T16:15:25.639752817+00:00 stderr F I1212 16:15:25.639742 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-image-registry/image-registry-66587d64c8-jqtjf" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:26.020345586+00:00 stderr F E1212 16:15:26.020292 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-image-registry/image-registry-66587d64c8-jqtjf" 2025-12-12T16:15:26.020460429+00:00 stderr F I1212 16:15:26.020443 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-image-registry/cluster-image-registry-operator-86c45576b9-sfm9v" 2025-12-12T16:15:26.020482709+00:00 stderr F I1212 16:15:26.020467 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-image-registry/cluster-image-registry-operator-86c45576b9-sfm9v" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:26.373165996+00:00 stderr F E1212 16:15:26.373091 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-image-registry/cluster-image-registry-operator-86c45576b9-sfm9v" 2025-12-12T16:15:26.373276158+00:00 stderr F I1212 16:15:26.373253 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-dns-operator/dns-operator-799b87ffcd-2w9hn" 2025-12-12T16:15:26.373287499+00:00 stderr F I1212 16:15:26.373279 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-dns-operator/dns-operator-799b87ffcd-2w9hn" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:26.723987047+00:00 stderr F E1212 16:15:26.723907 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-dns-operator/dns-operator-799b87ffcd-2w9hn" 2025-12-12T16:15:26.724113420+00:00 stderr F I1212 16:15:26.724080 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-etcd-operator/etcd-operator-69b85846b6-mrrt5" 2025-12-12T16:15:26.724141731+00:00 stderr F I1212 16:15:26.724117 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-etcd-operator/etcd-operator-69b85846b6-mrrt5" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:27.085076246+00:00 stderr F E1212 16:15:27.085019 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-etcd-operator/etcd-operator-69b85846b6-mrrt5" 2025-12-12T16:15:27.085240770+00:00 stderr F I1212 16:15:27.085198 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-ingress-operator/ingress-operator-6b9cb4dbcf-5twrv" 2025-12-12T16:15:27.085240770+00:00 stderr F I1212 16:15:27.085228 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-ingress-operator/ingress-operator-6b9cb4dbcf-5twrv" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:27.440258873+00:00 stderr F E1212 16:15:27.440086 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-ingress-operator/ingress-operator-6b9cb4dbcf-5twrv" 2025-12-12T16:15:27.440379206+00:00 stderr F I1212 16:15:27.440320 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-ingress/router-default-68cf44c8b8-bqttx" 2025-12-12T16:15:27.440379206+00:00 stderr F I1212 16:15:27.440352 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-ingress/router-default-68cf44c8b8-bqttx" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:27.792583651+00:00 stderr F E1212 16:15:27.792530 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-ingress/router-default-68cf44c8b8-bqttx" 2025-12-12T16:15:27.792762075+00:00 stderr F I1212 16:15:27.792747 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-54f497555d-dcs9d" 2025-12-12T16:15:27.792805736+00:00 stderr F I1212 16:15:27.792794 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-54f497555d-dcs9d" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:28.138750381+00:00 stderr F E1212 16:15:28.138694 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-54f497555d-dcs9d" 2025-12-12T16:15:28.138956245+00:00 stderr F I1212 16:15:28.138936 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-69d5f845f8-nsdgk" 2025-12-12T16:15:28.139012467+00:00 stderr F I1212 16:15:28.138997 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-69d5f845f8-nsdgk" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:28.495935875+00:00 stderr F E1212 16:15:28.495872 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-69d5f845f8-nsdgk" 2025-12-12T16:15:28.496061028+00:00 stderr F I1212 16:15:28.496011 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-575994946d-wff8v" 2025-12-12T16:15:28.496061028+00:00 stderr F I1212 16:15:28.496035 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-575994946d-wff8v" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:28.852899925+00:00 stderr F E1212 16:15:28.852820 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-575994946d-wff8v" 2025-12-12T16:15:28.853037899+00:00 stderr F I1212 16:15:28.852995 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-kube-storage-version-migrator/migrator-866fcbc849-6mhsj" 2025-12-12T16:15:28.853052929+00:00 stderr F I1212 16:15:28.853030 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-kube-storage-version-migrator/migrator-866fcbc849-6mhsj" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:29.210206453+00:00 stderr F E1212 16:15:29.210127 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-kube-storage-version-migrator/migrator-866fcbc849-6mhsj" 2025-12-12T16:15:29.210323896+00:00 stderr F I1212 16:15:29.210296 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-565b79b866-krgxf" 2025-12-12T16:15:29.210332486+00:00 stderr F I1212 16:15:29.210323 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-565b79b866-krgxf" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:29.559341344+00:00 stderr F E1212 16:15:29.559269 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-565b79b866-krgxf" 2025-12-12T16:15:29.559445027+00:00 stderr F I1212 16:15:29.559421 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-machine-config-operator/machine-config-controller-f9cdd68f7-ndnxt" 2025-12-12T16:15:29.559454827+00:00 stderr F I1212 16:15:29.559446 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-machine-config-operator/machine-config-controller-f9cdd68f7-ndnxt" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:29.902544203+00:00 stderr F E1212 16:15:29.902438 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-machine-config-operator/machine-config-controller-f9cdd68f7-ndnxt" 2025-12-12T16:15:29.902716347+00:00 stderr F I1212 16:15:29.902685 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-machine-config-operator/machine-config-operator-67c9d58cbb-bg744" 2025-12-12T16:15:29.902740887+00:00 stderr F I1212 16:15:29.902723 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-machine-config-operator/machine-config-operator-67c9d58cbb-bg744" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:30.271492441+00:00 stderr F E1212 16:15:30.271368 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-machine-config-operator/machine-config-operator-67c9d58cbb-bg744" 2025-12-12T16:15:30.271638345+00:00 stderr F I1212 16:15:30.271590 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-multus/multus-admission-controller-69db94689b-xks9x" 2025-12-12T16:15:30.271665315+00:00 stderr F I1212 16:15:30.271629 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-multus/multus-admission-controller-69db94689b-xks9x" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:30.627143478+00:00 stderr F E1212 16:15:30.627036 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-multus/multus-admission-controller-69db94689b-xks9x" 2025-12-12T16:15:30.627336153+00:00 stderr F I1212 16:15:30.627290 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7" 2025-12-12T16:15:30.627346783+00:00 stderr F I1212 16:15:30.627333 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:30.988418112+00:00 stderr F E1212 16:15:30.988273 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7" 2025-12-12T16:15:30.988522794+00:00 stderr F I1212 16:15:30.988424 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-marketplace/marketplace-operator-547dbd544d-xpvsb" 2025-12-12T16:15:30.988522794+00:00 stderr F I1212 16:15:30.988448 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-marketplace/marketplace-operator-547dbd544d-xpvsb" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:31.354494251+00:00 stderr F E1212 16:15:31.354343 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-marketplace/marketplace-operator-547dbd544d-xpvsb" 2025-12-12T16:15:31.354616954+00:00 stderr F I1212 16:15:31.354581 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-operator-lifecycle-manager/catalog-operator-75ff9f647d-4v9cj" 2025-12-12T16:15:31.354639524+00:00 stderr F I1212 16:15:31.354615 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-operator-lifecycle-manager/catalog-operator-75ff9f647d-4v9cj" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:31.710519528+00:00 stderr F E1212 16:15:31.710449 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-operator-lifecycle-manager/catalog-operator-75ff9f647d-4v9cj" 2025-12-12T16:15:31.710647571+00:00 stderr F I1212 16:15:31.710620 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-operator-lifecycle-manager/packageserver-7d4fc7d867-lfwgk" 2025-12-12T16:15:31.710658082+00:00 stderr F I1212 16:15:31.710648 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-operator-lifecycle-manager/packageserver-7d4fc7d867-lfwgk" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:32.065085850+00:00 stderr F E1212 16:15:32.065010 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-operator-lifecycle-manager/packageserver-7d4fc7d867-lfwgk" 2025-12-12T16:15:32.065419628+00:00 stderr F I1212 16:15:32.065393 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-operator-lifecycle-manager/package-server-manager-77f986bd66-mjzlp" 2025-12-12T16:15:32.065434319+00:00 stderr F I1212 16:15:32.065423 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-operator-lifecycle-manager/package-server-manager-77f986bd66-mjzlp" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:32.069215020+00:00 stderr F I1212 16:15:32.069119 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ovn-kubernetes/ovnkube-control-plane-57b78d8988-xtrkr" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:32.426914687+00:00 stderr F E1212 16:15:32.426826 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-operator-lifecycle-manager/package-server-manager-77f986bd66-mjzlp" 2025-12-12T16:15:32.427156793+00:00 stderr F I1212 16:15:32.427118 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-operator-lifecycle-manager/olm-operator-5cdf44d969-kcw92" 2025-12-12T16:15:32.427190684+00:00 stderr F I1212 16:15:32.427165 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-operator-lifecycle-manager/olm-operator-5cdf44d969-kcw92" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:32.791717806+00:00 stderr F E1212 16:15:32.791648 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-operator-lifecycle-manager/olm-operator-5cdf44d969-kcw92" 2025-12-12T16:15:32.791813198+00:00 stderr F I1212 16:15:32.791786 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-service-ca-operator/service-ca-operator-5b9c976747-9wbcx" 2025-12-12T16:15:32.791847889+00:00 stderr F I1212 16:15:32.791831 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-service-ca-operator/service-ca-operator-5b9c976747-9wbcx" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:33.156702979+00:00 stderr F E1212 16:15:33.156615 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-service-ca-operator/service-ca-operator-5b9c976747-9wbcx" 2025-12-12T16:15:33.156886443+00:00 stderr F I1212 16:15:33.156845 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-service-ca/service-ca-74545575db-gsm6t" 2025-12-12T16:15:33.156921954+00:00 stderr F I1212 16:15:33.156898 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-service-ca/service-ca-74545575db-gsm6t" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:33.517850449+00:00 stderr F E1212 16:15:33.517681 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-service-ca/service-ca-74545575db-gsm6t" 2025-12-12T16:15:33.518063005+00:00 stderr F I1212 16:15:33.518025 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-operator-lifecycle-manager/collect-profiles-29425935-7hkrm" 2025-12-12T16:15:33.518075825+00:00 stderr F I1212 16:15:33.518061 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-operator-lifecycle-manager/collect-profiles-29425935-7hkrm" err="0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/unreachable: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling." 2025-12-12T16:15:33.522280886+00:00 stderr F I1212 16:15:33.522159 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-multus/network-metrics-daemon-jhhcn" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:15:33.885092106+00:00 stderr F E1212 16:15:33.885024 1 schedule_one.go:1095] "Error updating pod" err="Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" pod="openshift-operator-lifecycle-manager/collect-profiles-29425935-7hkrm" 2025-12-12T16:16:28.761679190+00:00 stderr F I1212 16:16:28.761554 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-cluster-version/cluster-version-operator-7c9b9cfd6-d85ps" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.348303104+00:00 stderr F I1212 16:16:39.346721 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-apiserver-operator/openshift-apiserver-operator-846cbfc458-zf8cv" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.355275695+00:00 stderr F I1212 16:16:39.355170 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-oauth-apiserver/apiserver-8596bd845d-njgb5" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.355360097+00:00 stderr F I1212 16:16:39.355287 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-776cdc94d6-zksq4" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.355770137+00:00 stderr F I1212 16:16:39.355364 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-machine-api/machine-api-operator-755bb95488-dmjfw" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.355770137+00:00 stderr F I1212 16:16:39.355409 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-authentication-operator/authentication-operator-7f5c659b84-6t92c" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.355770137+00:00 stderr F I1212 16:16:39.355443 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-65b6cccf98-flnsl" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.355770137+00:00 stderr F I1212 16:16:39.355512 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-cluster-machine-approver/machine-approver-54c688565-62rws" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.356877534+00:00 stderr F I1212 16:16:39.355791 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-cluster-samples-operator/cluster-samples-operator-6b564684c8-fzlkp" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.357203772+00:00 stderr F I1212 16:16:39.357124 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-authentication/oauth-openshift-66458b6674-brfdj" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.358752440+00:00 stderr F I1212 16:16:39.358689 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ingress-operator/ingress-operator-6b9cb4dbcf-5twrv" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.358838582+00:00 stderr F I1212 16:16:39.358808 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-69d5f845f8-nsdgk" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.359381735+00:00 stderr F I1212 16:16:39.359322 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operator-lifecycle-manager/catalog-operator-75ff9f647d-4v9cj" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.359556059+00:00 stderr F I1212 16:16:39.359536 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-console/console-64d44f6ddf-zhgm9" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360304757+00:00 stderr F I1212 16:16:39.360259 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operator-lifecycle-manager/olm-operator-5cdf44d969-kcw92" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360382819+00:00 stderr F I1212 16:16:39.360357 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-575994946d-wff8v" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360413820+00:00 stderr F I1212 16:16:39.360393 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operator-lifecycle-manager/collect-profiles-29425935-7hkrm" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360462391+00:00 stderr F I1212 16:16:39.360441 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-console-operator/console-operator-67c89758df-5tw72" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360516303+00:00 stderr F I1212 16:16:39.360483 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-console/downloads-747b44746d-sm46g" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360824990+00:00 stderr F I1212 16:16:39.360612 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-image-registry/cluster-image-registry-operator-86c45576b9-sfm9v" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.360824990+00:00 stderr F I1212 16:16:39.360608 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-config-operator/openshift-config-operator-5777786469-49zmj" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.364497970+00:00 stderr F I1212 16:16:39.364418 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-service-ca-operator/service-ca-operator-5b9c976747-9wbcx" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.367505 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operator-lifecycle-manager/package-server-manager-77f986bd66-mjzlp" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.367577 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/marketplace-operator-547dbd544d-xpvsb" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.367611 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-apiserver/apiserver-9ddfb9f55-sg8rq" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.367644 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-image-registry/image-registry-66587d64c8-jqtjf" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.368030 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-machine-api/control-plane-machine-set-operator-75ffdb6fcd-m8gw7" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.368058 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operator-lifecycle-manager/packageserver-7d4fc7d867-lfwgk" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.368331 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-686468bdd5-xknw6" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.370259750+00:00 stderr F I1212 16:16:39.368421 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-kube-storage-version-migrator/migrator-866fcbc849-6mhsj" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.376990435+00:00 stderr F I1212 16:16:39.376592 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-dns-operator/dns-operator-799b87ffcd-2w9hn" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.376990435+00:00 stderr F I1212 16:16:39.376637 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-565b79b866-krgxf" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.376990435+00:00 stderr F I1212 16:16:39.376780 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-machine-config-operator/machine-config-controller-f9cdd68f7-ndnxt" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.376990435+00:00 stderr F I1212 16:16:39.376787 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-service-ca/service-ca-74545575db-gsm6t" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.376990435+00:00 stderr F I1212 16:16:39.376826 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-multus/multus-admission-controller-69db94689b-xks9x" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.376990435+00:00 stderr F I1212 16:16:39.376837 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-machine-config-operator/machine-config-operator-67c9d58cbb-bg744" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.377408405+00:00 stderr F I1212 16:16:39.377358 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ingress/router-default-68cf44c8b8-bqttx" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.377547078+00:00 stderr F I1212 16:16:39.377523 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-54f497555d-dcs9d" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.378879491+00:00 stderr F I1212 16:16:39.378836 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-etcd-operator/etcd-operator-69b85846b6-mrrt5" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.387532272+00:00 stderr F I1212 16:16:39.387449 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ingress-canary/ingress-canary-tqcqf" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.387599854+00:00 stderr F I1212 16:16:39.387539 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-multus/cni-sysctl-allowlist-ds-q8kdt" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.413284661+00:00 stderr F I1212 16:16:39.407884 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-machine-config-operator/machine-config-server-nwxp2" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.456137727+00:00 stderr F I1212 16:16:39.456022 1 schedule_one.go:314] "Successfully bound pod to node" pod="hostpath-provisioner/csi-hostpathplugin-59hhc" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:39.457390828+00:00 stderr F I1212 16:16:39.457335 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-dns/dns-default-rl44g" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:49.298855840+00:00 stderr F I1212 16:16:49.298331 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/certified-operators-pvzzz" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:49.495154981+00:00 stderr F I1212 16:16:49.493484 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/community-operators-2gt6h" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:49.707089566+00:00 stderr F I1212 16:16:49.705751 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/certified-operators-kxjp8" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:49.892108573+00:00 stderr F I1212 16:16:49.892040 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/community-operators-p7s65" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:51.490070276+00:00 stderr F I1212 16:16:51.490011 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-marketplace-s7x92" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:51.891480057+00:00 stderr F I1212 16:16:51.888573 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-marketplace-mgp9n" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:52.488168684+00:00 stderr F I1212 16:16:52.487416 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-operators-9ndfc" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:52.909296176+00:00 stderr F I1212 16:16:52.906835 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-operators-2blsm" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897634 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:16:55.897591712 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897675 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:16:55.897670514 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897692 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:16:55.897686925 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897703 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:16:55.897699395 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897717 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:16:55.897709995 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897727 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:16:55.897723776 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897739 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:16:55.897735236 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897751 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.897747626 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897762 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:16:55.897758866 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.897973 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"scheduler.openshift-kube-scheduler.svc\" [serving] validServingFor=[scheduler.openshift-kube-scheduler.svc,scheduler.openshift-kube-scheduler.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:16:55.897966092 +0000 UTC))" 2025-12-12T16:16:55.909223786+00:00 stderr F I1212 16:16:55.898110 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556103\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556103\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:16:55.898103005 +0000 UTC))" 2025-12-12T16:17:06.471656770+00:00 stderr F I1212 16:17:06.471282 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-86d99bb5b9-plxtj" 2025-12-12T16:17:06.471656770+00:00 stderr F I1212 16:17:06.471346 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-86d99bb5b9-plxtj" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:17:06.488294196+00:00 stderr F I1212 16:17:06.487998 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-6b749d96f6-tvtts" 2025-12-12T16:17:06.488294196+00:00 stderr F I1212 16:17:06.488064 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-6b749d96f6-tvtts" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:17:26.434224812+00:00 stderr F I1212 16:17:26.432794 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-69f958c846-qd8rg" 2025-12-12T16:17:26.434224812+00:00 stderr F I1212 16:17:26.432839 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-69f958c846-qd8rg" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:17:26.457366396+00:00 stderr F I1212 16:17:26.456452 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-f4599bd79-7rg9b" 2025-12-12T16:17:26.457366396+00:00 stderr F I1212 16:17:26.456522 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-f4599bd79-7rg9b" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:17:30.871143412+00:00 stderr F I1212 16:17:30.870944 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-69f958c846-qd8rg" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:17:30.971595414+00:00 stderr F I1212 16:17:30.969816 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-f4599bd79-7rg9b" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325664 1 tlsconfig.go:181] "Loaded client CA" index=0 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"admin-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:07 +0000 UTC to 2035-10-31 07:34:07 +0000 UTC (now=2025-12-12 16:17:46.325597619 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325741 1 tlsconfig.go:181] "Loaded client CA" index=1 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-control-plane-signer\" [] issuer=\"\" (2025-11-02 07:34:10 +0000 UTC to 2026-11-02 07:34:10 +0000 UTC (now=2025-12-12 16:17:46.325733323 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325757 1 tlsconfig.go:181] "Loaded client CA" index=2 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-apiserver-to-kubelet-signer\" [] issuer=\"\" (2025-11-02 07:34:11 +0000 UTC to 2026-11-02 07:34:11 +0000 UTC (now=2025-12-12 16:17:46.325752563 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325784 1 tlsconfig.go:181] "Loaded client CA" index=3 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kubelet-bootstrap-kubeconfig-signer\" [] issuer=\"\" (2025-11-02 07:34:08 +0000 UTC to 2035-10-31 07:34:08 +0000 UTC (now=2025-12-12 16:17:46.325775284 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325803 1 tlsconfig.go:181] "Loaded client CA" index=4 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_node-system-admin-signer@1762069887\" [] issuer=\"\" (2025-11-02 07:51:27 +0000 UTC to 2028-11-01 07:51:28 +0000 UTC (now=2025-12-12 16:17:46.325793914 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325823 1 tlsconfig.go:181] "Loaded client CA" index=5 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" [] issuer=\"\" (2025-11-02 08:17:34 +0000 UTC to 2027-10-23 08:17:35 +0000 UTC (now=2025-12-12 16:17:46.325816835 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325840 1 tlsconfig.go:181] "Loaded client CA" index=6 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"kube-csr-signer_@1762071455\" [] issuer=\"openshift-kube-controller-manager-operator_csr-signer-signer@1762071455\" (2025-11-02 08:17:35 +0000 UTC to 2026-10-28 08:17:36 +0000 UTC (now=2025-12-12 16:17:46.325835035 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325861 1 tlsconfig.go:181] "Loaded client CA" index=7 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-apiserver-to-kubelet-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-12-12 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.325852606 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325885 1 tlsconfig.go:181] "Loaded client CA" index=8 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"openshift-kube-apiserver-operator_kube-control-plane-signer@1765556209\" [] issuer=\"\" (2025-12-12 16:16:48 +0000 UTC to 2026-02-10 16:16:49 +0000 UTC (now=2025-12-12 16:17:46.325877836 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.325913 1 tlsconfig.go:181] "Loaded client CA" index=9 certName="client-ca::kube-system::extension-apiserver-authentication::client-ca-file" certDetail="\"admin-kubeconfig-signer-custom\" [] issuer=\"\" (2025-12-12 16:17:42 +0000 UTC to 2035-12-10 16:17:42 +0000 UTC (now=2025-12-12 16:17:46.325906267 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.326241 1 tlsconfig.go:203] "Loaded serving cert" certName="serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key" certDetail="\"scheduler.openshift-kube-scheduler.svc\" [serving] validServingFor=[scheduler.openshift-kube-scheduler.svc,scheduler.openshift-kube-scheduler.svc.cluster.local] issuer=\"openshift-service-serving-signer@1762069924\" (2025-11-02 07:52:15 +0000 UTC to 2027-11-02 07:52:16 +0000 UTC (now=2025-12-12 16:17:46.326229285 +0000 UTC))" 2025-12-12T16:17:46.326980753+00:00 stderr F I1212 16:17:46.326482 1 named_certificates.go:53] "Loaded SNI cert" index=0 certName="self-signed loopback" certDetail="\"apiserver-loopback-client@1765556103\" [serving] validServingFor=[apiserver-loopback-client] issuer=\"apiserver-loopback-client-ca@1765556103\" (2025-12-12 15:15:02 +0000 UTC to 2028-12-12 15:15:02 +0000 UTC (now=2025-12-12 16:17:46.326467031 +0000 UTC))" 2025-12-12T16:17:46.521254687+00:00 stderr F I1212 16:17:46.520130 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-6445bd5bb7-qhd4b" 2025-12-12T16:17:46.521254687+00:00 stderr F I1212 16:17:46.520200 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-6445bd5bb7-qhd4b" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:17:46.599115531+00:00 stderr F I1212 16:17:46.599038 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-6b47f77689-5r77s" 2025-12-12T16:17:46.599164373+00:00 stderr F I1212 16:17:46.599112 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-6b47f77689-5r77s" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:17:48.016424490+00:00 stderr F I1212 16:17:48.015989 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-6b47f77689-5r77s" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:17:48.302787650+00:00 stderr F I1212 16:17:48.302694 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-6445bd5bb7-qhd4b" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:18:00.927683003+00:00 stderr F I1212 16:18:00.927632 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-authentication/oauth-openshift-6567f5ffdb-jrpfr" 2025-12-12T16:18:00.927786295+00:00 stderr F I1212 16:18:00.927772 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-authentication/oauth-openshift-6567f5ffdb-jrpfr" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:18:06.501519072+00:00 stderr F I1212 16:18:06.501445 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-67bd47cff9-br6nz" 2025-12-12T16:18:06.501622665+00:00 stderr F I1212 16:18:06.501606 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-67bd47cff9-br6nz" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:18:06.525419013+00:00 stderr F I1212 16:18:06.525350 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-7fffb5779-6br5z" 2025-12-12T16:18:06.525445954+00:00 stderr F I1212 16:18:06.525415 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-7fffb5779-6br5z" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:18:07.480807733+00:00 stderr F I1212 16:18:07.479425 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-67bd47cff9-br6nz" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:18:07.751857374+00:00 stderr F I1212 16:18:07.751493 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-7fffb5779-6br5z" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:18:21.743521466+00:00 stderr F E1212 16:18:21.743429 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-scheduler/leases/kube-scheduler?timeout=53.5s": dial tcp 38.102.83.180:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:21.743860374+00:00 stderr F E1212 16:18:21.743818 1 leaderelection.go:436] error retrieving resource lock openshift-kube-scheduler/kube-scheduler: Get "https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-scheduler/leases/kube-scheduler?timeout=53.5s": dial tcp 38.102.83.180:6443: connect: connection refused 2025-12-12T16:18:30.408189879+00:00 stderr F I1212 16:18:30.405219 1 cert_rotation.go:92] "Certificate rotation detected, shutting down client connections to start using new credentials" logger="tls-transport-cache" 2025-12-12T16:18:35.874628194+00:00 stderr F I1212 16:18:35.874581 1 cert_rotation.go:92] "Certificate rotation detected, shutting down client connections to start using new credentials" logger="tls-transport-cache" 2025-12-12T16:18:45.443961005+00:00 stderr F I1212 16:18:45.443908 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolumeClaim" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:48.142329436+00:00 stderr F I1212 16:18:48.142283 1 reflector.go:430] "Caches populated" type="*v1.ReplicaSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:50.503484321+00:00 stderr F I1212 16:18:50.503437 1 reflector.go:430] "Caches populated" type="*v1.Node" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:51.019518309+00:00 stderr F I1212 16:18:51.019379 1 reflector.go:430] "Caches populated" type="*v1.CSINode" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:52.025756885+00:00 stderr F I1212 16:18:52.024956 1 reflector.go:430] "Caches populated" type="*v1.Pod" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:52.031072046+00:00 stderr F I1212 16:18:52.030985 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-authentication/oauth-openshift-6567f5ffdb-jrpfr" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:18:52.162959957+00:00 stderr F I1212 16:18:52.162869 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="runtime/asm_amd64.s:1700" 2025-12-12T16:18:52.345432148+00:00 stderr F I1212 16:18:52.345367 1 reflector.go:430] "Caches populated" type="*v1.CSIStorageCapacity" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:53.221196890+00:00 stderr F I1212 16:18:53.221134 1 reflector.go:430] "Caches populated" type="*v1.Service" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:54.282632611+00:00 stderr F I1212 16:18:54.282564 1 reflector.go:430] "Caches populated" type="*v1.PersistentVolume" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:55.841232443+00:00 stderr F I1212 16:18:55.840404 1 reflector.go:430] "Caches populated" type="*v1.Namespace" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:56.303940173+00:00 stderr F I1212 16:18:56.303826 1 reflector.go:430] "Caches populated" type="*v1.PodDisruptionBudget" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:56.867762422+00:00 stderr F I1212 16:18:56.867671 1 reflector.go:430] "Caches populated" type="*v1.ReplicationController" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:57.800071911+00:00 stderr F I1212 16:18:57.800000 1 reflector.go:430] "Caches populated" type="*v1.VolumeAttachment" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:58.342711766+00:00 stderr F I1212 16:18:58.342619 1 reflector.go:430] "Caches populated" type="*v1.StatefulSet" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:18:59.478973928+00:00 stderr F I1212 16:18:59.478872 1 reflector.go:430] "Caches populated" type="*v1.StorageClass" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:00.683968489+00:00 stderr F I1212 16:19:00.683688 1 reflector.go:430] "Caches populated" type="*v1.CSIDriver" reflector="k8s.io/client-go/informers/factory.go:160" 2025-12-12T16:19:06.484300348+00:00 stderr F I1212 16:19:06.484001 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-7b9f779b68-xk96c" 2025-12-12T16:19:06.484300348+00:00 stderr F I1212 16:19:06.484079 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-7b9f779b68-xk96c" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:19:06.584901905+00:00 stderr F I1212 16:19:06.584788 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-vvkdl" 2025-12-12T16:19:06.584901905+00:00 stderr F I1212 16:19:06.584830 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-vvkdl" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:19:07.102074231+00:00 stderr F I1212 16:19:07.095341 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-vvkdl" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:19:07.155972373+00:00 stderr F I1212 16:19:07.152665 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-7b9f779b68-xk96c" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:19:26.419231636+00:00 stderr F I1212 16:19:26.419128 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-79d797b698-v4v6j" 2025-12-12T16:19:26.419350259+00:00 stderr F I1212 16:19:26.419234 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-79d797b698-v4v6j" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:19:26.436971611+00:00 stderr F I1212 16:19:26.435897 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-bf6bf5794-d5zzt" 2025-12-12T16:19:26.436971611+00:00 stderr F I1212 16:19:26.435957 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-bf6bf5794-d5zzt" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:19:26.969749247+00:00 stderr F I1212 16:19:26.969654 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-bf6bf5794-d5zzt" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:19:27.184311415+00:00 stderr F I1212 16:19:27.179647 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-79d797b698-v4v6j" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:19:46.420339722+00:00 stderr F I1212 16:19:46.419401 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-controller-manager/controller-manager-7b9f779b68-rhrzf" 2025-12-12T16:19:46.420339722+00:00 stderr F I1212 16:19:46.419767 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-controller-manager/controller-manager-7b9f779b68-rhrzf" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:19:47.110490250+00:00 stderr F I1212 16:19:47.110403 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-controller-manager/controller-manager-7b9f779b68-rhrzf" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:20:35.813368922+00:00 stderr F I1212 16:20:35.812730 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/marketplace-operator-547dbd544d-4vhrb" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:20:38.392573285+00:00 stderr F I1212 16:20:38.387652 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-operators-wqdb8" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:20:39.382692004+00:00 stderr F I1212 16:20:39.382617 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/certified-operators-psnw2" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:20:40.784413062+00:00 stderr F I1212 16:20:40.784328 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/community-operators-6jgv5" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:20:41.784768201+00:00 stderr F I1212 16:20:41.784171 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-marketplace-jkgqd" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:20:46.441733131+00:00 stderr F I1212 16:20:46.439565 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-mzfqh" 2025-12-12T16:20:46.441733131+00:00 stderr F I1212 16:20:46.439604 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-mzfqh" err="0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules." 2025-12-12T16:20:47.762580517+00:00 stderr F I1212 16:20:47.762500 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-route-controller-manager/route-controller-manager-8fdcdbb66-mzfqh" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:25:26.888770031+00:00 stderr F I1212 16:25:26.888716 1 preemption.go:264] "No preemption candidate is found; preemption is not helpful for scheduling" pod="openshift-ovn-kubernetes/ovnkube-control-plane-97c9b6c48-w5wsh" 2025-12-12T16:25:26.888811582+00:00 stderr F I1212 16:25:26.888769 1 schedule_one.go:1044] "Unable to schedule pod; no fit; waiting" pod="openshift-ovn-kubernetes/ovnkube-control-plane-97c9b6c48-w5wsh" err="0/1 nodes are available: 1 node(s) didn't have free ports for the requested pod ports. preemption: 0/1 nodes are available: 1 node(s) didn't have free ports for the requested pod ports." 2025-12-12T16:25:27.119420695+00:00 stderr F I1212 16:25:27.119166 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ovn-kubernetes/ovnkube-control-plane-97c9b6c48-w5wsh" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:25:27.512810543+00:00 stderr F I1212 16:25:27.512208 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-ovn-kubernetes/ovnkube-node-4pkx2" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:26:39.938794109+00:00 stderr F I1212 16:26:39.934928 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-image-registry/image-registry-5d9d95bf5b-6md9w" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:26:42.832563076+00:00 stderr F I1212 16:26:42.832488 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:26:49.210218147+00:00 stderr F I1212 16:26:49.210102 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:26:53.088254930+00:00 stderr F I1212 16:26:53.083090 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:26:53.202257910+00:00 stderr F I1212 16:26:53.201763 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/certified-operators-8pl6d" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:26:57.766465128+00:00 stderr F I1212 16:26:57.766380 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-operators-b4n58" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:00.477958005+00:00 stderr F I1212 16:27:00.474861 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operators/obo-prometheus-operator-86648f486b-wbj29" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:00.635058191+00:00 stderr F I1212 16:27:00.634942 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:00.646668895+00:00 stderr F I1212 16:27:00.646587 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:00.847743064+00:00 stderr F I1212 16:27:00.845140 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operators/observability-operator-78c97476f4-qxqmn" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:01.060703964+00:00 stderr F I1212 16:27:01.056570 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operators/perses-operator-68bdb49cbf-nqtp8" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:05.171571683+00:00 stderr F I1212 16:27:05.169934 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/elastic-operator-6c994c654b-42tmw" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:22.755133569+00:00 stderr F I1212 16:27:22.754569 1 schedule_one.go:314] "Successfully bound pod to node" pod="cert-manager-operator/cert-manager-operator-controller-manager-64c74584c4-djdmt" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:25.362370784+00:00 stderr F I1212 16:27:25.361564 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/community-operators-9wq8j" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:31.356071685+00:00 stderr F I1212 16:27:31.355779 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/elasticsearch-es-default-0" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:37.993668343+00:00 stderr F I1212 16:27:37.993568 1 schedule_one.go:314] "Successfully bound pod to node" pod="cert-manager/cert-manager-webhook-7894b5b9b4-2kmrt" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:41.569066111+00:00 stderr F I1212 16:27:41.569010 1 schedule_one.go:314] "Successfully bound pod to node" pod="cert-manager/cert-manager-cainjector-7dbf76d5c8-lv2hl" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:27:51.438365104+00:00 stderr F I1212 16:27:51.438136 1 schedule_one.go:314] "Successfully bound pod to node" pod="cert-manager/cert-manager-858d87f86b-r7f8q" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:28:18.106265941+00:00 stderr F I1212 16:28:18.103628 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/service-telemetry-framework-index-1-build" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:28:36.944515093+00:00 stderr F I1212 16:28:36.944050 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/service-telemetry-framework-index-2-build" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:29:02.156333891+00:00 stderr F I1212 16:29:02.156224 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/service-telemetry-framework-index-3-build" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:29:18.246097420+00:00 stderr F I1212 16:29:18.245974 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/service-telemetry-framework-index-4-build" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:29:23.244302308+00:00 stderr F I1212 16:29:23.242861 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/infrawatch-operators-cj72z" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:29:28.642511230+00:00 stderr F I1212 16:29:28.642239 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/infrawatch-operators-cdpts" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:30:00.159101161+00:00 stderr F I1212 16:30:00.157673 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-operator-lifecycle-manager/collect-profiles-29425950-g52jh" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:34:25.697423336+00:00 stderr F I1212 16:34:25.696996 1 schedule_one.go:314] "Successfully bound pod to node" pod="service-telemetry/infrawatch-operators-6bs58" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:37:13.690347050+00:00 stderr F I1212 16:37:13.689211 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/certified-operators-h46w2" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:37:22.541681404+00:00 stderr F I1212 16:37:22.541394 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/redhat-operators-k5p4x" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:38:09.912392248+00:00 stderr F I1212 16:38:09.909593 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-marketplace/community-operators-4sccg" node="crc" evaluatedNodes=1 feasibleNodes=1 2025-12-12T16:40:38.237700492+00:00 stderr F I1212 16:40:38.237608 1 schedule_one.go:314] "Successfully bound pod to node" pod="openshift-must-gather-2sjxj/must-gather-v4h5l" node="crc" evaluatedNodes=1 feasibleNodes=1 ././@LongLink0000644000000000000000000000031100000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043063033023 5ustar zuulzuul././@LongLink0000644000000000000000000000031600000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000644000175000017500000001717315117043043033034 0ustar zuulzuul2025-12-12T16:15:03.356308896+00:00 stderr F + timeout 3m /bin/bash -exuo pipefail -c 'while [ -n "$(ss -Htanop \( sport = 11443 \))" ]; do sleep 1; done' 2025-12-12T16:15:03.360662594+00:00 stderr F ++ ss -Htanop '(' sport = 11443 ')' 2025-12-12T16:15:03.371789661+00:00 stderr F + '[' -n '' ']' 2025-12-12T16:15:03.372893301+00:00 stderr F + exec cluster-kube-scheduler-operator cert-recovery-controller --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/kube-scheduler-cert-syncer-kubeconfig/kubeconfig --namespace=openshift-kube-scheduler --listen=0.0.0.0:11443 -v=2 2025-12-12T16:15:04.218673548+00:00 stderr F W1212 16:15:04.218540 1 cmd.go:257] Using insecure, self-signed certificates 2025-12-12T16:15:04.219105696+00:00 stderr F I1212 16:15:04.219056 1 crypto.go:594] Generating new CA for cert-recovery-controller-signer@1765556104 cert, and key in /tmp/serving-cert-3024116456/serving-signer.crt, /tmp/serving-cert-3024116456/serving-signer.key 2025-12-12T16:15:04.219105696+00:00 stderr F Validity period of the certificate for "cert-recovery-controller-signer@1765556104" is unset, resetting to 157680000000000000 years! 2025-12-12T16:15:04.763671255+00:00 stderr F I1212 16:15:04.763601 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}. 2025-12-12T16:15:04.764247825+00:00 stderr F I1212 16:15:04.764219 1 observer_polling.go:159] Starting file observer 2025-12-12T16:15:04.764360037+00:00 stderr F I1212 16:15:04.764326 1 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false 2025-12-12T16:15:04.764360037+00:00 stderr F I1212 16:15:04.764348 1 envvar.go:172] "Feature gate default state" feature="InOrderInformers" enabled=true 2025-12-12T16:15:04.764360037+00:00 stderr F I1212 16:15:04.764354 1 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false 2025-12-12T16:15:04.764375397+00:00 stderr F I1212 16:15:04.764358 1 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false 2025-12-12T16:15:04.764375397+00:00 stderr F I1212 16:15:04.764362 1 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false 2025-12-12T16:15:19.871248497+00:00 stderr F W1212 16:15:19.868323 1 builder.go:272] unable to get owner reference (falling back to namespace): unable to setup event recorder as "POD_NAME" env variable is not set and there are no pods 2025-12-12T16:15:19.871248497+00:00 stderr F I1212 16:15:19.868448 1 builder.go:304] cert-recovery-controller version v0.0.0-master+$Format:%H$-$Format:%H$ 2025-12-12T16:15:19.874154037+00:00 stderr F I1212 16:15:19.873359 1 builder.go:446] detected SingleReplicaTopologyMode, the original leader election has been altered for the default SingleReplicaTopology 2025-12-12T16:15:19.888987385+00:00 stderr F I1212 16:15:19.885921 1 leaderelection.go:257] attempting to acquire leader lease openshift-kube-scheduler/cert-recovery-controller-lock... 2025-12-12T16:15:19.926149590+00:00 stderr F I1212 16:15:19.926069 1 leaderelection.go:271] successfully acquired lease openshift-kube-scheduler/cert-recovery-controller-lock 2025-12-12T16:15:19.927728128+00:00 stderr F I1212 16:15:19.927543 1 event.go:377] Event(v1.ObjectReference{Kind:"Lease", Namespace:"openshift-kube-scheduler", Name:"cert-recovery-controller-lock", UID:"0d49e304-ef73-42f9-b02e-3e781d76a8e2", APIVersion:"coordination.k8s.io/v1", ResourceVersion:"36016", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' crc_fe8adf4f-e2d7-4381-a8d1-993a6c0ac997 became leader 2025-12-12T16:15:19.930099785+00:00 stderr F I1212 16:15:19.930053 1 base_controller.go:76] Waiting for caches to sync for kube-scheduler 2025-12-12T16:15:19.943260182+00:00 stderr F I1212 16:15:19.943137 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubeschedulers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.945289671+00:00 stderr F I1212 16:15:19.943892 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.945289671+00:00 stderr F I1212 16:15:19.944250 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.959804501+00:00 stderr F I1212 16:15:19.959682 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:19.978532712+00:00 stderr F I1212 16:15:19.978439 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:15:20.030371571+00:00 stderr F I1212 16:15:20.030284 1 base_controller.go:82] Caches are synced for kube-scheduler 2025-12-12T16:15:20.030371571+00:00 stderr F I1212 16:15:20.030316 1 base_controller.go:119] Starting #1 worker of kube-scheduler controller ... 2025-12-12T16:16:52.285948127+00:00 stderr F I1212 16:16:52.285033 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"openshift-kube-scheduler", Name:"openshift-kube-scheduler", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/kube-scheduler-client-cert-key -n openshift-kube-scheduler because it changed 2025-12-12T16:16:56.280826409+00:00 stderr F I1212 16:16:56.277406 1 event.go:377] Event(v1.ObjectReference{Kind:"Namespace", Namespace:"openshift-kube-scheduler", Name:"openshift-kube-scheduler", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'SecretUpdated' Updated Secret/kube-scheduler-client-cert-key -n openshift-kube-scheduler because it changed 2025-12-12T16:18:19.957542131+00:00 stderr F E1212 16:18:19.957083 1 leaderelection.go:429] Failed to update lock optimistically: Put "https://localhost:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-scheduler/leases/cert-recovery-controller-lock?timeout=4m0s": dial tcp [::1]:6443: connect: connection refused, falling back to slow path 2025-12-12T16:18:19.958213268+00:00 stderr F E1212 16:18:19.958087 1 leaderelection.go:436] error retrieving resource lock openshift-kube-scheduler/cert-recovery-controller-lock: Get "https://localhost:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-scheduler/leases/cert-recovery-controller-lock?timeout=4m0s": dial tcp [::1]:6443: connect: connection refused 2025-12-12T16:18:47.408770541+00:00 stderr F I1212 16:18:47.408692 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.167709333+00:00 stderr F I1212 16:18:48.167635 1 reflector.go:430] "Caches populated" type="*v1.Secret" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:48.373567283+00:00 stderr F I1212 16:18:48.373504 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:49.150220914+00:00 stderr F I1212 16:18:49.150134 1 reflector.go:430] "Caches populated" type="*v1.ConfigMap" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" 2025-12-12T16:18:59.840587838+00:00 stderr F I1212 16:18:59.839848 1 reflector.go:430] "Caches populated" type="operator.openshift.io/v1, Resource=kubeschedulers" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" ././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043063033023 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000644000175000017500000000012515117043043033021 0ustar zuulzuul2025-12-12T16:15:01.389069994+00:00 stdout P Waiting for port :10259 to be released. ././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015117043063033023 5ustar zuulzuul././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer/0.loghome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000644000175000017500000002252415117043063033032 0ustar zuulzuul2025-12-12T16:15:04.232786009+00:00 stderr F I1212 16:15:04.232565 1 observer_polling.go:159] Starting file observer 2025-12-12T16:15:04.232786009+00:00 stderr F I1212 16:15:04.232570 1 base_controller.go:76] Waiting for caches to sync for CertSyncController 2025-12-12T16:15:14.235717992+00:00 stderr F E1212 16:15:14.235668 1 reflector.go:200] "Failed to watch" err="failed to list *v1.Secret: Ge********** \"https://localhost:6443/api/v1/namespaces/openshift-kube-scheduler/secrets?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.Secret" 2025-12-12T16:15:14.235828894+00:00 stderr F E1212 16:15:14.235790 1 reflector.go:200] "Failed to watch" err="failed to list *v1.ConfigMap: Get \"https://localhost:6443/api/v1/namespaces/openshift-kube-scheduler/configmaps?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" reflector="k8s.io/client-go@v0.33.2/tools/cache/reflector.go:285" type="*v1.ConfigMap" 2025-12-12T16:15:20.833016417+00:00 stderr F I1212 16:15:20.832940 1 base_controller.go:82] Caches are synced for CertSyncController 2025-12-12T16:15:20.833016417+00:00 stderr F I1212 16:15:20.832989 1 base_controller.go:119] Starting #1 worker of CertSyncController controller ... 2025-12-12T16:15:20.833078479+00:00 stderr F I1212 16:15:20.833060 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:15:20.833088389+00:00 stderr F I1212 16:15:20.833071 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:52.264246798+00:00 stderr F I1212 16:16:52.264137 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:52.264246798+00:00 stderr F I1212 16:16:52.264206 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:52.272806736+00:00 stderr F I1212 16:16:52.272714 1 certsync_controller.go:260] Creating directory "/etc/kubernetes/static-pod-certs/secrets/kube-scheduler-client-cert-key" ... 2025-12-12T16:16:52.272918629+00:00 stderr F I1212 16:16:52.272902 1 certsync_controller.go:274] Writing secret manifest "/etc/kubernetes/static-pod-certs/secrets/kube-scheduler-client-cert-key/tls.crt" ... 2025-12-12T16:16:52.273260128+00:00 stderr F I1212 16:16:52.273241 1 certsync_controller.go:274] Writing secret manifest "/etc/kubernetes/static-pod-certs/secrets/kube-scheduler-client-cert-key/tls.key" ... 2025-12-12T16:16:52.276321022+00:00 stderr F I1212 16:16:52.273781 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-scheduler", Name:"openshift-kube-scheduler-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CertificateUpdated' Wrote updated secret: op**********ey 2025-12-12T16:16:56.271256725+00:00 stderr F I1212 16:16:56.270547 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.271256725+00:00 stderr F I1212 16:16:56.270588 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:56.285299718+00:00 stderr F I1212 16:16:56.284675 1 certsync_controller.go:260] Creating directory "/etc/kubernetes/static-pod-certs/secrets/kube-scheduler-client-cert-key" ... 2025-12-12T16:16:56.285299718+00:00 stderr F I1212 16:16:56.284762 1 certsync_controller.go:274] Writing secret manifest "/etc/kubernetes/static-pod-certs/secrets/kube-scheduler-client-cert-key/tls.crt" ... 2025-12-12T16:16:56.286819965+00:00 stderr F I1212 16:16:56.285708 1 certsync_controller.go:274] Writing secret manifest "/etc/kubernetes/static-pod-certs/secrets/kube-scheduler-client-cert-key/tls.key" ... 2025-12-12T16:16:56.286819965+00:00 stderr F I1212 16:16:56.286622 1 event.go:377] Event(v1.ObjectReference{Kind:"Pod", Namespace:"openshift-kube-scheduler", Name:"openshift-kube-scheduler-crc", UID:"", APIVersion:"v1", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'CertificateUpdated' Wrote updated secret: op**********ey 2025-12-12T16:16:56.303381160+00:00 stderr F I1212 16:16:56.302623 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.303381160+00:00 stderr F I1212 16:16:56.302662 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:56.314491791+00:00 stderr F I1212 16:16:56.313674 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.314491791+00:00 stderr F I1212 16:16:56.313712 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:56.333045234+00:00 stderr F I1212 16:16:56.332956 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.333045234+00:00 stderr F I1212 16:16:56.332992 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:56.359331416+00:00 stderr F I1212 16:16:56.359271 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.359331416+00:00 stderr F I1212 16:16:56.359299 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:56.395851747+00:00 stderr F I1212 16:16:56.395719 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.395851747+00:00 stderr F I1212 16:16:56.395746 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:16:56.403260048+00:00 stderr F I1212 16:16:56.403131 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:16:56.403260048+00:00 stderr F I1212 16:16:56.403159 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:18:47.046592197+00:00 stderr F I1212 16:18:47.046499 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:18:47.046592197+00:00 stderr F I1212 16:18:47.046546 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:18:47.046812772+00:00 stderr F I1212 16:18:47.046695 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:18:47.046812772+00:00 stderr F I1212 16:18:47.046716 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:19:01.933311215+00:00 stderr F I1212 16:19:01.933253 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:19:01.933311215+00:00 stderr F I1212 16:19:01.933278 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:19:01.933415387+00:00 stderr F I1212 16:19:01.933386 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:19:01.933415387+00:00 stderr F I1212 16:19:01.933403 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:19:01.933474229+00:00 stderr F I1212 16:19:01.933451 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:19:01.933474229+00:00 stderr F I1212 16:19:01.933462 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:19:01.933525870+00:00 stderr F I1212 16:19:01.933508 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:19:01.933533460+00:00 stderr F I1212 16:19:01.933523 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:28:47.047824526+00:00 stderr F I1212 16:28:47.047684 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:28:47.047824526+00:00 stderr F I1212 16:28:47.047738 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:28:47.048511013+00:00 stderr F I1212 16:28:47.048417 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:28:47.048786400+00:00 stderr F I1212 16:28:47.048702 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:29:01.933559763+00:00 stderr F I1212 16:29:01.933481 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:29:01.933559763+00:00 stderr F I1212 16:29:01.933522 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:29:01.933719467+00:00 stderr F I1212 16:29:01.933685 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:29:01.933719467+00:00 stderr F I1212 16:29:01.933697 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:38:47.048017977+00:00 stderr F I1212 16:38:47.047855 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:38:47.048017977+00:00 stderr F I1212 16:38:47.047927 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:39:01.934349651+00:00 stderr F I1212 16:39:01.934166 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:39:01.934349651+00:00 stderr F I1212 16:39:01.934270 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] 2025-12-12T16:39:01.934564836+00:00 stderr F I1212 16:39:01.934499 1 certsync_controller.go:74] Syncing configmaps: [] 2025-12-12T16:39:01.934564836+00:00 stderr F I1212 16:39:01.934528 1 certsync_controller.go:178] Syncing secrets: [{kube-scheduler-client-cert-key false}] home/zuul/zuul-output/logs/ci-framework-data/logs/2025-12-12_16-40/0000775000175000017500000000000015117043103023103 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/2025-12-12_16-40/ansible.log0000666000175000017500000046107715117041014025243 0ustar zuulzuul2025-12-12 16:23:25,733 p=30999 u=zuul n=ansible | Starting galaxy collection install process 2025-12-12 16:23:25,735 p=30999 u=zuul n=ansible | Process install dependency map 2025-12-12 16:23:42,274 p=30999 u=zuul n=ansible | Starting collection install process 2025-12-12 16:23:42,274 p=30999 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+b9f05e2b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+b9f05e2b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | cifmw.general:1.0.0+b9f05e2b was installed successfully 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-12-12 16:23:44,296 p=30999 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-12-12 16:23:44,297 p=30999 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-12-12 16:23:44,297 p=30999 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-12-12 16:23:44,612 p=30999 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-12-12 16:23:44,612 p=30999 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-12-12 16:23:44,613 p=30999 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-12-12 16:23:44,740 p=30999 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-12-12 16:23:44,740 p=30999 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2025-12-12 16:23:52,472 p=31591 u=zuul n=ansible | PLAY [Bootstrap playbook] ****************************************************** 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:52 +0000 (0:00:00.034) 0:00:00.034 ******* 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:52 +0000 (0:00:00.033) 0:00:00.033 ******* 2025-12-12 16:23:53,532 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | TASK [Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:01.062) 0:00:01.096 ******* 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:01.062) 0:00:01.095 ******* 2025-12-12 16:23:53,579 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,586 p=31591 u=zuul n=ansible | TASK [Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2025-12-12 16:23:53,587 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.035) 0:00:01.132 ******* 2025-12-12 16:23:53,587 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.035) 0:00:01.130 ******* 2025-12-12 16:23:53,650 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.070) 0:00:01.202 ******* 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.070) 0:00:01.201 ******* 2025-12-12 16:23:54,401 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.753) 0:00:01.956 ******* 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.754) 0:00:01.955 ******* 2025-12-12 16:23:54,441 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,452 p=31591 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2025-12-12 16:23:54,452 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:01.998 ******* 2025-12-12 16:23:54,453 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:01.996 ******* 2025-12-12 16:23:54,490 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,502 p=31591 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2025-12-12 16:23:54,502 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.049) 0:00:02.048 ******* 2025-12-12 16:23:54,503 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.049) 0:00:02.046 ******* 2025-12-12 16:23:54,528 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:02.089 ******* 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:02.088 ******* 2025-12-12 16:23:56,110 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:01.579) 0:00:03.668 ******* 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:01.579) 0:00:03.667 ******* 2025-12-12 16:23:56,315 p=31591 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2025-12-12 16:23:56,501 p=31591 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2025-12-12 16:23:56,672 p=31591 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:00.563) 0:00:04.232 ******* 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:00.563) 0:00:04.231 ******* 2025-12-12 16:23:58,223 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:58 +0000 (0:00:01.541) 0:00:05.774 ******* 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:58 +0000 (0:00:01.541) 0:00:05.773 ******* 2025-12-12 16:23:59,641 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:59 +0000 (0:00:01.421) 0:00:07.195 ******* 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:59 +0000 (0:00:01.421) 0:00:07.194 ******* 2025-12-12 16:24:08,242 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:08 +0000 (0:00:08.606) 0:00:15.802 ******* 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:08 +0000 (0:00:08.606) 0:00:15.801 ******* 2025-12-12 16:24:09,064 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:09,072 p=31591 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2025-12-12 16:24:09,073 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.815) 0:00:16.618 ******* 2025-12-12 16:24:09,073 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.815) 0:00:16.616 ******* 2025-12-12 16:24:09,103 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.037) 0:00:16.655 ******* 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.037) 0:00:16.654 ******* 2025-12-12 16:24:09,757 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:09,771 p=31591 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2025-12-12 16:24:09,772 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.661) 0:00:17.317 ******* 2025-12-12 16:24:09,772 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.661) 0:00:17.315 ******* 2025-12-12 16:24:09,801 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,807 p=31591 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2025-12-12 16:24:09,808 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.036) 0:00:17.353 ******* 2025-12-12 16:24:09,808 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.036) 0:00:17.351 ******* 2025-12-12 16:24:09,834 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.032) 0:00:17.385 ******* 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.032) 0:00:17.384 ******* 2025-12-12 16:24:09,870 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.038) 0:00:17.423 ******* 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.038) 0:00:17.422 ******* 2025-12-12 16:24:10,353 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:10,368 p=31591 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-12-12 16:24:10,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:10 +0000 (0:00:00.490) 0:00:17.913 ******* 2025-12-12 16:24:10,369 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:10 +0000 (0:00:00.490) 0:00:17.912 ******* 2025-12-12 16:24:11,049 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.689) 0:00:18.603 ******* 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.689) 0:00:18.602 ******* 2025-12-12 16:24:11,076 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.630 ******* 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.629 ******* 2025-12-12 16:24:11,104 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.657 ******* 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.656 ******* 2025-12-12 16:24:11,131 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.685 ******* 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.683 ******* 2025-12-12 16:24:11,172 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:11,179 p=31591 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2025-12-12 16:24:11,179 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.039) 0:00:18.725 ******* 2025-12-12 16:24:11,180 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.039) 0:00:18.723 ******* 2025-12-12 16:24:11,194 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.021) 0:00:18.746 ******* 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.021) 0:00:18.745 ******* 2025-12-12 16:24:11,215 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.767 ******* 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.766 ******* 2025-12-12 16:24:11,239 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.793 ******* 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.792 ******* 2025-12-12 16:24:11,268 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,275 p=31591 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2025-12-12 16:24:11,276 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.821 ******* 2025-12-12 16:24:11,276 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.819 ******* 2025-12-12 16:24:11,289 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.019) 0:00:18.840 ******* 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.019) 0:00:18.839 ******* 2025-12-12 16:24:11,308 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.861 ******* 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.860 ******* 2025-12-12 16:24:11,329 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,334 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2025-12-12 16:24:11,335 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.018) 0:00:18.880 ******* 2025-12-12 16:24:11,335 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.018) 0:00:18.878 ******* 2025-12-12 16:24:11,534 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:11,548 p=31591 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2025-12-12 16:24:11,549 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.214) 0:00:19.094 ******* 2025-12-12 16:24:11,549 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.214) 0:00:19.092 ******* 2025-12-12 16:24:11,755 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:11,770 p=31591 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2025-12-12 16:24:11,771 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.221) 0:00:19.316 ******* 2025-12-12 16:24:11,771 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.222) 0:00:19.314 ******* 2025-12-12 16:24:12,015 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:12,021 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2025-12-12 16:24:12,021 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.250) 0:00:19.567 ******* 2025-12-12 16:24:12,022 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.250) 0:00:19.565 ******* 2025-12-12 16:24:12,046 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.599 ******* 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.597 ******* 2025-12-12 16:24:12,080 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.634 ******* 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.633 ******* 2025-12-12 16:24:12,117 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.670 ******* 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.668 ******* 2025-12-12 16:24:12,146 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.028) 0:00:19.698 ******* 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.028) 0:00:19.697 ******* 2025-12-12 16:24:12,179 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.733 ******* 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.732 ******* 2025-12-12 16:24:12,210 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.764 ******* 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.763 ******* 2025-12-12 16:24:12,535 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.324) 0:00:20.089 ******* 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.324) 0:00:20.088 ******* 2025-12-12 16:24:12,793 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2025-12-12 16:24:13,025 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.492) 0:00:20.581 ******* 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.492) 0:00:20.580 ******* 2025-12-12 16:24:13,488 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:13,496 p=31591 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2025-12-12 16:24:13,497 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.460) 0:00:21.042 ******* 2025-12-12 16:24:13,497 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.460) 0:00:21.040 ******* 2025-12-12 16:24:13,770 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.285) 0:00:21.327 ******* 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.285) 0:00:21.326 ******* 2025-12-12 16:24:13,818 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.047) 0:00:21.374 ******* 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.047) 0:00:21.373 ******* 2025-12-12 16:24:13,850 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.027) 0:00:21.402 ******* 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.027) 0:00:21.400 ******* 2025-12-12 16:24:44,229 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:30.381) 0:00:51.783 ******* 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:30.381) 0:00:51.782 ******* 2025-12-12 16:24:44,462 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.239) 0:00:52.022 ******* 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.239) 0:00:52.021 ******* 2025-12-12 16:24:44,691 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.229) 0:00:52.251 ******* 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.229) 0:00:52.250 ******* 2025-12-12 16:24:50,033 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:05.336) 0:00:57.587 ******* 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:05.335) 0:00:57.586 ******* 2025-12-12 16:24:50,063 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.034) 0:00:57.622 ******* 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.034) 0:00:57.621 ******* 2025-12-12 16:24:50,411 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.342) 0:00:57.965 ******* 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.342) 0:00:57.964 ******* 2025-12-12 16:24:50,765 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.360) 0:00:58.325 ******* 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.360) 0:00:58.324 ******* 2025-12-12 16:24:50,799 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,813 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2025-12-12 16:24:50,813 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.359 ******* 2025-12-12 16:24:50,814 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.357 ******* 2025-12-12 16:24:50,835 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,849 p=31591 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2025-12-12 16:24:50,850 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.036) 0:00:58.395 ******* 2025-12-12 16:24:50,850 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.036) 0:00:58.393 ******* 2025-12-12 16:24:50,868 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.032) 0:00:58.427 ******* 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.032) 0:00:58.426 ******* 2025-12-12 16:24:50,912 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.037) 0:00:58.464 ******* 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.037) 0:00:58.463 ******* 2025-12-12 16:24:50,938 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,945 p=31591 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2025-12-12 16:24:50,946 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.026) 0:00:58.491 ******* 2025-12-12 16:24:50,946 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.026) 0:00:58.489 ******* 2025-12-12 16:24:50,967 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,978 p=31591 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2025-12-12 16:24:50,979 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.524 ******* 2025-12-12 16:24:50,979 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.522 ******* 2025-12-12 16:24:51,232 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2025-12-12 16:24:51,436 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2025-12-12 16:24:51,670 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2025-12-12 16:24:51,870 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2025-12-12 16:24:52,050 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:01.096) 0:00:59.620 ******* 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:01.096) 0:00:59.619 ******* 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.112) 0:00:59.733 ******* 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.112) 0:00:59.732 ******* 2025-12-12 16:24:52,378 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2025-12-12 16:24:52,558 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2025-12-12 16:24:52,756 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.578) 0:01:00.311 ******* 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.578) 0:01:00.310 ******* 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.033) 0:01:00.345 ******* 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.033) 0:01:00.344 ******* 2025-12-12 16:24:52,834 p=31591 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'master', 'change': '694', 'change_url': 'https://github.com/infrawatch/service-telemetry-operator/pull/694', 'commit_id': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'patchset': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/infrawatch/service-telemetry-operator', 'name': 'infrawatch/service-telemetry-operator', 'short_name': 'service-telemetry-operator', 'src_dir': 'src/github.com/infrawatch/service-telemetry-operator'}, 'topic': None}) 2025-12-12 16:24:52,836 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.389 ******* 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.388 ******* 2025-12-12 16:24:52,876 p=31591 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'master', 'change': '694', 'change_url': 'https://github.com/infrawatch/service-telemetry-operator/pull/694', 'commit_id': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'patchset': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/infrawatch/service-telemetry-operator', 'name': 'infrawatch/service-telemetry-operator', 'short_name': 'service-telemetry-operator', 'src_dir': 'src/github.com/infrawatch/service-telemetry-operator'}, 'topic': None}) 2025-12-12 16:24:52,878 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | TASK [Customize install_yamls devsetup vars if needed name=install_yamls, tasks_from=customize_devsetup_vars.yml] *** 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.047) 0:01:00.436 ******* 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.047) 0:01:00.435 ******* 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | TASK [install_yamls : Update opm_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^opm_version:, line=opm_version: {{ cifmw_install_yamls_opm_version }}, state=present] *** 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.480 ******* 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.479 ******* 2025-12-12 16:24:52,969 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | TASK [install_yamls : Update sdk_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^sdk_version:, line=sdk_version: {{ cifmw_install_yamls_sdk_version }}, state=present] *** 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.041) 0:01:00.521 ******* 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.041) 0:01:00.520 ******* 2025-12-12 16:24:53,000 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | TASK [install_yamls : Update go_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^go_version:, line=go_version: {{ cifmw_install_yamls_go_version }}, state=present] *** 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:00.553 ******* 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:00.552 ******* 2025-12-12 16:24:53,029 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | TASK [install_yamls : Update kustomize_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^kustomize_version:, line=kustomize_version: {{ cifmw_install_yamls_kustomize_version }}, state=present] *** 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:00.581 ******* 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:00.580 ******* 2025-12-12 16:24:53,060 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.035) 0:01:00.616 ******* 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.035) 0:01:00.615 ******* 2025-12-12 16:24:53,129 p=31591 u=zuul n=ansible | ok: [localhost] => (item={}) 2025-12-12 16:24:53,137 p=31591 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|antelope|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2025-12-12 16:24:53,138 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.066) 0:01:00.683 ******* 2025-12-12 16:24:53,138 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.066) 0:01:00.681 ******* 2025-12-12 16:24:53,177 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,182 p=31591 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2025-12-12 16:24:53,182 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.044) 0:01:00.728 ******* 2025-12-12 16:24:53,183 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.044) 0:01:00.726 ******* 2025-12-12 16:24:53,768 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,822 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2025-12-12 16:24:53,823 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.640) 0:01:01.368 ******* 2025-12-12 16:24:53,823 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.640) 0:01:01.366 ******* 2025-12-12 16:24:53,849 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.036) 0:01:01.404 ******* 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.036) 0:01:01.403 ******* 2025-12-12 16:24:53,878 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:01.436 ******* 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.435 ******* 2025-12-12 16:24:53,913 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.029) 0:01:01.465 ******* 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:01.464 ******* 2025-12-12 16:24:53,945 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,954 p=31591 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2025-12-12 16:24:53,954 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.034) 0:01:01.500 ******* 2025-12-12 16:24:53,955 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.034) 0:01:01.498 ******* 2025-12-12 16:24:53,977 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: CHECKOUT_FROM_OPENSTACK_REF: 'true' OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.532 ******* 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.531 ******* 2025-12-12 16:24:54,015 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: '' BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '1234567842' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12345678' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: osp-secret SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' 2025-12-12 16:24:54,022 p=31591 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2025-12-12 16:24:54,023 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.035) 0:01:01.568 ******* 2025-12-12 16:24:54,023 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.035) 0:01:01.566 ******* 2025-12-12 16:24:54,328 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.314) 0:01:01.882 ******* 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.314) 0:01:01.881 ******* 2025-12-12 16:24:54,360 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.030) 0:01:01.913 ******* 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.030) 0:01:01.912 ******* 2025-12-12 16:24:54,758 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.397) 0:01:02.311 ******* 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.397) 0:01:02.310 ******* 2025-12-12 16:24:54,784 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.034) 0:01:02.345 ******* 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.034) 0:01:02.344 ******* 2025-12-12 16:24:55,338 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:55,344 p=31591 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2025-12-12 16:24:55,345 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.544) 0:01:02.890 ******* 2025-12-12 16:24:55,345 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.544) 0:01:02.888 ******* 2025-12-12 16:24:55,366 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:55,382 p=31591 u=zuul n=ansible | TASK [Create artifacts with custom params mode=0644, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2025-12-12 16:24:55,382 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.037) 0:01:02.927 ******* 2025-12-12 16:24:55,383 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.037) 0:01:02.926 ******* 2025-12-12 16:24:55,817 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | localhost : ok=43 changed=23 unreachable=0 failed=0 skipped=40 rescued=0 ignored=0 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.458) 0:01:03.386 ******* 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | =============================================================================== 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 30.38s 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 8.61s 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.34s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Make sure git-core package is installed -------------------- 1.54s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Get repo-setup repository ---------------------------------- 1.42s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ci_setup : Manage directories ------------------------------------------- 1.10s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Gathering Facts --------------------------------------------------------- 1.06s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Install repo-setup package --------------------------------- 0.82s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca : Ensure target directory exists ----------------------------- 0.75s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Dump full hash in delorean.repo.md5 file ------------------- 0.69s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Run repo-setup --------------------------------------------- 0.66s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls : Get environment structure ------------------------------- 0.64s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls : Ensure directories exist -------------------------------- 0.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Ensure directories are present ----------------------------- 0.56s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | discover_latest_image : Get latest image -------------------------------- 0.54s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Remove existing repos from /etc/yum.repos.d directory ------ 0.49s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Run repo-setup-get-hash ------------------------------------ 0.49s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Cleanup existing metadata ---------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Create artifacts with custom params ------------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.459) 0:01:03.386 ******* 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | =============================================================================== 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ci_setup --------------------------------------------------------------- 38.29s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup ------------------------------------------------------------- 17.66s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls ----------------------------------------------------------- 2.61s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca -------------------------------------------------------------- 2.47s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | gather_facts ------------------------------------------------------------ 1.06s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | discover_latest_image --------------------------------------------------- 0.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.copy ---------------------------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.11s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.set_fact ------------------------------------------------ 0.11s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | total ------------------------------------------------------------------ 63.35s 2025-12-12 16:24:57,434 p=32453 u=zuul n=ansible | PLAY [Run pre_infra hooks] ***************************************************** 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.054) 0:00:00.054 ******* 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.053) 0:00:00.053 ******* 2025-12-12 16:24:57,552 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.094) 0:00:00.149 ******* 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.094) 0:00:00.148 ******* 2025-12-12 16:24:57,622 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,633 p=32453 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2025-12-12 16:24:57,634 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.069) 0:00:00.218 ******* 2025-12-12 16:24:57,634 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.069) 0:00:00.217 ******* 2025-12-12 16:24:57,694 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:57,741 p=32453 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | TASK [Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] ****** 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.141) 0:00:00.360 ******* 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.141) 0:00:00.359 ******* 2025-12-12 16:24:57,906 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.150) 0:00:00.510 ******* 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.150) 0:00:00.509 ******* 2025-12-12 16:24:57,945 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.029) 0:00:00.539 ******* 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.029) 0:00:00.539 ******* 2025-12-12 16:24:57,973 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,010 p=32453 u=zuul n=ansible | PLAY [Prepare the platform] **************************************************** 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | TASK [Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] ****** 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.081) 0:00:00.621 ******* 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.081) 0:00:00.620 ******* 2025-12-12 16:24:58,079 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.057) 0:00:00.679 ******* 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.057) 0:00:00.678 ******* 2025-12-12 16:24:58,388 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:58,401 p=32453 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2025-12-12 16:24:58,402 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.307) 0:00:00.986 ******* 2025-12-12 16:24:58,402 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.307) 0:00:00.985 ******* 2025-12-12 16:24:58,431 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,443 p=32453 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-12-12 16:24:58,444 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.028 ******* 2025-12-12 16:24:58,444 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.027 ******* 2025-12-12 16:24:58,466 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.035) 0:00:01.063 ******* 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.035) 0:00:01.062 ******* 2025-12-12 16:24:58,513 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.056) 0:00:01.120 ******* 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.056) 0:00:01.119 ******* 2025-12-12 16:24:58,557 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.032) 0:00:01.152 ******* 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.032) 0:00:01.152 ******* 2025-12-12 16:24:58,599 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.195 ******* 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.194 ******* 2025-12-12 16:24:58,632 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.034) 0:00:01.229 ******* 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.034) 0:00:01.228 ******* 2025-12-12 16:24:59,049 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.416) 0:00:01.645 ******* 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.416) 0:00:01.645 ******* 2025-12-12 16:24:59,095 p=32453 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2025-12-12 16:24:59,108 p=32453 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-12-12 16:24:59,109 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.047) 0:00:01.693 ******* 2025-12-12 16:24:59,109 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.047) 0:00:01.692 ******* 2025-12-12 16:24:59,132 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.726 ******* 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.725 ******* 2025-12-12 16:24:59,164 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,173 p=32453 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2025-12-12 16:24:59,174 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.758 ******* 2025-12-12 16:24:59,174 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.757 ******* 2025-12-12 16:24:59,193 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{ cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.029) 0:00:01.788 ******* 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.029) 0:00:01.787 ******* 2025-12-12 16:24:59,230 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.066) 0:00:01.854 ******* 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.066) 0:00:01.853 ******* 2025-12-12 16:24:59,440 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.183) 0:00:02.038 ******* 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.183) 0:00:02.037 ******* 2025-12-12 16:24:59,477 p=32453 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-12-12 16:24:59,488 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2025-12-12 16:24:59,489 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.073 ******* 2025-12-12 16:24:59,489 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.072 ******* 2025-12-12 16:24:59,511 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,523 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2025-12-12 16:24:59,524 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.034) 0:00:02.108 ******* 2025-12-12 16:24:59,524 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.034) 0:00:02.107 ******* 2025-12-12 16:24:59,546 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.144 ******* 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.143 ******* 2025-12-12 16:24:59,580 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:02.176 ******* 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:02.175 ******* 2025-12-12 16:24:59,614 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.212 ******* 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.211 ******* 2025-12-12 16:24:59,659 p=32453 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.049) 0:00:02.262 ******* 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.049) 0:00:02.261 ******* 2025-12-12 16:24:59,695 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,712 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2025-12-12 16:24:59,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.297 ******* 2025-12-12 16:24:59,713 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.296 ******* 2025-12-12 16:24:59,778 p=32453 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_fetch_openshift.log 2025-12-12 16:25:00,188 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:00,198 p=32453 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2025-12-12 16:25:00,198 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.486) 0:00:02.783 ******* 2025-12-12 16:25:00,199 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.486) 0:00:02.782 ******* 2025-12-12 16:25:00,219 p=32453 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.031) 0:00:02.815 ******* 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.031) 0:00:02.814 ******* 2025-12-12 16:25:00,701 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:00,711 p=32453 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2025-12-12 16:25:00,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.481) 0:00:03.296 ******* 2025-12-12 16:25:00,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.481) 0:00:03.295 ******* 2025-12-12 16:25:00,738 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.036) 0:00:03.333 ******* 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.036) 0:00:03.332 ******* 2025-12-12 16:25:01,060 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,073 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2025-12-12 16:25:01,074 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.325) 0:00:03.658 ******* 2025-12-12 16:25:01,074 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.325) 0:00:03.657 ******* 2025-12-12 16:25:01,379 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,389 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2025-12-12 16:25:01,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.315) 0:00:03.974 ******* 2025-12-12 16:25:01,390 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.315) 0:00:03.973 ******* 2025-12-12 16:25:01,668 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,732 p=32453 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2025-12-12 16:25:01,733 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.343) 0:00:04.317 ******* 2025-12-12 16:25:01,733 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.343) 0:00:04.316 ******* 2025-12-12 16:25:01,786 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:01,797 p=32453 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2025-12-12 16:25:01,798 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.064) 0:00:04.382 ******* 2025-12-12 16:25:01,798 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.064) 0:00:04.381 ******* 2025-12-12 16:25:02,365 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml] *** 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.578) 0:00:04.960 ******* 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.578) 0:00:04.960 ******* 2025-12-12 16:25:02,690 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:02,703 p=32453 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2025-12-12 16:25:02,704 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.327) 0:00:05.288 ******* 2025-12-12 16:25:02,704 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.327) 0:00:05.287 ******* 2025-12-12 16:25:03,147 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.465) 0:00:05.753 ******* 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.465) 0:00:05.753 ******* 2025-12-12 16:25:03,374 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.220) 0:00:05.974 ******* 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.220) 0:00:05.973 ******* 2025-12-12 16:25:03,437 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.062) 0:00:06.036 ******* 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.062) 0:00:06.035 ******* 2025-12-12 16:25:04,407 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-12-12 16:25:05,092 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:05 +0000 (0:00:01.658) 0:00:07.695 ******* 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:05 +0000 (0:00:01.658) 0:00:07.694 ******* 2025-12-12 16:25:06,268 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:06,280 p=32453 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2025-12-12 16:25:06,281 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:06 +0000 (0:00:01.170) 0:00:08.865 ******* 2025-12-12 16:25:06,281 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:06 +0000 (0:00:01.170) 0:00:08.864 ******* 2025-12-12 16:25:07,027 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-12-12 16:25:07,763 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-12-12 16:25:07,781 p=32453 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2025-12-12 16:25:07,781 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:07 +0000 (0:00:01.500) 0:00:10.366 ******* 2025-12-12 16:25:07,782 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:07 +0000 (0:00:01.500) 0:00:10.365 ******* 2025-12-12 16:25:08,718 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.947) 0:00:11.313 ******* 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.947) 0:00:11.313 ******* 2025-12-12 16:25:08,783 p=32453 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_login_into_openshift_internal.log 2025-12-12 16:25:08,970 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.250) 0:00:11.564 ******* 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.250) 0:00:11.563 ******* 2025-12-12 16:25:09,011 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.044) 0:00:11.609 ******* 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.044) 0:00:11.608 ******* 2025-12-12 16:25:09,049 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.036) 0:00:11.645 ******* 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.036) 0:00:11.644 ******* 2025-12-12 16:25:09,114 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,124 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2025-12-12 16:25:09,124 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.064) 0:00:11.709 ******* 2025-12-12 16:25:09,125 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.064) 0:00:11.708 ******* 2025-12-12 16:25:09,147 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,156 p=32453 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2025-12-12 16:25:09,157 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.032) 0:00:11.741 ******* 2025-12-12 16:25:09,157 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.032) 0:00:11.740 ******* 2025-12-12 16:25:09,178 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.031) 0:00:11.772 ******* 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.031) 0:00:11.771 ******* 2025-12-12 16:25:09,208 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.030) 0:00:11.802 ******* 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.030) 0:00:11.801 ******* 2025-12-12 16:25:09,243 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,256 p=32453 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2025-12-12 16:25:09,257 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.038) 0:00:11.841 ******* 2025-12-12 16:25:09,257 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.038) 0:00:11.840 ******* 2025-12-12 16:25:10,059 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:10 +0000 (0:00:00.816) 0:00:12.658 ******* 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:10 +0000 (0:00:00.816) 0:00:12.657 ******* 2025-12-12 16:25:11,085 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:01.023) 0:00:13.682 ******* 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:01.023) 0:00:13.681 ******* 2025-12-12 16:25:11,815 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:11,828 p=32453 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2025-12-12 16:25:11,828 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.731) 0:00:14.413 ******* 2025-12-12 16:25:11,829 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.731) 0:00:14.412 ******* 2025-12-12 16:25:11,844 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.036) 0:00:14.450 ******* 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.036) 0:00:14.449 ******* 2025-12-12 16:25:11,896 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.078) 0:00:14.528 ******* 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.078) 0:00:14.527 ******* 2025-12-12 16:25:11,976 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.045) 0:00:14.574 ******* 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.045) 0:00:14.573 ******* 2025-12-12 16:25:12,020 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.618 ******* 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.618 ******* 2025-12-12 16:25:12,067 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,078 p=32453 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2025-12-12 16:25:12,079 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.663 ******* 2025-12-12 16:25:12,079 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.662 ******* 2025-12-12 16:25:12,099 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,111 p=32453 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2025-12-12 16:25:12,111 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.032) 0:00:14.696 ******* 2025-12-12 16:25:12,112 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.032) 0:00:14.695 ******* 2025-12-12 16:25:12,137 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.038) 0:00:14.734 ******* 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.038) 0:00:14.734 ******* 2025-12-12 16:25:12,175 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.041) 0:00:14.776 ******* 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.041) 0:00:14.775 ******* 2025-12-12 16:25:12,223 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,234 p=32453 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2025-12-12 16:25:12,235 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.043) 0:00:14.819 ******* 2025-12-12 16:25:12,235 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.043) 0:00:14.818 ******* 2025-12-12 16:25:12,262 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,274 p=32453 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-12-12 16:25:12,275 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.039) 0:00:14.859 ******* 2025-12-12 16:25:12,275 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.039) 0:00:14.858 ******* 2025-12-12 16:25:12,333 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.073) 0:00:14.933 ******* 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.073) 0:00:14.932 ******* 2025-12-12 16:25:12,420 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.083) 0:00:15.016 ******* 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.082) 0:00:15.015 ******* 2025-12-12 16:25:12,526 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | localhost : ok=35 changed=12 unreachable=0 failed=0 skipped=34 rescued=0 ignored=0 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.170) 0:00:15.186 ******* 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | =============================================================================== 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.66s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces --- 1.50s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Get internal OpenShift registry route ----------------- 1.17s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Patch network operator -------------------------------- 1.02s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Wait for the image registry to be ready --------------- 0.95s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Gather network.operator info -------------------------- 0.82s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Patch samples registry configuration ------------------ 0.73s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Create the openshift_login parameters file ------------ 0.58s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift token --------------------------------- 0.49s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch new OpenShift access token ---------------------- 0.48s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Append the KUBECONFIG to the install yamls parameters --- 0.47s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Ensure output directory exists ------------------------ 0.42s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift current user -------------------------- 0.34s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Read the install yamls parameters file ---------------- 0.33s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift API URL ------------------------------- 0.33s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift kubeconfig context -------------------- 0.32s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | networking_mapper : Check for Networking Environment Definition file existence --- 0.31s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Login into OpenShift internal registry ---------------- 0.25s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Ensure output directory exists ------------------------ 0.22s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Check if kubeconfig exists ---------------------------- 0.18s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.171) 0:00:15.186 ******* 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | =============================================================================== 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup --------------------------------------------------------- 8.77s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login --------------------------------------------------------- 4.52s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | run_hook ---------------------------------------------------------------- 0.63s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.55s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | networking_mapper ------------------------------------------------------- 0.44s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ansible.builtin.include_vars -------------------------------------------- 0.21s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | total ------------------------------------------------------------------ 15.13s 2025-12-12 16:25:31,617 p=33051 u=zuul n=ansible | Starting galaxy collection install process 2025-12-12 16:25:31,637 p=33051 u=zuul n=ansible | Process install dependency map 2025-12-12 16:25:45,294 p=33051 u=zuul n=ansible | Starting collection install process 2025-12-12 16:25:45,294 p=33051 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+b9f05e2b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+b9f05e2b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | cifmw.general:1.0.0+b9f05e2b was installed successfully 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-12-12 16:25:46,994 p=33051 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-12-12 16:25:46,994 p=33051 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-12-12 16:25:46,995 p=33051 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-12-12 16:25:47,197 p=33051 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-12-12 16:25:47,198 p=33051 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-12-12 16:25:47,198 p=33051 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-12-12 16:25:47,430 p=33051 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-12-12 16:25:47,430 p=33051 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-12-12 16:25:47,431 p=33051 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-12-12 16:25:48,448 p=33051 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-12-12 16:25:48,449 p=33051 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-12-12 16:25:48,449 p=33051 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-12-12 16:25:48,624 p=33051 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-12-12 16:25:48,625 p=33051 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_fetch_openshift.log0000644000175000017500000000035215117040734027622 0ustar zuulzuulWARNING: Using insecure TLS client config. Setting this option is not supported! Login successful. You have access to 65 projects, the list has been suppressed. You can list all projects with 'oc projects' Using project "default". home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_001_login_into_openshift_internal.log0000644000175000017500000000002115117040744032561 0ustar zuulzuulLogin Succeeded! home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_run_openstack_must_gather.log0000644000175000017500000000713415117043022031725 0ustar zuulzuul[must-gather ] OUT 2025-12-12T16:40:38.139203278Z Using must-gather plug-in image: quay.io/openstack-k8s-operators/openstack-must-gather:latest When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: ClientVersion: 4.20.6 ClusterVersion: Stable at "4.20.1" ClusterOperators: clusteroperator/machine-config is degraded because Failed to resync 4.20.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"")] clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing [must-gather ] OUT 2025-12-12T16:40:38.18604624Z namespace/openshift-must-gather-2sjxj created [must-gather ] OUT 2025-12-12T16:40:38.195476451Z clusterrolebinding.rbac.authorization.k8s.io/must-gather-k5chq created [must-gather ] OUT 2025-12-12T16:40:38.21981086Z pod for plug-in image quay.io/openstack-k8s-operators/openstack-must-gather:latest created [must-gather-v4h5l] OUT 2025-12-12T16:40:48.231999094Z gather logs unavailable: Get "https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?follow=true×tamps=true": remote error: tls: internal error [must-gather-v4h5l] OUT 2025-12-12T16:40:48.23223174Z waiting for gather to complete [must-gather-v4h5l] OUT 2025-12-12T16:42:58.238364847Z downloading gather output WARNING: cannot use rsync: rsync not available in container WARNING: cannot use tar: tar not available in container WARNING: cannot use rsync: rsync not available in container WARNING: cannot use tar: tar not available in container [must-gather-v4h5l] OUT 2025-12-12T16:42:58.802953469Z gather output not downloaded: [Get "https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?timestamps=true": remote error: tls: internal error, No available strategies to copy.] [must-gather-v4h5l] OUT 2025-12-12T16:42:58.80298743Z [must-gather ] OUT 2025-12-12T16:42:58.80745501Z namespace/openshift-must-gather-2sjxj deleted Reprinting Cluster State: When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: ClientVersion: 4.20.6 ClusterVersion: Stable at "4.20.1" ClusterOperators: clusteroperator/machine-config is degraded because Failed to resync 4.20.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1, reason: Node crc is reporting: "unexpected on-disk state validating against rendered-master-d582710c680b4cd4536e11249c7e09e9: content mismatch for file \"/var/lib/kubelet/config.json\"")] clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing error: unable to download output from pod must-gather-v4h5l: [Get "https://192.168.126.11:10250/containerLogs/openshift-must-gather-2sjxj/must-gather-v4h5l/gather?timestamps=true": remote error: tls: internal error, No available strategies to copy.] home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_prepare_root_ssh.log0000644000175000017500000034447015117043042030037 0ustar zuulzuulPseudo-terminal will not be allocated because stdin is not a terminal. Red Hat Enterprise Linux CoreOS 9.6.20251021-0 Part of OpenShift 4.20, RHCOS is a Kubernetes-native operating system managed by the Machine Config Operator (`clusteroperator/machine-config`). WARNING: Direct SSH access to machines is not recommended; instead, make configuration changes via `machineconfig` objects: https://docs.openshift.com/container-platform/4.20/architecture/architecture-rhcos.html --- + test -d /etc/ssh/sshd_config.d/ + sudo sed -ri 's/PermitRootLogin no/PermitRootLogin prohibit-password/' '/etc/ssh/sshd_config.d/*' sed: can't read /etc/ssh/sshd_config.d/*: No such file or directory + true + sudo sed -i 's/PermitRootLogin no/PermitRootLogin prohibit-password/' /etc/ssh/sshd_config + sudo systemctl restart sshd + sudo cp -r .ssh /root/ + sudo chown -R root: /root/.ssh + mkdir -p /tmp/crc-logs-artifacts + sudo cp -av /ostree/deploy/rhcos/var/log/pods /tmp/crc-logs-artifacts/ '/ostree/deploy/rhcos/var/log/pods' -> '/tmp/crc-logs-artifacts/pods' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz/0.log' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch/0.log' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch/0.log' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend/0.log' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend/0.log' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem/0.log' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler/0.log' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager/0.log' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager/0.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container/0.log' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_infrawatch-operators-6bs58_6510d065-e486-4274-a8ca-4c2cdb8dd1ae' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_infrawatch-operators-6bs58_6510d065-e486-4274-a8ca-4c2cdb8dd1ae' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141' -> '/tmp/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141' '/ostree/deploy/rhcos/var/log/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin' -> '/tmp/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin' '/ostree/deploy/rhcos/var/log/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin/0.log' '/ostree/deploy/rhcos/var/log/pods/service-telemetry_infrawatch-operators-cdpts_eeed1a9b-f386-4d11-b730-03bcb44f9a55' -> '/tmp/crc-logs-artifacts/pods/service-telemetry_infrawatch-operators-cdpts_eeed1a9b-f386-4d11-b730-03bcb44f9a55' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin/0.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c' -> '/tmp/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c' '/ostree/deploy/rhcos/var/log/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator' -> '/tmp/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator' '/ostree/deploy/rhcos/var/log/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch/0.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull/0.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/7.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/7.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/6.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon/6.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner/0.log' + sudo chown -R core:core /tmp/crc-logs-artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_copy_logs_from_crc.log0000644000175000017500000002436715117043044030333 0ustar zuulzuulExecuting: program /usr/bin/ssh host api.crc.testing, user core, command sftp OpenSSH_9.9p1, OpenSSL 3.5.1 1 Jul 2025 debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug1: re-parsing configuration debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: Connecting to api.crc.testing [38.102.83.180] port 22. debug1: Connection established. debug1: identity file /home/zuul/.ssh/id_cifw type 2 debug1: identity file /home/zuul/.ssh/id_cifw-cert type -1 debug1: Local version string SSH-2.0-OpenSSH_9.9 debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7 debug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000 debug1: Authenticating to api.crc.testing:22 as 'core' debug1: load_hostkeys: fopen /home/zuul/.ssh/known_hosts2: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory debug1: SSH2_MSG_KEXINIT sent debug1: SSH2_MSG_KEXINIT received debug1: kex: algorithm: curve25519-sha256 debug1: kex: host key algorithm: ssh-ed25519 debug1: kex: server->client cipher: aes256-gcm@openssh.com MAC: compression: none debug1: kex: client->server cipher: aes256-gcm@openssh.com MAC: compression: none debug1: kex: curve25519-sha256 need=32 dh_need=32 debug1: kex: curve25519-sha256 need=32 dh_need=32 debug1: expecting SSH2_MSG_KEX_ECDH_REPLY debug1: SSH2_MSG_KEX_ECDH_REPLY received debug1: Server host key: ssh-ed25519 SHA256:A0lb0O1JPp4Iq9jOMsE3TeugznZjTtLaq8wuHo7RgHg debug1: load_hostkeys: fopen /home/zuul/.ssh/known_hosts2: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory debug1: Host 'api.crc.testing' is known and matches the ED25519 host key. debug1: Found key in /home/zuul/.ssh/known_hosts:22 debug1: ssh_packet_send2_wrapped: resetting send seqnr 3 debug1: rekey out after 4294967296 blocks debug1: SSH2_MSG_NEWKEYS sent debug1: expecting SSH2_MSG_NEWKEYS debug1: ssh_packet_read_poll2: resetting read seqnr 3 debug1: SSH2_MSG_NEWKEYS received debug1: rekey in after 4294967296 blocks debug1: SSH2_MSG_EXT_INFO received debug1: kex_ext_info_client_parse: server-sig-algs= debug1: SSH2_MSG_SERVICE_ACCEPT received debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic debug1: Next authentication method: gssapi-with-mic debug1: No credentials were supplied, or the credentials were unavailable or inaccessible No Kerberos credentials available (default cache: KCM:) debug1: No credentials were supplied, or the credentials were unavailable or inaccessible No Kerberos credentials available (default cache: KCM:) debug1: Next authentication method: publickey debug1: Will attempt key: /home/zuul/.ssh/id_cifw ECDSA SHA256:2V+JH/ejZX36R4k1D7YAgRx0/xTEZYlqiiujSZoZf2w explicit debug1: Offering public key: /home/zuul/.ssh/id_cifw ECDSA SHA256:2V+JH/ejZX36R4k1D7YAgRx0/xTEZYlqiiujSZoZf2w explicit debug1: Server accepts key: /home/zuul/.ssh/id_cifw ECDSA SHA256:2V+JH/ejZX36R4k1D7YAgRx0/xTEZYlqiiujSZoZf2w explicit Authenticated to api.crc.testing ([38.102.83.180]:22) using "publickey". debug1: pkcs11_del_provider: called, provider_id = (null) debug1: channel 0: new session [client-session] (inactive timeout: 0) debug1: Requesting no-more-sessions@openssh.com debug1: Entering interactive session. debug1: pledge: filesystem debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0 debug1: client_input_hostkeys: searching /home/zuul/.ssh/known_hosts for api.crc.testing / (none) debug1: client_input_hostkeys: searching /home/zuul/.ssh/known_hosts2 for api.crc.testing / (none) debug1: client_input_hostkeys: hostkeys file /home/zuul/.ssh/known_hosts2 does not exist debug1: client_input_hostkeys: no new or deprecated keys from server debug1: Remote: /var/home/core/.ssh/authorized_keys:28: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding debug1: Remote: /var/home/core/.ssh/authorized_keys:28: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding debug1: Sending subsystem: sftp debug1: pledge: fork scp: debug1: Fetching /tmp/crc-logs-artifacts/ to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts scp: debug1: truncating at 108623 scp: debug1: truncating at 1504 scp: debug1: truncating at 45038 scp: debug1: truncating at 396 scp: debug1: truncating at 22494 scp: debug1: truncating at 2210 scp: debug1: truncating at 0 scp: debug1: truncating at 53910 scp: debug1: truncating at 123064 scp: debug1: truncating at 271271 scp: debug1: truncating at 48830 scp: debug1: truncating at 1061942 scp: debug1: truncating at 146104 scp: debug1: truncating at 265784 scp: debug1: truncating at 1212 scp: debug1: truncating at 706475 scp: debug1: truncating at 95225 scp: debug1: truncating at 16416 scp: debug1: truncating at 4358794 scp: debug1: truncating at 0 scp: debug1: truncating at 0 scp: debug1: truncating at 18275 scp: debug1: truncating at 8366 scp: debug1: truncating at 42871 scp: debug1: truncating at 44575 scp: debug1: truncating at 14467 scp: debug1: truncating at 1040 scp: debug1: truncating at 736 scp: debug1: truncating at 75448 scp: debug1: truncating at 67331 scp: debug1: truncating at 1212 scp: debug1: truncating at 122131 scp: debug1: truncating at 4017 scp: debug1: truncating at 2578 scp: debug1: truncating at 71 scp: debug1: truncating at 5285 scp: debug1: truncating at 1900 scp: debug1: truncating at 2901 scp: debug1: truncating at 8366 scp: debug1: truncating at 67429 scp: debug1: truncating at 22758 scp: debug1: truncating at 9393 scp: debug1: truncating at 664 scp: debug1: truncating at 0 scp: debug1: truncating at 83322 scp: debug1: truncating at 68096 scp: debug1: truncating at 392 scp: debug1: truncating at 404 scp: debug1: truncating at 0 scp: debug1: truncating at 80 scp: debug1: truncating at 414 scp: debug1: truncating at 408 scp: debug1: truncating at 411 scp: debug1: truncating at 1061 scp: debug1: truncating at 1902 scp: debug1: truncating at 16184 scp: debug1: truncating at 17092 scp: debug1: truncating at 445913 scp: debug1: truncating at 96 scp: debug1: truncating at 0 scp: debug1: truncating at 0 scp: debug1: truncating at 2349472 scp: debug1: truncating at 1183 scp: debug1: truncating at 40429 scp: debug1: truncating at 70432 scp: debug1: truncating at 1212 scp: debug1: truncating at 62917 scp: debug1: truncating at 1956 scp: debug1: truncating at 71 scp: debug1: truncating at 2197 scp: debug1: truncating at 1376 scp: debug1: truncating at 101 scp: debug1: truncating at 1276 scp: debug1: truncating at 1040 scp: debug1: truncating at 779 scp: debug1: truncating at 1875 scp: debug1: truncating at 72 scp: debug1: truncating at 19178 scp: debug1: truncating at 64362 scp: debug1: truncating at 17470 scp: debug1: truncating at 61 scp: debug1: truncating at 1959 scp: debug1: truncating at 133690 scp: debug1: truncating at 5847 scp: debug1: truncating at 14141 scp: debug1: truncating at 1040 scp: debug1: truncating at 123379 scp: debug1: truncating at 210780 scp: debug1: truncating at 121266 scp: debug1: truncating at 29932 scp: debug1: truncating at 0 scp: debug1: truncating at 35708 scp: debug1: truncating at 7997 scp: debug1: truncating at 1054 scp: debug1: truncating at 0 scp: debug1: truncating at 0 scp: debug1: truncating at 3920400 scp: debug1: truncating at 116 scp: debug1: truncating at 19611 scp: debug1: truncating at 8140 scp: debug1: truncating at 641292 scp: debug1: truncating at 265 scp: debug1: truncating at 17986 scp: debug1: truncating at 0 scp: debug1: truncating at 35930 scp: debug1: truncating at 4640 scp: debug1: truncating at 4680 scp: debug1: truncating at 30662 scp: debug1: truncating at 2311284 scp: debug1: truncating at 38714 scp: debug1: truncating at 2357 scp: debug1: truncating at 0 scp: debug1: truncating at 2425 scp: debug1: truncating at 4519 scp: debug1: truncating at 309969 scp: debug1: truncating at 13419 scp: debug1: truncating at 151481 scp: debug1: truncating at 150323 scp: debug1: truncating at 7803 scp: debug1: truncating at 85 scp: debug1: truncating at 9629 scp: debug1: truncating at 1040 scp: debug1: truncating at 48631 scp: debug1: truncating at 39907 scp: debug1: truncating at 1040 scp: debug1: truncating at 65290 scp: debug1: truncating at 4085430 scp: debug1: truncating at 3698 scp: debug1: truncating at 12433 scp: debug1: truncating at 120 scp: debug1: truncating at 295194 scp: debug1: truncating at 274009 scp: debug1: truncating at 16776 scp: debug1: truncating at 973677 scp: debug1: truncating at 1091373 scp: debug1: truncating at 15167 scp: debug1: truncating at 273 scp: debug1: truncating at 71 scp: debug1: truncating at 71478 scp: debug1: truncating at 2146 scp: debug1: truncating at 865650 scp: debug1: truncating at 3422 scp: debug1: truncating at 1060 scp: debug1: truncating at 10539 scp: debug1: truncating at 0 scp: debug1: truncating at 52753 scp: debug1: truncating at 240 scp: debug1: truncating at 0 scp: debug1: truncating at 0 scp: debug1: truncating at 17964 scp: debug1: truncating at 156 scp: debug1: truncating at 124 scp: debug1: truncating at 0 scp: debug1: truncating at 29005 scp: debug1: truncating at 48279 scp: debug1: truncating at 1040 scp: debug1: truncating at 32441 debug1: client_input_channel_req: channel 0 rtype exit-status reply 0 debug1: channel 0: free: client-session, nchannels 1 Transferred: sent 129540, received 27774408 bytes, in 1.2 seconds Bytes per second: sent 104526.3, received 22411266.6 debug1: Exit status 0 home/zuul/zuul-output/logs/ci-framework-data/logs/ansible.log0000644000175000017500000046102515117043062023456 0ustar zuulzuul2025-12-12 16:23:25,733 p=30999 u=zuul n=ansible | Starting galaxy collection install process 2025-12-12 16:23:25,735 p=30999 u=zuul n=ansible | Process install dependency map 2025-12-12 16:23:42,274 p=30999 u=zuul n=ansible | Starting collection install process 2025-12-12 16:23:42,274 p=30999 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+b9f05e2b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+b9f05e2b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | cifmw.general:1.0.0+b9f05e2b was installed successfully 2025-12-12 16:23:42,762 p=30999 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-12-12 16:23:42,816 p=30999 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-12-12 16:23:43,563 p=30999 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-12-12 16:23:43,612 p=30999 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-12-12 16:23:43,705 p=30999 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-12-12 16:23:43,728 p=30999 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-12-12 16:23:43,864 p=30999 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-12-12 16:23:43,977 p=30999 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-12-12 16:23:44,044 p=30999 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-12-12 16:23:44,061 p=30999 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-12-12 16:23:44,296 p=30999 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-12-12 16:23:44,297 p=30999 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-12-12 16:23:44,297 p=30999 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-12-12 16:23:44,578 p=30999 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-12-12 16:23:44,612 p=30999 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-12-12 16:23:44,612 p=30999 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-12-12 16:23:44,613 p=30999 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-12-12 16:23:44,647 p=30999 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-12-12 16:23:44,740 p=30999 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-12-12 16:23:44,740 p=30999 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2025-12-12 16:23:52,472 p=31591 u=zuul n=ansible | PLAY [Bootstrap playbook] ****************************************************** 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:52 +0000 (0:00:00.034) 0:00:00.034 ******* 2025-12-12 16:23:52,489 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:52 +0000 (0:00:00.033) 0:00:00.033 ******* 2025-12-12 16:23:53,532 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | TASK [Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:01.062) 0:00:01.096 ******* 2025-12-12 16:23:53,551 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:01.062) 0:00:01.095 ******* 2025-12-12 16:23:53,579 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,586 p=31591 u=zuul n=ansible | TASK [Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2025-12-12 16:23:53,587 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.035) 0:00:01.132 ******* 2025-12-12 16:23:53,587 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.035) 0:00:01.130 ******* 2025-12-12 16:23:53,650 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.070) 0:00:01.202 ******* 2025-12-12 16:23:53,657 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:53 +0000 (0:00:00.070) 0:00:01.201 ******* 2025-12-12 16:23:54,401 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.753) 0:00:01.956 ******* 2025-12-12 16:23:54,411 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.754) 0:00:01.955 ******* 2025-12-12 16:23:54,441 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,452 p=31591 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2025-12-12 16:23:54,452 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:01.998 ******* 2025-12-12 16:23:54,453 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:01.996 ******* 2025-12-12 16:23:54,490 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,502 p=31591 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2025-12-12 16:23:54,502 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.049) 0:00:02.048 ******* 2025-12-12 16:23:54,503 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.049) 0:00:02.046 ******* 2025-12-12 16:23:54,528 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:02.089 ******* 2025-12-12 16:23:54,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:54 +0000 (0:00:00.041) 0:00:02.088 ******* 2025-12-12 16:23:56,110 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:01.579) 0:00:03.668 ******* 2025-12-12 16:23:56,123 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:01.579) 0:00:03.667 ******* 2025-12-12 16:23:56,315 p=31591 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2025-12-12 16:23:56,501 p=31591 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2025-12-12 16:23:56,672 p=31591 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:00.563) 0:00:04.232 ******* 2025-12-12 16:23:56,687 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:56 +0000 (0:00:00.563) 0:00:04.231 ******* 2025-12-12 16:23:58,223 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:58 +0000 (0:00:01.541) 0:00:05.774 ******* 2025-12-12 16:23:58,229 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:58 +0000 (0:00:01.541) 0:00:05.773 ******* 2025-12-12 16:23:59,641 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:59 +0000 (0:00:01.421) 0:00:07.195 ******* 2025-12-12 16:23:59,650 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:23:59 +0000 (0:00:01.421) 0:00:07.194 ******* 2025-12-12 16:24:08,242 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:08 +0000 (0:00:08.606) 0:00:15.802 ******* 2025-12-12 16:24:08,257 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:08 +0000 (0:00:08.606) 0:00:15.801 ******* 2025-12-12 16:24:09,064 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:09,072 p=31591 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2025-12-12 16:24:09,073 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.815) 0:00:16.618 ******* 2025-12-12 16:24:09,073 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.815) 0:00:16.616 ******* 2025-12-12 16:24:09,103 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.037) 0:00:16.655 ******* 2025-12-12 16:24:09,110 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.037) 0:00:16.654 ******* 2025-12-12 16:24:09,757 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:09,771 p=31591 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2025-12-12 16:24:09,772 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.661) 0:00:17.317 ******* 2025-12-12 16:24:09,772 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.661) 0:00:17.315 ******* 2025-12-12 16:24:09,801 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,807 p=31591 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2025-12-12 16:24:09,808 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.036) 0:00:17.353 ******* 2025-12-12 16:24:09,808 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.036) 0:00:17.351 ******* 2025-12-12 16:24:09,834 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.032) 0:00:17.385 ******* 2025-12-12 16:24:09,840 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.032) 0:00:17.384 ******* 2025-12-12 16:24:09,870 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.038) 0:00:17.423 ******* 2025-12-12 16:24:09,878 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:09 +0000 (0:00:00.038) 0:00:17.422 ******* 2025-12-12 16:24:10,353 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:10,368 p=31591 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-12-12 16:24:10,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:10 +0000 (0:00:00.490) 0:00:17.913 ******* 2025-12-12 16:24:10,369 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:10 +0000 (0:00:00.490) 0:00:17.912 ******* 2025-12-12 16:24:11,049 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.689) 0:00:18.603 ******* 2025-12-12 16:24:11,058 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.689) 0:00:18.602 ******* 2025-12-12 16:24:11,076 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.630 ******* 2025-12-12 16:24:11,085 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.629 ******* 2025-12-12 16:24:11,104 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.657 ******* 2025-12-12 16:24:11,112 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.656 ******* 2025-12-12 16:24:11,131 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.685 ******* 2025-12-12 16:24:11,140 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.683 ******* 2025-12-12 16:24:11,172 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:11,179 p=31591 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2025-12-12 16:24:11,179 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.039) 0:00:18.725 ******* 2025-12-12 16:24:11,180 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.039) 0:00:18.723 ******* 2025-12-12 16:24:11,194 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.021) 0:00:18.746 ******* 2025-12-12 16:24:11,201 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.021) 0:00:18.745 ******* 2025-12-12 16:24:11,215 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.767 ******* 2025-12-12 16:24:11,222 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.766 ******* 2025-12-12 16:24:11,239 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.793 ******* 2025-12-12 16:24:11,248 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.026) 0:00:18.792 ******* 2025-12-12 16:24:11,268 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,275 p=31591 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2025-12-12 16:24:11,276 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.821 ******* 2025-12-12 16:24:11,276 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.027) 0:00:18.819 ******* 2025-12-12 16:24:11,289 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.019) 0:00:18.840 ******* 2025-12-12 16:24:11,295 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.019) 0:00:18.839 ******* 2025-12-12 16:24:11,308 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.861 ******* 2025-12-12 16:24:11,316 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.020) 0:00:18.860 ******* 2025-12-12 16:24:11,329 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:11,334 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2025-12-12 16:24:11,335 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.018) 0:00:18.880 ******* 2025-12-12 16:24:11,335 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.018) 0:00:18.878 ******* 2025-12-12 16:24:11,534 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:11,548 p=31591 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2025-12-12 16:24:11,549 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.214) 0:00:19.094 ******* 2025-12-12 16:24:11,549 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.214) 0:00:19.092 ******* 2025-12-12 16:24:11,755 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:11,770 p=31591 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2025-12-12 16:24:11,771 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.221) 0:00:19.316 ******* 2025-12-12 16:24:11,771 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:11 +0000 (0:00:00.222) 0:00:19.314 ******* 2025-12-12 16:24:12,015 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:12,021 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2025-12-12 16:24:12,021 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.250) 0:00:19.567 ******* 2025-12-12 16:24:12,022 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.250) 0:00:19.565 ******* 2025-12-12 16:24:12,046 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.599 ******* 2025-12-12 16:24:12,053 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.597 ******* 2025-12-12 16:24:12,080 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.634 ******* 2025-12-12 16:24:12,089 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.633 ******* 2025-12-12 16:24:12,117 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.670 ******* 2025-12-12 16:24:12,125 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.668 ******* 2025-12-12 16:24:12,146 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.028) 0:00:19.698 ******* 2025-12-12 16:24:12,153 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.028) 0:00:19.697 ******* 2025-12-12 16:24:12,179 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.733 ******* 2025-12-12 16:24:12,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.035) 0:00:19.732 ******* 2025-12-12 16:24:12,210 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.764 ******* 2025-12-12 16:24:12,219 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.031) 0:00:19.763 ******* 2025-12-12 16:24:12,535 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.324) 0:00:20.089 ******* 2025-12-12 16:24:12,544 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:12 +0000 (0:00:00.324) 0:00:20.088 ******* 2025-12-12 16:24:12,793 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2025-12-12 16:24:13,025 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.492) 0:00:20.581 ******* 2025-12-12 16:24:13,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.492) 0:00:20.580 ******* 2025-12-12 16:24:13,488 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:13,496 p=31591 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2025-12-12 16:24:13,497 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.460) 0:00:21.042 ******* 2025-12-12 16:24:13,497 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.460) 0:00:21.040 ******* 2025-12-12 16:24:13,770 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.285) 0:00:21.327 ******* 2025-12-12 16:24:13,782 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.285) 0:00:21.326 ******* 2025-12-12 16:24:13,818 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.047) 0:00:21.374 ******* 2025-12-12 16:24:13,829 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.047) 0:00:21.373 ******* 2025-12-12 16:24:13,850 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.027) 0:00:21.402 ******* 2025-12-12 16:24:13,857 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:13 +0000 (0:00:00.027) 0:00:21.400 ******* 2025-12-12 16:24:44,229 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:30.381) 0:00:51.783 ******* 2025-12-12 16:24:44,238 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:30.381) 0:00:51.782 ******* 2025-12-12 16:24:44,462 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.239) 0:00:52.022 ******* 2025-12-12 16:24:44,477 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.239) 0:00:52.021 ******* 2025-12-12 16:24:44,691 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.229) 0:00:52.251 ******* 2025-12-12 16:24:44,706 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:44 +0000 (0:00:00.229) 0:00:52.250 ******* 2025-12-12 16:24:50,033 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:05.336) 0:00:57.587 ******* 2025-12-12 16:24:50,042 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:05.335) 0:00:57.586 ******* 2025-12-12 16:24:50,063 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.034) 0:00:57.622 ******* 2025-12-12 16:24:50,077 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.034) 0:00:57.621 ******* 2025-12-12 16:24:50,411 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.342) 0:00:57.965 ******* 2025-12-12 16:24:50,420 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.342) 0:00:57.964 ******* 2025-12-12 16:24:50,765 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.360) 0:00:58.325 ******* 2025-12-12 16:24:50,780 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.360) 0:00:58.324 ******* 2025-12-12 16:24:50,799 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,813 p=31591 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2025-12-12 16:24:50,813 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.359 ******* 2025-12-12 16:24:50,814 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.357 ******* 2025-12-12 16:24:50,835 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,849 p=31591 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2025-12-12 16:24:50,850 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.036) 0:00:58.395 ******* 2025-12-12 16:24:50,850 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.036) 0:00:58.393 ******* 2025-12-12 16:24:50,868 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.032) 0:00:58.427 ******* 2025-12-12 16:24:50,882 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.032) 0:00:58.426 ******* 2025-12-12 16:24:50,912 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.037) 0:00:58.464 ******* 2025-12-12 16:24:50,919 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.037) 0:00:58.463 ******* 2025-12-12 16:24:50,938 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,945 p=31591 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2025-12-12 16:24:50,946 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.026) 0:00:58.491 ******* 2025-12-12 16:24:50,946 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.026) 0:00:58.489 ******* 2025-12-12 16:24:50,967 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:50,978 p=31591 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2025-12-12 16:24:50,979 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.524 ******* 2025-12-12 16:24:50,979 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:50 +0000 (0:00:00.033) 0:00:58.522 ******* 2025-12-12 16:24:51,232 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2025-12-12 16:24:51,436 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2025-12-12 16:24:51,670 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2025-12-12 16:24:51,870 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2025-12-12 16:24:52,050 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:01.096) 0:00:59.620 ******* 2025-12-12 16:24:52,075 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:01.096) 0:00:59.619 ******* 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.112) 0:00:59.733 ******* 2025-12-12 16:24:52,188 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.112) 0:00:59.732 ******* 2025-12-12 16:24:52,378 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2025-12-12 16:24:52,558 p=31591 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2025-12-12 16:24:52,756 p=31591 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.578) 0:01:00.311 ******* 2025-12-12 16:24:52,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.578) 0:01:00.310 ******* 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.033) 0:01:00.345 ******* 2025-12-12 16:24:52,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.033) 0:01:00.344 ******* 2025-12-12 16:24:52,834 p=31591 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'master', 'change': '694', 'change_url': 'https://github.com/infrawatch/service-telemetry-operator/pull/694', 'commit_id': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'patchset': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/infrawatch/service-telemetry-operator', 'name': 'infrawatch/service-telemetry-operator', 'short_name': 'service-telemetry-operator', 'src_dir': 'src/github.com/infrawatch/service-telemetry-operator'}, 'topic': None}) 2025-12-12 16:24:52,836 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.389 ******* 2025-12-12 16:24:52,844 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.388 ******* 2025-12-12 16:24:52,876 p=31591 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'master', 'change': '694', 'change_url': 'https://github.com/infrawatch/service-telemetry-operator/pull/694', 'commit_id': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'patchset': '3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/infrawatch/service-telemetry-operator', 'name': 'infrawatch/service-telemetry-operator', 'short_name': 'service-telemetry-operator', 'src_dir': 'src/github.com/infrawatch/service-telemetry-operator'}, 'topic': None}) 2025-12-12 16:24:52,878 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | TASK [Customize install_yamls devsetup vars if needed name=install_yamls, tasks_from=customize_devsetup_vars.yml] *** 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.047) 0:01:00.436 ******* 2025-12-12 16:24:52,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.047) 0:01:00.435 ******* 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | TASK [install_yamls : Update opm_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^opm_version:, line=opm_version: {{ cifmw_install_yamls_opm_version }}, state=present] *** 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.480 ******* 2025-12-12 16:24:52,935 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.043) 0:01:00.479 ******* 2025-12-12 16:24:52,969 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | TASK [install_yamls : Update sdk_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^sdk_version:, line=sdk_version: {{ cifmw_install_yamls_sdk_version }}, state=present] *** 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.041) 0:01:00.521 ******* 2025-12-12 16:24:52,976 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:52 +0000 (0:00:00.041) 0:01:00.520 ******* 2025-12-12 16:24:53,000 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | TASK [install_yamls : Update go_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^go_version:, line=go_version: {{ cifmw_install_yamls_go_version }}, state=present] *** 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:00.553 ******* 2025-12-12 16:24:53,008 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:00.552 ******* 2025-12-12 16:24:53,029 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | TASK [install_yamls : Update kustomize_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^kustomize_version:, line=kustomize_version: {{ cifmw_install_yamls_kustomize_version }}, state=present] *** 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:00.581 ******* 2025-12-12 16:24:53,036 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:00.580 ******* 2025-12-12 16:24:53,060 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.035) 0:01:00.616 ******* 2025-12-12 16:24:53,071 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.035) 0:01:00.615 ******* 2025-12-12 16:24:53,129 p=31591 u=zuul n=ansible | ok: [localhost] => (item={}) 2025-12-12 16:24:53,137 p=31591 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|antelope|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2025-12-12 16:24:53,138 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.066) 0:01:00.683 ******* 2025-12-12 16:24:53,138 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.066) 0:01:00.681 ******* 2025-12-12 16:24:53,177 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,182 p=31591 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2025-12-12 16:24:53,182 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.044) 0:01:00.728 ******* 2025-12-12 16:24:53,183 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.044) 0:01:00.726 ******* 2025-12-12 16:24:53,768 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,822 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2025-12-12 16:24:53,823 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.640) 0:01:01.368 ******* 2025-12-12 16:24:53,823 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.640) 0:01:01.366 ******* 2025-12-12 16:24:53,849 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.036) 0:01:01.404 ******* 2025-12-12 16:24:53,859 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.036) 0:01:01.403 ******* 2025-12-12 16:24:53,878 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.031) 0:01:01.436 ******* 2025-12-12 16:24:53,891 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.435 ******* 2025-12-12 16:24:53,913 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.029) 0:01:01.465 ******* 2025-12-12 16:24:53,920 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.028) 0:01:01.464 ******* 2025-12-12 16:24:53,945 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:53,954 p=31591 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2025-12-12 16:24:53,954 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.034) 0:01:01.500 ******* 2025-12-12 16:24:53,955 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.034) 0:01:01.498 ******* 2025-12-12 16:24:53,977 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: CHECKOUT_FROM_OPENSTACK_REF: 'true' OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.532 ******* 2025-12-12 16:24:53,987 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:53 +0000 (0:00:00.032) 0:01:01.531 ******* 2025-12-12 16:24:54,015 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: '' BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' 2025-12-12 16:24:54,022 p=31591 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2025-12-12 16:24:54,023 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.035) 0:01:01.568 ******* 2025-12-12 16:24:54,023 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.035) 0:01:01.566 ******* 2025-12-12 16:24:54,328 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.314) 0:01:01.882 ******* 2025-12-12 16:24:54,337 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.314) 0:01:01.881 ******* 2025-12-12 16:24:54,360 p=31591 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.030) 0:01:01.913 ******* 2025-12-12 16:24:54,368 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.030) 0:01:01.912 ******* 2025-12-12 16:24:54,758 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.397) 0:01:02.311 ******* 2025-12-12 16:24:54,766 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.397) 0:01:02.310 ******* 2025-12-12 16:24:54,784 p=31591 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.034) 0:01:02.345 ******* 2025-12-12 16:24:54,800 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:54 +0000 (0:00:00.034) 0:01:02.344 ******* 2025-12-12 16:24:55,338 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:55,344 p=31591 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2025-12-12 16:24:55,345 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.544) 0:01:02.890 ******* 2025-12-12 16:24:55,345 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.544) 0:01:02.888 ******* 2025-12-12 16:24:55,366 p=31591 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:55,382 p=31591 u=zuul n=ansible | TASK [Create artifacts with custom params mode=0644, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2025-12-12 16:24:55,382 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.037) 0:01:02.927 ******* 2025-12-12 16:24:55,383 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.037) 0:01:02.926 ******* 2025-12-12 16:24:55,817 p=31591 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | localhost : ok=43 changed=23 unreachable=0 failed=0 skipped=40 rescued=0 ignored=0 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.458) 0:01:03.386 ******* 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | =============================================================================== 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 30.38s 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 8.61s 2025-12-12 16:24:55,841 p=31591 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.34s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Make sure git-core package is installed -------------------- 1.54s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Get repo-setup repository ---------------------------------- 1.42s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ci_setup : Manage directories ------------------------------------------- 1.10s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Gathering Facts --------------------------------------------------------- 1.06s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Install repo-setup package --------------------------------- 0.82s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca : Ensure target directory exists ----------------------------- 0.75s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Dump full hash in delorean.repo.md5 file ------------------- 0.69s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Run repo-setup --------------------------------------------- 0.66s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls : Get environment structure ------------------------------- 0.64s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls : Ensure directories exist -------------------------------- 0.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Ensure directories are present ----------------------------- 0.56s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | discover_latest_image : Get latest image -------------------------------- 0.54s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Remove existing repos from /etc/yum.repos.d directory ------ 0.49s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Run repo-setup-get-hash ------------------------------------ 0.49s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup : Cleanup existing metadata ---------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Create artifacts with custom params ------------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | Friday 12 December 2025 16:24:55 +0000 (0:00:00.459) 0:01:03.386 ******* 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | =============================================================================== 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ci_setup --------------------------------------------------------------- 38.29s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | repo_setup ------------------------------------------------------------- 17.66s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_yamls ----------------------------------------------------------- 2.61s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | install_ca -------------------------------------------------------------- 2.47s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | gather_facts ------------------------------------------------------------ 1.06s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | discover_latest_image --------------------------------------------------- 0.58s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.copy ---------------------------------------------------- 0.46s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.11s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ansible.builtin.set_fact ------------------------------------------------ 0.11s 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2025-12-12 16:24:55,842 p=31591 u=zuul n=ansible | total ------------------------------------------------------------------ 63.35s 2025-12-12 16:24:57,434 p=32453 u=zuul n=ansible | PLAY [Run pre_infra hooks] ***************************************************** 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.054) 0:00:00.054 ******* 2025-12-12 16:24:57,470 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.053) 0:00:00.053 ******* 2025-12-12 16:24:57,552 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.094) 0:00:00.149 ******* 2025-12-12 16:24:57,564 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.094) 0:00:00.148 ******* 2025-12-12 16:24:57,622 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,633 p=32453 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2025-12-12 16:24:57,634 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.069) 0:00:00.218 ******* 2025-12-12 16:24:57,634 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.069) 0:00:00.217 ******* 2025-12-12 16:24:57,694 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:57,741 p=32453 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | TASK [Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] ****** 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.141) 0:00:00.360 ******* 2025-12-12 16:24:57,775 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.141) 0:00:00.359 ******* 2025-12-12 16:24:57,906 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.150) 0:00:00.510 ******* 2025-12-12 16:24:57,925 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.150) 0:00:00.509 ******* 2025-12-12 16:24:57,945 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.029) 0:00:00.539 ******* 2025-12-12 16:24:57,955 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:57 +0000 (0:00:00.029) 0:00:00.539 ******* 2025-12-12 16:24:57,973 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,010 p=32453 u=zuul n=ansible | PLAY [Prepare the platform] **************************************************** 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | TASK [Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] ****** 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.081) 0:00:00.621 ******* 2025-12-12 16:24:58,036 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.081) 0:00:00.620 ******* 2025-12-12 16:24:58,079 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.057) 0:00:00.679 ******* 2025-12-12 16:24:58,094 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.057) 0:00:00.678 ******* 2025-12-12 16:24:58,388 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:58,401 p=32453 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2025-12-12 16:24:58,402 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.307) 0:00:00.986 ******* 2025-12-12 16:24:58,402 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.307) 0:00:00.985 ******* 2025-12-12 16:24:58,431 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,443 p=32453 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-12-12 16:24:58,444 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.028 ******* 2025-12-12 16:24:58,444 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.027 ******* 2025-12-12 16:24:58,466 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.035) 0:00:01.063 ******* 2025-12-12 16:24:58,479 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.035) 0:00:01.062 ******* 2025-12-12 16:24:58,513 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.056) 0:00:01.120 ******* 2025-12-12 16:24:58,535 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.056) 0:00:01.119 ******* 2025-12-12 16:24:58,557 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.032) 0:00:01.152 ******* 2025-12-12 16:24:58,568 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.032) 0:00:01.152 ******* 2025-12-12 16:24:58,599 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.195 ******* 2025-12-12 16:24:58,610 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.042) 0:00:01.194 ******* 2025-12-12 16:24:58,632 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.034) 0:00:01.229 ******* 2025-12-12 16:24:58,644 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:58 +0000 (0:00:00.034) 0:00:01.228 ******* 2025-12-12 16:24:59,049 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.416) 0:00:01.645 ******* 2025-12-12 16:24:59,061 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.416) 0:00:01.645 ******* 2025-12-12 16:24:59,095 p=32453 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2025-12-12 16:24:59,108 p=32453 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-12-12 16:24:59,109 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.047) 0:00:01.693 ******* 2025-12-12 16:24:59,109 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.047) 0:00:01.692 ******* 2025-12-12 16:24:59,132 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.726 ******* 2025-12-12 16:24:59,141 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.725 ******* 2025-12-12 16:24:59,164 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,173 p=32453 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2025-12-12 16:24:59,174 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.758 ******* 2025-12-12 16:24:59,174 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:01.757 ******* 2025-12-12 16:24:59,193 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{********** cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.029) 0:00:01.788 ******* 2025-12-12 16:24:59,203 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.029) 0:00:01.787 ******* 2025-12-12 16:24:59,230 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.066) 0:00:01.854 ******* 2025-12-12 16:24:59,270 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.066) 0:00:01.853 ******* 2025-12-12 16:24:59,440 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.183) 0:00:02.038 ******* 2025-12-12 16:24:59,453 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.183) 0:00:02.037 ******* 2025-12-12 16:24:59,477 p=32453 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-12-12 16:24:59,488 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2025-12-12 16:24:59,489 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.073 ******* 2025-12-12 16:24:59,489 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.072 ******* 2025-12-12 16:24:59,511 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,523 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2025-12-12 16:24:59,524 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.034) 0:00:02.108 ******* 2025-12-12 16:24:59,524 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.034) 0:00:02.107 ******* 2025-12-12 16:24:59,546 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.144 ******* 2025-12-12 16:24:59,559 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.143 ******* 2025-12-12 16:24:59,580 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:02.176 ******* 2025-12-12 16:24:59,591 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.032) 0:00:02.175 ******* 2025-12-12 16:24:59,614 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.212 ******* 2025-12-12 16:24:59,627 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.211 ******* 2025-12-12 16:24:59,659 p=32453 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.049) 0:00:02.262 ******* 2025-12-12 16:24:59,677 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.049) 0:00:02.261 ******* 2025-12-12 16:24:59,695 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:24:59,712 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2025-12-12 16:24:59,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.297 ******* 2025-12-12 16:24:59,713 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:24:59 +0000 (0:00:00.035) 0:00:02.296 ******* 2025-12-12 16:24:59,778 p=32453 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_fetch_openshift.log 2025-12-12 16:25:00,188 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:00,198 p=32453 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2025-12-12 16:25:00,198 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.486) 0:00:02.783 ******* 2025-12-12 16:25:00,199 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.486) 0:00:02.782 ******* 2025-12-12 16:25:00,219 p=32453 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.031) 0:00:02.815 ******* 2025-12-12 16:25:00,230 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.031) 0:00:02.814 ******* 2025-12-12 16:25:00,701 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:00,711 p=32453 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2025-12-12 16:25:00,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.481) 0:00:03.296 ******* 2025-12-12 16:25:00,712 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.481) 0:00:03.295 ******* 2025-12-12 16:25:00,738 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.036) 0:00:03.333 ******* 2025-12-12 16:25:00,748 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:00 +0000 (0:00:00.036) 0:00:03.332 ******* 2025-12-12 16:25:01,060 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,073 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2025-12-12 16:25:01,074 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.325) 0:00:03.658 ******* 2025-12-12 16:25:01,074 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.325) 0:00:03.657 ******* 2025-12-12 16:25:01,379 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,389 p=32453 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2025-12-12 16:25:01,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.315) 0:00:03.974 ******* 2025-12-12 16:25:01,390 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.315) 0:00:03.973 ******* 2025-12-12 16:25:01,668 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:01,732 p=32453 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2025-12-12 16:25:01,733 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.343) 0:00:04.317 ******* 2025-12-12 16:25:01,733 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.343) 0:00:04.316 ******* 2025-12-12 16:25:01,786 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:01,797 p=32453 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2025-12-12 16:25:01,798 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.064) 0:00:04.382 ******* 2025-12-12 16:25:01,798 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:01 +0000 (0:00:00.064) 0:00:04.381 ******* 2025-12-12 16:25:02,365 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml] *** 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.578) 0:00:04.960 ******* 2025-12-12 16:25:02,376 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.578) 0:00:04.960 ******* 2025-12-12 16:25:02,690 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:02,703 p=32453 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2025-12-12 16:25:02,704 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.327) 0:00:05.288 ******* 2025-12-12 16:25:02,704 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:02 +0000 (0:00:00.327) 0:00:05.287 ******* 2025-12-12 16:25:03,147 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.465) 0:00:05.753 ******* 2025-12-12 16:25:03,169 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.465) 0:00:05.753 ******* 2025-12-12 16:25:03,374 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.220) 0:00:05.974 ******* 2025-12-12 16:25:03,389 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.220) 0:00:05.973 ******* 2025-12-12 16:25:03,437 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.062) 0:00:06.036 ******* 2025-12-12 16:25:03,452 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:03 +0000 (0:00:00.062) 0:00:06.035 ******* 2025-12-12 16:25:04,407 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-12-12 16:25:05,092 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:05 +0000 (0:00:01.658) 0:00:07.695 ******* 2025-12-12 16:25:05,110 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:05 +0000 (0:00:01.658) 0:00:07.694 ******* 2025-12-12 16:25:06,268 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:06,280 p=32453 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2025-12-12 16:25:06,281 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:06 +0000 (0:00:01.170) 0:00:08.865 ******* 2025-12-12 16:25:06,281 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:06 +0000 (0:00:01.170) 0:00:08.864 ******* 2025-12-12 16:25:07,027 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-12-12 16:25:07,763 p=32453 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-12-12 16:25:07,781 p=32453 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2025-12-12 16:25:07,781 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:07 +0000 (0:00:01.500) 0:00:10.366 ******* 2025-12-12 16:25:07,782 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:07 +0000 (0:00:01.500) 0:00:10.365 ******* 2025-12-12 16:25:08,718 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.947) 0:00:11.313 ******* 2025-12-12 16:25:08,729 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.947) 0:00:11.313 ******* 2025-12-12 16:25:08,783 p=32453 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_login_into_openshift_internal.log 2025-12-12 16:25:08,970 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.250) 0:00:11.564 ******* 2025-12-12 16:25:08,980 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:08 +0000 (0:00:00.250) 0:00:11.563 ******* 2025-12-12 16:25:09,011 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.044) 0:00:11.609 ******* 2025-12-12 16:25:09,024 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.044) 0:00:11.608 ******* 2025-12-12 16:25:09,049 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.036) 0:00:11.645 ******* 2025-12-12 16:25:09,060 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.036) 0:00:11.644 ******* 2025-12-12 16:25:09,114 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,124 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2025-12-12 16:25:09,124 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.064) 0:00:11.709 ******* 2025-12-12 16:25:09,125 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.064) 0:00:11.708 ******* 2025-12-12 16:25:09,147 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,156 p=32453 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2025-12-12 16:25:09,157 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.032) 0:00:11.741 ******* 2025-12-12 16:25:09,157 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.032) 0:00:11.740 ******* 2025-12-12 16:25:09,178 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.031) 0:00:11.772 ******* 2025-12-12 16:25:09,188 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.031) 0:00:11.771 ******* 2025-12-12 16:25:09,208 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.030) 0:00:11.802 ******* 2025-12-12 16:25:09,218 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.030) 0:00:11.801 ******* 2025-12-12 16:25:09,243 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:09,256 p=32453 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2025-12-12 16:25:09,257 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.038) 0:00:11.841 ******* 2025-12-12 16:25:09,257 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:09 +0000 (0:00:00.038) 0:00:11.840 ******* 2025-12-12 16:25:10,059 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:10 +0000 (0:00:00.816) 0:00:12.658 ******* 2025-12-12 16:25:10,073 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:10 +0000 (0:00:00.816) 0:00:12.657 ******* 2025-12-12 16:25:11,085 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:01.023) 0:00:13.682 ******* 2025-12-12 16:25:11,097 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:01.023) 0:00:13.681 ******* 2025-12-12 16:25:11,815 p=32453 u=zuul n=ansible | changed: [localhost] 2025-12-12 16:25:11,828 p=32453 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2025-12-12 16:25:11,828 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.731) 0:00:14.413 ******* 2025-12-12 16:25:11,829 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.731) 0:00:14.412 ******* 2025-12-12 16:25:11,844 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.036) 0:00:14.450 ******* 2025-12-12 16:25:11,865 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.036) 0:00:14.449 ******* 2025-12-12 16:25:11,896 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.078) 0:00:14.528 ******* 2025-12-12 16:25:11,944 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.078) 0:00:14.527 ******* 2025-12-12 16:25:11,976 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.045) 0:00:14.574 ******* 2025-12-12 16:25:11,989 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:11 +0000 (0:00:00.045) 0:00:14.573 ******* 2025-12-12 16:25:12,020 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.618 ******* 2025-12-12 16:25:12,034 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.618 ******* 2025-12-12 16:25:12,067 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,078 p=32453 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2025-12-12 16:25:12,079 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.663 ******* 2025-12-12 16:25:12,079 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.044) 0:00:14.662 ******* 2025-12-12 16:25:12,099 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,111 p=32453 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2025-12-12 16:25:12,111 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.032) 0:00:14.696 ******* 2025-12-12 16:25:12,112 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.032) 0:00:14.695 ******* 2025-12-12 16:25:12,137 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.038) 0:00:14.734 ******* 2025-12-12 16:25:12,150 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.038) 0:00:14.734 ******* 2025-12-12 16:25:12,175 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.041) 0:00:14.776 ******* 2025-12-12 16:25:12,191 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.041) 0:00:14.775 ******* 2025-12-12 16:25:12,223 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,234 p=32453 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2025-12-12 16:25:12,235 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.043) 0:00:14.819 ******* 2025-12-12 16:25:12,235 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.043) 0:00:14.818 ******* 2025-12-12 16:25:12,262 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,274 p=32453 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-12-12 16:25:12,275 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.039) 0:00:14.859 ******* 2025-12-12 16:25:12,275 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.039) 0:00:14.858 ******* 2025-12-12 16:25:12,333 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.073) 0:00:14.933 ******* 2025-12-12 16:25:12,348 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.073) 0:00:14.932 ******* 2025-12-12 16:25:12,420 p=32453 u=zuul n=ansible | ok: [localhost] 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.083) 0:00:15.016 ******* 2025-12-12 16:25:12,431 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.082) 0:00:15.015 ******* 2025-12-12 16:25:12,526 p=32453 u=zuul n=ansible | skipping: [localhost] 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | localhost : ok=35 changed=12 unreachable=0 failed=0 skipped=34 rescued=0 ignored=0 2025-12-12 16:25:12,601 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.170) 0:00:15.186 ******* 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | =============================================================================== 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.66s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces --- 1.50s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Get internal OpenShift registry route ----------------- 1.17s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Patch network operator -------------------------------- 1.02s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Wait for the image registry to be ready --------------- 0.95s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Gather network.operator info -------------------------- 0.82s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Patch samples registry configuration ------------------ 0.73s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Create the openshift_login parameters file ------------ 0.58s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift token --------------------------------- 0.49s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch new OpenShift access token ---------------------- 0.48s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Append the KUBECONFIG to the install yamls parameters --- 0.47s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Ensure output directory exists ------------------------ 0.42s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift current user -------------------------- 0.34s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Read the install yamls parameters file ---------------- 0.33s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift API URL ------------------------------- 0.33s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Fetch OpenShift kubeconfig context -------------------- 0.32s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | networking_mapper : Check for Networking Environment Definition file existence --- 0.31s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Login into OpenShift internal registry ---------------- 0.25s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup : Ensure output directory exists ------------------------ 0.22s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login : Check if kubeconfig exists ---------------------------- 0.18s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | Friday 12 December 2025 16:25:12 +0000 (0:00:00.171) 0:00:15.186 ******* 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | =============================================================================== 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_setup --------------------------------------------------------- 8.77s 2025-12-12 16:25:12,602 p=32453 u=zuul n=ansible | openshift_login --------------------------------------------------------- 4.52s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | run_hook ---------------------------------------------------------------- 0.63s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.55s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | networking_mapper ------------------------------------------------------- 0.44s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ansible.builtin.include_vars -------------------------------------------- 0.21s 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2025-12-12 16:25:12,603 p=32453 u=zuul n=ansible | total ------------------------------------------------------------------ 15.13s 2025-12-12 16:25:31,617 p=33051 u=zuul n=ansible | Starting galaxy collection install process 2025-12-12 16:25:31,637 p=33051 u=zuul n=ansible | Process install dependency map 2025-12-12 16:25:45,294 p=33051 u=zuul n=ansible | Starting collection install process 2025-12-12 16:25:45,294 p=33051 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+b9f05e2b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+b9f05e2b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | cifmw.general:1.0.0+b9f05e2b was installed successfully 2025-12-12 16:25:45,929 p=33051 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-12-12 16:25:46,006 p=33051 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-12-12 16:25:46,994 p=33051 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-12-12 16:25:46,994 p=33051 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-12-12 16:25:46,995 p=33051 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-12-12 16:25:47,068 p=33051 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-12-12 16:25:47,197 p=33051 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-12-12 16:25:47,198 p=33051 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-12-12 16:25:47,198 p=33051 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-12-12 16:25:47,229 p=33051 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-12-12 16:25:47,430 p=33051 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-12-12 16:25:47,430 p=33051 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-12-12 16:25:47,431 p=33051 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-12-12 16:25:47,614 p=33051 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-12-12 16:25:47,708 p=33051 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-12-12 16:25:47,736 p=33051 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-12-12 16:25:48,052 p=33051 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-12-12 16:25:48,400 p=33051 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-12-12 16:25:48,448 p=33051 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-12-12 16:25:48,449 p=33051 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-12-12 16:25:48,449 p=33051 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-12-12 16:25:48,493 p=33051 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-12-12 16:25:48,624 p=33051 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-12-12 16:25:48,625 p=33051 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully home/zuul/zuul-output/logs/ci-framework-data/artifacts/0000755000175000017500000000000015117043104022337 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci-env/0000755000175000017500000000000015117043064023525 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci-env/networking-info.yml0000644000175000017500000000231215117042566027374 0ustar zuulzuulcrc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:e6:21:77 mtu: 1500 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:a8:13:c5 mtu: 1500 internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:b2:c5:bf mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:c0:69:a0 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:e1:5c:e6 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 home/zuul/zuul-output/logs/ci-framework-data/artifacts/installed-packages.yml0000644000175000017500000022647715117042602026641 0ustar zuulzuulNetworkManager: - arch: x86_64 epoch: 1 name: NetworkManager release: 1.el9 source: rpm version: 1.54.2 NetworkManager-libnm: - arch: x86_64 epoch: 1 name: NetworkManager-libnm release: 1.el9 source: rpm version: 1.54.2 NetworkManager-team: - arch: x86_64 epoch: 1 name: NetworkManager-team release: 1.el9 source: rpm version: 1.54.2 NetworkManager-tui: - arch: x86_64 epoch: 1 name: NetworkManager-tui release: 1.el9 source: rpm version: 1.54.2 aardvark-dns: - arch: x86_64 epoch: 2 name: aardvark-dns release: 1.el9 source: rpm version: 1.17.0 abattis-cantarell-fonts: - arch: noarch epoch: null name: abattis-cantarell-fonts release: 4.el9 source: rpm version: '0.301' acl: - arch: x86_64 epoch: null name: acl release: 4.el9 source: rpm version: 2.3.1 adobe-source-code-pro-fonts: - arch: noarch epoch: null name: adobe-source-code-pro-fonts release: 12.el9.1 source: rpm version: 2.030.1.050 alternatives: - arch: x86_64 epoch: null name: alternatives release: 2.el9 source: rpm version: '1.24' annobin: - arch: x86_64 epoch: null name: annobin release: 1.el9 source: rpm version: '12.98' ansible-core: - arch: x86_64 epoch: 1 name: ansible-core release: 2.el9 source: rpm version: 2.14.18 attr: - arch: x86_64 epoch: null name: attr release: 3.el9 source: rpm version: 2.5.1 audit: - arch: x86_64 epoch: null name: audit release: 7.el9 source: rpm version: 3.1.5 audit-libs: - arch: x86_64 epoch: null name: audit-libs release: 7.el9 source: rpm version: 3.1.5 authselect: - arch: x86_64 epoch: null name: authselect release: 3.el9 source: rpm version: 1.2.6 authselect-compat: - arch: x86_64 epoch: null name: authselect-compat release: 3.el9 source: rpm version: 1.2.6 authselect-libs: - arch: x86_64 epoch: null name: authselect-libs release: 3.el9 source: rpm version: 1.2.6 avahi-libs: - arch: x86_64 epoch: null name: avahi-libs release: 23.el9 source: rpm version: '0.8' basesystem: - arch: noarch epoch: null name: basesystem release: 13.el9 source: rpm version: '11' bash: - arch: x86_64 epoch: null name: bash release: 9.el9 source: rpm version: 5.1.8 bash-completion: - arch: noarch epoch: 1 name: bash-completion release: 5.el9 source: rpm version: '2.11' binutils: - arch: x86_64 epoch: null name: binutils release: 69.el9 source: rpm version: 2.35.2 binutils-gold: - arch: x86_64 epoch: null name: binutils-gold release: 69.el9 source: rpm version: 2.35.2 buildah: - arch: x86_64 epoch: 2 name: buildah release: 1.el9 source: rpm version: 1.41.3 bzip2: - arch: x86_64 epoch: null name: bzip2 release: 10.el9 source: rpm version: 1.0.8 bzip2-libs: - arch: x86_64 epoch: null name: bzip2-libs release: 10.el9 source: rpm version: 1.0.8 c-ares: - arch: x86_64 epoch: null name: c-ares release: 2.el9 source: rpm version: 1.19.1 ca-certificates: - arch: noarch epoch: null name: ca-certificates release: 91.el9 source: rpm version: 2025.2.80_v9.0.305 centos-gpg-keys: - arch: noarch epoch: null name: centos-gpg-keys release: 30.el9 source: rpm version: '9.0' centos-logos: - arch: x86_64 epoch: null name: centos-logos release: 3.el9 source: rpm version: '90.8' centos-stream-release: - arch: noarch epoch: null name: centos-stream-release release: 30.el9 source: rpm version: '9.0' centos-stream-repos: - arch: noarch epoch: null name: centos-stream-repos release: 30.el9 source: rpm version: '9.0' checkpolicy: - arch: x86_64 epoch: null name: checkpolicy release: 1.el9 source: rpm version: '3.6' chrony: - arch: x86_64 epoch: null name: chrony release: 1.el9 source: rpm version: '4.8' cloud-init: - arch: noarch epoch: null name: cloud-init release: 7.el9 source: rpm version: '24.4' cloud-utils-growpart: - arch: x86_64 epoch: null name: cloud-utils-growpart release: 1.el9 source: rpm version: '0.33' cmake-filesystem: - arch: x86_64 epoch: null name: cmake-filesystem release: 3.el9 source: rpm version: 3.31.8 cockpit-bridge: - arch: noarch epoch: null name: cockpit-bridge release: 1.el9 source: rpm version: '348' cockpit-system: - arch: noarch epoch: null name: cockpit-system release: 1.el9 source: rpm version: '348' cockpit-ws: - arch: x86_64 epoch: null name: cockpit-ws release: 1.el9 source: rpm version: '348' cockpit-ws-selinux: - arch: x86_64 epoch: null name: cockpit-ws-selinux release: 1.el9 source: rpm version: '348' conmon: - arch: x86_64 epoch: 3 name: conmon release: 1.el9 source: rpm version: 2.1.13 container-selinux: - arch: noarch epoch: 4 name: container-selinux release: 1.el9 source: rpm version: 2.242.0 containers-common: - arch: x86_64 epoch: 4 name: containers-common release: 134.el9 source: rpm version: '1' containers-common-extra: - arch: x86_64 epoch: 4 name: containers-common-extra release: 134.el9 source: rpm version: '1' coreutils: - arch: x86_64 epoch: null name: coreutils release: 39.el9 source: rpm version: '8.32' coreutils-common: - arch: x86_64 epoch: null name: coreutils-common release: 39.el9 source: rpm version: '8.32' cpio: - arch: x86_64 epoch: null name: cpio release: 16.el9 source: rpm version: '2.13' cpp: - arch: x86_64 epoch: null name: cpp release: 14.el9 source: rpm version: 11.5.0 cracklib: - arch: x86_64 epoch: null name: cracklib release: 27.el9 source: rpm version: 2.9.6 cracklib-dicts: - arch: x86_64 epoch: null name: cracklib-dicts release: 27.el9 source: rpm version: 2.9.6 createrepo_c: - arch: x86_64 epoch: null name: createrepo_c release: 4.el9 source: rpm version: 0.20.1 createrepo_c-libs: - arch: x86_64 epoch: null name: createrepo_c-libs release: 4.el9 source: rpm version: 0.20.1 criu: - arch: x86_64 epoch: null name: criu release: 3.el9 source: rpm version: '3.19' criu-libs: - arch: x86_64 epoch: null name: criu-libs release: 3.el9 source: rpm version: '3.19' cronie: - arch: x86_64 epoch: null name: cronie release: 14.el9 source: rpm version: 1.5.7 cronie-anacron: - arch: x86_64 epoch: null name: cronie-anacron release: 14.el9 source: rpm version: 1.5.7 crontabs: - arch: noarch epoch: null name: crontabs release: 26.20190603git.el9 source: rpm version: '1.11' crun: - arch: x86_64 epoch: null name: crun release: 1.el9 source: rpm version: '1.24' crypto-policies: - arch: noarch epoch: null name: crypto-policies release: 1.gite9c4db2.el9 source: rpm version: '20251126' crypto-policies-scripts: - arch: noarch epoch: null name: crypto-policies-scripts release: 1.gite9c4db2.el9 source: rpm version: '20251126' cryptsetup-libs: - arch: x86_64 epoch: null name: cryptsetup-libs release: 2.el9 source: rpm version: 2.8.1 curl: - arch: x86_64 epoch: null name: curl release: 38.el9 source: rpm version: 7.76.1 cyrus-sasl: - arch: x86_64 epoch: null name: cyrus-sasl release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-devel: - arch: x86_64 epoch: null name: cyrus-sasl-devel release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-gssapi: - arch: x86_64 epoch: null name: cyrus-sasl-gssapi release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-lib: - arch: x86_64 epoch: null name: cyrus-sasl-lib release: 21.el9 source: rpm version: 2.1.27 dbus: - arch: x86_64 epoch: 1 name: dbus release: 8.el9 source: rpm version: 1.12.20 dbus-broker: - arch: x86_64 epoch: null name: dbus-broker release: 7.el9 source: rpm version: '28' dbus-common: - arch: noarch epoch: 1 name: dbus-common release: 8.el9 source: rpm version: 1.12.20 dbus-libs: - arch: x86_64 epoch: 1 name: dbus-libs release: 8.el9 source: rpm version: 1.12.20 dbus-tools: - arch: x86_64 epoch: 1 name: dbus-tools release: 8.el9 source: rpm version: 1.12.20 debugedit: - arch: x86_64 epoch: null name: debugedit release: 11.el9 source: rpm version: '5.0' dejavu-sans-fonts: - arch: noarch epoch: null name: dejavu-sans-fonts release: 18.el9 source: rpm version: '2.37' desktop-file-utils: - arch: x86_64 epoch: null name: desktop-file-utils release: 6.el9 source: rpm version: '0.26' device-mapper: - arch: x86_64 epoch: 9 name: device-mapper release: 2.el9 source: rpm version: 1.02.206 device-mapper-libs: - arch: x86_64 epoch: 9 name: device-mapper-libs release: 2.el9 source: rpm version: 1.02.206 dhcp-client: - arch: x86_64 epoch: 12 name: dhcp-client release: 19.b1.el9 source: rpm version: 4.4.2 dhcp-common: - arch: noarch epoch: 12 name: dhcp-common release: 19.b1.el9 source: rpm version: 4.4.2 diffutils: - arch: x86_64 epoch: null name: diffutils release: 12.el9 source: rpm version: '3.7' dnf: - arch: noarch epoch: null name: dnf release: 31.el9 source: rpm version: 4.14.0 dnf-data: - arch: noarch epoch: null name: dnf-data release: 31.el9 source: rpm version: 4.14.0 dnf-plugins-core: - arch: noarch epoch: null name: dnf-plugins-core release: 24.el9 source: rpm version: 4.3.0 dracut: - arch: x86_64 epoch: null name: dracut release: 102.git20250818.el9 source: rpm version: '057' dracut-config-generic: - arch: x86_64 epoch: null name: dracut-config-generic release: 102.git20250818.el9 source: rpm version: '057' dracut-network: - arch: x86_64 epoch: null name: dracut-network release: 102.git20250818.el9 source: rpm version: '057' dracut-squash: - arch: x86_64 epoch: null name: dracut-squash release: 102.git20250818.el9 source: rpm version: '057' dwz: - arch: x86_64 epoch: null name: dwz release: 1.el9 source: rpm version: '0.16' e2fsprogs: - arch: x86_64 epoch: null name: e2fsprogs release: 8.el9 source: rpm version: 1.46.5 e2fsprogs-libs: - arch: x86_64 epoch: null name: e2fsprogs-libs release: 8.el9 source: rpm version: 1.46.5 ed: - arch: x86_64 epoch: null name: ed release: 12.el9 source: rpm version: 1.14.2 efi-srpm-macros: - arch: noarch epoch: null name: efi-srpm-macros release: 4.el9 source: rpm version: '6' elfutils: - arch: x86_64 epoch: null name: elfutils release: 1.el9 source: rpm version: '0.194' elfutils-debuginfod-client: - arch: x86_64 epoch: null name: elfutils-debuginfod-client release: 1.el9 source: rpm version: '0.194' elfutils-default-yama-scope: - arch: noarch epoch: null name: elfutils-default-yama-scope release: 1.el9 source: rpm version: '0.194' elfutils-libelf: - arch: x86_64 epoch: null name: elfutils-libelf release: 1.el9 source: rpm version: '0.194' elfutils-libs: - arch: x86_64 epoch: null name: elfutils-libs release: 1.el9 source: rpm version: '0.194' emacs-filesystem: - arch: noarch epoch: 1 name: emacs-filesystem release: 18.el9 source: rpm version: '27.2' enchant: - arch: x86_64 epoch: 1 name: enchant release: 30.el9 source: rpm version: 1.6.0 ethtool: - arch: x86_64 epoch: 2 name: ethtool release: 2.el9 source: rpm version: '6.15' expat: - arch: x86_64 epoch: null name: expat release: 5.el9 source: rpm version: 2.5.0 expect: - arch: x86_64 epoch: null name: expect release: 16.el9 source: rpm version: 5.45.4 file: - arch: x86_64 epoch: null name: file release: 16.el9 source: rpm version: '5.39' file-libs: - arch: x86_64 epoch: null name: file-libs release: 16.el9 source: rpm version: '5.39' filesystem: - arch: x86_64 epoch: null name: filesystem release: 5.el9 source: rpm version: '3.16' findutils: - arch: x86_64 epoch: 1 name: findutils release: 7.el9 source: rpm version: 4.8.0 fonts-filesystem: - arch: noarch epoch: 1 name: fonts-filesystem release: 7.el9.1 source: rpm version: 2.0.5 fonts-srpm-macros: - arch: noarch epoch: 1 name: fonts-srpm-macros release: 7.el9.1 source: rpm version: 2.0.5 fuse-common: - arch: x86_64 epoch: null name: fuse-common release: 9.el9 source: rpm version: 3.10.2 fuse-libs: - arch: x86_64 epoch: null name: fuse-libs release: 17.el9 source: rpm version: 2.9.9 fuse-overlayfs: - arch: x86_64 epoch: null name: fuse-overlayfs release: 1.el9 source: rpm version: '1.16' fuse3: - arch: x86_64 epoch: null name: fuse3 release: 9.el9 source: rpm version: 3.10.2 fuse3-libs: - arch: x86_64 epoch: null name: fuse3-libs release: 9.el9 source: rpm version: 3.10.2 gawk: - arch: x86_64 epoch: null name: gawk release: 6.el9 source: rpm version: 5.1.0 gawk-all-langpacks: - arch: x86_64 epoch: null name: gawk-all-langpacks release: 6.el9 source: rpm version: 5.1.0 gcc: - arch: x86_64 epoch: null name: gcc release: 14.el9 source: rpm version: 11.5.0 gcc-c++: - arch: x86_64 epoch: null name: gcc-c++ release: 14.el9 source: rpm version: 11.5.0 gcc-plugin-annobin: - arch: x86_64 epoch: null name: gcc-plugin-annobin release: 14.el9 source: rpm version: 11.5.0 gdb-minimal: - arch: x86_64 epoch: null name: gdb-minimal release: 2.el9 source: rpm version: '16.3' gdbm-libs: - arch: x86_64 epoch: 1 name: gdbm-libs release: 1.el9 source: rpm version: '1.23' gdisk: - arch: x86_64 epoch: null name: gdisk release: 5.el9 source: rpm version: 1.0.7 gdk-pixbuf2: - arch: x86_64 epoch: null name: gdk-pixbuf2 release: 6.el9 source: rpm version: 2.42.6 geolite2-city: - arch: noarch epoch: null name: geolite2-city release: 6.el9 source: rpm version: '20191217' geolite2-country: - arch: noarch epoch: null name: geolite2-country release: 6.el9 source: rpm version: '20191217' gettext: - arch: x86_64 epoch: null name: gettext release: 8.el9 source: rpm version: '0.21' gettext-libs: - arch: x86_64 epoch: null name: gettext-libs release: 8.el9 source: rpm version: '0.21' ghc-srpm-macros: - arch: noarch epoch: null name: ghc-srpm-macros release: 6.el9 source: rpm version: 1.5.0 git: - arch: x86_64 epoch: null name: git release: 1.el9 source: rpm version: 2.47.3 git-core: - arch: x86_64 epoch: null name: git-core release: 1.el9 source: rpm version: 2.47.3 git-core-doc: - arch: noarch epoch: null name: git-core-doc release: 1.el9 source: rpm version: 2.47.3 glib-networking: - arch: x86_64 epoch: null name: glib-networking release: 3.el9 source: rpm version: 2.68.3 glib2: - arch: x86_64 epoch: null name: glib2 release: 18.el9 source: rpm version: 2.68.4 glibc: - arch: x86_64 epoch: null name: glibc release: 244.el9 source: rpm version: '2.34' glibc-common: - arch: x86_64 epoch: null name: glibc-common release: 244.el9 source: rpm version: '2.34' glibc-devel: - arch: x86_64 epoch: null name: glibc-devel release: 244.el9 source: rpm version: '2.34' glibc-gconv-extra: - arch: x86_64 epoch: null name: glibc-gconv-extra release: 244.el9 source: rpm version: '2.34' glibc-headers: - arch: x86_64 epoch: null name: glibc-headers release: 244.el9 source: rpm version: '2.34' glibc-langpack-en: - arch: x86_64 epoch: null name: glibc-langpack-en release: 244.el9 source: rpm version: '2.34' gmp: - arch: x86_64 epoch: 1 name: gmp release: 13.el9 source: rpm version: 6.2.0 gnupg2: - arch: x86_64 epoch: null name: gnupg2 release: 4.el9 source: rpm version: 2.3.3 gnutls: - arch: x86_64 epoch: null name: gnutls release: 1.el9 source: rpm version: 3.8.10 go-srpm-macros: - arch: noarch epoch: null name: go-srpm-macros release: 1.el9 source: rpm version: 3.8.1 gobject-introspection: - arch: x86_64 epoch: null name: gobject-introspection release: 11.el9 source: rpm version: 1.68.0 gpg-pubkey: - arch: null epoch: null name: gpg-pubkey release: 5ccc5b19 source: rpm version: 8483c65d gpgme: - arch: x86_64 epoch: null name: gpgme release: 6.el9 source: rpm version: 1.15.1 grep: - arch: x86_64 epoch: null name: grep release: 5.el9 source: rpm version: '3.6' groff-base: - arch: x86_64 epoch: null name: groff-base release: 10.el9 source: rpm version: 1.22.4 grub2-common: - arch: noarch epoch: 1 name: grub2-common release: 120.el9 source: rpm version: '2.06' grub2-pc: - arch: x86_64 epoch: 1 name: grub2-pc release: 120.el9 source: rpm version: '2.06' grub2-pc-modules: - arch: noarch epoch: 1 name: grub2-pc-modules release: 120.el9 source: rpm version: '2.06' grub2-tools: - arch: x86_64 epoch: 1 name: grub2-tools release: 120.el9 source: rpm version: '2.06' grub2-tools-minimal: - arch: x86_64 epoch: 1 name: grub2-tools-minimal release: 120.el9 source: rpm version: '2.06' grubby: - arch: x86_64 epoch: null name: grubby release: 69.el9 source: rpm version: '8.40' gsettings-desktop-schemas: - arch: x86_64 epoch: null name: gsettings-desktop-schemas release: 8.el9 source: rpm version: '40.0' gssproxy: - arch: x86_64 epoch: null name: gssproxy release: 7.el9 source: rpm version: 0.8.4 gzip: - arch: x86_64 epoch: null name: gzip release: 1.el9 source: rpm version: '1.12' hostname: - arch: x86_64 epoch: null name: hostname release: 6.el9 source: rpm version: '3.23' hunspell: - arch: x86_64 epoch: null name: hunspell release: 11.el9 source: rpm version: 1.7.0 hunspell-en-GB: - arch: noarch epoch: null name: hunspell-en-GB release: 20.el9 source: rpm version: 0.20140811.1 hunspell-en-US: - arch: noarch epoch: null name: hunspell-en-US release: 20.el9 source: rpm version: 0.20140811.1 hunspell-filesystem: - arch: x86_64 epoch: null name: hunspell-filesystem release: 11.el9 source: rpm version: 1.7.0 hwdata: - arch: noarch epoch: null name: hwdata release: 9.20.el9 source: rpm version: '0.348' ima-evm-utils: - arch: x86_64 epoch: null name: ima-evm-utils release: 2.el9 source: rpm version: 1.6.2 info: - arch: x86_64 epoch: null name: info release: 15.el9 source: rpm version: '6.7' inih: - arch: x86_64 epoch: null name: inih release: 6.el9 source: rpm version: '49' initscripts-rename-device: - arch: x86_64 epoch: null name: initscripts-rename-device release: 4.el9 source: rpm version: 10.11.8 initscripts-service: - arch: noarch epoch: null name: initscripts-service release: 4.el9 source: rpm version: 10.11.8 ipcalc: - arch: x86_64 epoch: null name: ipcalc release: 5.el9 source: rpm version: 1.0.0 iproute: - arch: x86_64 epoch: null name: iproute release: 1.el9 source: rpm version: 6.17.0 iproute-tc: - arch: x86_64 epoch: null name: iproute-tc release: 1.el9 source: rpm version: 6.17.0 iptables-libs: - arch: x86_64 epoch: null name: iptables-libs release: 11.el9 source: rpm version: 1.8.10 iptables-nft: - arch: x86_64 epoch: null name: iptables-nft release: 11.el9 source: rpm version: 1.8.10 iptables-nft-services: - arch: noarch epoch: null name: iptables-nft-services release: 11.el9 source: rpm version: 1.8.10 iputils: - arch: x86_64 epoch: null name: iputils release: 15.el9 source: rpm version: '20210202' irqbalance: - arch: x86_64 epoch: 2 name: irqbalance release: 5.el9 source: rpm version: 1.9.4 jansson: - arch: x86_64 epoch: null name: jansson release: 1.el9 source: rpm version: '2.14' jq: - arch: x86_64 epoch: null name: jq release: 19.el9 source: rpm version: '1.6' json-c: - arch: x86_64 epoch: null name: json-c release: 11.el9 source: rpm version: '0.14' json-glib: - arch: x86_64 epoch: null name: json-glib release: 1.el9 source: rpm version: 1.6.6 kbd: - arch: x86_64 epoch: null name: kbd release: 11.el9 source: rpm version: 2.4.0 kbd-legacy: - arch: noarch epoch: null name: kbd-legacy release: 11.el9 source: rpm version: 2.4.0 kbd-misc: - arch: noarch epoch: null name: kbd-misc release: 11.el9 source: rpm version: 2.4.0 kernel: - arch: x86_64 epoch: null name: kernel release: 648.el9 source: rpm version: 5.14.0 kernel-core: - arch: x86_64 epoch: null name: kernel-core release: 648.el9 source: rpm version: 5.14.0 kernel-headers: - arch: x86_64 epoch: null name: kernel-headers release: 648.el9 source: rpm version: 5.14.0 kernel-modules: - arch: x86_64 epoch: null name: kernel-modules release: 648.el9 source: rpm version: 5.14.0 kernel-modules-core: - arch: x86_64 epoch: null name: kernel-modules-core release: 648.el9 source: rpm version: 5.14.0 kernel-srpm-macros: - arch: noarch epoch: null name: kernel-srpm-macros release: 14.el9 source: rpm version: '1.0' kernel-tools: - arch: x86_64 epoch: null name: kernel-tools release: 648.el9 source: rpm version: 5.14.0 kernel-tools-libs: - arch: x86_64 epoch: null name: kernel-tools-libs release: 648.el9 source: rpm version: 5.14.0 kexec-tools: - arch: x86_64 epoch: null name: kexec-tools release: 12.el9 source: rpm version: 2.0.29 keyutils: - arch: x86_64 epoch: null name: keyutils release: 1.el9 source: rpm version: 1.6.3 keyutils-libs: - arch: x86_64 epoch: null name: keyutils-libs release: 1.el9 source: rpm version: 1.6.3 kmod: - arch: x86_64 epoch: null name: kmod release: 11.el9 source: rpm version: '28' kmod-libs: - arch: x86_64 epoch: null name: kmod-libs release: 11.el9 source: rpm version: '28' kpartx: - arch: x86_64 epoch: null name: kpartx release: 39.el9 source: rpm version: 0.8.7 krb5-libs: - arch: x86_64 epoch: null name: krb5-libs release: 8.el9 source: rpm version: 1.21.1 langpacks-core-en_GB: - arch: noarch epoch: null name: langpacks-core-en_GB release: 16.el9 source: rpm version: '3.0' langpacks-core-font-en: - arch: noarch epoch: null name: langpacks-core-font-en release: 16.el9 source: rpm version: '3.0' langpacks-en_GB: - arch: noarch epoch: null name: langpacks-en_GB release: 16.el9 source: rpm version: '3.0' less: - arch: x86_64 epoch: null name: less release: 6.el9 source: rpm version: '590' libacl: - arch: x86_64 epoch: null name: libacl release: 4.el9 source: rpm version: 2.3.1 libappstream-glib: - arch: x86_64 epoch: null name: libappstream-glib release: 5.el9 source: rpm version: 0.7.18 libarchive: - arch: x86_64 epoch: null name: libarchive release: 6.el9 source: rpm version: 3.5.3 libassuan: - arch: x86_64 epoch: null name: libassuan release: 3.el9 source: rpm version: 2.5.5 libattr: - arch: x86_64 epoch: null name: libattr release: 3.el9 source: rpm version: 2.5.1 libbasicobjects: - arch: x86_64 epoch: null name: libbasicobjects release: 53.el9 source: rpm version: 0.1.1 libblkid: - arch: x86_64 epoch: null name: libblkid release: 21.el9 source: rpm version: 2.37.4 libbpf: - arch: x86_64 epoch: 2 name: libbpf release: 2.el9 source: rpm version: 1.5.0 libbrotli: - arch: x86_64 epoch: null name: libbrotli release: 7.el9 source: rpm version: 1.0.9 libcap: - arch: x86_64 epoch: null name: libcap release: 10.el9 source: rpm version: '2.48' libcap-ng: - arch: x86_64 epoch: null name: libcap-ng release: 7.el9 source: rpm version: 0.8.2 libcbor: - arch: x86_64 epoch: null name: libcbor release: 5.el9 source: rpm version: 0.7.0 libcollection: - arch: x86_64 epoch: null name: libcollection release: 53.el9 source: rpm version: 0.7.0 libcom_err: - arch: x86_64 epoch: null name: libcom_err release: 8.el9 source: rpm version: 1.46.5 libcomps: - arch: x86_64 epoch: null name: libcomps release: 1.el9 source: rpm version: 0.1.18 libcurl: - arch: x86_64 epoch: null name: libcurl release: 38.el9 source: rpm version: 7.76.1 libdaemon: - arch: x86_64 epoch: null name: libdaemon release: 23.el9 source: rpm version: '0.14' libdb: - arch: x86_64 epoch: null name: libdb release: 57.el9 source: rpm version: 5.3.28 libdhash: - arch: x86_64 epoch: null name: libdhash release: 53.el9 source: rpm version: 0.5.0 libdnf: - arch: x86_64 epoch: null name: libdnf release: 16.el9 source: rpm version: 0.69.0 libeconf: - arch: x86_64 epoch: null name: libeconf release: 4.el9 source: rpm version: 0.4.1 libedit: - arch: x86_64 epoch: null name: libedit release: 38.20210216cvs.el9 source: rpm version: '3.1' libestr: - arch: x86_64 epoch: null name: libestr release: 4.el9 source: rpm version: 0.1.11 libev: - arch: x86_64 epoch: null name: libev release: 6.el9 source: rpm version: '4.33' libevent: - arch: x86_64 epoch: null name: libevent release: 8.el9 source: rpm version: 2.1.12 libfastjson: - arch: x86_64 epoch: null name: libfastjson release: 5.el9 source: rpm version: 0.99.9 libfdisk: - arch: x86_64 epoch: null name: libfdisk release: 21.el9 source: rpm version: 2.37.4 libffi: - arch: x86_64 epoch: null name: libffi release: 8.el9 source: rpm version: 3.4.2 libffi-devel: - arch: x86_64 epoch: null name: libffi-devel release: 8.el9 source: rpm version: 3.4.2 libfido2: - arch: x86_64 epoch: null name: libfido2 release: 2.el9 source: rpm version: 1.13.0 libgcc: - arch: x86_64 epoch: null name: libgcc release: 14.el9 source: rpm version: 11.5.0 libgcrypt: - arch: x86_64 epoch: null name: libgcrypt release: 11.el9 source: rpm version: 1.10.0 libgomp: - arch: x86_64 epoch: null name: libgomp release: 14.el9 source: rpm version: 11.5.0 libgpg-error: - arch: x86_64 epoch: null name: libgpg-error release: 5.el9 source: rpm version: '1.42' libgpg-error-devel: - arch: x86_64 epoch: null name: libgpg-error-devel release: 5.el9 source: rpm version: '1.42' libibverbs: - arch: x86_64 epoch: null name: libibverbs release: 2.el9 source: rpm version: '57.0' libicu: - arch: x86_64 epoch: null name: libicu release: 10.el9 source: rpm version: '67.1' libidn2: - arch: x86_64 epoch: null name: libidn2 release: 7.el9 source: rpm version: 2.3.0 libini_config: - arch: x86_64 epoch: null name: libini_config release: 53.el9 source: rpm version: 1.3.1 libjpeg-turbo: - arch: x86_64 epoch: null name: libjpeg-turbo release: 7.el9 source: rpm version: 2.0.90 libkcapi: - arch: x86_64 epoch: null name: libkcapi release: 2.el9 source: rpm version: 1.4.0 libkcapi-hmaccalc: - arch: x86_64 epoch: null name: libkcapi-hmaccalc release: 2.el9 source: rpm version: 1.4.0 libksba: - arch: x86_64 epoch: null name: libksba release: 7.el9 source: rpm version: 1.5.1 libldb: - arch: x86_64 epoch: 0 name: libldb release: 1.el9 source: rpm version: 4.23.3 libmaxminddb: - arch: x86_64 epoch: null name: libmaxminddb release: 4.el9 source: rpm version: 1.5.2 libmnl: - arch: x86_64 epoch: null name: libmnl release: 16.el9 source: rpm version: 1.0.4 libmodulemd: - arch: x86_64 epoch: null name: libmodulemd release: 2.el9 source: rpm version: 2.13.0 libmount: - arch: x86_64 epoch: null name: libmount release: 21.el9 source: rpm version: 2.37.4 libmpc: - arch: x86_64 epoch: null name: libmpc release: 4.el9 source: rpm version: 1.2.1 libndp: - arch: x86_64 epoch: null name: libndp release: 1.el9 source: rpm version: '1.9' libnet: - arch: x86_64 epoch: null name: libnet release: 7.el9 source: rpm version: '1.2' libnetfilter_conntrack: - arch: x86_64 epoch: null name: libnetfilter_conntrack release: 1.el9 source: rpm version: 1.0.9 libnfnetlink: - arch: x86_64 epoch: null name: libnfnetlink release: 23.el9 source: rpm version: 1.0.1 libnfsidmap: - arch: x86_64 epoch: 1 name: libnfsidmap release: 39.el9 source: rpm version: 2.5.4 libnftnl: - arch: x86_64 epoch: null name: libnftnl release: 4.el9 source: rpm version: 1.2.6 libnghttp2: - arch: x86_64 epoch: null name: libnghttp2 release: 6.el9 source: rpm version: 1.43.0 libnl3: - arch: x86_64 epoch: null name: libnl3 release: 1.el9 source: rpm version: 3.11.0 libnl3-cli: - arch: x86_64 epoch: null name: libnl3-cli release: 1.el9 source: rpm version: 3.11.0 libnsl2: - arch: x86_64 epoch: null name: libnsl2 release: 1.el9 source: rpm version: 2.0.0 libpath_utils: - arch: x86_64 epoch: null name: libpath_utils release: 53.el9 source: rpm version: 0.2.1 libpcap: - arch: x86_64 epoch: 14 name: libpcap release: 4.el9 source: rpm version: 1.10.0 libpipeline: - arch: x86_64 epoch: null name: libpipeline release: 4.el9 source: rpm version: 1.5.3 libpkgconf: - arch: x86_64 epoch: null name: libpkgconf release: 10.el9 source: rpm version: 1.7.3 libpng: - arch: x86_64 epoch: 2 name: libpng release: 12.el9 source: rpm version: 1.6.37 libproxy: - arch: x86_64 epoch: null name: libproxy release: 35.el9 source: rpm version: 0.4.15 libproxy-webkitgtk4: - arch: x86_64 epoch: null name: libproxy-webkitgtk4 release: 35.el9 source: rpm version: 0.4.15 libpsl: - arch: x86_64 epoch: null name: libpsl release: 5.el9 source: rpm version: 0.21.1 libpwquality: - arch: x86_64 epoch: null name: libpwquality release: 8.el9 source: rpm version: 1.4.4 libref_array: - arch: x86_64 epoch: null name: libref_array release: 53.el9 source: rpm version: 0.1.5 librepo: - arch: x86_64 epoch: null name: librepo release: 3.el9 source: rpm version: 1.14.5 libreport-filesystem: - arch: noarch epoch: null name: libreport-filesystem release: 6.el9 source: rpm version: 2.15.2 libseccomp: - arch: x86_64 epoch: null name: libseccomp release: 2.el9 source: rpm version: 2.5.2 libselinux: - arch: x86_64 epoch: null name: libselinux release: 3.el9 source: rpm version: '3.6' libselinux-utils: - arch: x86_64 epoch: null name: libselinux-utils release: 3.el9 source: rpm version: '3.6' libsemanage: - arch: x86_64 epoch: null name: libsemanage release: 5.el9 source: rpm version: '3.6' libsepol: - arch: x86_64 epoch: null name: libsepol release: 3.el9 source: rpm version: '3.6' libsigsegv: - arch: x86_64 epoch: null name: libsigsegv release: 4.el9 source: rpm version: '2.13' libslirp: - arch: x86_64 epoch: null name: libslirp release: 8.el9 source: rpm version: 4.4.0 libsmartcols: - arch: x86_64 epoch: null name: libsmartcols release: 21.el9 source: rpm version: 2.37.4 libsolv: - arch: x86_64 epoch: null name: libsolv release: 3.el9 source: rpm version: 0.7.24 libsoup: - arch: x86_64 epoch: null name: libsoup release: 10.el9 source: rpm version: 2.72.0 libss: - arch: x86_64 epoch: null name: libss release: 8.el9 source: rpm version: 1.46.5 libssh: - arch: x86_64 epoch: null name: libssh release: 15.el9 source: rpm version: 0.10.4 libssh-config: - arch: noarch epoch: null name: libssh-config release: 15.el9 source: rpm version: 0.10.4 libsss_certmap: - arch: x86_64 epoch: null name: libsss_certmap release: 5.el9 source: rpm version: 2.9.7 libsss_idmap: - arch: x86_64 epoch: null name: libsss_idmap release: 5.el9 source: rpm version: 2.9.7 libsss_nss_idmap: - arch: x86_64 epoch: null name: libsss_nss_idmap release: 5.el9 source: rpm version: 2.9.7 libsss_sudo: - arch: x86_64 epoch: null name: libsss_sudo release: 5.el9 source: rpm version: 2.9.7 libstdc++: - arch: x86_64 epoch: null name: libstdc++ release: 14.el9 source: rpm version: 11.5.0 libstdc++-devel: - arch: x86_64 epoch: null name: libstdc++-devel release: 14.el9 source: rpm version: 11.5.0 libstemmer: - arch: x86_64 epoch: null name: libstemmer release: 18.585svn.el9 source: rpm version: '0' libsysfs: - arch: x86_64 epoch: null name: libsysfs release: 11.el9 source: rpm version: 2.1.1 libtalloc: - arch: x86_64 epoch: null name: libtalloc release: 1.el9 source: rpm version: 2.4.3 libtasn1: - arch: x86_64 epoch: null name: libtasn1 release: 9.el9 source: rpm version: 4.16.0 libtdb: - arch: x86_64 epoch: null name: libtdb release: 1.el9 source: rpm version: 1.4.14 libteam: - arch: x86_64 epoch: null name: libteam release: 16.el9 source: rpm version: '1.31' libtevent: - arch: x86_64 epoch: null name: libtevent release: 1.el9 source: rpm version: 0.17.1 libtirpc: - arch: x86_64 epoch: null name: libtirpc release: 9.el9 source: rpm version: 1.3.3 libtool-ltdl: - arch: x86_64 epoch: null name: libtool-ltdl release: 46.el9 source: rpm version: 2.4.6 libunistring: - arch: x86_64 epoch: null name: libunistring release: 15.el9 source: rpm version: 0.9.10 liburing: - arch: x86_64 epoch: null name: liburing release: 1.el9 source: rpm version: '2.12' libuser: - arch: x86_64 epoch: null name: libuser release: 17.el9 source: rpm version: '0.63' libutempter: - arch: x86_64 epoch: null name: libutempter release: 6.el9 source: rpm version: 1.2.1 libuuid: - arch: x86_64 epoch: null name: libuuid release: 21.el9 source: rpm version: 2.37.4 libverto: - arch: x86_64 epoch: null name: libverto release: 3.el9 source: rpm version: 0.3.2 libverto-libev: - arch: x86_64 epoch: null name: libverto-libev release: 3.el9 source: rpm version: 0.3.2 libvirt-libs: - arch: x86_64 epoch: null name: libvirt-libs release: 1.el9 source: rpm version: 11.9.0 libwbclient: - arch: x86_64 epoch: 0 name: libwbclient release: 1.el9 source: rpm version: 4.23.3 libxcrypt: - arch: x86_64 epoch: null name: libxcrypt release: 3.el9 source: rpm version: 4.4.18 libxcrypt-compat: - arch: x86_64 epoch: null name: libxcrypt-compat release: 3.el9 source: rpm version: 4.4.18 libxcrypt-devel: - arch: x86_64 epoch: null name: libxcrypt-devel release: 3.el9 source: rpm version: 4.4.18 libxml2: - arch: x86_64 epoch: null name: libxml2 release: 14.el9 source: rpm version: 2.9.13 libxml2-devel: - arch: x86_64 epoch: null name: libxml2-devel release: 14.el9 source: rpm version: 2.9.13 libxslt: - arch: x86_64 epoch: null name: libxslt release: 12.el9 source: rpm version: 1.1.34 libxslt-devel: - arch: x86_64 epoch: null name: libxslt-devel release: 12.el9 source: rpm version: 1.1.34 libyaml: - arch: x86_64 epoch: null name: libyaml release: 7.el9 source: rpm version: 0.2.5 libzstd: - arch: x86_64 epoch: null name: libzstd release: 1.el9 source: rpm version: 1.5.5 llvm-filesystem: - arch: x86_64 epoch: null name: llvm-filesystem release: 1.el9 source: rpm version: 21.1.3 llvm-libs: - arch: x86_64 epoch: null name: llvm-libs release: 1.el9 source: rpm version: 21.1.3 lmdb-libs: - arch: x86_64 epoch: null name: lmdb-libs release: 3.el9 source: rpm version: 0.9.29 logrotate: - arch: x86_64 epoch: null name: logrotate release: 12.el9 source: rpm version: 3.18.0 lshw: - arch: x86_64 epoch: null name: lshw release: 3.el9 source: rpm version: B.02.20 lsscsi: - arch: x86_64 epoch: null name: lsscsi release: 6.el9 source: rpm version: '0.32' lua-libs: - arch: x86_64 epoch: null name: lua-libs release: 4.el9 source: rpm version: 5.4.4 lua-srpm-macros: - arch: noarch epoch: null name: lua-srpm-macros release: 6.el9 source: rpm version: '1' lz4-libs: - arch: x86_64 epoch: null name: lz4-libs release: 5.el9 source: rpm version: 1.9.3 lzo: - arch: x86_64 epoch: null name: lzo release: 7.el9 source: rpm version: '2.10' make: - arch: x86_64 epoch: 1 name: make release: 8.el9 source: rpm version: '4.3' man-db: - arch: x86_64 epoch: null name: man-db release: 9.el9 source: rpm version: 2.9.3 microcode_ctl: - arch: noarch epoch: 4 name: microcode_ctl release: 1.el9 source: rpm version: '20251111' mpdecimal: - arch: x86_64 epoch: null name: mpdecimal release: 3.el9 source: rpm version: 2.5.1 mpfr: - arch: x86_64 epoch: null name: mpfr release: 7.el9 source: rpm version: 4.1.0 ncurses: - arch: x86_64 epoch: null name: ncurses release: 12.20210508.el9 source: rpm version: '6.2' ncurses-base: - arch: noarch epoch: null name: ncurses-base release: 12.20210508.el9 source: rpm version: '6.2' ncurses-c++-libs: - arch: x86_64 epoch: null name: ncurses-c++-libs release: 12.20210508.el9 source: rpm version: '6.2' ncurses-devel: - arch: x86_64 epoch: null name: ncurses-devel release: 12.20210508.el9 source: rpm version: '6.2' ncurses-libs: - arch: x86_64 epoch: null name: ncurses-libs release: 12.20210508.el9 source: rpm version: '6.2' netavark: - arch: x86_64 epoch: 2 name: netavark release: 1.el9 source: rpm version: 1.16.0 nettle: - arch: x86_64 epoch: null name: nettle release: 1.el9 source: rpm version: 3.10.1 newt: - arch: x86_64 epoch: null name: newt release: 11.el9 source: rpm version: 0.52.21 nfs-utils: - arch: x86_64 epoch: 1 name: nfs-utils release: 39.el9 source: rpm version: 2.5.4 nftables: - arch: x86_64 epoch: 1 name: nftables release: 5.el9 source: rpm version: 1.0.9 npth: - arch: x86_64 epoch: null name: npth release: 8.el9 source: rpm version: '1.6' numactl-libs: - arch: x86_64 epoch: null name: numactl-libs release: 3.el9 source: rpm version: 2.0.19 ocaml-srpm-macros: - arch: noarch epoch: null name: ocaml-srpm-macros release: 6.el9 source: rpm version: '6' oddjob: - arch: x86_64 epoch: null name: oddjob release: 7.el9 source: rpm version: 0.34.7 oddjob-mkhomedir: - arch: x86_64 epoch: null name: oddjob-mkhomedir release: 7.el9 source: rpm version: 0.34.7 oniguruma: - arch: x86_64 epoch: null name: oniguruma release: 1.el9.6 source: rpm version: 6.9.6 openblas-srpm-macros: - arch: noarch epoch: null name: openblas-srpm-macros release: 11.el9 source: rpm version: '2' openldap: - arch: x86_64 epoch: null name: openldap release: 4.el9 source: rpm version: 2.6.8 openldap-devel: - arch: x86_64 epoch: null name: openldap-devel release: 4.el9 source: rpm version: 2.6.8 openssh: - arch: x86_64 epoch: null name: openssh release: 2.el9 source: rpm version: 9.9p1 openssh-clients: - arch: x86_64 epoch: null name: openssh-clients release: 2.el9 source: rpm version: 9.9p1 openssh-server: - arch: x86_64 epoch: null name: openssh-server release: 2.el9 source: rpm version: 9.9p1 openssl: - arch: x86_64 epoch: 1 name: openssl release: 6.el9 source: rpm version: 3.5.1 openssl-devel: - arch: x86_64 epoch: 1 name: openssl-devel release: 6.el9 source: rpm version: 3.5.1 openssl-fips-provider: - arch: x86_64 epoch: 1 name: openssl-fips-provider release: 6.el9 source: rpm version: 3.5.1 openssl-libs: - arch: x86_64 epoch: 1 name: openssl-libs release: 6.el9 source: rpm version: 3.5.1 os-prober: - arch: x86_64 epoch: null name: os-prober release: 12.el9 source: rpm version: '1.77' p11-kit: - arch: x86_64 epoch: null name: p11-kit release: 1.el9 source: rpm version: 0.25.10 p11-kit-trust: - arch: x86_64 epoch: null name: p11-kit-trust release: 1.el9 source: rpm version: 0.25.10 pam: - arch: x86_64 epoch: null name: pam release: 26.el9 source: rpm version: 1.5.1 parted: - arch: x86_64 epoch: null name: parted release: 3.el9 source: rpm version: '3.5' passt: - arch: x86_64 epoch: null name: passt release: 2.el9 source: rpm version: 0^20250512.g8ec1341 passt-selinux: - arch: noarch epoch: null name: passt-selinux release: 2.el9 source: rpm version: 0^20250512.g8ec1341 passwd: - arch: x86_64 epoch: null name: passwd release: 12.el9 source: rpm version: '0.80' patch: - arch: x86_64 epoch: null name: patch release: 16.el9 source: rpm version: 2.7.6 pciutils-libs: - arch: x86_64 epoch: null name: pciutils-libs release: 7.el9 source: rpm version: 3.7.0 pcre: - arch: x86_64 epoch: null name: pcre release: 4.el9 source: rpm version: '8.44' pcre2: - arch: x86_64 epoch: null name: pcre2 release: 6.el9 source: rpm version: '10.40' pcre2-syntax: - arch: noarch epoch: null name: pcre2-syntax release: 6.el9 source: rpm version: '10.40' perl-AutoLoader: - arch: noarch epoch: 0 name: perl-AutoLoader release: 483.el9 source: rpm version: '5.74' perl-B: - arch: x86_64 epoch: 0 name: perl-B release: 483.el9 source: rpm version: '1.80' perl-Carp: - arch: noarch epoch: null name: perl-Carp release: 460.el9 source: rpm version: '1.50' perl-Class-Struct: - arch: noarch epoch: 0 name: perl-Class-Struct release: 483.el9 source: rpm version: '0.66' perl-Data-Dumper: - arch: x86_64 epoch: null name: perl-Data-Dumper release: 462.el9 source: rpm version: '2.174' perl-Digest: - arch: noarch epoch: null name: perl-Digest release: 4.el9 source: rpm version: '1.19' perl-Digest-MD5: - arch: x86_64 epoch: null name: perl-Digest-MD5 release: 4.el9 source: rpm version: '2.58' perl-DynaLoader: - arch: x86_64 epoch: 0 name: perl-DynaLoader release: 483.el9 source: rpm version: '1.47' perl-Encode: - arch: x86_64 epoch: 4 name: perl-Encode release: 462.el9 source: rpm version: '3.08' perl-Errno: - arch: x86_64 epoch: 0 name: perl-Errno release: 483.el9 source: rpm version: '1.30' perl-Error: - arch: noarch epoch: 1 name: perl-Error release: 7.el9 source: rpm version: '0.17029' perl-Exporter: - arch: noarch epoch: null name: perl-Exporter release: 461.el9 source: rpm version: '5.74' perl-Fcntl: - arch: x86_64 epoch: 0 name: perl-Fcntl release: 483.el9 source: rpm version: '1.13' perl-File-Basename: - arch: noarch epoch: 0 name: perl-File-Basename release: 483.el9 source: rpm version: '2.85' perl-File-Find: - arch: noarch epoch: 0 name: perl-File-Find release: 483.el9 source: rpm version: '1.37' perl-File-Path: - arch: noarch epoch: null name: perl-File-Path release: 4.el9 source: rpm version: '2.18' perl-File-Temp: - arch: noarch epoch: 1 name: perl-File-Temp release: 4.el9 source: rpm version: 0.231.100 perl-File-stat: - arch: noarch epoch: 0 name: perl-File-stat release: 483.el9 source: rpm version: '1.09' perl-FileHandle: - arch: noarch epoch: 0 name: perl-FileHandle release: 483.el9 source: rpm version: '2.03' perl-Getopt-Long: - arch: noarch epoch: 1 name: perl-Getopt-Long release: 4.el9 source: rpm version: '2.52' perl-Getopt-Std: - arch: noarch epoch: 0 name: perl-Getopt-Std release: 483.el9 source: rpm version: '1.12' perl-Git: - arch: noarch epoch: null name: perl-Git release: 1.el9 source: rpm version: 2.47.3 perl-HTTP-Tiny: - arch: noarch epoch: null name: perl-HTTP-Tiny release: 462.el9 source: rpm version: '0.076' perl-IO: - arch: x86_64 epoch: 0 name: perl-IO release: 483.el9 source: rpm version: '1.43' perl-IO-Socket-IP: - arch: noarch epoch: null name: perl-IO-Socket-IP release: 5.el9 source: rpm version: '0.41' perl-IO-Socket-SSL: - arch: noarch epoch: null name: perl-IO-Socket-SSL release: 2.el9 source: rpm version: '2.073' perl-IPC-Open3: - arch: noarch epoch: 0 name: perl-IPC-Open3 release: 483.el9 source: rpm version: '1.21' perl-MIME-Base64: - arch: x86_64 epoch: null name: perl-MIME-Base64 release: 4.el9 source: rpm version: '3.16' perl-Mozilla-CA: - arch: noarch epoch: null name: perl-Mozilla-CA release: 6.el9 source: rpm version: '20200520' perl-NDBM_File: - arch: x86_64 epoch: 0 name: perl-NDBM_File release: 483.el9 source: rpm version: '1.15' perl-Net-SSLeay: - arch: x86_64 epoch: null name: perl-Net-SSLeay release: 3.el9 source: rpm version: '1.94' perl-POSIX: - arch: x86_64 epoch: 0 name: perl-POSIX release: 483.el9 source: rpm version: '1.94' perl-PathTools: - arch: x86_64 epoch: null name: perl-PathTools release: 461.el9 source: rpm version: '3.78' perl-Pod-Escapes: - arch: noarch epoch: 1 name: perl-Pod-Escapes release: 460.el9 source: rpm version: '1.07' perl-Pod-Perldoc: - arch: noarch epoch: null name: perl-Pod-Perldoc release: 461.el9 source: rpm version: 3.28.01 perl-Pod-Simple: - arch: noarch epoch: 1 name: perl-Pod-Simple release: 4.el9 source: rpm version: '3.42' perl-Pod-Usage: - arch: noarch epoch: 4 name: perl-Pod-Usage release: 4.el9 source: rpm version: '2.01' perl-Scalar-List-Utils: - arch: x86_64 epoch: 4 name: perl-Scalar-List-Utils release: 462.el9 source: rpm version: '1.56' perl-SelectSaver: - arch: noarch epoch: 0 name: perl-SelectSaver release: 483.el9 source: rpm version: '1.02' perl-Socket: - arch: x86_64 epoch: 4 name: perl-Socket release: 4.el9 source: rpm version: '2.031' perl-Storable: - arch: x86_64 epoch: 1 name: perl-Storable release: 460.el9 source: rpm version: '3.21' perl-Symbol: - arch: noarch epoch: 0 name: perl-Symbol release: 483.el9 source: rpm version: '1.08' perl-Term-ANSIColor: - arch: noarch epoch: null name: perl-Term-ANSIColor release: 461.el9 source: rpm version: '5.01' perl-Term-Cap: - arch: noarch epoch: null name: perl-Term-Cap release: 460.el9 source: rpm version: '1.17' perl-TermReadKey: - arch: x86_64 epoch: null name: perl-TermReadKey release: 11.el9 source: rpm version: '2.38' perl-Text-ParseWords: - arch: noarch epoch: null name: perl-Text-ParseWords release: 460.el9 source: rpm version: '3.30' perl-Text-Tabs+Wrap: - arch: noarch epoch: null name: perl-Text-Tabs+Wrap release: 460.el9 source: rpm version: '2013.0523' perl-Time-Local: - arch: noarch epoch: 2 name: perl-Time-Local release: 7.el9 source: rpm version: '1.300' perl-URI: - arch: noarch epoch: null name: perl-URI release: 3.el9 source: rpm version: '5.09' perl-base: - arch: noarch epoch: 0 name: perl-base release: 483.el9 source: rpm version: '2.27' perl-constant: - arch: noarch epoch: null name: perl-constant release: 461.el9 source: rpm version: '1.33' perl-if: - arch: noarch epoch: 0 name: perl-if release: 483.el9 source: rpm version: 0.60.800 perl-interpreter: - arch: x86_64 epoch: 4 name: perl-interpreter release: 483.el9 source: rpm version: 5.32.1 perl-lib: - arch: x86_64 epoch: 0 name: perl-lib release: 483.el9 source: rpm version: '0.65' perl-libnet: - arch: noarch epoch: null name: perl-libnet release: 4.el9 source: rpm version: '3.13' perl-libs: - arch: x86_64 epoch: 4 name: perl-libs release: 483.el9 source: rpm version: 5.32.1 perl-mro: - arch: x86_64 epoch: 0 name: perl-mro release: 483.el9 source: rpm version: '1.23' perl-overload: - arch: noarch epoch: 0 name: perl-overload release: 483.el9 source: rpm version: '1.31' perl-overloading: - arch: noarch epoch: 0 name: perl-overloading release: 483.el9 source: rpm version: '0.02' perl-parent: - arch: noarch epoch: 1 name: perl-parent release: 460.el9 source: rpm version: '0.238' perl-podlators: - arch: noarch epoch: 1 name: perl-podlators release: 460.el9 source: rpm version: '4.14' perl-srpm-macros: - arch: noarch epoch: null name: perl-srpm-macros release: 41.el9 source: rpm version: '1' perl-subs: - arch: noarch epoch: 0 name: perl-subs release: 483.el9 source: rpm version: '1.03' perl-vars: - arch: noarch epoch: 0 name: perl-vars release: 483.el9 source: rpm version: '1.05' pigz: - arch: x86_64 epoch: null name: pigz release: 4.el9 source: rpm version: '2.5' pkgconf: - arch: x86_64 epoch: null name: pkgconf release: 10.el9 source: rpm version: 1.7.3 pkgconf-m4: - arch: noarch epoch: null name: pkgconf-m4 release: 10.el9 source: rpm version: 1.7.3 pkgconf-pkg-config: - arch: x86_64 epoch: null name: pkgconf-pkg-config release: 10.el9 source: rpm version: 1.7.3 podman: - arch: x86_64 epoch: 6 name: podman release: 2.el9 source: rpm version: 5.6.0 policycoreutils: - arch: x86_64 epoch: null name: policycoreutils release: 3.el9 source: rpm version: '3.6' policycoreutils-python-utils: - arch: noarch epoch: null name: policycoreutils-python-utils release: 3.el9 source: rpm version: '3.6' polkit: - arch: x86_64 epoch: null name: polkit release: 14.el9 source: rpm version: '0.117' polkit-libs: - arch: x86_64 epoch: null name: polkit-libs release: 14.el9 source: rpm version: '0.117' polkit-pkla-compat: - arch: x86_64 epoch: null name: polkit-pkla-compat release: 21.el9 source: rpm version: '0.1' popt: - arch: x86_64 epoch: null name: popt release: 8.el9 source: rpm version: '1.18' prefixdevname: - arch: x86_64 epoch: null name: prefixdevname release: 8.el9 source: rpm version: 0.1.0 procps-ng: - arch: x86_64 epoch: null name: procps-ng release: 14.el9 source: rpm version: 3.3.17 protobuf-c: - arch: x86_64 epoch: null name: protobuf-c release: 13.el9 source: rpm version: 1.3.3 psmisc: - arch: x86_64 epoch: null name: psmisc release: 3.el9 source: rpm version: '23.4' publicsuffix-list-dafsa: - arch: noarch epoch: null name: publicsuffix-list-dafsa release: 3.el9 source: rpm version: '20210518' pyproject-srpm-macros: - arch: noarch epoch: null name: pyproject-srpm-macros release: 1.el9 source: rpm version: 1.16.2 python-rpm-macros: - arch: noarch epoch: null name: python-rpm-macros release: 54.el9 source: rpm version: '3.9' python-srpm-macros: - arch: noarch epoch: null name: python-srpm-macros release: 54.el9 source: rpm version: '3.9' python-unversioned-command: - arch: noarch epoch: null name: python-unversioned-command release: 2.el9 source: rpm version: 3.9.25 python3: - arch: x86_64 epoch: null name: python3 release: 2.el9 source: rpm version: 3.9.25 python3-attrs: - arch: noarch epoch: null name: python3-attrs release: 7.el9 source: rpm version: 20.3.0 python3-audit: - arch: x86_64 epoch: null name: python3-audit release: 7.el9 source: rpm version: 3.1.5 python3-babel: - arch: noarch epoch: null name: python3-babel release: 2.el9 source: rpm version: 2.9.1 python3-cffi: - arch: x86_64 epoch: null name: python3-cffi release: 5.el9 source: rpm version: 1.14.5 python3-chardet: - arch: noarch epoch: null name: python3-chardet release: 5.el9 source: rpm version: 4.0.0 python3-configobj: - arch: noarch epoch: null name: python3-configobj release: 25.el9 source: rpm version: 5.0.6 python3-cryptography: - arch: x86_64 epoch: null name: python3-cryptography release: 5.el9 source: rpm version: 36.0.1 python3-dasbus: - arch: noarch epoch: null name: python3-dasbus release: 1.el9 source: rpm version: '1.7' python3-dateutil: - arch: noarch epoch: 1 name: python3-dateutil release: 1.el9 source: rpm version: 2.9.0.post0 python3-dbus: - arch: x86_64 epoch: null name: python3-dbus release: 2.el9 source: rpm version: 1.2.18 python3-devel: - arch: x86_64 epoch: null name: python3-devel release: 2.el9 source: rpm version: 3.9.25 python3-distro: - arch: noarch epoch: null name: python3-distro release: 7.el9 source: rpm version: 1.5.0 python3-dnf: - arch: noarch epoch: null name: python3-dnf release: 31.el9 source: rpm version: 4.14.0 python3-dnf-plugins-core: - arch: noarch epoch: null name: python3-dnf-plugins-core release: 24.el9 source: rpm version: 4.3.0 python3-enchant: - arch: noarch epoch: null name: python3-enchant release: 5.el9 source: rpm version: 3.2.0 python3-file-magic: - arch: noarch epoch: null name: python3-file-magic release: 16.el9 source: rpm version: '5.39' python3-gobject-base: - arch: x86_64 epoch: null name: python3-gobject-base release: 6.el9 source: rpm version: 3.40.1 python3-gobject-base-noarch: - arch: noarch epoch: null name: python3-gobject-base-noarch release: 6.el9 source: rpm version: 3.40.1 python3-gpg: - arch: x86_64 epoch: null name: python3-gpg release: 6.el9 source: rpm version: 1.15.1 python3-hawkey: - arch: x86_64 epoch: null name: python3-hawkey release: 16.el9 source: rpm version: 0.69.0 python3-idna: - arch: noarch epoch: null name: python3-idna release: 7.el9.1 source: rpm version: '2.10' python3-jinja2: - arch: noarch epoch: null name: python3-jinja2 release: 8.el9 source: rpm version: 2.11.3 python3-jmespath: - arch: noarch epoch: null name: python3-jmespath release: 1.el9 source: rpm version: 1.0.1 python3-jsonpatch: - arch: noarch epoch: null name: python3-jsonpatch release: 16.el9 source: rpm version: '1.21' python3-jsonpointer: - arch: noarch epoch: null name: python3-jsonpointer release: 4.el9 source: rpm version: '2.0' python3-jsonschema: - arch: noarch epoch: null name: python3-jsonschema release: 13.el9 source: rpm version: 3.2.0 python3-libcomps: - arch: x86_64 epoch: null name: python3-libcomps release: 1.el9 source: rpm version: 0.1.18 python3-libdnf: - arch: x86_64 epoch: null name: python3-libdnf release: 16.el9 source: rpm version: 0.69.0 python3-libs: - arch: x86_64 epoch: null name: python3-libs release: 2.el9 source: rpm version: 3.9.25 python3-libselinux: - arch: x86_64 epoch: null name: python3-libselinux release: 3.el9 source: rpm version: '3.6' python3-libsemanage: - arch: x86_64 epoch: null name: python3-libsemanage release: 5.el9 source: rpm version: '3.6' python3-libvirt: - arch: x86_64 epoch: null name: python3-libvirt release: 1.el9 source: rpm version: 11.9.0 python3-libxml2: - arch: x86_64 epoch: null name: python3-libxml2 release: 14.el9 source: rpm version: 2.9.13 python3-lxml: - arch: x86_64 epoch: null name: python3-lxml release: 3.el9 source: rpm version: 4.6.5 python3-markupsafe: - arch: x86_64 epoch: null name: python3-markupsafe release: 12.el9 source: rpm version: 1.1.1 python3-netaddr: - arch: noarch epoch: null name: python3-netaddr release: 3.el9 source: rpm version: 0.10.1 python3-netifaces: - arch: x86_64 epoch: null name: python3-netifaces release: 15.el9 source: rpm version: 0.10.6 python3-oauthlib: - arch: noarch epoch: null name: python3-oauthlib release: 5.el9 source: rpm version: 3.1.1 python3-packaging: - arch: noarch epoch: null name: python3-packaging release: 5.el9 source: rpm version: '20.9' python3-pexpect: - arch: noarch epoch: null name: python3-pexpect release: 7.el9 source: rpm version: 4.8.0 python3-pip: - arch: noarch epoch: null name: python3-pip release: 1.el9 source: rpm version: 21.3.1 python3-pip-wheel: - arch: noarch epoch: null name: python3-pip-wheel release: 1.el9 source: rpm version: 21.3.1 python3-ply: - arch: noarch epoch: null name: python3-ply release: 14.el9 source: rpm version: '3.11' python3-policycoreutils: - arch: noarch epoch: null name: python3-policycoreutils release: 3.el9 source: rpm version: '3.6' python3-prettytable: - arch: noarch epoch: null name: python3-prettytable release: 27.el9 source: rpm version: 0.7.2 python3-ptyprocess: - arch: noarch epoch: null name: python3-ptyprocess release: 12.el9 source: rpm version: 0.6.0 python3-pycparser: - arch: noarch epoch: null name: python3-pycparser release: 6.el9 source: rpm version: '2.20' python3-pyparsing: - arch: noarch epoch: null name: python3-pyparsing release: 9.el9 source: rpm version: 2.4.7 python3-pyrsistent: - arch: x86_64 epoch: null name: python3-pyrsistent release: 8.el9 source: rpm version: 0.17.3 python3-pyserial: - arch: noarch epoch: null name: python3-pyserial release: 12.el9 source: rpm version: '3.4' python3-pysocks: - arch: noarch epoch: null name: python3-pysocks release: 12.el9 source: rpm version: 1.7.1 python3-pytz: - arch: noarch epoch: null name: python3-pytz release: 5.el9 source: rpm version: '2021.1' python3-pyyaml: - arch: x86_64 epoch: null name: python3-pyyaml release: 6.el9 source: rpm version: 5.4.1 python3-requests: - arch: noarch epoch: null name: python3-requests release: 10.el9 source: rpm version: 2.25.1 python3-resolvelib: - arch: noarch epoch: null name: python3-resolvelib release: 5.el9 source: rpm version: 0.5.4 python3-rpm: - arch: x86_64 epoch: null name: python3-rpm release: 40.el9 source: rpm version: 4.16.1.3 python3-rpm-generators: - arch: noarch epoch: null name: python3-rpm-generators release: 9.el9 source: rpm version: '12' python3-rpm-macros: - arch: noarch epoch: null name: python3-rpm-macros release: 54.el9 source: rpm version: '3.9' python3-setools: - arch: x86_64 epoch: null name: python3-setools release: 1.el9 source: rpm version: 4.4.4 python3-setuptools: - arch: noarch epoch: null name: python3-setuptools release: 15.el9 source: rpm version: 53.0.0 python3-setuptools-wheel: - arch: noarch epoch: null name: python3-setuptools-wheel release: 15.el9 source: rpm version: 53.0.0 python3-six: - arch: noarch epoch: null name: python3-six release: 9.el9 source: rpm version: 1.15.0 python3-systemd: - arch: x86_64 epoch: null name: python3-systemd release: 19.el9 source: rpm version: '234' python3-urllib3: - arch: noarch epoch: null name: python3-urllib3 release: 6.el9 source: rpm version: 1.26.5 python3.12: - arch: x86_64 epoch: null name: python3.12 release: 1.el9 source: rpm version: 3.12.12 python3.12-libs: - arch: x86_64 epoch: null name: python3.12-libs release: 1.el9 source: rpm version: 3.12.12 python3.12-pip: - arch: noarch epoch: null name: python3.12-pip release: 5.el9 source: rpm version: 23.2.1 python3.12-pip-wheel: - arch: noarch epoch: null name: python3.12-pip-wheel release: 5.el9 source: rpm version: 23.2.1 python3.12-setuptools: - arch: noarch epoch: null name: python3.12-setuptools release: 5.el9 source: rpm version: 68.2.2 qemu-guest-agent: - arch: x86_64 epoch: 17 name: qemu-guest-agent release: 7.el9 source: rpm version: 10.1.0 qt5-srpm-macros: - arch: noarch epoch: null name: qt5-srpm-macros release: 1.el9 source: rpm version: 5.15.9 quota: - arch: x86_64 epoch: 1 name: quota release: 4.el9 source: rpm version: '4.09' quota-nls: - arch: noarch epoch: 1 name: quota-nls release: 4.el9 source: rpm version: '4.09' readline: - arch: x86_64 epoch: null name: readline release: 4.el9 source: rpm version: '8.1' readline-devel: - arch: x86_64 epoch: null name: readline-devel release: 4.el9 source: rpm version: '8.1' redhat-rpm-config: - arch: noarch epoch: null name: redhat-rpm-config release: 1.el9 source: rpm version: '210' rootfiles: - arch: noarch epoch: null name: rootfiles release: 35.el9 source: rpm version: '8.1' rpcbind: - arch: x86_64 epoch: null name: rpcbind release: 7.el9 source: rpm version: 1.2.6 rpm: - arch: x86_64 epoch: null name: rpm release: 40.el9 source: rpm version: 4.16.1.3 rpm-build: - arch: x86_64 epoch: null name: rpm-build release: 40.el9 source: rpm version: 4.16.1.3 rpm-build-libs: - arch: x86_64 epoch: null name: rpm-build-libs release: 40.el9 source: rpm version: 4.16.1.3 rpm-libs: - arch: x86_64 epoch: null name: rpm-libs release: 40.el9 source: rpm version: 4.16.1.3 rpm-plugin-audit: - arch: x86_64 epoch: null name: rpm-plugin-audit release: 40.el9 source: rpm version: 4.16.1.3 rpm-plugin-selinux: - arch: x86_64 epoch: null name: rpm-plugin-selinux release: 40.el9 source: rpm version: 4.16.1.3 rpm-plugin-systemd-inhibit: - arch: x86_64 epoch: null name: rpm-plugin-systemd-inhibit release: 40.el9 source: rpm version: 4.16.1.3 rpm-sign: - arch: x86_64 epoch: null name: rpm-sign release: 40.el9 source: rpm version: 4.16.1.3 rpm-sign-libs: - arch: x86_64 epoch: null name: rpm-sign-libs release: 40.el9 source: rpm version: 4.16.1.3 rpmlint: - arch: noarch epoch: null name: rpmlint release: 19.el9 source: rpm version: '1.11' rsync: - arch: x86_64 epoch: null name: rsync release: 4.el9 source: rpm version: 3.2.5 rsyslog: - arch: x86_64 epoch: null name: rsyslog release: 2.el9 source: rpm version: 8.2510.0 rsyslog-logrotate: - arch: x86_64 epoch: null name: rsyslog-logrotate release: 2.el9 source: rpm version: 8.2510.0 ruby: - arch: x86_64 epoch: null name: ruby release: 165.el9 source: rpm version: 3.0.7 ruby-default-gems: - arch: noarch epoch: null name: ruby-default-gems release: 165.el9 source: rpm version: 3.0.7 ruby-devel: - arch: x86_64 epoch: null name: ruby-devel release: 165.el9 source: rpm version: 3.0.7 ruby-libs: - arch: x86_64 epoch: null name: ruby-libs release: 165.el9 source: rpm version: 3.0.7 rubygem-bigdecimal: - arch: x86_64 epoch: null name: rubygem-bigdecimal release: 165.el9 source: rpm version: 3.0.0 rubygem-bundler: - arch: noarch epoch: null name: rubygem-bundler release: 165.el9 source: rpm version: 2.2.33 rubygem-io-console: - arch: x86_64 epoch: null name: rubygem-io-console release: 165.el9 source: rpm version: 0.5.7 rubygem-json: - arch: x86_64 epoch: null name: rubygem-json release: 165.el9 source: rpm version: 2.5.1 rubygem-psych: - arch: x86_64 epoch: null name: rubygem-psych release: 165.el9 source: rpm version: 3.3.2 rubygem-rdoc: - arch: noarch epoch: null name: rubygem-rdoc release: 165.el9 source: rpm version: 6.3.4.1 rubygems: - arch: noarch epoch: null name: rubygems release: 165.el9 source: rpm version: 3.2.33 rust-srpm-macros: - arch: noarch epoch: null name: rust-srpm-macros release: 4.el9 source: rpm version: '17' samba-client-libs: - arch: x86_64 epoch: 0 name: samba-client-libs release: 1.el9 source: rpm version: 4.23.3 samba-common: - arch: noarch epoch: 0 name: samba-common release: 1.el9 source: rpm version: 4.23.3 samba-common-libs: - arch: x86_64 epoch: 0 name: samba-common-libs release: 1.el9 source: rpm version: 4.23.3 sed: - arch: x86_64 epoch: null name: sed release: 9.el9 source: rpm version: '4.8' selinux-policy: - arch: noarch epoch: null name: selinux-policy release: 1.el9 source: rpm version: 38.1.69 selinux-policy-targeted: - arch: noarch epoch: null name: selinux-policy-targeted release: 1.el9 source: rpm version: 38.1.69 setroubleshoot-plugins: - arch: noarch epoch: null name: setroubleshoot-plugins release: 4.el9 source: rpm version: 3.3.14 setroubleshoot-server: - arch: x86_64 epoch: null name: setroubleshoot-server release: 2.el9 source: rpm version: 3.3.35 setup: - arch: noarch epoch: null name: setup release: 10.el9 source: rpm version: 2.13.7 sg3_utils: - arch: x86_64 epoch: null name: sg3_utils release: 10.el9 source: rpm version: '1.47' sg3_utils-libs: - arch: x86_64 epoch: null name: sg3_utils-libs release: 10.el9 source: rpm version: '1.47' shadow-utils: - arch: x86_64 epoch: 2 name: shadow-utils release: 15.el9 source: rpm version: '4.9' shadow-utils-subid: - arch: x86_64 epoch: 2 name: shadow-utils-subid release: 15.el9 source: rpm version: '4.9' shared-mime-info: - arch: x86_64 epoch: null name: shared-mime-info release: 5.el9 source: rpm version: '2.1' slang: - arch: x86_64 epoch: null name: slang release: 11.el9 source: rpm version: 2.3.2 slirp4netns: - arch: x86_64 epoch: null name: slirp4netns release: 1.el9 source: rpm version: 1.3.3 snappy: - arch: x86_64 epoch: null name: snappy release: 8.el9 source: rpm version: 1.1.8 sos: - arch: noarch epoch: null name: sos release: 1.el9 source: rpm version: 4.10.1 sqlite-libs: - arch: x86_64 epoch: null name: sqlite-libs release: 9.el9 source: rpm version: 3.34.1 squashfs-tools: - arch: x86_64 epoch: null name: squashfs-tools release: 10.git1.el9 source: rpm version: '4.4' sscg: - arch: x86_64 epoch: null name: sscg release: 2.el9 source: rpm version: 4.0.3 sshpass: - arch: x86_64 epoch: null name: sshpass release: 4.el9 source: rpm version: '1.09' sssd-client: - arch: x86_64 epoch: null name: sssd-client release: 5.el9 source: rpm version: 2.9.7 sssd-common: - arch: x86_64 epoch: null name: sssd-common release: 5.el9 source: rpm version: 2.9.7 sssd-kcm: - arch: x86_64 epoch: null name: sssd-kcm release: 5.el9 source: rpm version: 2.9.7 sssd-nfs-idmap: - arch: x86_64 epoch: null name: sssd-nfs-idmap release: 5.el9 source: rpm version: 2.9.7 sudo: - arch: x86_64 epoch: null name: sudo release: 13.el9 source: rpm version: 1.9.5p2 systemd: - arch: x86_64 epoch: null name: systemd release: 59.el9 source: rpm version: '252' systemd-devel: - arch: x86_64 epoch: null name: systemd-devel release: 59.el9 source: rpm version: '252' systemd-libs: - arch: x86_64 epoch: null name: systemd-libs release: 59.el9 source: rpm version: '252' systemd-pam: - arch: x86_64 epoch: null name: systemd-pam release: 59.el9 source: rpm version: '252' systemd-rpm-macros: - arch: noarch epoch: null name: systemd-rpm-macros release: 59.el9 source: rpm version: '252' systemd-udev: - arch: x86_64 epoch: null name: systemd-udev release: 59.el9 source: rpm version: '252' tar: - arch: x86_64 epoch: 2 name: tar release: 7.el9 source: rpm version: '1.34' tcl: - arch: x86_64 epoch: 1 name: tcl release: 7.el9 source: rpm version: 8.6.10 tcpdump: - arch: x86_64 epoch: 14 name: tcpdump release: 9.el9 source: rpm version: 4.99.0 teamd: - arch: x86_64 epoch: null name: teamd release: 16.el9 source: rpm version: '1.31' time: - arch: x86_64 epoch: null name: time release: 18.el9 source: rpm version: '1.9' tmux: - arch: x86_64 epoch: null name: tmux release: 5.el9 source: rpm version: 3.2a tpm2-tss: - arch: x86_64 epoch: null name: tpm2-tss release: 1.el9 source: rpm version: 3.2.3 traceroute: - arch: x86_64 epoch: 3 name: traceroute release: 1.el9 source: rpm version: 2.1.1 tzdata: - arch: noarch epoch: null name: tzdata release: 2.el9 source: rpm version: 2025b unzip: - arch: x86_64 epoch: null name: unzip release: 59.el9 source: rpm version: '6.0' userspace-rcu: - arch: x86_64 epoch: null name: userspace-rcu release: 6.el9 source: rpm version: 0.12.1 util-linux: - arch: x86_64 epoch: null name: util-linux release: 21.el9 source: rpm version: 2.37.4 util-linux-core: - arch: x86_64 epoch: null name: util-linux-core release: 21.el9 source: rpm version: 2.37.4 vim-minimal: - arch: x86_64 epoch: 2 name: vim-minimal release: 23.el9 source: rpm version: 8.2.2637 webkit2gtk3-jsc: - arch: x86_64 epoch: null name: webkit2gtk3-jsc release: 1.el9 source: rpm version: 2.50.3 wget: - arch: x86_64 epoch: null name: wget release: 8.el9 source: rpm version: 1.21.1 which: - arch: x86_64 epoch: null name: which release: 30.el9 source: rpm version: '2.21' xfsprogs: - arch: x86_64 epoch: null name: xfsprogs release: 7.el9 source: rpm version: 6.4.0 xz: - arch: x86_64 epoch: null name: xz release: 8.el9 source: rpm version: 5.2.5 xz-devel: - arch: x86_64 epoch: null name: xz-devel release: 8.el9 source: rpm version: 5.2.5 xz-libs: - arch: x86_64 epoch: null name: xz-libs release: 8.el9 source: rpm version: 5.2.5 yajl: - arch: x86_64 epoch: null name: yajl release: 25.el9 source: rpm version: 2.1.0 yum: - arch: noarch epoch: null name: yum release: 31.el9 source: rpm version: 4.14.0 yum-utils: - arch: noarch epoch: null name: yum-utils release: 24.el9 source: rpm version: 4.3.0 zip: - arch: x86_64 epoch: null name: zip release: 35.el9 source: rpm version: '3.0' zlib: - arch: x86_64 epoch: null name: zlib release: 41.el9 source: rpm version: 1.2.11 zlib-devel: - arch: x86_64 epoch: null name: zlib-devel release: 41.el9 source: rpm version: 1.2.11 zstd: - arch: x86_64 epoch: null name: zstd release: 1.el9 source: rpm version: 1.5.5 home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/0000755000175000017500000000000015117040724023470 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/0000755000175000017500000000000015117040724027523 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/0000755000175000017500000000000015117043064030650 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_all.yml0000644000175000017500000000142615117040726033145 0ustar zuulzuul--- - name: Debug make_all_env when: make_all_env is defined ansible.builtin.debug: var: make_all_env - name: Debug make_all_params when: make_all_params is defined ansible.builtin.debug: var: make_all_params - name: Run all retries: "{{ make_all_retries | default(omit) }}" delay: "{{ make_all_delay | default(omit) }}" until: "{{ make_all_until | default(true) }}" register: "make_all_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make all" dry_run: "{{ make_all_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_all_env|default({})), **(make_all_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_help.yml0000644000175000017500000000145615117040726033330 0ustar zuulzuul--- - name: Debug make_help_env when: make_help_env is defined ansible.builtin.debug: var: make_help_env - name: Debug make_help_params when: make_help_params is defined ansible.builtin.debug: var: make_help_params - name: Run help retries: "{{ make_help_retries | default(omit) }}" delay: "{{ make_help_delay | default(omit) }}" until: "{{ make_help_until | default(true) }}" register: "make_help_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make help" dry_run: "{{ make_help_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_help_env|default({})), **(make_help_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cleanup.0000644000175000017500000000152215117040726033277 0ustar zuulzuul--- - name: Debug make_cleanup_env when: make_cleanup_env is defined ansible.builtin.debug: var: make_cleanup_env - name: Debug make_cleanup_params when: make_cleanup_params is defined ansible.builtin.debug: var: make_cleanup_params - name: Run cleanup retries: "{{ make_cleanup_retries | default(omit) }}" delay: "{{ make_cleanup_delay | default(omit) }}" until: "{{ make_cleanup_until | default(true) }}" register: "make_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cleanup" dry_run: "{{ make_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cleanup_env|default({})), **(make_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_deploy_c0000644000175000017500000000167315117040726033377 0ustar zuulzuul--- - name: Debug make_deploy_cleanup_env when: make_deploy_cleanup_env is defined ansible.builtin.debug: var: make_deploy_cleanup_env - name: Debug make_deploy_cleanup_params when: make_deploy_cleanup_params is defined ansible.builtin.debug: var: make_deploy_cleanup_params - name: Run deploy_cleanup retries: "{{ make_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_deploy_cleanup_delay | default(omit) }}" until: "{{ make_deploy_cleanup_until | default(true) }}" register: "make_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make deploy_cleanup" dry_run: "{{ make_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_deploy_cleanup_env|default({})), **(make_deploy_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_wait.yml0000644000175000017500000000144515117040726033342 0ustar zuulzuul--- - name: Debug make_wait_env when: make_wait_env is defined ansible.builtin.debug: var: make_wait_env - name: Debug make_wait_params when: make_wait_params is defined ansible.builtin.debug: var: make_wait_params - name: Run wait retries: "{{ make_wait_retries | default(omit) }}" delay: "{{ make_wait_delay | default(omit) }}" until: "{{ make_wait_until | default(true) }}" register: "make_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make wait" dry_run: "{{ make_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_wait_env|default({})), **(make_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000161615117040726033414 0ustar zuulzuul--- - name: Debug make_crc_storage_env when: make_crc_storage_env is defined ansible.builtin.debug: var: make_crc_storage_env - name: Debug make_crc_storage_params when: make_crc_storage_params is defined ansible.builtin.debug: var: make_crc_storage_params - name: Run crc_storage retries: "{{ make_crc_storage_retries | default(omit) }}" delay: "{{ make_crc_storage_delay | default(omit) }}" until: "{{ make_crc_storage_until | default(true) }}" register: "make_crc_storage_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage" dry_run: "{{ make_crc_storage_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_env|default({})), **(make_crc_storage_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000200615117040726033406 0ustar zuulzuul--- - name: Debug make_crc_storage_cleanup_env when: make_crc_storage_cleanup_env is defined ansible.builtin.debug: var: make_crc_storage_cleanup_env - name: Debug make_crc_storage_cleanup_params when: make_crc_storage_cleanup_params is defined ansible.builtin.debug: var: make_crc_storage_cleanup_params - name: Run crc_storage_cleanup retries: "{{ make_crc_storage_cleanup_retries | default(omit) }}" delay: "{{ make_crc_storage_cleanup_delay | default(omit) }}" until: "{{ make_crc_storage_cleanup_until | default(true) }}" register: "make_crc_storage_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_cleanup" dry_run: "{{ make_crc_storage_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_cleanup_env|default({})), **(make_crc_storage_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_release.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000200615117040726033406 0ustar zuulzuul--- - name: Debug make_crc_storage_release_env when: make_crc_storage_release_env is defined ansible.builtin.debug: var: make_crc_storage_release_env - name: Debug make_crc_storage_release_params when: make_crc_storage_release_params is defined ansible.builtin.debug: var: make_crc_storage_release_params - name: Run crc_storage_release retries: "{{ make_crc_storage_release_retries | default(omit) }}" delay: "{{ make_crc_storage_release_delay | default(omit) }}" until: "{{ make_crc_storage_release_until | default(true) }}" register: "make_crc_storage_release_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_release" dry_run: "{{ make_crc_storage_release_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_release_env|default({})), **(make_crc_storage_release_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_with_retries.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000212115117040726033404 0ustar zuulzuul--- - name: Debug make_crc_storage_with_retries_env when: make_crc_storage_with_retries_env is defined ansible.builtin.debug: var: make_crc_storage_with_retries_env - name: Debug make_crc_storage_with_retries_params when: make_crc_storage_with_retries_params is defined ansible.builtin.debug: var: make_crc_storage_with_retries_params - name: Run crc_storage_with_retries retries: "{{ make_crc_storage_with_retries_retries | default(omit) }}" delay: "{{ make_crc_storage_with_retries_delay | default(omit) }}" until: "{{ make_crc_storage_with_retries_until | default(true) }}" register: "make_crc_storage_with_retries_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_with_retries" dry_run: "{{ make_crc_storage_with_retries_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_with_retries_env|default({})), **(make_crc_storage_with_retries_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_cleanup_with_retries.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000231115117040726033405 0ustar zuulzuul--- - name: Debug make_crc_storage_cleanup_with_retries_env when: make_crc_storage_cleanup_with_retries_env is defined ansible.builtin.debug: var: make_crc_storage_cleanup_with_retries_env - name: Debug make_crc_storage_cleanup_with_retries_params when: make_crc_storage_cleanup_with_retries_params is defined ansible.builtin.debug: var: make_crc_storage_cleanup_with_retries_params - name: Run crc_storage_cleanup_with_retries retries: "{{ make_crc_storage_cleanup_with_retries_retries | default(omit) }}" delay: "{{ make_crc_storage_cleanup_with_retries_delay | default(omit) }}" until: "{{ make_crc_storage_cleanup_with_retries_until | default(true) }}" register: "make_crc_storage_cleanup_with_retries_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_cleanup_with_retries" dry_run: "{{ make_crc_storage_cleanup_with_retries_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_cleanup_with_retries_env|default({})), **(make_crc_storage_cleanup_with_retries_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_operator_namespace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_operator0000644000175000017500000000176715117040726033440 0ustar zuulzuul--- - name: Debug make_operator_namespace_env when: make_operator_namespace_env is defined ansible.builtin.debug: var: make_operator_namespace_env - name: Debug make_operator_namespace_params when: make_operator_namespace_params is defined ansible.builtin.debug: var: make_operator_namespace_params - name: Run operator_namespace retries: "{{ make_operator_namespace_retries | default(omit) }}" delay: "{{ make_operator_namespace_delay | default(omit) }}" until: "{{ make_operator_namespace_until | default(true) }}" register: "make_operator_namespace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make operator_namespace" dry_run: "{{ make_operator_namespace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_operator_namespace_env|default({})), **(make_operator_namespace_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespac0000644000175000017500000000156015117040726033363 0ustar zuulzuul--- - name: Debug make_namespace_env when: make_namespace_env is defined ansible.builtin.debug: var: make_namespace_env - name: Debug make_namespace_params when: make_namespace_params is defined ansible.builtin.debug: var: make_namespace_params - name: Run namespace retries: "{{ make_namespace_retries | default(omit) }}" delay: "{{ make_namespace_delay | default(omit) }}" until: "{{ make_namespace_until | default(true) }}" register: "make_namespace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make namespace" dry_run: "{{ make_namespace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_namespace_env|default({})), **(make_namespace_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespace_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespac0000644000175000017500000000175015117040726033364 0ustar zuulzuul--- - name: Debug make_namespace_cleanup_env when: make_namespace_cleanup_env is defined ansible.builtin.debug: var: make_namespace_cleanup_env - name: Debug make_namespace_cleanup_params when: make_namespace_cleanup_params is defined ansible.builtin.debug: var: make_namespace_cleanup_params - name: Run namespace_cleanup retries: "{{ make_namespace_cleanup_retries | default(omit) }}" delay: "{{ make_namespace_cleanup_delay | default(omit) }}" until: "{{ make_namespace_cleanup_until | default(true) }}" register: "make_namespace_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make namespace_cleanup" dry_run: "{{ make_namespace_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_namespace_cleanup_env|default({})), **(make_namespace_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input.ym0000644000175000017500000000146415117040726033362 0ustar zuulzuul--- - name: Debug make_input_env when: make_input_env is defined ansible.builtin.debug: var: make_input_env - name: Debug make_input_params when: make_input_params is defined ansible.builtin.debug: var: make_input_params - name: Run input retries: "{{ make_input_retries | default(omit) }}" delay: "{{ make_input_delay | default(omit) }}" until: "{{ make_input_until | default(true) }}" register: "make_input_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make input" dry_run: "{{ make_input_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_input_env|default({})), **(make_input_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input_cl0000644000175000017500000000165415117040726033415 0ustar zuulzuul--- - name: Debug make_input_cleanup_env when: make_input_cleanup_env is defined ansible.builtin.debug: var: make_input_cleanup_env - name: Debug make_input_cleanup_params when: make_input_cleanup_params is defined ansible.builtin.debug: var: make_input_cleanup_params - name: Run input_cleanup retries: "{{ make_input_cleanup_retries | default(omit) }}" delay: "{{ make_input_cleanup_delay | default(omit) }}" until: "{{ make_input_cleanup_until | default(true) }}" register: "make_input_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make input_cleanup" dry_run: "{{ make_input_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_input_cleanup_env|default({})), **(make_input_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_setup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_0000644000175000017500000000165415117040726033343 0ustar zuulzuul--- - name: Debug make_crc_bmo_setup_env when: make_crc_bmo_setup_env is defined ansible.builtin.debug: var: make_crc_bmo_setup_env - name: Debug make_crc_bmo_setup_params when: make_crc_bmo_setup_params is defined ansible.builtin.debug: var: make_crc_bmo_setup_params - name: Run crc_bmo_setup retries: "{{ make_crc_bmo_setup_retries | default(omit) }}" delay: "{{ make_crc_bmo_setup_delay | default(omit) }}" until: "{{ make_crc_bmo_setup_until | default(true) }}" register: "make_crc_bmo_setup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_bmo_setup" dry_run: "{{ make_crc_bmo_setup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_bmo_setup_env|default({})), **(make_crc_bmo_setup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_0000644000175000017500000000171215117040726033336 0ustar zuulzuul--- - name: Debug make_crc_bmo_cleanup_env when: make_crc_bmo_cleanup_env is defined ansible.builtin.debug: var: make_crc_bmo_cleanup_env - name: Debug make_crc_bmo_cleanup_params when: make_crc_bmo_cleanup_params is defined ansible.builtin.debug: var: make_crc_bmo_cleanup_params - name: Run crc_bmo_cleanup retries: "{{ make_crc_bmo_cleanup_retries | default(omit) }}" delay: "{{ make_crc_bmo_cleanup_delay | default(omit) }}" until: "{{ make_crc_bmo_cleanup_until | default(true) }}" register: "make_crc_bmo_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_bmo_cleanup" dry_run: "{{ make_crc_bmo_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_bmo_cleanup_env|default({})), **(make_crc_bmo_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315117040726033415 0ustar zuulzuul--- - name: Debug make_openstack_prep_env when: make_openstack_prep_env is defined ansible.builtin.debug: var: make_openstack_prep_env - name: Debug make_openstack_prep_params when: make_openstack_prep_params is defined ansible.builtin.debug: var: make_openstack_prep_params - name: Run openstack_prep retries: "{{ make_openstack_prep_retries | default(omit) }}" delay: "{{ make_openstack_prep_delay | default(omit) }}" until: "{{ make_openstack_prep_until | default(true) }}" register: "make_openstack_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_prep" dry_run: "{{ make_openstack_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_prep_env|default({})), **(make_openstack_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000156015117040726033410 0ustar zuulzuul--- - name: Debug make_openstack_env when: make_openstack_env is defined ansible.builtin.debug: var: make_openstack_env - name: Debug make_openstack_params when: make_openstack_params is defined ansible.builtin.debug: var: make_openstack_params - name: Run openstack retries: "{{ make_openstack_retries | default(omit) }}" delay: "{{ make_openstack_delay | default(omit) }}" until: "{{ make_openstack_until | default(true) }}" register: "make_openstack_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack" dry_run: "{{ make_openstack_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_env|default({})), **(make_openstack_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_wait.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315117040726033415 0ustar zuulzuul--- - name: Debug make_openstack_wait_env when: make_openstack_wait_env is defined ansible.builtin.debug: var: make_openstack_wait_env - name: Debug make_openstack_wait_params when: make_openstack_wait_params is defined ansible.builtin.debug: var: make_openstack_wait_params - name: Run openstack_wait retries: "{{ make_openstack_wait_retries | default(omit) }}" delay: "{{ make_openstack_wait_delay | default(omit) }}" until: "{{ make_openstack_wait_until | default(true) }}" register: "make_openstack_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_wait" dry_run: "{{ make_openstack_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_wait_env|default({})), **(make_openstack_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_init.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315117040726033415 0ustar zuulzuul--- - name: Debug make_openstack_init_env when: make_openstack_init_env is defined ansible.builtin.debug: var: make_openstack_init_env - name: Debug make_openstack_init_params when: make_openstack_init_params is defined ansible.builtin.debug: var: make_openstack_init_params - name: Run openstack_init retries: "{{ make_openstack_init_retries | default(omit) }}" delay: "{{ make_openstack_init_delay | default(omit) }}" until: "{{ make_openstack_init_until | default(true) }}" register: "make_openstack_init_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_init" dry_run: "{{ make_openstack_init_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_init_env|default({})), **(make_openstack_init_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000175015117040726033411 0ustar zuulzuul--- - name: Debug make_openstack_cleanup_env when: make_openstack_cleanup_env is defined ansible.builtin.debug: var: make_openstack_cleanup_env - name: Debug make_openstack_cleanup_params when: make_openstack_cleanup_params is defined ansible.builtin.debug: var: make_openstack_cleanup_params - name: Run openstack_cleanup retries: "{{ make_openstack_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_cleanup_delay | default(omit) }}" until: "{{ make_openstack_cleanup_until | default(true) }}" register: "make_openstack_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_cleanup" dry_run: "{{ make_openstack_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_cleanup_env|default({})), **(make_openstack_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_repo.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315117040726033415 0ustar zuulzuul--- - name: Debug make_openstack_repo_env when: make_openstack_repo_env is defined ansible.builtin.debug: var: make_openstack_repo_env - name: Debug make_openstack_repo_params when: make_openstack_repo_params is defined ansible.builtin.debug: var: make_openstack_repo_params - name: Run openstack_repo retries: "{{ make_openstack_repo_retries | default(omit) }}" delay: "{{ make_openstack_repo_delay | default(omit) }}" until: "{{ make_openstack_repo_until | default(true) }}" register: "make_openstack_repo_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_repo" dry_run: "{{ make_openstack_repo_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_repo_env|default({})), **(make_openstack_repo_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000204415117040726033406 0ustar zuulzuul--- - name: Debug make_openstack_deploy_prep_env when: make_openstack_deploy_prep_env is defined ansible.builtin.debug: var: make_openstack_deploy_prep_env - name: Debug make_openstack_deploy_prep_params when: make_openstack_deploy_prep_params is defined ansible.builtin.debug: var: make_openstack_deploy_prep_params - name: Run openstack_deploy_prep retries: "{{ make_openstack_deploy_prep_retries | default(omit) }}" delay: "{{ make_openstack_deploy_prep_delay | default(omit) }}" until: "{{ make_openstack_deploy_prep_until | default(true) }}" register: "make_openstack_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy_prep" dry_run: "{{ make_openstack_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_prep_env|default({})), **(make_openstack_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000173115117040726033410 0ustar zuulzuul--- - name: Debug make_openstack_deploy_env when: make_openstack_deploy_env is defined ansible.builtin.debug: var: make_openstack_deploy_env - name: Debug make_openstack_deploy_params when: make_openstack_deploy_params is defined ansible.builtin.debug: var: make_openstack_deploy_params - name: Run openstack_deploy retries: "{{ make_openstack_deploy_retries | default(omit) }}" delay: "{{ make_openstack_deploy_delay | default(omit) }}" until: "{{ make_openstack_deploy_until | default(true) }}" register: "make_openstack_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy" dry_run: "{{ make_openstack_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_env|default({})), **(make_openstack_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_wait_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000204415117040726033406 0ustar zuulzuul--- - name: Debug make_openstack_wait_deploy_env when: make_openstack_wait_deploy_env is defined ansible.builtin.debug: var: make_openstack_wait_deploy_env - name: Debug make_openstack_wait_deploy_params when: make_openstack_wait_deploy_params is defined ansible.builtin.debug: var: make_openstack_wait_deploy_params - name: Run openstack_wait_deploy retries: "{{ make_openstack_wait_deploy_retries | default(omit) }}" delay: "{{ make_openstack_wait_deploy_delay | default(omit) }}" until: "{{ make_openstack_wait_deploy_until | default(true) }}" register: "make_openstack_wait_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_wait_deploy" dry_run: "{{ make_openstack_wait_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_wait_deploy_env|default({})), **(make_openstack_wait_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000212115117040726033402 0ustar zuulzuul--- - name: Debug make_openstack_deploy_cleanup_env when: make_openstack_deploy_cleanup_env is defined ansible.builtin.debug: var: make_openstack_deploy_cleanup_env - name: Debug make_openstack_deploy_cleanup_params when: make_openstack_deploy_cleanup_params is defined ansible.builtin.debug: var: make_openstack_deploy_cleanup_params - name: Run openstack_deploy_cleanup retries: "{{ make_openstack_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_deploy_cleanup_delay | default(omit) }}" until: "{{ make_openstack_deploy_cleanup_until | default(true) }}" register: "make_openstack_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy_cleanup" dry_run: "{{ make_openstack_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_cleanup_env|default({})), **(make_openstack_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_update_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000202515117040726033405 0ustar zuulzuul--- - name: Debug make_openstack_update_run_env when: make_openstack_update_run_env is defined ansible.builtin.debug: var: make_openstack_update_run_env - name: Debug make_openstack_update_run_params when: make_openstack_update_run_params is defined ansible.builtin.debug: var: make_openstack_update_run_params - name: Run openstack_update_run retries: "{{ make_openstack_update_run_retries | default(omit) }}" delay: "{{ make_openstack_update_run_delay | default(omit) }}" until: "{{ make_openstack_update_run_until | default(true) }}" register: "make_openstack_update_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_update_run" dry_run: "{{ make_openstack_update_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_update_run_env|default({})), **(make_openstack_update_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_services.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_s0000644000175000017500000000171215117040726033377 0ustar zuulzuul--- - name: Debug make_update_services_env when: make_update_services_env is defined ansible.builtin.debug: var: make_update_services_env - name: Debug make_update_services_params when: make_update_services_params is defined ansible.builtin.debug: var: make_update_services_params - name: Run update_services retries: "{{ make_update_services_retries | default(omit) }}" delay: "{{ make_update_services_delay | default(omit) }}" until: "{{ make_update_services_until | default(true) }}" register: "make_update_services_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make update_services" dry_run: "{{ make_update_services_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_update_services_env|default({})), **(make_update_services_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_system.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_s0000644000175000017500000000165415117040726033404 0ustar zuulzuul--- - name: Debug make_update_system_env when: make_update_system_env is defined ansible.builtin.debug: var: make_update_system_env - name: Debug make_update_system_params when: make_update_system_params is defined ansible.builtin.debug: var: make_update_system_params - name: Run update_system retries: "{{ make_update_system_retries | default(omit) }}" delay: "{{ make_update_system_delay | default(omit) }}" until: "{{ make_update_system_until | default(true) }}" register: "make_update_system_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make update_system" dry_run: "{{ make_update_system_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_update_system_env|default({})), **(make_update_system_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_patch_version.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000210215117040726033401 0ustar zuulzuul--- - name: Debug make_openstack_patch_version_env when: make_openstack_patch_version_env is defined ansible.builtin.debug: var: make_openstack_patch_version_env - name: Debug make_openstack_patch_version_params when: make_openstack_patch_version_params is defined ansible.builtin.debug: var: make_openstack_patch_version_params - name: Run openstack_patch_version retries: "{{ make_openstack_patch_version_retries | default(omit) }}" delay: "{{ make_openstack_patch_version_delay | default(omit) }}" until: "{{ make_openstack_patch_version_until | default(true) }}" register: "make_openstack_patch_version_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_patch_version" dry_run: "{{ make_openstack_patch_version_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_patch_version_env|default({})), **(make_openstack_patch_version_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_generate_keys.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000214015117040726033344 0ustar zuulzuul--- - name: Debug make_edpm_deploy_generate_keys_env when: make_edpm_deploy_generate_keys_env is defined ansible.builtin.debug: var: make_edpm_deploy_generate_keys_env - name: Debug make_edpm_deploy_generate_keys_params when: make_edpm_deploy_generate_keys_params is defined ansible.builtin.debug: var: make_edpm_deploy_generate_keys_params - name: Run edpm_deploy_generate_keys retries: "{{ make_edpm_deploy_generate_keys_retries | default(omit) }}" delay: "{{ make_edpm_deploy_generate_keys_delay | default(omit) }}" until: "{{ make_edpm_deploy_generate_keys_until | default(true) }}" register: "make_edpm_deploy_generate_keys_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_generate_keys" dry_run: "{{ make_edpm_deploy_generate_keys_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_generate_keys_env|default({})), **(make_edpm_deploy_generate_keys_params|default({}))) }}" ././@LongLink0000644000000000000000000000020000000000000011573 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_patch_ansible_runner_image.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_pat0000644000175000017500000000227215117040726033366 0ustar zuulzuul--- - name: Debug make_edpm_patch_ansible_runner_image_env when: make_edpm_patch_ansible_runner_image_env is defined ansible.builtin.debug: var: make_edpm_patch_ansible_runner_image_env - name: Debug make_edpm_patch_ansible_runner_image_params when: make_edpm_patch_ansible_runner_image_params is defined ansible.builtin.debug: var: make_edpm_patch_ansible_runner_image_params - name: Run edpm_patch_ansible_runner_image retries: "{{ make_edpm_patch_ansible_runner_image_retries | default(omit) }}" delay: "{{ make_edpm_patch_ansible_runner_image_delay | default(omit) }}" until: "{{ make_edpm_patch_ansible_runner_image_until | default(true) }}" register: "make_edpm_patch_ansible_runner_image_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_patch_ansible_runner_image" dry_run: "{{ make_edpm_patch_ansible_runner_image_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_patch_ansible_runner_image_env|default({})), **(make_edpm_patch_ansible_runner_image_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000173115117040726033351 0ustar zuulzuul--- - name: Debug make_edpm_deploy_prep_env when: make_edpm_deploy_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_prep_env - name: Debug make_edpm_deploy_prep_params when: make_edpm_deploy_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_prep_params - name: Run edpm_deploy_prep retries: "{{ make_edpm_deploy_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_prep_until | default(true) }}" register: "make_edpm_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_prep" dry_run: "{{ make_edpm_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_prep_env|default({})), **(make_edpm_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000200615117040726033345 0ustar zuulzuul--- - name: Debug make_edpm_deploy_cleanup_env when: make_edpm_deploy_cleanup_env is defined ansible.builtin.debug: var: make_edpm_deploy_cleanup_env - name: Debug make_edpm_deploy_cleanup_params when: make_edpm_deploy_cleanup_params is defined ansible.builtin.debug: var: make_edpm_deploy_cleanup_params - name: Run edpm_deploy_cleanup retries: "{{ make_edpm_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_deploy_cleanup_delay | default(omit) }}" until: "{{ make_edpm_deploy_cleanup_until | default(true) }}" register: "make_edpm_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_cleanup" dry_run: "{{ make_edpm_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_cleanup_env|default({})), **(make_edpm_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000161615117040726033353 0ustar zuulzuul--- - name: Debug make_edpm_deploy_env when: make_edpm_deploy_env is defined ansible.builtin.debug: var: make_edpm_deploy_env - name: Debug make_edpm_deploy_params when: make_edpm_deploy_params is defined ansible.builtin.debug: var: make_edpm_deploy_params - name: Run edpm_deploy retries: "{{ make_edpm_deploy_retries | default(omit) }}" delay: "{{ make_edpm_deploy_delay | default(omit) }}" until: "{{ make_edpm_deploy_until | default(true) }}" register: "make_edpm_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy" dry_run: "{{ make_edpm_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_env|default({})), **(make_edpm_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_baremetal_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000215715117040726033354 0ustar zuulzuul--- - name: Debug make_edpm_deploy_baremetal_prep_env when: make_edpm_deploy_baremetal_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_prep_env - name: Debug make_edpm_deploy_baremetal_prep_params when: make_edpm_deploy_baremetal_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_prep_params - name: Run edpm_deploy_baremetal_prep retries: "{{ make_edpm_deploy_baremetal_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_baremetal_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_baremetal_prep_until | default(true) }}" register: "make_edpm_deploy_baremetal_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_baremetal_prep" dry_run: "{{ make_edpm_deploy_baremetal_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_baremetal_prep_env|default({})), **(make_edpm_deploy_baremetal_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000204415117040726033347 0ustar zuulzuul--- - name: Debug make_edpm_deploy_baremetal_env when: make_edpm_deploy_baremetal_env is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_env - name: Debug make_edpm_deploy_baremetal_params when: make_edpm_deploy_baremetal_params is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_params - name: Run edpm_deploy_baremetal retries: "{{ make_edpm_deploy_baremetal_retries | default(omit) }}" delay: "{{ make_edpm_deploy_baremetal_delay | default(omit) }}" until: "{{ make_edpm_deploy_baremetal_until | default(true) }}" register: "make_edpm_deploy_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_baremetal" dry_run: "{{ make_edpm_deploy_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_baremetal_env|default({})), **(make_edpm_deploy_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wait_deploy_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wai0000644000175000017500000000215715117040726033364 0ustar zuulzuul--- - name: Debug make_edpm_wait_deploy_baremetal_env when: make_edpm_wait_deploy_baremetal_env is defined ansible.builtin.debug: var: make_edpm_wait_deploy_baremetal_env - name: Debug make_edpm_wait_deploy_baremetal_params when: make_edpm_wait_deploy_baremetal_params is defined ansible.builtin.debug: var: make_edpm_wait_deploy_baremetal_params - name: Run edpm_wait_deploy_baremetal retries: "{{ make_edpm_wait_deploy_baremetal_retries | default(omit) }}" delay: "{{ make_edpm_wait_deploy_baremetal_delay | default(omit) }}" until: "{{ make_edpm_wait_deploy_baremetal_until | default(true) }}" register: "make_edpm_wait_deploy_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_wait_deploy_baremetal" dry_run: "{{ make_edpm_wait_deploy_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_wait_deploy_baremetal_env|default({})), **(make_edpm_wait_deploy_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wait_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wai0000644000175000017500000000173115117040726033361 0ustar zuulzuul--- - name: Debug make_edpm_wait_deploy_env when: make_edpm_wait_deploy_env is defined ansible.builtin.debug: var: make_edpm_wait_deploy_env - name: Debug make_edpm_wait_deploy_params when: make_edpm_wait_deploy_params is defined ansible.builtin.debug: var: make_edpm_wait_deploy_params - name: Run edpm_wait_deploy retries: "{{ make_edpm_wait_deploy_retries | default(omit) }}" delay: "{{ make_edpm_wait_deploy_delay | default(omit) }}" until: "{{ make_edpm_wait_deploy_until | default(true) }}" register: "make_edpm_wait_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_wait_deploy" dry_run: "{{ make_edpm_wait_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_wait_deploy_env|default({})), **(make_edpm_wait_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_register_dns.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_reg0000644000175000017500000000175015117040726033357 0ustar zuulzuul--- - name: Debug make_edpm_register_dns_env when: make_edpm_register_dns_env is defined ansible.builtin.debug: var: make_edpm_register_dns_env - name: Debug make_edpm_register_dns_params when: make_edpm_register_dns_params is defined ansible.builtin.debug: var: make_edpm_register_dns_params - name: Run edpm_register_dns retries: "{{ make_edpm_register_dns_retries | default(omit) }}" delay: "{{ make_edpm_register_dns_delay | default(omit) }}" until: "{{ make_edpm_register_dns_until | default(true) }}" register: "make_edpm_register_dns_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_register_dns" dry_run: "{{ make_edpm_register_dns_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_register_dns_env|default({})), **(make_edpm_register_dns_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_nova_discover_hosts.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_nov0000644000175000017500000000212115117040726033375 0ustar zuulzuul--- - name: Debug make_edpm_nova_discover_hosts_env when: make_edpm_nova_discover_hosts_env is defined ansible.builtin.debug: var: make_edpm_nova_discover_hosts_env - name: Debug make_edpm_nova_discover_hosts_params when: make_edpm_nova_discover_hosts_params is defined ansible.builtin.debug: var: make_edpm_nova_discover_hosts_params - name: Run edpm_nova_discover_hosts retries: "{{ make_edpm_nova_discover_hosts_retries | default(omit) }}" delay: "{{ make_edpm_nova_discover_hosts_delay | default(omit) }}" until: "{{ make_edpm_nova_discover_hosts_until | default(true) }}" register: "make_edpm_nova_discover_hosts_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_nova_discover_hosts" dry_run: "{{ make_edpm_nova_discover_hosts_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_nova_discover_hosts_env|default({})), **(make_edpm_nova_discover_hosts_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_crds.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315117040726033415 0ustar zuulzuul--- - name: Debug make_openstack_crds_env when: make_openstack_crds_env is defined ansible.builtin.debug: var: make_openstack_crds_env - name: Debug make_openstack_crds_params when: make_openstack_crds_params is defined ansible.builtin.debug: var: make_openstack_crds_params - name: Run openstack_crds retries: "{{ make_openstack_crds_retries | default(omit) }}" delay: "{{ make_openstack_crds_delay | default(omit) }}" until: "{{ make_openstack_crds_until | default(true) }}" register: "make_openstack_crds_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_crds" dry_run: "{{ make_openstack_crds_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_crds_env|default({})), **(make_openstack_crds_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_crds_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000206315117040726033407 0ustar zuulzuul--- - name: Debug make_openstack_crds_cleanup_env when: make_openstack_crds_cleanup_env is defined ansible.builtin.debug: var: make_openstack_crds_cleanup_env - name: Debug make_openstack_crds_cleanup_params when: make_openstack_crds_cleanup_params is defined ansible.builtin.debug: var: make_openstack_crds_cleanup_params - name: Run openstack_crds_cleanup retries: "{{ make_openstack_crds_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_crds_cleanup_delay | default(omit) }}" until: "{{ make_openstack_crds_cleanup_until | default(true) }}" register: "make_openstack_crds_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_crds_cleanup" dry_run: "{{ make_openstack_crds_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_crds_cleanup_env|default({})), **(make_openstack_crds_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000215715117040726033354 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_prep_env when: make_edpm_deploy_networker_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_prep_env - name: Debug make_edpm_deploy_networker_prep_params when: make_edpm_deploy_networker_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_prep_params - name: Run edpm_deploy_networker_prep retries: "{{ make_edpm_deploy_networker_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_prep_until | default(true) }}" register: "make_edpm_deploy_networker_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker_prep" dry_run: "{{ make_edpm_deploy_networker_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_prep_env|default({})), **(make_edpm_deploy_networker_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000017600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000223415117040726033350 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_cleanup_env when: make_edpm_deploy_networker_cleanup_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_cleanup_env - name: Debug make_edpm_deploy_networker_cleanup_params when: make_edpm_deploy_networker_cleanup_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_cleanup_params - name: Run edpm_deploy_networker_cleanup retries: "{{ make_edpm_deploy_networker_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_cleanup_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_cleanup_until | default(true) }}" register: "make_edpm_deploy_networker_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker_cleanup" dry_run: "{{ make_edpm_deploy_networker_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_cleanup_env|default({})), **(make_edpm_deploy_networker_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000204415117040726033347 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_env when: make_edpm_deploy_networker_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_env - name: Debug make_edpm_deploy_networker_params when: make_edpm_deploy_networker_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_params - name: Run edpm_deploy_networker retries: "{{ make_edpm_deploy_networker_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_until | default(true) }}" register: "make_edpm_deploy_networker_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker" dry_run: "{{ make_edpm_deploy_networker_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_env|default({})), **(make_edpm_deploy_networker_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_pr0000644000175000017500000000157715117040726033404 0ustar zuulzuul--- - name: Debug make_infra_prep_env when: make_infra_prep_env is defined ansible.builtin.debug: var: make_infra_prep_env - name: Debug make_infra_prep_params when: make_infra_prep_params is defined ansible.builtin.debug: var: make_infra_prep_params - name: Run infra_prep retries: "{{ make_infra_prep_retries | default(omit) }}" delay: "{{ make_infra_prep_delay | default(omit) }}" until: "{{ make_infra_prep_until | default(true) }}" register: "make_infra_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_prep" dry_run: "{{ make_infra_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_prep_env|default({})), **(make_infra_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra.ym0000644000175000017500000000146415117040726033322 0ustar zuulzuul--- - name: Debug make_infra_env when: make_infra_env is defined ansible.builtin.debug: var: make_infra_env - name: Debug make_infra_params when: make_infra_params is defined ansible.builtin.debug: var: make_infra_params - name: Run infra retries: "{{ make_infra_retries | default(omit) }}" delay: "{{ make_infra_delay | default(omit) }}" until: "{{ make_infra_until | default(true) }}" register: "make_infra_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra" dry_run: "{{ make_infra_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_env|default({})), **(make_infra_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_cl0000644000175000017500000000165415117040726033355 0ustar zuulzuul--- - name: Debug make_infra_cleanup_env when: make_infra_cleanup_env is defined ansible.builtin.debug: var: make_infra_cleanup_env - name: Debug make_infra_cleanup_params when: make_infra_cleanup_params is defined ansible.builtin.debug: var: make_infra_cleanup_params - name: Run infra_cleanup retries: "{{ make_infra_cleanup_retries | default(omit) }}" delay: "{{ make_infra_cleanup_delay | default(omit) }}" until: "{{ make_infra_cleanup_until | default(true) }}" register: "make_infra_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_cleanup" dry_run: "{{ make_infra_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_cleanup_env|default({})), **(make_infra_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000171215117040726033363 0ustar zuulzuul--- - name: Debug make_dns_deploy_prep_env when: make_dns_deploy_prep_env is defined ansible.builtin.debug: var: make_dns_deploy_prep_env - name: Debug make_dns_deploy_prep_params when: make_dns_deploy_prep_params is defined ansible.builtin.debug: var: make_dns_deploy_prep_params - name: Run dns_deploy_prep retries: "{{ make_dns_deploy_prep_retries | default(omit) }}" delay: "{{ make_dns_deploy_prep_delay | default(omit) }}" until: "{{ make_dns_deploy_prep_until | default(true) }}" register: "make_dns_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy_prep" dry_run: "{{ make_dns_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_prep_env|default({})), **(make_dns_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000157715117040726033374 0ustar zuulzuul--- - name: Debug make_dns_deploy_env when: make_dns_deploy_env is defined ansible.builtin.debug: var: make_dns_deploy_env - name: Debug make_dns_deploy_params when: make_dns_deploy_params is defined ansible.builtin.debug: var: make_dns_deploy_params - name: Run dns_deploy retries: "{{ make_dns_deploy_retries | default(omit) }}" delay: "{{ make_dns_deploy_delay | default(omit) }}" until: "{{ make_dns_deploy_until | default(true) }}" register: "make_dns_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy" dry_run: "{{ make_dns_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_env|default({})), **(make_dns_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000176715117040726033375 0ustar zuulzuul--- - name: Debug make_dns_deploy_cleanup_env when: make_dns_deploy_cleanup_env is defined ansible.builtin.debug: var: make_dns_deploy_cleanup_env - name: Debug make_dns_deploy_cleanup_params when: make_dns_deploy_cleanup_params is defined ansible.builtin.debug: var: make_dns_deploy_cleanup_params - name: Run dns_deploy_cleanup retries: "{{ make_dns_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_dns_deploy_cleanup_delay | default(omit) }}" until: "{{ make_dns_deploy_cleanup_until | default(true) }}" register: "make_dns_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy_cleanup" dry_run: "{{ make_dns_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_cleanup_env|default({})), **(make_dns_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000204415117040726033377 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_prep_env when: make_netconfig_deploy_prep_env is defined ansible.builtin.debug: var: make_netconfig_deploy_prep_env - name: Debug make_netconfig_deploy_prep_params when: make_netconfig_deploy_prep_params is defined ansible.builtin.debug: var: make_netconfig_deploy_prep_params - name: Run netconfig_deploy_prep retries: "{{ make_netconfig_deploy_prep_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_prep_delay | default(omit) }}" until: "{{ make_netconfig_deploy_prep_until | default(true) }}" register: "make_netconfig_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy_prep" dry_run: "{{ make_netconfig_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_prep_env|default({})), **(make_netconfig_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000173115117040726033401 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_env when: make_netconfig_deploy_env is defined ansible.builtin.debug: var: make_netconfig_deploy_env - name: Debug make_netconfig_deploy_params when: make_netconfig_deploy_params is defined ansible.builtin.debug: var: make_netconfig_deploy_params - name: Run netconfig_deploy retries: "{{ make_netconfig_deploy_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_delay | default(omit) }}" until: "{{ make_netconfig_deploy_until | default(true) }}" register: "make_netconfig_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy" dry_run: "{{ make_netconfig_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_env|default({})), **(make_netconfig_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000212115117040726033373 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_cleanup_env when: make_netconfig_deploy_cleanup_env is defined ansible.builtin.debug: var: make_netconfig_deploy_cleanup_env - name: Debug make_netconfig_deploy_cleanup_params when: make_netconfig_deploy_cleanup_params is defined ansible.builtin.debug: var: make_netconfig_deploy_cleanup_params - name: Run netconfig_deploy_cleanup retries: "{{ make_netconfig_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_cleanup_delay | default(omit) }}" until: "{{ make_netconfig_deploy_cleanup_until | default(true) }}" register: "make_netconfig_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy_cleanup" dry_run: "{{ make_netconfig_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_cleanup_env|default({})), **(make_netconfig_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000204415117040726033334 0ustar zuulzuul--- - name: Debug make_memcached_deploy_prep_env when: make_memcached_deploy_prep_env is defined ansible.builtin.debug: var: make_memcached_deploy_prep_env - name: Debug make_memcached_deploy_prep_params when: make_memcached_deploy_prep_params is defined ansible.builtin.debug: var: make_memcached_deploy_prep_params - name: Run memcached_deploy_prep retries: "{{ make_memcached_deploy_prep_retries | default(omit) }}" delay: "{{ make_memcached_deploy_prep_delay | default(omit) }}" until: "{{ make_memcached_deploy_prep_until | default(true) }}" register: "make_memcached_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy_prep" dry_run: "{{ make_memcached_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_prep_env|default({})), **(make_memcached_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000173115117040726033336 0ustar zuulzuul--- - name: Debug make_memcached_deploy_env when: make_memcached_deploy_env is defined ansible.builtin.debug: var: make_memcached_deploy_env - name: Debug make_memcached_deploy_params when: make_memcached_deploy_params is defined ansible.builtin.debug: var: make_memcached_deploy_params - name: Run memcached_deploy retries: "{{ make_memcached_deploy_retries | default(omit) }}" delay: "{{ make_memcached_deploy_delay | default(omit) }}" until: "{{ make_memcached_deploy_until | default(true) }}" register: "make_memcached_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy" dry_run: "{{ make_memcached_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_env|default({})), **(make_memcached_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000212115117040726033330 0ustar zuulzuul--- - name: Debug make_memcached_deploy_cleanup_env when: make_memcached_deploy_cleanup_env is defined ansible.builtin.debug: var: make_memcached_deploy_cleanup_env - name: Debug make_memcached_deploy_cleanup_params when: make_memcached_deploy_cleanup_params is defined ansible.builtin.debug: var: make_memcached_deploy_cleanup_params - name: Run memcached_deploy_cleanup retries: "{{ make_memcached_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_memcached_deploy_cleanup_delay | default(omit) }}" until: "{{ make_memcached_deploy_cleanup_until | default(true) }}" register: "make_memcached_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy_cleanup" dry_run: "{{ make_memcached_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_cleanup_env|default({})), **(make_memcached_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000165415117040726033441 0ustar zuulzuul--- - name: Debug make_keystone_prep_env when: make_keystone_prep_env is defined ansible.builtin.debug: var: make_keystone_prep_env - name: Debug make_keystone_prep_params when: make_keystone_prep_params is defined ansible.builtin.debug: var: make_keystone_prep_params - name: Run keystone_prep retries: "{{ make_keystone_prep_retries | default(omit) }}" delay: "{{ make_keystone_prep_delay | default(omit) }}" until: "{{ make_keystone_prep_until | default(true) }}" register: "make_keystone_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_prep" dry_run: "{{ make_keystone_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_prep_env|default({})), **(make_keystone_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000154115117040726033434 0ustar zuulzuul--- - name: Debug make_keystone_env when: make_keystone_env is defined ansible.builtin.debug: var: make_keystone_env - name: Debug make_keystone_params when: make_keystone_params is defined ansible.builtin.debug: var: make_keystone_params - name: Run keystone retries: "{{ make_keystone_retries | default(omit) }}" delay: "{{ make_keystone_delay | default(omit) }}" until: "{{ make_keystone_until | default(true) }}" register: "make_keystone_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone" dry_run: "{{ make_keystone_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_env|default({})), **(make_keystone_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000173115117040726033435 0ustar zuulzuul--- - name: Debug make_keystone_cleanup_env when: make_keystone_cleanup_env is defined ansible.builtin.debug: var: make_keystone_cleanup_env - name: Debug make_keystone_cleanup_params when: make_keystone_cleanup_params is defined ansible.builtin.debug: var: make_keystone_cleanup_params - name: Run keystone_cleanup retries: "{{ make_keystone_cleanup_retries | default(omit) }}" delay: "{{ make_keystone_cleanup_delay | default(omit) }}" until: "{{ make_keystone_cleanup_until | default(true) }}" register: "make_keystone_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_cleanup" dry_run: "{{ make_keystone_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_cleanup_env|default({})), **(make_keystone_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000202515117040726033432 0ustar zuulzuul--- - name: Debug make_keystone_deploy_prep_env when: make_keystone_deploy_prep_env is defined ansible.builtin.debug: var: make_keystone_deploy_prep_env - name: Debug make_keystone_deploy_prep_params when: make_keystone_deploy_prep_params is defined ansible.builtin.debug: var: make_keystone_deploy_prep_params - name: Run keystone_deploy_prep retries: "{{ make_keystone_deploy_prep_retries | default(omit) }}" delay: "{{ make_keystone_deploy_prep_delay | default(omit) }}" until: "{{ make_keystone_deploy_prep_until | default(true) }}" register: "make_keystone_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy_prep" dry_run: "{{ make_keystone_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_prep_env|default({})), **(make_keystone_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000171215117040726033434 0ustar zuulzuul--- - name: Debug make_keystone_deploy_env when: make_keystone_deploy_env is defined ansible.builtin.debug: var: make_keystone_deploy_env - name: Debug make_keystone_deploy_params when: make_keystone_deploy_params is defined ansible.builtin.debug: var: make_keystone_deploy_params - name: Run keystone_deploy retries: "{{ make_keystone_deploy_retries | default(omit) }}" delay: "{{ make_keystone_deploy_delay | default(omit) }}" until: "{{ make_keystone_deploy_until | default(true) }}" register: "make_keystone_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy" dry_run: "{{ make_keystone_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_env|default({})), **(make_keystone_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000210215117040726033426 0ustar zuulzuul--- - name: Debug make_keystone_deploy_cleanup_env when: make_keystone_deploy_cleanup_env is defined ansible.builtin.debug: var: make_keystone_deploy_cleanup_env - name: Debug make_keystone_deploy_cleanup_params when: make_keystone_deploy_cleanup_params is defined ansible.builtin.debug: var: make_keystone_deploy_cleanup_params - name: Run keystone_deploy_cleanup retries: "{{ make_keystone_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_keystone_deploy_cleanup_delay | default(omit) }}" until: "{{ make_keystone_deploy_cleanup_until | default(true) }}" register: "make_keystone_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy_cleanup" dry_run: "{{ make_keystone_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_cleanup_env|default({})), **(make_keystone_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000165415117040726033341 0ustar zuulzuul--- - name: Debug make_barbican_prep_env when: make_barbican_prep_env is defined ansible.builtin.debug: var: make_barbican_prep_env - name: Debug make_barbican_prep_params when: make_barbican_prep_params is defined ansible.builtin.debug: var: make_barbican_prep_params - name: Run barbican_prep retries: "{{ make_barbican_prep_retries | default(omit) }}" delay: "{{ make_barbican_prep_delay | default(omit) }}" until: "{{ make_barbican_prep_until | default(true) }}" register: "make_barbican_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_prep" dry_run: "{{ make_barbican_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_prep_env|default({})), **(make_barbican_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000154115117040726033334 0ustar zuulzuul--- - name: Debug make_barbican_env when: make_barbican_env is defined ansible.builtin.debug: var: make_barbican_env - name: Debug make_barbican_params when: make_barbican_params is defined ansible.builtin.debug: var: make_barbican_params - name: Run barbican retries: "{{ make_barbican_retries | default(omit) }}" delay: "{{ make_barbican_delay | default(omit) }}" until: "{{ make_barbican_until | default(true) }}" register: "make_barbican_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican" dry_run: "{{ make_barbican_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_env|default({})), **(make_barbican_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000173115117040726033335 0ustar zuulzuul--- - name: Debug make_barbican_cleanup_env when: make_barbican_cleanup_env is defined ansible.builtin.debug: var: make_barbican_cleanup_env - name: Debug make_barbican_cleanup_params when: make_barbican_cleanup_params is defined ansible.builtin.debug: var: make_barbican_cleanup_params - name: Run barbican_cleanup retries: "{{ make_barbican_cleanup_retries | default(omit) }}" delay: "{{ make_barbican_cleanup_delay | default(omit) }}" until: "{{ make_barbican_cleanup_until | default(true) }}" register: "make_barbican_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_cleanup" dry_run: "{{ make_barbican_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_cleanup_env|default({})), **(make_barbican_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000202515117040726033332 0ustar zuulzuul--- - name: Debug make_barbican_deploy_prep_env when: make_barbican_deploy_prep_env is defined ansible.builtin.debug: var: make_barbican_deploy_prep_env - name: Debug make_barbican_deploy_prep_params when: make_barbican_deploy_prep_params is defined ansible.builtin.debug: var: make_barbican_deploy_prep_params - name: Run barbican_deploy_prep retries: "{{ make_barbican_deploy_prep_retries | default(omit) }}" delay: "{{ make_barbican_deploy_prep_delay | default(omit) }}" until: "{{ make_barbican_deploy_prep_until | default(true) }}" register: "make_barbican_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_prep" dry_run: "{{ make_barbican_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_prep_env|default({})), **(make_barbican_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000171215117040726033334 0ustar zuulzuul--- - name: Debug make_barbican_deploy_env when: make_barbican_deploy_env is defined ansible.builtin.debug: var: make_barbican_deploy_env - name: Debug make_barbican_deploy_params when: make_barbican_deploy_params is defined ansible.builtin.debug: var: make_barbican_deploy_params - name: Run barbican_deploy retries: "{{ make_barbican_deploy_retries | default(omit) }}" delay: "{{ make_barbican_deploy_delay | default(omit) }}" until: "{{ make_barbican_deploy_until | default(true) }}" register: "make_barbican_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy" dry_run: "{{ make_barbican_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_env|default({})), **(make_barbican_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_validate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000212115117040726033327 0ustar zuulzuul--- - name: Debug make_barbican_deploy_validate_env when: make_barbican_deploy_validate_env is defined ansible.builtin.debug: var: make_barbican_deploy_validate_env - name: Debug make_barbican_deploy_validate_params when: make_barbican_deploy_validate_params is defined ansible.builtin.debug: var: make_barbican_deploy_validate_params - name: Run barbican_deploy_validate retries: "{{ make_barbican_deploy_validate_retries | default(omit) }}" delay: "{{ make_barbican_deploy_validate_delay | default(omit) }}" until: "{{ make_barbican_deploy_validate_until | default(true) }}" register: "make_barbican_deploy_validate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_validate" dry_run: "{{ make_barbican_deploy_validate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_validate_env|default({})), **(make_barbican_deploy_validate_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000210215117040726033326 0ustar zuulzuul--- - name: Debug make_barbican_deploy_cleanup_env when: make_barbican_deploy_cleanup_env is defined ansible.builtin.debug: var: make_barbican_deploy_cleanup_env - name: Debug make_barbican_deploy_cleanup_params when: make_barbican_deploy_cleanup_params is defined ansible.builtin.debug: var: make_barbican_deploy_cleanup_params - name: Run barbican_deploy_cleanup retries: "{{ make_barbican_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_barbican_deploy_cleanup_delay | default(omit) }}" until: "{{ make_barbican_deploy_cleanup_until | default(true) }}" register: "make_barbican_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_cleanup" dry_run: "{{ make_barbican_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_cleanup_env|default({})), **(make_barbican_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb.0000644000175000017500000000152215117040726033247 0ustar zuulzuul--- - name: Debug make_mariadb_env when: make_mariadb_env is defined ansible.builtin.debug: var: make_mariadb_env - name: Debug make_mariadb_params when: make_mariadb_params is defined ansible.builtin.debug: var: make_mariadb_params - name: Run mariadb retries: "{{ make_mariadb_retries | default(omit) }}" delay: "{{ make_mariadb_delay | default(omit) }}" until: "{{ make_mariadb_until | default(true) }}" register: "make_mariadb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb" dry_run: "{{ make_mariadb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_env|default({})), **(make_mariadb_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000171215117040726033331 0ustar zuulzuul--- - name: Debug make_mariadb_cleanup_env when: make_mariadb_cleanup_env is defined ansible.builtin.debug: var: make_mariadb_cleanup_env - name: Debug make_mariadb_cleanup_params when: make_mariadb_cleanup_params is defined ansible.builtin.debug: var: make_mariadb_cleanup_params - name: Run mariadb_cleanup retries: "{{ make_mariadb_cleanup_retries | default(omit) }}" delay: "{{ make_mariadb_cleanup_delay | default(omit) }}" until: "{{ make_mariadb_cleanup_until | default(true) }}" register: "make_mariadb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_cleanup" dry_run: "{{ make_mariadb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_cleanup_env|default({})), **(make_mariadb_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000200615117040726033326 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_prep_env when: make_mariadb_deploy_prep_env is defined ansible.builtin.debug: var: make_mariadb_deploy_prep_env - name: Debug make_mariadb_deploy_prep_params when: make_mariadb_deploy_prep_params is defined ansible.builtin.debug: var: make_mariadb_deploy_prep_params - name: Run mariadb_deploy_prep retries: "{{ make_mariadb_deploy_prep_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_prep_delay | default(omit) }}" until: "{{ make_mariadb_deploy_prep_until | default(true) }}" register: "make_mariadb_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy_prep" dry_run: "{{ make_mariadb_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_prep_env|default({})), **(make_mariadb_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000167315117040726033337 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_env when: make_mariadb_deploy_env is defined ansible.builtin.debug: var: make_mariadb_deploy_env - name: Debug make_mariadb_deploy_params when: make_mariadb_deploy_params is defined ansible.builtin.debug: var: make_mariadb_deploy_params - name: Run mariadb_deploy retries: "{{ make_mariadb_deploy_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_delay | default(omit) }}" until: "{{ make_mariadb_deploy_until | default(true) }}" register: "make_mariadb_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy" dry_run: "{{ make_mariadb_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_env|default({})), **(make_mariadb_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000206315117040726033331 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_cleanup_env when: make_mariadb_deploy_cleanup_env is defined ansible.builtin.debug: var: make_mariadb_deploy_cleanup_env - name: Debug make_mariadb_deploy_cleanup_params when: make_mariadb_deploy_cleanup_params is defined ansible.builtin.debug: var: make_mariadb_deploy_cleanup_params - name: Run mariadb_deploy_cleanup retries: "{{ make_mariadb_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_cleanup_delay | default(omit) }}" until: "{{ make_mariadb_deploy_cleanup_until | default(true) }}" register: "make_mariadb_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy_cleanup" dry_run: "{{ make_mariadb_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_cleanup_env|default({})), **(make_mariadb_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000167315117040726033365 0ustar zuulzuul--- - name: Debug make_placement_prep_env when: make_placement_prep_env is defined ansible.builtin.debug: var: make_placement_prep_env - name: Debug make_placement_prep_params when: make_placement_prep_params is defined ansible.builtin.debug: var: make_placement_prep_params - name: Run placement_prep retries: "{{ make_placement_prep_retries | default(omit) }}" delay: "{{ make_placement_prep_delay | default(omit) }}" until: "{{ make_placement_prep_until | default(true) }}" register: "make_placement_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_prep" dry_run: "{{ make_placement_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_prep_env|default({})), **(make_placement_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000156015117040726033360 0ustar zuulzuul--- - name: Debug make_placement_env when: make_placement_env is defined ansible.builtin.debug: var: make_placement_env - name: Debug make_placement_params when: make_placement_params is defined ansible.builtin.debug: var: make_placement_params - name: Run placement retries: "{{ make_placement_retries | default(omit) }}" delay: "{{ make_placement_delay | default(omit) }}" until: "{{ make_placement_until | default(true) }}" register: "make_placement_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement" dry_run: "{{ make_placement_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_env|default({})), **(make_placement_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000175015117040726033361 0ustar zuulzuul--- - name: Debug make_placement_cleanup_env when: make_placement_cleanup_env is defined ansible.builtin.debug: var: make_placement_cleanup_env - name: Debug make_placement_cleanup_params when: make_placement_cleanup_params is defined ansible.builtin.debug: var: make_placement_cleanup_params - name: Run placement_cleanup retries: "{{ make_placement_cleanup_retries | default(omit) }}" delay: "{{ make_placement_cleanup_delay | default(omit) }}" until: "{{ make_placement_cleanup_until | default(true) }}" register: "make_placement_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_cleanup" dry_run: "{{ make_placement_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_cleanup_env|default({})), **(make_placement_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000204415117040726033356 0ustar zuulzuul--- - name: Debug make_placement_deploy_prep_env when: make_placement_deploy_prep_env is defined ansible.builtin.debug: var: make_placement_deploy_prep_env - name: Debug make_placement_deploy_prep_params when: make_placement_deploy_prep_params is defined ansible.builtin.debug: var: make_placement_deploy_prep_params - name: Run placement_deploy_prep retries: "{{ make_placement_deploy_prep_retries | default(omit) }}" delay: "{{ make_placement_deploy_prep_delay | default(omit) }}" until: "{{ make_placement_deploy_prep_until | default(true) }}" register: "make_placement_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy_prep" dry_run: "{{ make_placement_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_prep_env|default({})), **(make_placement_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000173115117040726033360 0ustar zuulzuul--- - name: Debug make_placement_deploy_env when: make_placement_deploy_env is defined ansible.builtin.debug: var: make_placement_deploy_env - name: Debug make_placement_deploy_params when: make_placement_deploy_params is defined ansible.builtin.debug: var: make_placement_deploy_params - name: Run placement_deploy retries: "{{ make_placement_deploy_retries | default(omit) }}" delay: "{{ make_placement_deploy_delay | default(omit) }}" until: "{{ make_placement_deploy_until | default(true) }}" register: "make_placement_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy" dry_run: "{{ make_placement_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_env|default({})), **(make_placement_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000212115117040726033352 0ustar zuulzuul--- - name: Debug make_placement_deploy_cleanup_env when: make_placement_deploy_cleanup_env is defined ansible.builtin.debug: var: make_placement_deploy_cleanup_env - name: Debug make_placement_deploy_cleanup_params when: make_placement_deploy_cleanup_params is defined ansible.builtin.debug: var: make_placement_deploy_cleanup_params - name: Run placement_deploy_cleanup retries: "{{ make_placement_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_placement_deploy_cleanup_delay | default(omit) }}" until: "{{ make_placement_deploy_cleanup_until | default(true) }}" register: "make_placement_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy_cleanup" dry_run: "{{ make_placement_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_cleanup_env|default({})), **(make_placement_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_p0000644000175000017500000000161615117040726033346 0ustar zuulzuul--- - name: Debug make_glance_prep_env when: make_glance_prep_env is defined ansible.builtin.debug: var: make_glance_prep_env - name: Debug make_glance_prep_params when: make_glance_prep_params is defined ansible.builtin.debug: var: make_glance_prep_params - name: Run glance_prep retries: "{{ make_glance_prep_retries | default(omit) }}" delay: "{{ make_glance_prep_delay | default(omit) }}" until: "{{ make_glance_prep_until | default(true) }}" register: "make_glance_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_prep" dry_run: "{{ make_glance_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_prep_env|default({})), **(make_glance_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance.y0000644000175000017500000000150315117040726033271 0ustar zuulzuul--- - name: Debug make_glance_env when: make_glance_env is defined ansible.builtin.debug: var: make_glance_env - name: Debug make_glance_params when: make_glance_params is defined ansible.builtin.debug: var: make_glance_params - name: Run glance retries: "{{ make_glance_retries | default(omit) }}" delay: "{{ make_glance_delay | default(omit) }}" until: "{{ make_glance_until | default(true) }}" register: "make_glance_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance" dry_run: "{{ make_glance_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_env|default({})), **(make_glance_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_c0000644000175000017500000000167315117040726033334 0ustar zuulzuul--- - name: Debug make_glance_cleanup_env when: make_glance_cleanup_env is defined ansible.builtin.debug: var: make_glance_cleanup_env - name: Debug make_glance_cleanup_params when: make_glance_cleanup_params is defined ansible.builtin.debug: var: make_glance_cleanup_params - name: Run glance_cleanup retries: "{{ make_glance_cleanup_retries | default(omit) }}" delay: "{{ make_glance_cleanup_delay | default(omit) }}" until: "{{ make_glance_cleanup_until | default(true) }}" register: "make_glance_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_cleanup" dry_run: "{{ make_glance_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_cleanup_env|default({})), **(make_glance_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000176715117040726033341 0ustar zuulzuul--- - name: Debug make_glance_deploy_prep_env when: make_glance_deploy_prep_env is defined ansible.builtin.debug: var: make_glance_deploy_prep_env - name: Debug make_glance_deploy_prep_params when: make_glance_deploy_prep_params is defined ansible.builtin.debug: var: make_glance_deploy_prep_params - name: Run glance_deploy_prep retries: "{{ make_glance_deploy_prep_retries | default(omit) }}" delay: "{{ make_glance_deploy_prep_delay | default(omit) }}" until: "{{ make_glance_deploy_prep_until | default(true) }}" register: "make_glance_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy_prep" dry_run: "{{ make_glance_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_prep_env|default({})), **(make_glance_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000165415117040726033334 0ustar zuulzuul--- - name: Debug make_glance_deploy_env when: make_glance_deploy_env is defined ansible.builtin.debug: var: make_glance_deploy_env - name: Debug make_glance_deploy_params when: make_glance_deploy_params is defined ansible.builtin.debug: var: make_glance_deploy_params - name: Run glance_deploy retries: "{{ make_glance_deploy_retries | default(omit) }}" delay: "{{ make_glance_deploy_delay | default(omit) }}" until: "{{ make_glance_deploy_until | default(true) }}" register: "make_glance_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy" dry_run: "{{ make_glance_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_env|default({})), **(make_glance_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000204415117040726033326 0ustar zuulzuul--- - name: Debug make_glance_deploy_cleanup_env when: make_glance_deploy_cleanup_env is defined ansible.builtin.debug: var: make_glance_deploy_cleanup_env - name: Debug make_glance_deploy_cleanup_params when: make_glance_deploy_cleanup_params is defined ansible.builtin.debug: var: make_glance_deploy_cleanup_params - name: Run glance_deploy_cleanup retries: "{{ make_glance_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_glance_deploy_cleanup_delay | default(omit) }}" until: "{{ make_glance_deploy_cleanup_until | default(true) }}" register: "make_glance_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy_cleanup" dry_run: "{{ make_glance_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_cleanup_env|default({})), **(make_glance_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_prep0000644000175000017500000000154115117040726033423 0ustar zuulzuul--- - name: Debug make_ovn_prep_env when: make_ovn_prep_env is defined ansible.builtin.debug: var: make_ovn_prep_env - name: Debug make_ovn_prep_params when: make_ovn_prep_params is defined ansible.builtin.debug: var: make_ovn_prep_params - name: Run ovn_prep retries: "{{ make_ovn_prep_retries | default(omit) }}" delay: "{{ make_ovn_prep_delay | default(omit) }}" until: "{{ make_ovn_prep_until | default(true) }}" register: "make_ovn_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_prep" dry_run: "{{ make_ovn_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_prep_env|default({})), **(make_ovn_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn.yml0000644000175000017500000000142615117040726033177 0ustar zuulzuul--- - name: Debug make_ovn_env when: make_ovn_env is defined ansible.builtin.debug: var: make_ovn_env - name: Debug make_ovn_params when: make_ovn_params is defined ansible.builtin.debug: var: make_ovn_params - name: Run ovn retries: "{{ make_ovn_retries | default(omit) }}" delay: "{{ make_ovn_delay | default(omit) }}" until: "{{ make_ovn_until | default(true) }}" register: "make_ovn_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn" dry_run: "{{ make_ovn_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_env|default({})), **(make_ovn_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_clea0000644000175000017500000000161615117040726033364 0ustar zuulzuul--- - name: Debug make_ovn_cleanup_env when: make_ovn_cleanup_env is defined ansible.builtin.debug: var: make_ovn_cleanup_env - name: Debug make_ovn_cleanup_params when: make_ovn_cleanup_params is defined ansible.builtin.debug: var: make_ovn_cleanup_params - name: Run ovn_cleanup retries: "{{ make_ovn_cleanup_retries | default(omit) }}" delay: "{{ make_ovn_cleanup_delay | default(omit) }}" until: "{{ make_ovn_cleanup_until | default(true) }}" register: "make_ovn_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_cleanup" dry_run: "{{ make_ovn_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_cleanup_env|default({})), **(make_ovn_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000171215117040726033401 0ustar zuulzuul--- - name: Debug make_ovn_deploy_prep_env when: make_ovn_deploy_prep_env is defined ansible.builtin.debug: var: make_ovn_deploy_prep_env - name: Debug make_ovn_deploy_prep_params when: make_ovn_deploy_prep_params is defined ansible.builtin.debug: var: make_ovn_deploy_prep_params - name: Run ovn_deploy_prep retries: "{{ make_ovn_deploy_prep_retries | default(omit) }}" delay: "{{ make_ovn_deploy_prep_delay | default(omit) }}" until: "{{ make_ovn_deploy_prep_until | default(true) }}" register: "make_ovn_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy_prep" dry_run: "{{ make_ovn_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_prep_env|default({})), **(make_ovn_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000157715117040726033412 0ustar zuulzuul--- - name: Debug make_ovn_deploy_env when: make_ovn_deploy_env is defined ansible.builtin.debug: var: make_ovn_deploy_env - name: Debug make_ovn_deploy_params when: make_ovn_deploy_params is defined ansible.builtin.debug: var: make_ovn_deploy_params - name: Run ovn_deploy retries: "{{ make_ovn_deploy_retries | default(omit) }}" delay: "{{ make_ovn_deploy_delay | default(omit) }}" until: "{{ make_ovn_deploy_until | default(true) }}" register: "make_ovn_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy" dry_run: "{{ make_ovn_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_env|default({})), **(make_ovn_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000176715117040726033413 0ustar zuulzuul--- - name: Debug make_ovn_deploy_cleanup_env when: make_ovn_deploy_cleanup_env is defined ansible.builtin.debug: var: make_ovn_deploy_cleanup_env - name: Debug make_ovn_deploy_cleanup_params when: make_ovn_deploy_cleanup_params is defined ansible.builtin.debug: var: make_ovn_deploy_cleanup_params - name: Run ovn_deploy_cleanup retries: "{{ make_ovn_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_ovn_deploy_cleanup_delay | default(omit) }}" until: "{{ make_ovn_deploy_cleanup_until | default(true) }}" register: "make_ovn_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy_cleanup" dry_run: "{{ make_ovn_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_cleanup_env|default({})), **(make_ovn_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000163515117040726033430 0ustar zuulzuul--- - name: Debug make_neutron_prep_env when: make_neutron_prep_env is defined ansible.builtin.debug: var: make_neutron_prep_env - name: Debug make_neutron_prep_params when: make_neutron_prep_params is defined ansible.builtin.debug: var: make_neutron_prep_params - name: Run neutron_prep retries: "{{ make_neutron_prep_retries | default(omit) }}" delay: "{{ make_neutron_prep_delay | default(omit) }}" until: "{{ make_neutron_prep_until | default(true) }}" register: "make_neutron_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_prep" dry_run: "{{ make_neutron_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_prep_env|default({})), **(make_neutron_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron.0000644000175000017500000000152215117040726033342 0ustar zuulzuul--- - name: Debug make_neutron_env when: make_neutron_env is defined ansible.builtin.debug: var: make_neutron_env - name: Debug make_neutron_params when: make_neutron_params is defined ansible.builtin.debug: var: make_neutron_params - name: Run neutron retries: "{{ make_neutron_retries | default(omit) }}" delay: "{{ make_neutron_delay | default(omit) }}" until: "{{ make_neutron_until | default(true) }}" register: "make_neutron_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron" dry_run: "{{ make_neutron_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_env|default({})), **(make_neutron_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000171215117040726033424 0ustar zuulzuul--- - name: Debug make_neutron_cleanup_env when: make_neutron_cleanup_env is defined ansible.builtin.debug: var: make_neutron_cleanup_env - name: Debug make_neutron_cleanup_params when: make_neutron_cleanup_params is defined ansible.builtin.debug: var: make_neutron_cleanup_params - name: Run neutron_cleanup retries: "{{ make_neutron_cleanup_retries | default(omit) }}" delay: "{{ make_neutron_cleanup_delay | default(omit) }}" until: "{{ make_neutron_cleanup_until | default(true) }}" register: "make_neutron_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_cleanup" dry_run: "{{ make_neutron_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_cleanup_env|default({})), **(make_neutron_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000200615117040726033421 0ustar zuulzuul--- - name: Debug make_neutron_deploy_prep_env when: make_neutron_deploy_prep_env is defined ansible.builtin.debug: var: make_neutron_deploy_prep_env - name: Debug make_neutron_deploy_prep_params when: make_neutron_deploy_prep_params is defined ansible.builtin.debug: var: make_neutron_deploy_prep_params - name: Run neutron_deploy_prep retries: "{{ make_neutron_deploy_prep_retries | default(omit) }}" delay: "{{ make_neutron_deploy_prep_delay | default(omit) }}" until: "{{ make_neutron_deploy_prep_until | default(true) }}" register: "make_neutron_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy_prep" dry_run: "{{ make_neutron_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_prep_env|default({})), **(make_neutron_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000167315117040726033432 0ustar zuulzuul--- - name: Debug make_neutron_deploy_env when: make_neutron_deploy_env is defined ansible.builtin.debug: var: make_neutron_deploy_env - name: Debug make_neutron_deploy_params when: make_neutron_deploy_params is defined ansible.builtin.debug: var: make_neutron_deploy_params - name: Run neutron_deploy retries: "{{ make_neutron_deploy_retries | default(omit) }}" delay: "{{ make_neutron_deploy_delay | default(omit) }}" until: "{{ make_neutron_deploy_until | default(true) }}" register: "make_neutron_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy" dry_run: "{{ make_neutron_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_env|default({})), **(make_neutron_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000206315117040726033424 0ustar zuulzuul--- - name: Debug make_neutron_deploy_cleanup_env when: make_neutron_deploy_cleanup_env is defined ansible.builtin.debug: var: make_neutron_deploy_cleanup_env - name: Debug make_neutron_deploy_cleanup_params when: make_neutron_deploy_cleanup_params is defined ansible.builtin.debug: var: make_neutron_deploy_cleanup_params - name: Run neutron_deploy_cleanup retries: "{{ make_neutron_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_neutron_deploy_cleanup_delay | default(omit) }}" until: "{{ make_neutron_deploy_cleanup_until | default(true) }}" register: "make_neutron_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy_cleanup" dry_run: "{{ make_neutron_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_cleanup_env|default({})), **(make_neutron_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_p0000644000175000017500000000161615117040726033361 0ustar zuulzuul--- - name: Debug make_cinder_prep_env when: make_cinder_prep_env is defined ansible.builtin.debug: var: make_cinder_prep_env - name: Debug make_cinder_prep_params when: make_cinder_prep_params is defined ansible.builtin.debug: var: make_cinder_prep_params - name: Run cinder_prep retries: "{{ make_cinder_prep_retries | default(omit) }}" delay: "{{ make_cinder_prep_delay | default(omit) }}" until: "{{ make_cinder_prep_until | default(true) }}" register: "make_cinder_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_prep" dry_run: "{{ make_cinder_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_prep_env|default({})), **(make_cinder_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder.y0000644000175000017500000000150315117040726033304 0ustar zuulzuul--- - name: Debug make_cinder_env when: make_cinder_env is defined ansible.builtin.debug: var: make_cinder_env - name: Debug make_cinder_params when: make_cinder_params is defined ansible.builtin.debug: var: make_cinder_params - name: Run cinder retries: "{{ make_cinder_retries | default(omit) }}" delay: "{{ make_cinder_delay | default(omit) }}" until: "{{ make_cinder_until | default(true) }}" register: "make_cinder_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder" dry_run: "{{ make_cinder_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_env|default({})), **(make_cinder_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_c0000644000175000017500000000167315117040726033347 0ustar zuulzuul--- - name: Debug make_cinder_cleanup_env when: make_cinder_cleanup_env is defined ansible.builtin.debug: var: make_cinder_cleanup_env - name: Debug make_cinder_cleanup_params when: make_cinder_cleanup_params is defined ansible.builtin.debug: var: make_cinder_cleanup_params - name: Run cinder_cleanup retries: "{{ make_cinder_cleanup_retries | default(omit) }}" delay: "{{ make_cinder_cleanup_delay | default(omit) }}" until: "{{ make_cinder_cleanup_until | default(true) }}" register: "make_cinder_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_cleanup" dry_run: "{{ make_cinder_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_cleanup_env|default({})), **(make_cinder_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000176715117040726033354 0ustar zuulzuul--- - name: Debug make_cinder_deploy_prep_env when: make_cinder_deploy_prep_env is defined ansible.builtin.debug: var: make_cinder_deploy_prep_env - name: Debug make_cinder_deploy_prep_params when: make_cinder_deploy_prep_params is defined ansible.builtin.debug: var: make_cinder_deploy_prep_params - name: Run cinder_deploy_prep retries: "{{ make_cinder_deploy_prep_retries | default(omit) }}" delay: "{{ make_cinder_deploy_prep_delay | default(omit) }}" until: "{{ make_cinder_deploy_prep_until | default(true) }}" register: "make_cinder_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy_prep" dry_run: "{{ make_cinder_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_prep_env|default({})), **(make_cinder_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000165415117040726033347 0ustar zuulzuul--- - name: Debug make_cinder_deploy_env when: make_cinder_deploy_env is defined ansible.builtin.debug: var: make_cinder_deploy_env - name: Debug make_cinder_deploy_params when: make_cinder_deploy_params is defined ansible.builtin.debug: var: make_cinder_deploy_params - name: Run cinder_deploy retries: "{{ make_cinder_deploy_retries | default(omit) }}" delay: "{{ make_cinder_deploy_delay | default(omit) }}" until: "{{ make_cinder_deploy_until | default(true) }}" register: "make_cinder_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy" dry_run: "{{ make_cinder_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_env|default({})), **(make_cinder_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000204415117040726033341 0ustar zuulzuul--- - name: Debug make_cinder_deploy_cleanup_env when: make_cinder_deploy_cleanup_env is defined ansible.builtin.debug: var: make_cinder_deploy_cleanup_env - name: Debug make_cinder_deploy_cleanup_params when: make_cinder_deploy_cleanup_params is defined ansible.builtin.debug: var: make_cinder_deploy_cleanup_params - name: Run cinder_deploy_cleanup retries: "{{ make_cinder_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_cinder_deploy_cleanup_delay | default(omit) }}" until: "{{ make_cinder_deploy_cleanup_until | default(true) }}" register: "make_cinder_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy_cleanup" dry_run: "{{ make_cinder_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_cleanup_env|default({})), **(make_cinder_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000165415117040726033401 0ustar zuulzuul--- - name: Debug make_rabbitmq_prep_env when: make_rabbitmq_prep_env is defined ansible.builtin.debug: var: make_rabbitmq_prep_env - name: Debug make_rabbitmq_prep_params when: make_rabbitmq_prep_params is defined ansible.builtin.debug: var: make_rabbitmq_prep_params - name: Run rabbitmq_prep retries: "{{ make_rabbitmq_prep_retries | default(omit) }}" delay: "{{ make_rabbitmq_prep_delay | default(omit) }}" until: "{{ make_rabbitmq_prep_until | default(true) }}" register: "make_rabbitmq_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_prep" dry_run: "{{ make_rabbitmq_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_prep_env|default({})), **(make_rabbitmq_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000154115117040726033374 0ustar zuulzuul--- - name: Debug make_rabbitmq_env when: make_rabbitmq_env is defined ansible.builtin.debug: var: make_rabbitmq_env - name: Debug make_rabbitmq_params when: make_rabbitmq_params is defined ansible.builtin.debug: var: make_rabbitmq_params - name: Run rabbitmq retries: "{{ make_rabbitmq_retries | default(omit) }}" delay: "{{ make_rabbitmq_delay | default(omit) }}" until: "{{ make_rabbitmq_until | default(true) }}" register: "make_rabbitmq_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq" dry_run: "{{ make_rabbitmq_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_env|default({})), **(make_rabbitmq_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000173115117040726033375 0ustar zuulzuul--- - name: Debug make_rabbitmq_cleanup_env when: make_rabbitmq_cleanup_env is defined ansible.builtin.debug: var: make_rabbitmq_cleanup_env - name: Debug make_rabbitmq_cleanup_params when: make_rabbitmq_cleanup_params is defined ansible.builtin.debug: var: make_rabbitmq_cleanup_params - name: Run rabbitmq_cleanup retries: "{{ make_rabbitmq_cleanup_retries | default(omit) }}" delay: "{{ make_rabbitmq_cleanup_delay | default(omit) }}" until: "{{ make_rabbitmq_cleanup_until | default(true) }}" register: "make_rabbitmq_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_cleanup" dry_run: "{{ make_rabbitmq_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_cleanup_env|default({})), **(make_rabbitmq_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000202515117040726033372 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_prep_env when: make_rabbitmq_deploy_prep_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_prep_env - name: Debug make_rabbitmq_deploy_prep_params when: make_rabbitmq_deploy_prep_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_prep_params - name: Run rabbitmq_deploy_prep retries: "{{ make_rabbitmq_deploy_prep_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_prep_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_prep_until | default(true) }}" register: "make_rabbitmq_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy_prep" dry_run: "{{ make_rabbitmq_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_prep_env|default({})), **(make_rabbitmq_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000171215117040726033374 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_env when: make_rabbitmq_deploy_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_env - name: Debug make_rabbitmq_deploy_params when: make_rabbitmq_deploy_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_params - name: Run rabbitmq_deploy retries: "{{ make_rabbitmq_deploy_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_until | default(true) }}" register: "make_rabbitmq_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy" dry_run: "{{ make_rabbitmq_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_env|default({})), **(make_rabbitmq_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000210215117040726033366 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_cleanup_env when: make_rabbitmq_deploy_cleanup_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_cleanup_env - name: Debug make_rabbitmq_deploy_cleanup_params when: make_rabbitmq_deploy_cleanup_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_cleanup_params - name: Run rabbitmq_deploy_cleanup retries: "{{ make_rabbitmq_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_cleanup_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_cleanup_until | default(true) }}" register: "make_rabbitmq_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy_cleanup" dry_run: "{{ make_rabbitmq_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_cleanup_env|default({})), **(make_rabbitmq_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_p0000644000175000017500000000161615117040726033400 0ustar zuulzuul--- - name: Debug make_ironic_prep_env when: make_ironic_prep_env is defined ansible.builtin.debug: var: make_ironic_prep_env - name: Debug make_ironic_prep_params when: make_ironic_prep_params is defined ansible.builtin.debug: var: make_ironic_prep_params - name: Run ironic_prep retries: "{{ make_ironic_prep_retries | default(omit) }}" delay: "{{ make_ironic_prep_delay | default(omit) }}" until: "{{ make_ironic_prep_until | default(true) }}" register: "make_ironic_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_prep" dry_run: "{{ make_ironic_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_prep_env|default({})), **(make_ironic_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic.y0000644000175000017500000000150315117040726033323 0ustar zuulzuul--- - name: Debug make_ironic_env when: make_ironic_env is defined ansible.builtin.debug: var: make_ironic_env - name: Debug make_ironic_params when: make_ironic_params is defined ansible.builtin.debug: var: make_ironic_params - name: Run ironic retries: "{{ make_ironic_retries | default(omit) }}" delay: "{{ make_ironic_delay | default(omit) }}" until: "{{ make_ironic_until | default(true) }}" register: "make_ironic_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic" dry_run: "{{ make_ironic_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_env|default({})), **(make_ironic_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_c0000644000175000017500000000167315117040726033366 0ustar zuulzuul--- - name: Debug make_ironic_cleanup_env when: make_ironic_cleanup_env is defined ansible.builtin.debug: var: make_ironic_cleanup_env - name: Debug make_ironic_cleanup_params when: make_ironic_cleanup_params is defined ansible.builtin.debug: var: make_ironic_cleanup_params - name: Run ironic_cleanup retries: "{{ make_ironic_cleanup_retries | default(omit) }}" delay: "{{ make_ironic_cleanup_delay | default(omit) }}" until: "{{ make_ironic_cleanup_until | default(true) }}" register: "make_ironic_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_cleanup" dry_run: "{{ make_ironic_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_cleanup_env|default({})), **(make_ironic_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000176715117040726033373 0ustar zuulzuul--- - name: Debug make_ironic_deploy_prep_env when: make_ironic_deploy_prep_env is defined ansible.builtin.debug: var: make_ironic_deploy_prep_env - name: Debug make_ironic_deploy_prep_params when: make_ironic_deploy_prep_params is defined ansible.builtin.debug: var: make_ironic_deploy_prep_params - name: Run ironic_deploy_prep retries: "{{ make_ironic_deploy_prep_retries | default(omit) }}" delay: "{{ make_ironic_deploy_prep_delay | default(omit) }}" until: "{{ make_ironic_deploy_prep_until | default(true) }}" register: "make_ironic_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy_prep" dry_run: "{{ make_ironic_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_prep_env|default({})), **(make_ironic_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000165415117040726033366 0ustar zuulzuul--- - name: Debug make_ironic_deploy_env when: make_ironic_deploy_env is defined ansible.builtin.debug: var: make_ironic_deploy_env - name: Debug make_ironic_deploy_params when: make_ironic_deploy_params is defined ansible.builtin.debug: var: make_ironic_deploy_params - name: Run ironic_deploy retries: "{{ make_ironic_deploy_retries | default(omit) }}" delay: "{{ make_ironic_deploy_delay | default(omit) }}" until: "{{ make_ironic_deploy_until | default(true) }}" register: "make_ironic_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy" dry_run: "{{ make_ironic_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_env|default({})), **(make_ironic_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000204415117040726033360 0ustar zuulzuul--- - name: Debug make_ironic_deploy_cleanup_env when: make_ironic_deploy_cleanup_env is defined ansible.builtin.debug: var: make_ironic_deploy_cleanup_env - name: Debug make_ironic_deploy_cleanup_params when: make_ironic_deploy_cleanup_params is defined ansible.builtin.debug: var: make_ironic_deploy_cleanup_params - name: Run ironic_deploy_cleanup retries: "{{ make_ironic_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_ironic_deploy_cleanup_delay | default(omit) }}" until: "{{ make_ironic_deploy_cleanup_until | default(true) }}" register: "make_ironic_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy_cleanup" dry_run: "{{ make_ironic_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_cleanup_env|default({})), **(make_ironic_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000163515117040726033364 0ustar zuulzuul--- - name: Debug make_octavia_prep_env when: make_octavia_prep_env is defined ansible.builtin.debug: var: make_octavia_prep_env - name: Debug make_octavia_prep_params when: make_octavia_prep_params is defined ansible.builtin.debug: var: make_octavia_prep_params - name: Run octavia_prep retries: "{{ make_octavia_prep_retries | default(omit) }}" delay: "{{ make_octavia_prep_delay | default(omit) }}" until: "{{ make_octavia_prep_until | default(true) }}" register: "make_octavia_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_prep" dry_run: "{{ make_octavia_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_prep_env|default({})), **(make_octavia_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia.0000644000175000017500000000152215117040726033276 0ustar zuulzuul--- - name: Debug make_octavia_env when: make_octavia_env is defined ansible.builtin.debug: var: make_octavia_env - name: Debug make_octavia_params when: make_octavia_params is defined ansible.builtin.debug: var: make_octavia_params - name: Run octavia retries: "{{ make_octavia_retries | default(omit) }}" delay: "{{ make_octavia_delay | default(omit) }}" until: "{{ make_octavia_until | default(true) }}" register: "make_octavia_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia" dry_run: "{{ make_octavia_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_env|default({})), **(make_octavia_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000171215117040726033360 0ustar zuulzuul--- - name: Debug make_octavia_cleanup_env when: make_octavia_cleanup_env is defined ansible.builtin.debug: var: make_octavia_cleanup_env - name: Debug make_octavia_cleanup_params when: make_octavia_cleanup_params is defined ansible.builtin.debug: var: make_octavia_cleanup_params - name: Run octavia_cleanup retries: "{{ make_octavia_cleanup_retries | default(omit) }}" delay: "{{ make_octavia_cleanup_delay | default(omit) }}" until: "{{ make_octavia_cleanup_until | default(true) }}" register: "make_octavia_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_cleanup" dry_run: "{{ make_octavia_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_cleanup_env|default({})), **(make_octavia_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000200615117040726033355 0ustar zuulzuul--- - name: Debug make_octavia_deploy_prep_env when: make_octavia_deploy_prep_env is defined ansible.builtin.debug: var: make_octavia_deploy_prep_env - name: Debug make_octavia_deploy_prep_params when: make_octavia_deploy_prep_params is defined ansible.builtin.debug: var: make_octavia_deploy_prep_params - name: Run octavia_deploy_prep retries: "{{ make_octavia_deploy_prep_retries | default(omit) }}" delay: "{{ make_octavia_deploy_prep_delay | default(omit) }}" until: "{{ make_octavia_deploy_prep_until | default(true) }}" register: "make_octavia_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy_prep" dry_run: "{{ make_octavia_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_prep_env|default({})), **(make_octavia_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000167315117040726033366 0ustar zuulzuul--- - name: Debug make_octavia_deploy_env when: make_octavia_deploy_env is defined ansible.builtin.debug: var: make_octavia_deploy_env - name: Debug make_octavia_deploy_params when: make_octavia_deploy_params is defined ansible.builtin.debug: var: make_octavia_deploy_params - name: Run octavia_deploy retries: "{{ make_octavia_deploy_retries | default(omit) }}" delay: "{{ make_octavia_deploy_delay | default(omit) }}" until: "{{ make_octavia_deploy_until | default(true) }}" register: "make_octavia_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy" dry_run: "{{ make_octavia_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_env|default({})), **(make_octavia_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000206315117040726033360 0ustar zuulzuul--- - name: Debug make_octavia_deploy_cleanup_env when: make_octavia_deploy_cleanup_env is defined ansible.builtin.debug: var: make_octavia_deploy_cleanup_env - name: Debug make_octavia_deploy_cleanup_params when: make_octavia_deploy_cleanup_params is defined ansible.builtin.debug: var: make_octavia_deploy_cleanup_params - name: Run octavia_deploy_cleanup retries: "{{ make_octavia_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_octavia_deploy_cleanup_delay | default(omit) }}" until: "{{ make_octavia_deploy_cleanup_until | default(true) }}" register: "make_octavia_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy_cleanup" dry_run: "{{ make_octavia_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_cleanup_env|default({})), **(make_octavia_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000167315117040726033377 0ustar zuulzuul--- - name: Debug make_designate_prep_env when: make_designate_prep_env is defined ansible.builtin.debug: var: make_designate_prep_env - name: Debug make_designate_prep_params when: make_designate_prep_params is defined ansible.builtin.debug: var: make_designate_prep_params - name: Run designate_prep retries: "{{ make_designate_prep_retries | default(omit) }}" delay: "{{ make_designate_prep_delay | default(omit) }}" until: "{{ make_designate_prep_until | default(true) }}" register: "make_designate_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_prep" dry_run: "{{ make_designate_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_prep_env|default({})), **(make_designate_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000156015117040726033372 0ustar zuulzuul--- - name: Debug make_designate_env when: make_designate_env is defined ansible.builtin.debug: var: make_designate_env - name: Debug make_designate_params when: make_designate_params is defined ansible.builtin.debug: var: make_designate_params - name: Run designate retries: "{{ make_designate_retries | default(omit) }}" delay: "{{ make_designate_delay | default(omit) }}" until: "{{ make_designate_until | default(true) }}" register: "make_designate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate" dry_run: "{{ make_designate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_env|default({})), **(make_designate_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000175015117040726033373 0ustar zuulzuul--- - name: Debug make_designate_cleanup_env when: make_designate_cleanup_env is defined ansible.builtin.debug: var: make_designate_cleanup_env - name: Debug make_designate_cleanup_params when: make_designate_cleanup_params is defined ansible.builtin.debug: var: make_designate_cleanup_params - name: Run designate_cleanup retries: "{{ make_designate_cleanup_retries | default(omit) }}" delay: "{{ make_designate_cleanup_delay | default(omit) }}" until: "{{ make_designate_cleanup_until | default(true) }}" register: "make_designate_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_cleanup" dry_run: "{{ make_designate_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_cleanup_env|default({})), **(make_designate_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000204415117040726033370 0ustar zuulzuul--- - name: Debug make_designate_deploy_prep_env when: make_designate_deploy_prep_env is defined ansible.builtin.debug: var: make_designate_deploy_prep_env - name: Debug make_designate_deploy_prep_params when: make_designate_deploy_prep_params is defined ansible.builtin.debug: var: make_designate_deploy_prep_params - name: Run designate_deploy_prep retries: "{{ make_designate_deploy_prep_retries | default(omit) }}" delay: "{{ make_designate_deploy_prep_delay | default(omit) }}" until: "{{ make_designate_deploy_prep_until | default(true) }}" register: "make_designate_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy_prep" dry_run: "{{ make_designate_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_prep_env|default({})), **(make_designate_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000173115117040726033372 0ustar zuulzuul--- - name: Debug make_designate_deploy_env when: make_designate_deploy_env is defined ansible.builtin.debug: var: make_designate_deploy_env - name: Debug make_designate_deploy_params when: make_designate_deploy_params is defined ansible.builtin.debug: var: make_designate_deploy_params - name: Run designate_deploy retries: "{{ make_designate_deploy_retries | default(omit) }}" delay: "{{ make_designate_deploy_delay | default(omit) }}" until: "{{ make_designate_deploy_until | default(true) }}" register: "make_designate_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy" dry_run: "{{ make_designate_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_env|default({})), **(make_designate_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000212115117040726033364 0ustar zuulzuul--- - name: Debug make_designate_deploy_cleanup_env when: make_designate_deploy_cleanup_env is defined ansible.builtin.debug: var: make_designate_deploy_cleanup_env - name: Debug make_designate_deploy_cleanup_params when: make_designate_deploy_cleanup_params is defined ansible.builtin.debug: var: make_designate_deploy_cleanup_params - name: Run designate_deploy_cleanup retries: "{{ make_designate_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_designate_deploy_cleanup_delay | default(omit) }}" until: "{{ make_designate_deploy_cleanup_until | default(true) }}" register: "make_designate_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy_cleanup" dry_run: "{{ make_designate_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_cleanup_env|default({})), **(make_designate_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_pre0000644000175000017500000000156015117040726033405 0ustar zuulzuul--- - name: Debug make_nova_prep_env when: make_nova_prep_env is defined ansible.builtin.debug: var: make_nova_prep_env - name: Debug make_nova_prep_params when: make_nova_prep_params is defined ansible.builtin.debug: var: make_nova_prep_params - name: Run nova_prep retries: "{{ make_nova_prep_retries | default(omit) }}" delay: "{{ make_nova_prep_delay | default(omit) }}" until: "{{ make_nova_prep_until | default(true) }}" register: "make_nova_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_prep" dry_run: "{{ make_nova_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_prep_env|default({})), **(make_nova_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova.yml0000644000175000017500000000144515117040726033341 0ustar zuulzuul--- - name: Debug make_nova_env when: make_nova_env is defined ansible.builtin.debug: var: make_nova_env - name: Debug make_nova_params when: make_nova_params is defined ansible.builtin.debug: var: make_nova_params - name: Run nova retries: "{{ make_nova_retries | default(omit) }}" delay: "{{ make_nova_delay | default(omit) }}" until: "{{ make_nova_until | default(true) }}" register: "make_nova_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova" dry_run: "{{ make_nova_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_env|default({})), **(make_nova_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_cle0000644000175000017500000000163515117040726033365 0ustar zuulzuul--- - name: Debug make_nova_cleanup_env when: make_nova_cleanup_env is defined ansible.builtin.debug: var: make_nova_cleanup_env - name: Debug make_nova_cleanup_params when: make_nova_cleanup_params is defined ansible.builtin.debug: var: make_nova_cleanup_params - name: Run nova_cleanup retries: "{{ make_nova_cleanup_retries | default(omit) }}" delay: "{{ make_nova_cleanup_delay | default(omit) }}" until: "{{ make_nova_cleanup_until | default(true) }}" register: "make_nova_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_cleanup" dry_run: "{{ make_nova_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_cleanup_env|default({})), **(make_nova_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000173115117040726033367 0ustar zuulzuul--- - name: Debug make_nova_deploy_prep_env when: make_nova_deploy_prep_env is defined ansible.builtin.debug: var: make_nova_deploy_prep_env - name: Debug make_nova_deploy_prep_params when: make_nova_deploy_prep_params is defined ansible.builtin.debug: var: make_nova_deploy_prep_params - name: Run nova_deploy_prep retries: "{{ make_nova_deploy_prep_retries | default(omit) }}" delay: "{{ make_nova_deploy_prep_delay | default(omit) }}" until: "{{ make_nova_deploy_prep_until | default(true) }}" register: "make_nova_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy_prep" dry_run: "{{ make_nova_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_prep_env|default({})), **(make_nova_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000161615117040726033371 0ustar zuulzuul--- - name: Debug make_nova_deploy_env when: make_nova_deploy_env is defined ansible.builtin.debug: var: make_nova_deploy_env - name: Debug make_nova_deploy_params when: make_nova_deploy_params is defined ansible.builtin.debug: var: make_nova_deploy_params - name: Run nova_deploy retries: "{{ make_nova_deploy_retries | default(omit) }}" delay: "{{ make_nova_deploy_delay | default(omit) }}" until: "{{ make_nova_deploy_until | default(true) }}" register: "make_nova_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy" dry_run: "{{ make_nova_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_env|default({})), **(make_nova_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000200615117040726033363 0ustar zuulzuul--- - name: Debug make_nova_deploy_cleanup_env when: make_nova_deploy_cleanup_env is defined ansible.builtin.debug: var: make_nova_deploy_cleanup_env - name: Debug make_nova_deploy_cleanup_params when: make_nova_deploy_cleanup_params is defined ansible.builtin.debug: var: make_nova_deploy_cleanup_params - name: Run nova_deploy_cleanup retries: "{{ make_nova_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_nova_deploy_cleanup_delay | default(omit) }}" until: "{{ make_nova_deploy_cleanup_until | default(true) }}" register: "make_nova_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy_cleanup" dry_run: "{{ make_nova_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_cleanup_env|default({})), **(make_nova_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000175015117040726033333 0ustar zuulzuul--- - name: Debug make_mariadb_kuttl_run_env when: make_mariadb_kuttl_run_env is defined ansible.builtin.debug: var: make_mariadb_kuttl_run_env - name: Debug make_mariadb_kuttl_run_params when: make_mariadb_kuttl_run_params is defined ansible.builtin.debug: var: make_mariadb_kuttl_run_params - name: Run mariadb_kuttl_run retries: "{{ make_mariadb_kuttl_run_retries | default(omit) }}" delay: "{{ make_mariadb_kuttl_run_delay | default(omit) }}" until: "{{ make_mariadb_kuttl_run_until | default(true) }}" register: "make_mariadb_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_kuttl_run" dry_run: "{{ make_mariadb_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_kuttl_run_env|default({})), **(make_mariadb_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000165415117040726033336 0ustar zuulzuul--- - name: Debug make_mariadb_kuttl_env when: make_mariadb_kuttl_env is defined ansible.builtin.debug: var: make_mariadb_kuttl_env - name: Debug make_mariadb_kuttl_params when: make_mariadb_kuttl_params is defined ansible.builtin.debug: var: make_mariadb_kuttl_params - name: Run mariadb_kuttl retries: "{{ make_mariadb_kuttl_retries | default(omit) }}" delay: "{{ make_mariadb_kuttl_delay | default(omit) }}" until: "{{ make_mariadb_kuttl_until | default(true) }}" register: "make_mariadb_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_kuttl" dry_run: "{{ make_mariadb_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_kuttl_env|default({})), **(make_mariadb_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db0000644000175000017500000000165415117040726033410 0ustar zuulzuul--- - name: Debug make_kuttl_db_prep_env when: make_kuttl_db_prep_env is defined ansible.builtin.debug: var: make_kuttl_db_prep_env - name: Debug make_kuttl_db_prep_params when: make_kuttl_db_prep_params is defined ansible.builtin.debug: var: make_kuttl_db_prep_params - name: Run kuttl_db_prep retries: "{{ make_kuttl_db_prep_retries | default(omit) }}" delay: "{{ make_kuttl_db_prep_delay | default(omit) }}" until: "{{ make_kuttl_db_prep_until | default(true) }}" register: "make_kuttl_db_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_db_prep" dry_run: "{{ make_kuttl_db_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_db_prep_env|default({})), **(make_kuttl_db_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db0000644000175000017500000000173115117040726033404 0ustar zuulzuul--- - name: Debug make_kuttl_db_cleanup_env when: make_kuttl_db_cleanup_env is defined ansible.builtin.debug: var: make_kuttl_db_cleanup_env - name: Debug make_kuttl_db_cleanup_params when: make_kuttl_db_cleanup_params is defined ansible.builtin.debug: var: make_kuttl_db_cleanup_params - name: Run kuttl_db_cleanup retries: "{{ make_kuttl_db_cleanup_retries | default(omit) }}" delay: "{{ make_kuttl_db_cleanup_delay | default(omit) }}" until: "{{ make_kuttl_db_cleanup_until | default(true) }}" register: "make_kuttl_db_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_db_cleanup" dry_run: "{{ make_kuttl_db_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_db_cleanup_env|default({})), **(make_kuttl_db_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_common_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_co0000644000175000017500000000175015117040726033421 0ustar zuulzuul--- - name: Debug make_kuttl_common_prep_env when: make_kuttl_common_prep_env is defined ansible.builtin.debug: var: make_kuttl_common_prep_env - name: Debug make_kuttl_common_prep_params when: make_kuttl_common_prep_params is defined ansible.builtin.debug: var: make_kuttl_common_prep_params - name: Run kuttl_common_prep retries: "{{ make_kuttl_common_prep_retries | default(omit) }}" delay: "{{ make_kuttl_common_prep_delay | default(omit) }}" until: "{{ make_kuttl_common_prep_until | default(true) }}" register: "make_kuttl_common_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_common_prep" dry_run: "{{ make_kuttl_common_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_common_prep_env|default({})), **(make_kuttl_common_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_common_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_co0000644000175000017500000000202515117040726033415 0ustar zuulzuul--- - name: Debug make_kuttl_common_cleanup_env when: make_kuttl_common_cleanup_env is defined ansible.builtin.debug: var: make_kuttl_common_cleanup_env - name: Debug make_kuttl_common_cleanup_params when: make_kuttl_common_cleanup_params is defined ansible.builtin.debug: var: make_kuttl_common_cleanup_params - name: Run kuttl_common_cleanup retries: "{{ make_kuttl_common_cleanup_retries | default(omit) }}" delay: "{{ make_kuttl_common_cleanup_delay | default(omit) }}" until: "{{ make_kuttl_common_cleanup_until | default(true) }}" register: "make_kuttl_common_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_common_cleanup" dry_run: "{{ make_kuttl_common_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_common_cleanup_env|default({})), **(make_kuttl_common_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000176715117040726033446 0ustar zuulzuul--- - name: Debug make_keystone_kuttl_run_env when: make_keystone_kuttl_run_env is defined ansible.builtin.debug: var: make_keystone_kuttl_run_env - name: Debug make_keystone_kuttl_run_params when: make_keystone_kuttl_run_params is defined ansible.builtin.debug: var: make_keystone_kuttl_run_params - name: Run keystone_kuttl_run retries: "{{ make_keystone_kuttl_run_retries | default(omit) }}" delay: "{{ make_keystone_kuttl_run_delay | default(omit) }}" until: "{{ make_keystone_kuttl_run_until | default(true) }}" register: "make_keystone_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_kuttl_run" dry_run: "{{ make_keystone_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_kuttl_run_env|default({})), **(make_keystone_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000167315117040726033442 0ustar zuulzuul--- - name: Debug make_keystone_kuttl_env when: make_keystone_kuttl_env is defined ansible.builtin.debug: var: make_keystone_kuttl_env - name: Debug make_keystone_kuttl_params when: make_keystone_kuttl_params is defined ansible.builtin.debug: var: make_keystone_kuttl_params - name: Run keystone_kuttl retries: "{{ make_keystone_kuttl_retries | default(omit) }}" delay: "{{ make_keystone_kuttl_delay | default(omit) }}" until: "{{ make_keystone_kuttl_until | default(true) }}" register: "make_keystone_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_kuttl" dry_run: "{{ make_keystone_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_kuttl_env|default({})), **(make_keystone_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000176715117040726033346 0ustar zuulzuul--- - name: Debug make_barbican_kuttl_run_env when: make_barbican_kuttl_run_env is defined ansible.builtin.debug: var: make_barbican_kuttl_run_env - name: Debug make_barbican_kuttl_run_params when: make_barbican_kuttl_run_params is defined ansible.builtin.debug: var: make_barbican_kuttl_run_params - name: Run barbican_kuttl_run retries: "{{ make_barbican_kuttl_run_retries | default(omit) }}" delay: "{{ make_barbican_kuttl_run_delay | default(omit) }}" until: "{{ make_barbican_kuttl_run_until | default(true) }}" register: "make_barbican_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_kuttl_run" dry_run: "{{ make_barbican_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_kuttl_run_env|default({})), **(make_barbican_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000167315117040726033342 0ustar zuulzuul--- - name: Debug make_barbican_kuttl_env when: make_barbican_kuttl_env is defined ansible.builtin.debug: var: make_barbican_kuttl_env - name: Debug make_barbican_kuttl_params when: make_barbican_kuttl_params is defined ansible.builtin.debug: var: make_barbican_kuttl_params - name: Run barbican_kuttl retries: "{{ make_barbican_kuttl_retries | default(omit) }}" delay: "{{ make_barbican_kuttl_delay | default(omit) }}" until: "{{ make_barbican_kuttl_until | default(true) }}" register: "make_barbican_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_kuttl" dry_run: "{{ make_barbican_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_kuttl_env|default({})), **(make_barbican_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000200615117040726033354 0ustar zuulzuul--- - name: Debug make_placement_kuttl_run_env when: make_placement_kuttl_run_env is defined ansible.builtin.debug: var: make_placement_kuttl_run_env - name: Debug make_placement_kuttl_run_params when: make_placement_kuttl_run_params is defined ansible.builtin.debug: var: make_placement_kuttl_run_params - name: Run placement_kuttl_run retries: "{{ make_placement_kuttl_run_retries | default(omit) }}" delay: "{{ make_placement_kuttl_run_delay | default(omit) }}" until: "{{ make_placement_kuttl_run_until | default(true) }}" register: "make_placement_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_kuttl_run" dry_run: "{{ make_placement_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_kuttl_run_env|default({})), **(make_placement_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000171215117040726033357 0ustar zuulzuul--- - name: Debug make_placement_kuttl_env when: make_placement_kuttl_env is defined ansible.builtin.debug: var: make_placement_kuttl_env - name: Debug make_placement_kuttl_params when: make_placement_kuttl_params is defined ansible.builtin.debug: var: make_placement_kuttl_params - name: Run placement_kuttl retries: "{{ make_placement_kuttl_retries | default(omit) }}" delay: "{{ make_placement_kuttl_delay | default(omit) }}" until: "{{ make_placement_kuttl_until | default(true) }}" register: "make_placement_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_kuttl" dry_run: "{{ make_placement_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_kuttl_env|default({})), **(make_placement_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_k0000644000175000017500000000173115117040726033352 0ustar zuulzuul--- - name: Debug make_cinder_kuttl_run_env when: make_cinder_kuttl_run_env is defined ansible.builtin.debug: var: make_cinder_kuttl_run_env - name: Debug make_cinder_kuttl_run_params when: make_cinder_kuttl_run_params is defined ansible.builtin.debug: var: make_cinder_kuttl_run_params - name: Run cinder_kuttl_run retries: "{{ make_cinder_kuttl_run_retries | default(omit) }}" delay: "{{ make_cinder_kuttl_run_delay | default(omit) }}" until: "{{ make_cinder_kuttl_run_until | default(true) }}" register: "make_cinder_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_kuttl_run" dry_run: "{{ make_cinder_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_kuttl_run_env|default({})), **(make_cinder_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_k0000644000175000017500000000163515117040726033355 0ustar zuulzuul--- - name: Debug make_cinder_kuttl_env when: make_cinder_kuttl_env is defined ansible.builtin.debug: var: make_cinder_kuttl_env - name: Debug make_cinder_kuttl_params when: make_cinder_kuttl_params is defined ansible.builtin.debug: var: make_cinder_kuttl_params - name: Run cinder_kuttl retries: "{{ make_cinder_kuttl_retries | default(omit) }}" delay: "{{ make_cinder_kuttl_delay | default(omit) }}" until: "{{ make_cinder_kuttl_until | default(true) }}" register: "make_cinder_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_kuttl" dry_run: "{{ make_cinder_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_kuttl_env|default({})), **(make_cinder_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000175015117040726033426 0ustar zuulzuul--- - name: Debug make_neutron_kuttl_run_env when: make_neutron_kuttl_run_env is defined ansible.builtin.debug: var: make_neutron_kuttl_run_env - name: Debug make_neutron_kuttl_run_params when: make_neutron_kuttl_run_params is defined ansible.builtin.debug: var: make_neutron_kuttl_run_params - name: Run neutron_kuttl_run retries: "{{ make_neutron_kuttl_run_retries | default(omit) }}" delay: "{{ make_neutron_kuttl_run_delay | default(omit) }}" until: "{{ make_neutron_kuttl_run_until | default(true) }}" register: "make_neutron_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_kuttl_run" dry_run: "{{ make_neutron_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_kuttl_run_env|default({})), **(make_neutron_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000165415117040726033431 0ustar zuulzuul--- - name: Debug make_neutron_kuttl_env when: make_neutron_kuttl_env is defined ansible.builtin.debug: var: make_neutron_kuttl_env - name: Debug make_neutron_kuttl_params when: make_neutron_kuttl_params is defined ansible.builtin.debug: var: make_neutron_kuttl_params - name: Run neutron_kuttl retries: "{{ make_neutron_kuttl_retries | default(omit) }}" delay: "{{ make_neutron_kuttl_delay | default(omit) }}" until: "{{ make_neutron_kuttl_until | default(true) }}" register: "make_neutron_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_kuttl" dry_run: "{{ make_neutron_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_kuttl_env|default({})), **(make_neutron_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000175015117040726033362 0ustar zuulzuul--- - name: Debug make_octavia_kuttl_run_env when: make_octavia_kuttl_run_env is defined ansible.builtin.debug: var: make_octavia_kuttl_run_env - name: Debug make_octavia_kuttl_run_params when: make_octavia_kuttl_run_params is defined ansible.builtin.debug: var: make_octavia_kuttl_run_params - name: Run octavia_kuttl_run retries: "{{ make_octavia_kuttl_run_retries | default(omit) }}" delay: "{{ make_octavia_kuttl_run_delay | default(omit) }}" until: "{{ make_octavia_kuttl_run_until | default(true) }}" register: "make_octavia_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_kuttl_run" dry_run: "{{ make_octavia_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_kuttl_run_env|default({})), **(make_octavia_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000165415117040726033365 0ustar zuulzuul--- - name: Debug make_octavia_kuttl_env when: make_octavia_kuttl_env is defined ansible.builtin.debug: var: make_octavia_kuttl_env - name: Debug make_octavia_kuttl_params when: make_octavia_kuttl_params is defined ansible.builtin.debug: var: make_octavia_kuttl_params - name: Run octavia_kuttl retries: "{{ make_octavia_kuttl_retries | default(omit) }}" delay: "{{ make_octavia_kuttl_delay | default(omit) }}" until: "{{ make_octavia_kuttl_until | default(true) }}" register: "make_octavia_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_kuttl" dry_run: "{{ make_octavia_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_kuttl_env|default({})), **(make_octavia_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000171215117040726033371 0ustar zuulzuul--- - name: Debug make_designate_kuttl_env when: make_designate_kuttl_env is defined ansible.builtin.debug: var: make_designate_kuttl_env - name: Debug make_designate_kuttl_params when: make_designate_kuttl_params is defined ansible.builtin.debug: var: make_designate_kuttl_params - name: Run designate_kuttl retries: "{{ make_designate_kuttl_retries | default(omit) }}" delay: "{{ make_designate_kuttl_delay | default(omit) }}" until: "{{ make_designate_kuttl_until | default(true) }}" register: "make_designate_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_kuttl" dry_run: "{{ make_designate_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_kuttl_env|default({})), **(make_designate_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000200615117040726033366 0ustar zuulzuul--- - name: Debug make_designate_kuttl_run_env when: make_designate_kuttl_run_env is defined ansible.builtin.debug: var: make_designate_kuttl_run_env - name: Debug make_designate_kuttl_run_params when: make_designate_kuttl_run_params is defined ansible.builtin.debug: var: make_designate_kuttl_run_params - name: Run designate_kuttl_run retries: "{{ make_designate_kuttl_run_retries | default(omit) }}" delay: "{{ make_designate_kuttl_run_delay | default(omit) }}" until: "{{ make_designate_kuttl_run_until | default(true) }}" register: "make_designate_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_kuttl_run" dry_run: "{{ make_designate_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_kuttl_run_env|default({})), **(make_designate_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kutt0000644000175000017500000000165415117040726033451 0ustar zuulzuul--- - name: Debug make_ovn_kuttl_run_env when: make_ovn_kuttl_run_env is defined ansible.builtin.debug: var: make_ovn_kuttl_run_env - name: Debug make_ovn_kuttl_run_params when: make_ovn_kuttl_run_params is defined ansible.builtin.debug: var: make_ovn_kuttl_run_params - name: Run ovn_kuttl_run retries: "{{ make_ovn_kuttl_run_retries | default(omit) }}" delay: "{{ make_ovn_kuttl_run_delay | default(omit) }}" until: "{{ make_ovn_kuttl_run_until | default(true) }}" register: "make_ovn_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_kuttl_run" dry_run: "{{ make_ovn_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_kuttl_run_env|default({})), **(make_ovn_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kutt0000644000175000017500000000156015117040726033445 0ustar zuulzuul--- - name: Debug make_ovn_kuttl_env when: make_ovn_kuttl_env is defined ansible.builtin.debug: var: make_ovn_kuttl_env - name: Debug make_ovn_kuttl_params when: make_ovn_kuttl_params is defined ansible.builtin.debug: var: make_ovn_kuttl_params - name: Run ovn_kuttl retries: "{{ make_ovn_kuttl_retries | default(omit) }}" delay: "{{ make_ovn_kuttl_delay | default(omit) }}" until: "{{ make_ovn_kuttl_until | default(true) }}" register: "make_ovn_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_kuttl" dry_run: "{{ make_ovn_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_kuttl_env|default({})), **(make_ovn_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_ku0000644000175000017500000000171215117040726033371 0ustar zuulzuul--- - name: Debug make_infra_kuttl_run_env when: make_infra_kuttl_run_env is defined ansible.builtin.debug: var: make_infra_kuttl_run_env - name: Debug make_infra_kuttl_run_params when: make_infra_kuttl_run_params is defined ansible.builtin.debug: var: make_infra_kuttl_run_params - name: Run infra_kuttl_run retries: "{{ make_infra_kuttl_run_retries | default(omit) }}" delay: "{{ make_infra_kuttl_run_delay | default(omit) }}" until: "{{ make_infra_kuttl_run_until | default(true) }}" register: "make_infra_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_kuttl_run" dry_run: "{{ make_infra_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_kuttl_run_env|default({})), **(make_infra_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_ku0000644000175000017500000000161615117040726033374 0ustar zuulzuul--- - name: Debug make_infra_kuttl_env when: make_infra_kuttl_env is defined ansible.builtin.debug: var: make_infra_kuttl_env - name: Debug make_infra_kuttl_params when: make_infra_kuttl_params is defined ansible.builtin.debug: var: make_infra_kuttl_params - name: Run infra_kuttl retries: "{{ make_infra_kuttl_retries | default(omit) }}" delay: "{{ make_infra_kuttl_delay | default(omit) }}" until: "{{ make_infra_kuttl_until | default(true) }}" register: "make_infra_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_kuttl" dry_run: "{{ make_infra_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_kuttl_env|default({})), **(make_infra_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000173115117040726033371 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_run_env when: make_ironic_kuttl_run_env is defined ansible.builtin.debug: var: make_ironic_kuttl_run_env - name: Debug make_ironic_kuttl_run_params when: make_ironic_kuttl_run_params is defined ansible.builtin.debug: var: make_ironic_kuttl_run_params - name: Run ironic_kuttl_run retries: "{{ make_ironic_kuttl_run_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_run_delay | default(omit) }}" until: "{{ make_ironic_kuttl_run_until | default(true) }}" register: "make_ironic_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl_run" dry_run: "{{ make_ironic_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_run_env|default({})), **(make_ironic_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000163515117040726033374 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_env when: make_ironic_kuttl_env is defined ansible.builtin.debug: var: make_ironic_kuttl_env - name: Debug make_ironic_kuttl_params when: make_ironic_kuttl_params is defined ansible.builtin.debug: var: make_ironic_kuttl_params - name: Run ironic_kuttl retries: "{{ make_ironic_kuttl_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_delay | default(omit) }}" until: "{{ make_ironic_kuttl_until | default(true) }}" register: "make_ironic_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl" dry_run: "{{ make_ironic_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_env|default({})), **(make_ironic_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl_crc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000173115117040726033371 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_crc_env when: make_ironic_kuttl_crc_env is defined ansible.builtin.debug: var: make_ironic_kuttl_crc_env - name: Debug make_ironic_kuttl_crc_params when: make_ironic_kuttl_crc_params is defined ansible.builtin.debug: var: make_ironic_kuttl_crc_params - name: Run ironic_kuttl_crc retries: "{{ make_ironic_kuttl_crc_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_crc_delay | default(omit) }}" until: "{{ make_ironic_kuttl_crc_until | default(true) }}" register: "make_ironic_kuttl_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl_crc" dry_run: "{{ make_ironic_kuttl_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_crc_env|default({})), **(make_ironic_kuttl_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000167315117040726033405 0ustar zuulzuul--- - name: Debug make_heat_kuttl_run_env when: make_heat_kuttl_run_env is defined ansible.builtin.debug: var: make_heat_kuttl_run_env - name: Debug make_heat_kuttl_run_params when: make_heat_kuttl_run_params is defined ansible.builtin.debug: var: make_heat_kuttl_run_params - name: Run heat_kuttl_run retries: "{{ make_heat_kuttl_run_retries | default(omit) }}" delay: "{{ make_heat_kuttl_run_delay | default(omit) }}" until: "{{ make_heat_kuttl_run_until | default(true) }}" register: "make_heat_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl_run" dry_run: "{{ make_heat_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_run_env|default({})), **(make_heat_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000157715117040726033410 0ustar zuulzuul--- - name: Debug make_heat_kuttl_env when: make_heat_kuttl_env is defined ansible.builtin.debug: var: make_heat_kuttl_env - name: Debug make_heat_kuttl_params when: make_heat_kuttl_params is defined ansible.builtin.debug: var: make_heat_kuttl_params - name: Run heat_kuttl retries: "{{ make_heat_kuttl_retries | default(omit) }}" delay: "{{ make_heat_kuttl_delay | default(omit) }}" until: "{{ make_heat_kuttl_until | default(true) }}" register: "make_heat_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl" dry_run: "{{ make_heat_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_env|default({})), **(make_heat_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl_crc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000167315117040726033405 0ustar zuulzuul--- - name: Debug make_heat_kuttl_crc_env when: make_heat_kuttl_crc_env is defined ansible.builtin.debug: var: make_heat_kuttl_crc_env - name: Debug make_heat_kuttl_crc_params when: make_heat_kuttl_crc_params is defined ansible.builtin.debug: var: make_heat_kuttl_crc_params - name: Run heat_kuttl_crc retries: "{{ make_heat_kuttl_crc_retries | default(omit) }}" delay: "{{ make_heat_kuttl_crc_delay | default(omit) }}" until: "{{ make_heat_kuttl_crc_until | default(true) }}" register: "make_heat_kuttl_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl_crc" dry_run: "{{ make_heat_kuttl_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_crc_env|default({})), **(make_heat_kuttl_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000200615117040726033352 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_run_env when: make_ansibleee_kuttl_run_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_run_env - name: Debug make_ansibleee_kuttl_run_params when: make_ansibleee_kuttl_run_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_run_params - name: Run ansibleee_kuttl_run retries: "{{ make_ansibleee_kuttl_run_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_run_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_run_until | default(true) }}" register: "make_ansibleee_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_run" dry_run: "{{ make_ansibleee_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_run_env|default({})), **(make_ansibleee_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000210215117040726033347 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_cleanup_env when: make_ansibleee_kuttl_cleanup_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_cleanup_env - name: Debug make_ansibleee_kuttl_cleanup_params when: make_ansibleee_kuttl_cleanup_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_cleanup_params - name: Run ansibleee_kuttl_cleanup retries: "{{ make_ansibleee_kuttl_cleanup_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_cleanup_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_cleanup_until | default(true) }}" register: "make_ansibleee_kuttl_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_cleanup" dry_run: "{{ make_ansibleee_kuttl_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_cleanup_env|default({})), **(make_ansibleee_kuttl_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000202515117040726033353 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_prep_env when: make_ansibleee_kuttl_prep_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_prep_env - name: Debug make_ansibleee_kuttl_prep_params when: make_ansibleee_kuttl_prep_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_prep_params - name: Run ansibleee_kuttl_prep retries: "{{ make_ansibleee_kuttl_prep_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_prep_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_prep_until | default(true) }}" register: "make_ansibleee_kuttl_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_prep" dry_run: "{{ make_ansibleee_kuttl_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_prep_env|default({})), **(make_ansibleee_kuttl_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000171215117040726033355 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_env when: make_ansibleee_kuttl_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_env - name: Debug make_ansibleee_kuttl_params when: make_ansibleee_kuttl_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_params - name: Run ansibleee_kuttl retries: "{{ make_ansibleee_kuttl_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_until | default(true) }}" register: "make_ansibleee_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl" dry_run: "{{ make_ansibleee_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_env|default({})), **(make_ansibleee_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_k0000644000175000017500000000173115117040726033337 0ustar zuulzuul--- - name: Debug make_glance_kuttl_run_env when: make_glance_kuttl_run_env is defined ansible.builtin.debug: var: make_glance_kuttl_run_env - name: Debug make_glance_kuttl_run_params when: make_glance_kuttl_run_params is defined ansible.builtin.debug: var: make_glance_kuttl_run_params - name: Run glance_kuttl_run retries: "{{ make_glance_kuttl_run_retries | default(omit) }}" delay: "{{ make_glance_kuttl_run_delay | default(omit) }}" until: "{{ make_glance_kuttl_run_until | default(true) }}" register: "make_glance_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_kuttl_run" dry_run: "{{ make_glance_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_kuttl_run_env|default({})), **(make_glance_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_k0000644000175000017500000000163515117040726033342 0ustar zuulzuul--- - name: Debug make_glance_kuttl_env when: make_glance_kuttl_env is defined ansible.builtin.debug: var: make_glance_kuttl_env - name: Debug make_glance_kuttl_params when: make_glance_kuttl_params is defined ansible.builtin.debug: var: make_glance_kuttl_params - name: Run glance_kuttl retries: "{{ make_glance_kuttl_retries | default(omit) }}" delay: "{{ make_glance_kuttl_delay | default(omit) }}" until: "{{ make_glance_kuttl_until | default(true) }}" register: "make_glance_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_kuttl" dry_run: "{{ make_glance_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_kuttl_env|default({})), **(make_glance_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_k0000644000175000017500000000173115117040726033347 0ustar zuulzuul--- - name: Debug make_manila_kuttl_run_env when: make_manila_kuttl_run_env is defined ansible.builtin.debug: var: make_manila_kuttl_run_env - name: Debug make_manila_kuttl_run_params when: make_manila_kuttl_run_params is defined ansible.builtin.debug: var: make_manila_kuttl_run_params - name: Run manila_kuttl_run retries: "{{ make_manila_kuttl_run_retries | default(omit) }}" delay: "{{ make_manila_kuttl_run_delay | default(omit) }}" until: "{{ make_manila_kuttl_run_until | default(true) }}" register: "make_manila_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_kuttl_run" dry_run: "{{ make_manila_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_kuttl_run_env|default({})), **(make_manila_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_k0000644000175000017500000000163515117040726033352 0ustar zuulzuul--- - name: Debug make_manila_kuttl_env when: make_manila_kuttl_env is defined ansible.builtin.debug: var: make_manila_kuttl_env - name: Debug make_manila_kuttl_params when: make_manila_kuttl_params is defined ansible.builtin.debug: var: make_manila_kuttl_params - name: Run manila_kuttl retries: "{{ make_manila_kuttl_retries | default(omit) }}" delay: "{{ make_manila_kuttl_delay | default(omit) }}" until: "{{ make_manila_kuttl_until | default(true) }}" register: "make_manila_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_kuttl" dry_run: "{{ make_manila_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_kuttl_env|default({})), **(make_manila_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_ku0000644000175000017500000000171215117040726033426 0ustar zuulzuul--- - name: Debug make_swift_kuttl_run_env when: make_swift_kuttl_run_env is defined ansible.builtin.debug: var: make_swift_kuttl_run_env - name: Debug make_swift_kuttl_run_params when: make_swift_kuttl_run_params is defined ansible.builtin.debug: var: make_swift_kuttl_run_params - name: Run swift_kuttl_run retries: "{{ make_swift_kuttl_run_retries | default(omit) }}" delay: "{{ make_swift_kuttl_run_delay | default(omit) }}" until: "{{ make_swift_kuttl_run_until | default(true) }}" register: "make_swift_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_kuttl_run" dry_run: "{{ make_swift_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_kuttl_run_env|default({})), **(make_swift_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_ku0000644000175000017500000000161615117040726033431 0ustar zuulzuul--- - name: Debug make_swift_kuttl_env when: make_swift_kuttl_env is defined ansible.builtin.debug: var: make_swift_kuttl_env - name: Debug make_swift_kuttl_params when: make_swift_kuttl_params is defined ansible.builtin.debug: var: make_swift_kuttl_params - name: Run swift_kuttl retries: "{{ make_swift_kuttl_retries | default(omit) }}" delay: "{{ make_swift_kuttl_delay | default(omit) }}" until: "{{ make_swift_kuttl_until | default(true) }}" register: "make_swift_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_kuttl" dry_run: "{{ make_swift_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_kuttl_env|default({})), **(make_swift_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000175015117040726033424 0ustar zuulzuul--- - name: Debug make_horizon_kuttl_run_env when: make_horizon_kuttl_run_env is defined ansible.builtin.debug: var: make_horizon_kuttl_run_env - name: Debug make_horizon_kuttl_run_params when: make_horizon_kuttl_run_params is defined ansible.builtin.debug: var: make_horizon_kuttl_run_params - name: Run horizon_kuttl_run retries: "{{ make_horizon_kuttl_run_retries | default(omit) }}" delay: "{{ make_horizon_kuttl_run_delay | default(omit) }}" until: "{{ make_horizon_kuttl_run_until | default(true) }}" register: "make_horizon_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_kuttl_run" dry_run: "{{ make_horizon_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_kuttl_run_env|default({})), **(make_horizon_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000165415117040726033427 0ustar zuulzuul--- - name: Debug make_horizon_kuttl_env when: make_horizon_kuttl_env is defined ansible.builtin.debug: var: make_horizon_kuttl_env - name: Debug make_horizon_kuttl_params when: make_horizon_kuttl_params is defined ansible.builtin.debug: var: make_horizon_kuttl_params - name: Run horizon_kuttl retries: "{{ make_horizon_kuttl_retries | default(omit) }}" delay: "{{ make_horizon_kuttl_delay | default(omit) }}" until: "{{ make_horizon_kuttl_until | default(true) }}" register: "make_horizon_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_kuttl" dry_run: "{{ make_horizon_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_kuttl_env|default({})), **(make_horizon_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000200615117040726033404 0ustar zuulzuul--- - name: Debug make_openstack_kuttl_run_env when: make_openstack_kuttl_run_env is defined ansible.builtin.debug: var: make_openstack_kuttl_run_env - name: Debug make_openstack_kuttl_run_params when: make_openstack_kuttl_run_params is defined ansible.builtin.debug: var: make_openstack_kuttl_run_params - name: Run openstack_kuttl_run retries: "{{ make_openstack_kuttl_run_retries | default(omit) }}" delay: "{{ make_openstack_kuttl_run_delay | default(omit) }}" until: "{{ make_openstack_kuttl_run_until | default(true) }}" register: "make_openstack_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_kuttl_run" dry_run: "{{ make_openstack_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_kuttl_run_env|default({})), **(make_openstack_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000171215117040726033407 0ustar zuulzuul--- - name: Debug make_openstack_kuttl_env when: make_openstack_kuttl_env is defined ansible.builtin.debug: var: make_openstack_kuttl_env - name: Debug make_openstack_kuttl_params when: make_openstack_kuttl_params is defined ansible.builtin.debug: var: make_openstack_kuttl_params - name: Run openstack_kuttl retries: "{{ make_openstack_kuttl_retries | default(omit) }}" delay: "{{ make_openstack_kuttl_delay | default(omit) }}" until: "{{ make_openstack_kuttl_until | default(true) }}" register: "make_openstack_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_kuttl" dry_run: "{{ make_openstack_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_kuttl_env|default({})), **(make_openstack_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_chainsaw_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000202515117040726033327 0ustar zuulzuul--- - name: Debug make_mariadb_chainsaw_run_env when: make_mariadb_chainsaw_run_env is defined ansible.builtin.debug: var: make_mariadb_chainsaw_run_env - name: Debug make_mariadb_chainsaw_run_params when: make_mariadb_chainsaw_run_params is defined ansible.builtin.debug: var: make_mariadb_chainsaw_run_params - name: Run mariadb_chainsaw_run retries: "{{ make_mariadb_chainsaw_run_retries | default(omit) }}" delay: "{{ make_mariadb_chainsaw_run_delay | default(omit) }}" until: "{{ make_mariadb_chainsaw_run_until | default(true) }}" register: "make_mariadb_chainsaw_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_chainsaw_run" dry_run: "{{ make_mariadb_chainsaw_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_chainsaw_run_env|default({})), **(make_mariadb_chainsaw_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_chainsaw.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000173115117040726033332 0ustar zuulzuul--- - name: Debug make_mariadb_chainsaw_env when: make_mariadb_chainsaw_env is defined ansible.builtin.debug: var: make_mariadb_chainsaw_env - name: Debug make_mariadb_chainsaw_params when: make_mariadb_chainsaw_params is defined ansible.builtin.debug: var: make_mariadb_chainsaw_params - name: Run mariadb_chainsaw retries: "{{ make_mariadb_chainsaw_retries | default(omit) }}" delay: "{{ make_mariadb_chainsaw_delay | default(omit) }}" until: "{{ make_mariadb_chainsaw_until | default(true) }}" register: "make_mariadb_chainsaw_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_chainsaw" dry_run: "{{ make_mariadb_chainsaw_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_chainsaw_env|default({})), **(make_mariadb_chainsaw_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000163515117040726033426 0ustar zuulzuul--- - name: Debug make_horizon_prep_env when: make_horizon_prep_env is defined ansible.builtin.debug: var: make_horizon_prep_env - name: Debug make_horizon_prep_params when: make_horizon_prep_params is defined ansible.builtin.debug: var: make_horizon_prep_params - name: Run horizon_prep retries: "{{ make_horizon_prep_retries | default(omit) }}" delay: "{{ make_horizon_prep_delay | default(omit) }}" until: "{{ make_horizon_prep_until | default(true) }}" register: "make_horizon_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_prep" dry_run: "{{ make_horizon_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_prep_env|default({})), **(make_horizon_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon.0000644000175000017500000000152215117040726033340 0ustar zuulzuul--- - name: Debug make_horizon_env when: make_horizon_env is defined ansible.builtin.debug: var: make_horizon_env - name: Debug make_horizon_params when: make_horizon_params is defined ansible.builtin.debug: var: make_horizon_params - name: Run horizon retries: "{{ make_horizon_retries | default(omit) }}" delay: "{{ make_horizon_delay | default(omit) }}" until: "{{ make_horizon_until | default(true) }}" register: "make_horizon_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon" dry_run: "{{ make_horizon_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_env|default({})), **(make_horizon_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000171215117040726033422 0ustar zuulzuul--- - name: Debug make_horizon_cleanup_env when: make_horizon_cleanup_env is defined ansible.builtin.debug: var: make_horizon_cleanup_env - name: Debug make_horizon_cleanup_params when: make_horizon_cleanup_params is defined ansible.builtin.debug: var: make_horizon_cleanup_params - name: Run horizon_cleanup retries: "{{ make_horizon_cleanup_retries | default(omit) }}" delay: "{{ make_horizon_cleanup_delay | default(omit) }}" until: "{{ make_horizon_cleanup_until | default(true) }}" register: "make_horizon_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_cleanup" dry_run: "{{ make_horizon_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_cleanup_env|default({})), **(make_horizon_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000200615117040726033417 0ustar zuulzuul--- - name: Debug make_horizon_deploy_prep_env when: make_horizon_deploy_prep_env is defined ansible.builtin.debug: var: make_horizon_deploy_prep_env - name: Debug make_horizon_deploy_prep_params when: make_horizon_deploy_prep_params is defined ansible.builtin.debug: var: make_horizon_deploy_prep_params - name: Run horizon_deploy_prep retries: "{{ make_horizon_deploy_prep_retries | default(omit) }}" delay: "{{ make_horizon_deploy_prep_delay | default(omit) }}" until: "{{ make_horizon_deploy_prep_until | default(true) }}" register: "make_horizon_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy_prep" dry_run: "{{ make_horizon_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_prep_env|default({})), **(make_horizon_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000167315117040726033430 0ustar zuulzuul--- - name: Debug make_horizon_deploy_env when: make_horizon_deploy_env is defined ansible.builtin.debug: var: make_horizon_deploy_env - name: Debug make_horizon_deploy_params when: make_horizon_deploy_params is defined ansible.builtin.debug: var: make_horizon_deploy_params - name: Run horizon_deploy retries: "{{ make_horizon_deploy_retries | default(omit) }}" delay: "{{ make_horizon_deploy_delay | default(omit) }}" until: "{{ make_horizon_deploy_until | default(true) }}" register: "make_horizon_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy" dry_run: "{{ make_horizon_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_env|default({})), **(make_horizon_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000206315117040726033422 0ustar zuulzuul--- - name: Debug make_horizon_deploy_cleanup_env when: make_horizon_deploy_cleanup_env is defined ansible.builtin.debug: var: make_horizon_deploy_cleanup_env - name: Debug make_horizon_deploy_cleanup_params when: make_horizon_deploy_cleanup_params is defined ansible.builtin.debug: var: make_horizon_deploy_cleanup_params - name: Run horizon_deploy_cleanup retries: "{{ make_horizon_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_horizon_deploy_cleanup_delay | default(omit) }}" until: "{{ make_horizon_deploy_cleanup_until | default(true) }}" register: "make_horizon_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy_cleanup" dry_run: "{{ make_horizon_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_cleanup_env|default({})), **(make_horizon_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_pre0000644000175000017500000000156015117040726033363 0ustar zuulzuul--- - name: Debug make_heat_prep_env when: make_heat_prep_env is defined ansible.builtin.debug: var: make_heat_prep_env - name: Debug make_heat_prep_params when: make_heat_prep_params is defined ansible.builtin.debug: var: make_heat_prep_params - name: Run heat_prep retries: "{{ make_heat_prep_retries | default(omit) }}" delay: "{{ make_heat_prep_delay | default(omit) }}" until: "{{ make_heat_prep_until | default(true) }}" register: "make_heat_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_prep" dry_run: "{{ make_heat_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_prep_env|default({})), **(make_heat_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat.yml0000644000175000017500000000144515117040726033317 0ustar zuulzuul--- - name: Debug make_heat_env when: make_heat_env is defined ansible.builtin.debug: var: make_heat_env - name: Debug make_heat_params when: make_heat_params is defined ansible.builtin.debug: var: make_heat_params - name: Run heat retries: "{{ make_heat_retries | default(omit) }}" delay: "{{ make_heat_delay | default(omit) }}" until: "{{ make_heat_until | default(true) }}" register: "make_heat_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat" dry_run: "{{ make_heat_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_env|default({})), **(make_heat_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_cle0000644000175000017500000000163515117040726033343 0ustar zuulzuul--- - name: Debug make_heat_cleanup_env when: make_heat_cleanup_env is defined ansible.builtin.debug: var: make_heat_cleanup_env - name: Debug make_heat_cleanup_params when: make_heat_cleanup_params is defined ansible.builtin.debug: var: make_heat_cleanup_params - name: Run heat_cleanup retries: "{{ make_heat_cleanup_retries | default(omit) }}" delay: "{{ make_heat_cleanup_delay | default(omit) }}" until: "{{ make_heat_cleanup_until | default(true) }}" register: "make_heat_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_cleanup" dry_run: "{{ make_heat_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_cleanup_env|default({})), **(make_heat_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000173115117040726033345 0ustar zuulzuul--- - name: Debug make_heat_deploy_prep_env when: make_heat_deploy_prep_env is defined ansible.builtin.debug: var: make_heat_deploy_prep_env - name: Debug make_heat_deploy_prep_params when: make_heat_deploy_prep_params is defined ansible.builtin.debug: var: make_heat_deploy_prep_params - name: Run heat_deploy_prep retries: "{{ make_heat_deploy_prep_retries | default(omit) }}" delay: "{{ make_heat_deploy_prep_delay | default(omit) }}" until: "{{ make_heat_deploy_prep_until | default(true) }}" register: "make_heat_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy_prep" dry_run: "{{ make_heat_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_prep_env|default({})), **(make_heat_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000161615117040726033347 0ustar zuulzuul--- - name: Debug make_heat_deploy_env when: make_heat_deploy_env is defined ansible.builtin.debug: var: make_heat_deploy_env - name: Debug make_heat_deploy_params when: make_heat_deploy_params is defined ansible.builtin.debug: var: make_heat_deploy_params - name: Run heat_deploy retries: "{{ make_heat_deploy_retries | default(omit) }}" delay: "{{ make_heat_deploy_delay | default(omit) }}" until: "{{ make_heat_deploy_until | default(true) }}" register: "make_heat_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy" dry_run: "{{ make_heat_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_env|default({})), **(make_heat_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000200615117040726033341 0ustar zuulzuul--- - name: Debug make_heat_deploy_cleanup_env when: make_heat_deploy_cleanup_env is defined ansible.builtin.debug: var: make_heat_deploy_cleanup_env - name: Debug make_heat_deploy_cleanup_params when: make_heat_deploy_cleanup_params is defined ansible.builtin.debug: var: make_heat_deploy_cleanup_params - name: Run heat_deploy_cleanup retries: "{{ make_heat_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_heat_deploy_cleanup_delay | default(omit) }}" until: "{{ make_heat_deploy_cleanup_until | default(true) }}" register: "make_heat_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy_cleanup" dry_run: "{{ make_heat_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_cleanup_env|default({})), **(make_heat_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000167315117040726033363 0ustar zuulzuul--- - name: Debug make_ansibleee_prep_env when: make_ansibleee_prep_env is defined ansible.builtin.debug: var: make_ansibleee_prep_env - name: Debug make_ansibleee_prep_params when: make_ansibleee_prep_params is defined ansible.builtin.debug: var: make_ansibleee_prep_params - name: Run ansibleee_prep retries: "{{ make_ansibleee_prep_retries | default(omit) }}" delay: "{{ make_ansibleee_prep_delay | default(omit) }}" until: "{{ make_ansibleee_prep_until | default(true) }}" register: "make_ansibleee_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_prep" dry_run: "{{ make_ansibleee_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_prep_env|default({})), **(make_ansibleee_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000156015117040726033356 0ustar zuulzuul--- - name: Debug make_ansibleee_env when: make_ansibleee_env is defined ansible.builtin.debug: var: make_ansibleee_env - name: Debug make_ansibleee_params when: make_ansibleee_params is defined ansible.builtin.debug: var: make_ansibleee_params - name: Run ansibleee retries: "{{ make_ansibleee_retries | default(omit) }}" delay: "{{ make_ansibleee_delay | default(omit) }}" until: "{{ make_ansibleee_until | default(true) }}" register: "make_ansibleee_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee" dry_run: "{{ make_ansibleee_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_env|default({})), **(make_ansibleee_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000175015117040726033357 0ustar zuulzuul--- - name: Debug make_ansibleee_cleanup_env when: make_ansibleee_cleanup_env is defined ansible.builtin.debug: var: make_ansibleee_cleanup_env - name: Debug make_ansibleee_cleanup_params when: make_ansibleee_cleanup_params is defined ansible.builtin.debug: var: make_ansibleee_cleanup_params - name: Run ansibleee_cleanup retries: "{{ make_ansibleee_cleanup_retries | default(omit) }}" delay: "{{ make_ansibleee_cleanup_delay | default(omit) }}" until: "{{ make_ansibleee_cleanup_until | default(true) }}" register: "make_ansibleee_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_cleanup" dry_run: "{{ make_ansibleee_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_cleanup_env|default({})), **(make_ansibleee_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000167315117040726033361 0ustar zuulzuul--- - name: Debug make_baremetal_prep_env when: make_baremetal_prep_env is defined ansible.builtin.debug: var: make_baremetal_prep_env - name: Debug make_baremetal_prep_params when: make_baremetal_prep_params is defined ansible.builtin.debug: var: make_baremetal_prep_params - name: Run baremetal_prep retries: "{{ make_baremetal_prep_retries | default(omit) }}" delay: "{{ make_baremetal_prep_delay | default(omit) }}" until: "{{ make_baremetal_prep_until | default(true) }}" register: "make_baremetal_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal_prep" dry_run: "{{ make_baremetal_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_prep_env|default({})), **(make_baremetal_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000156015117040726033354 0ustar zuulzuul--- - name: Debug make_baremetal_env when: make_baremetal_env is defined ansible.builtin.debug: var: make_baremetal_env - name: Debug make_baremetal_params when: make_baremetal_params is defined ansible.builtin.debug: var: make_baremetal_params - name: Run baremetal retries: "{{ make_baremetal_retries | default(omit) }}" delay: "{{ make_baremetal_delay | default(omit) }}" until: "{{ make_baremetal_until | default(true) }}" register: "make_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal" dry_run: "{{ make_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_env|default({})), **(make_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000175015117040726033355 0ustar zuulzuul--- - name: Debug make_baremetal_cleanup_env when: make_baremetal_cleanup_env is defined ansible.builtin.debug: var: make_baremetal_cleanup_env - name: Debug make_baremetal_cleanup_params when: make_baremetal_cleanup_params is defined ansible.builtin.debug: var: make_baremetal_cleanup_params - name: Run baremetal_cleanup retries: "{{ make_baremetal_cleanup_retries | default(omit) }}" delay: "{{ make_baremetal_cleanup_delay | default(omit) }}" until: "{{ make_baremetal_cleanup_until | default(true) }}" register: "make_baremetal_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal_cleanup" dry_run: "{{ make_baremetal_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_cleanup_env|default({})), **(make_baremetal_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_help.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_hel0000644000175000017500000000156015117040726033343 0ustar zuulzuul--- - name: Debug make_ceph_help_env when: make_ceph_help_env is defined ansible.builtin.debug: var: make_ceph_help_env - name: Debug make_ceph_help_params when: make_ceph_help_params is defined ansible.builtin.debug: var: make_ceph_help_params - name: Run ceph_help retries: "{{ make_ceph_help_retries | default(omit) }}" delay: "{{ make_ceph_help_delay | default(omit) }}" until: "{{ make_ceph_help_until | default(true) }}" register: "make_ceph_help_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph_help" dry_run: "{{ make_ceph_help_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_help_env|default({})), **(make_ceph_help_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph.yml0000644000175000017500000000144515117040726033315 0ustar zuulzuul--- - name: Debug make_ceph_env when: make_ceph_env is defined ansible.builtin.debug: var: make_ceph_env - name: Debug make_ceph_params when: make_ceph_params is defined ansible.builtin.debug: var: make_ceph_params - name: Run ceph retries: "{{ make_ceph_retries | default(omit) }}" delay: "{{ make_ceph_delay | default(omit) }}" until: "{{ make_ceph_until | default(true) }}" register: "make_ceph_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph" dry_run: "{{ make_ceph_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_env|default({})), **(make_ceph_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_cle0000644000175000017500000000163515117040726033341 0ustar zuulzuul--- - name: Debug make_ceph_cleanup_env when: make_ceph_cleanup_env is defined ansible.builtin.debug: var: make_ceph_cleanup_env - name: Debug make_ceph_cleanup_params when: make_ceph_cleanup_params is defined ansible.builtin.debug: var: make_ceph_cleanup_params - name: Run ceph_cleanup retries: "{{ make_ceph_cleanup_retries | default(omit) }}" delay: "{{ make_ceph_cleanup_delay | default(omit) }}" until: "{{ make_ceph_cleanup_until | default(true) }}" register: "make_ceph_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph_cleanup" dry_run: "{{ make_ceph_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_cleanup_env|default({})), **(make_ceph_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_pre0000644000175000017500000000156015117040726033414 0ustar zuulzuul--- - name: Debug make_rook_prep_env when: make_rook_prep_env is defined ansible.builtin.debug: var: make_rook_prep_env - name: Debug make_rook_prep_params when: make_rook_prep_params is defined ansible.builtin.debug: var: make_rook_prep_params - name: Run rook_prep retries: "{{ make_rook_prep_retries | default(omit) }}" delay: "{{ make_rook_prep_delay | default(omit) }}" until: "{{ make_rook_prep_until | default(true) }}" register: "make_rook_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_prep" dry_run: "{{ make_rook_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_prep_env|default({})), **(make_rook_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook.yml0000644000175000017500000000144515117040726033350 0ustar zuulzuul--- - name: Debug make_rook_env when: make_rook_env is defined ansible.builtin.debug: var: make_rook_env - name: Debug make_rook_params when: make_rook_params is defined ansible.builtin.debug: var: make_rook_params - name: Run rook retries: "{{ make_rook_retries | default(omit) }}" delay: "{{ make_rook_delay | default(omit) }}" until: "{{ make_rook_until | default(true) }}" register: "make_rook_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook" dry_run: "{{ make_rook_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_env|default({})), **(make_rook_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_dep0000644000175000017500000000173115117040726033376 0ustar zuulzuul--- - name: Debug make_rook_deploy_prep_env when: make_rook_deploy_prep_env is defined ansible.builtin.debug: var: make_rook_deploy_prep_env - name: Debug make_rook_deploy_prep_params when: make_rook_deploy_prep_params is defined ansible.builtin.debug: var: make_rook_deploy_prep_params - name: Run rook_deploy_prep retries: "{{ make_rook_deploy_prep_retries | default(omit) }}" delay: "{{ make_rook_deploy_prep_delay | default(omit) }}" until: "{{ make_rook_deploy_prep_until | default(true) }}" register: "make_rook_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_deploy_prep" dry_run: "{{ make_rook_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_deploy_prep_env|default({})), **(make_rook_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_dep0000644000175000017500000000161615117040726033400 0ustar zuulzuul--- - name: Debug make_rook_deploy_env when: make_rook_deploy_env is defined ansible.builtin.debug: var: make_rook_deploy_env - name: Debug make_rook_deploy_params when: make_rook_deploy_params is defined ansible.builtin.debug: var: make_rook_deploy_params - name: Run rook_deploy retries: "{{ make_rook_deploy_retries | default(omit) }}" delay: "{{ make_rook_deploy_delay | default(omit) }}" until: "{{ make_rook_deploy_until | default(true) }}" register: "make_rook_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_deploy" dry_run: "{{ make_rook_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_deploy_env|default({})), **(make_rook_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_crc_disk.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_crc0000644000175000017500000000165415117040726033401 0ustar zuulzuul--- - name: Debug make_rook_crc_disk_env when: make_rook_crc_disk_env is defined ansible.builtin.debug: var: make_rook_crc_disk_env - name: Debug make_rook_crc_disk_params when: make_rook_crc_disk_params is defined ansible.builtin.debug: var: make_rook_crc_disk_params - name: Run rook_crc_disk retries: "{{ make_rook_crc_disk_retries | default(omit) }}" delay: "{{ make_rook_crc_disk_delay | default(omit) }}" until: "{{ make_rook_crc_disk_until | default(true) }}" register: "make_rook_crc_disk_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_crc_disk" dry_run: "{{ make_rook_crc_disk_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_crc_disk_env|default({})), **(make_rook_crc_disk_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_cle0000644000175000017500000000163515117040726033374 0ustar zuulzuul--- - name: Debug make_rook_cleanup_env when: make_rook_cleanup_env is defined ansible.builtin.debug: var: make_rook_cleanup_env - name: Debug make_rook_cleanup_params when: make_rook_cleanup_params is defined ansible.builtin.debug: var: make_rook_cleanup_params - name: Run rook_cleanup retries: "{{ make_rook_cleanup_retries | default(omit) }}" delay: "{{ make_rook_cleanup_delay | default(omit) }}" until: "{{ make_rook_cleanup_until | default(true) }}" register: "make_rook_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_cleanup" dry_run: "{{ make_rook_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_cleanup_env|default({})), **(make_rook_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_lvms.yml0000644000175000017500000000144515117040726033357 0ustar zuulzuul--- - name: Debug make_lvms_env when: make_lvms_env is defined ansible.builtin.debug: var: make_lvms_env - name: Debug make_lvms_params when: make_lvms_params is defined ansible.builtin.debug: var: make_lvms_params - name: Run lvms retries: "{{ make_lvms_retries | default(omit) }}" delay: "{{ make_lvms_delay | default(omit) }}" until: "{{ make_lvms_until | default(true) }}" register: "make_lvms_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make lvms" dry_run: "{{ make_lvms_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_lvms_env|default({})), **(make_lvms_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nmstate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nmstate.0000644000175000017500000000152215117040726033323 0ustar zuulzuul--- - name: Debug make_nmstate_env when: make_nmstate_env is defined ansible.builtin.debug: var: make_nmstate_env - name: Debug make_nmstate_params when: make_nmstate_params is defined ansible.builtin.debug: var: make_nmstate_params - name: Run nmstate retries: "{{ make_nmstate_retries | default(omit) }}" delay: "{{ make_nmstate_delay | default(omit) }}" until: "{{ make_nmstate_until | default(true) }}" register: "make_nmstate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nmstate" dry_run: "{{ make_nmstate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nmstate_env|default({})), **(make_nmstate_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp.yml0000644000175000017500000000144515117040726033334 0ustar zuulzuul--- - name: Debug make_nncp_env when: make_nncp_env is defined ansible.builtin.debug: var: make_nncp_env - name: Debug make_nncp_params when: make_nncp_params is defined ansible.builtin.debug: var: make_nncp_params - name: Run nncp retries: "{{ make_nncp_retries | default(omit) }}" delay: "{{ make_nncp_delay | default(omit) }}" until: "{{ make_nncp_until | default(true) }}" register: "make_nncp_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nncp" dry_run: "{{ make_nncp_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nncp_env|default({})), **(make_nncp_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp_cle0000644000175000017500000000163515117040726033360 0ustar zuulzuul--- - name: Debug make_nncp_cleanup_env when: make_nncp_cleanup_env is defined ansible.builtin.debug: var: make_nncp_cleanup_env - name: Debug make_nncp_cleanup_params when: make_nncp_cleanup_params is defined ansible.builtin.debug: var: make_nncp_cleanup_params - name: Run nncp_cleanup retries: "{{ make_nncp_cleanup_retries | default(omit) }}" delay: "{{ make_nncp_cleanup_delay | default(omit) }}" until: "{{ make_nncp_cleanup_until | default(true) }}" register: "make_nncp_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nncp_cleanup" dry_run: "{{ make_nncp_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nncp_cleanup_env|default({})), **(make_nncp_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattach.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattac0000644000175000017500000000156015117040726033377 0ustar zuulzuul--- - name: Debug make_netattach_env when: make_netattach_env is defined ansible.builtin.debug: var: make_netattach_env - name: Debug make_netattach_params when: make_netattach_params is defined ansible.builtin.debug: var: make_netattach_params - name: Run netattach retries: "{{ make_netattach_retries | default(omit) }}" delay: "{{ make_netattach_delay | default(omit) }}" until: "{{ make_netattach_until | default(true) }}" register: "make_netattach_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netattach" dry_run: "{{ make_netattach_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netattach_env|default({})), **(make_netattach_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattach_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattac0000644000175000017500000000175015117040726033400 0ustar zuulzuul--- - name: Debug make_netattach_cleanup_env when: make_netattach_cleanup_env is defined ansible.builtin.debug: var: make_netattach_cleanup_env - name: Debug make_netattach_cleanup_params when: make_netattach_cleanup_params is defined ansible.builtin.debug: var: make_netattach_cleanup_params - name: Run netattach_cleanup retries: "{{ make_netattach_cleanup_retries | default(omit) }}" delay: "{{ make_netattach_cleanup_delay | default(omit) }}" until: "{{ make_netattach_cleanup_until | default(true) }}" register: "make_netattach_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netattach_cleanup" dry_run: "{{ make_netattach_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netattach_cleanup_env|default({})), **(make_netattach_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb.0000644000175000017500000000152215117040726033270 0ustar zuulzuul--- - name: Debug make_metallb_env when: make_metallb_env is defined ansible.builtin.debug: var: make_metallb_env - name: Debug make_metallb_params when: make_metallb_params is defined ansible.builtin.debug: var: make_metallb_params - name: Run metallb retries: "{{ make_metallb_retries | default(omit) }}" delay: "{{ make_metallb_delay | default(omit) }}" until: "{{ make_metallb_until | default(true) }}" register: "make_metallb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb" dry_run: "{{ make_metallb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_env|default({})), **(make_metallb_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_config.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000167315117040726033360 0ustar zuulzuul--- - name: Debug make_metallb_config_env when: make_metallb_config_env is defined ansible.builtin.debug: var: make_metallb_config_env - name: Debug make_metallb_config_params when: make_metallb_config_params is defined ansible.builtin.debug: var: make_metallb_config_params - name: Run metallb_config retries: "{{ make_metallb_config_retries | default(omit) }}" delay: "{{ make_metallb_config_delay | default(omit) }}" until: "{{ make_metallb_config_until | default(true) }}" register: "make_metallb_config_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_config" dry_run: "{{ make_metallb_config_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_config_env|default({})), **(make_metallb_config_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_config_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000206315117040726033352 0ustar zuulzuul--- - name: Debug make_metallb_config_cleanup_env when: make_metallb_config_cleanup_env is defined ansible.builtin.debug: var: make_metallb_config_cleanup_env - name: Debug make_metallb_config_cleanup_params when: make_metallb_config_cleanup_params is defined ansible.builtin.debug: var: make_metallb_config_cleanup_params - name: Run metallb_config_cleanup retries: "{{ make_metallb_config_cleanup_retries | default(omit) }}" delay: "{{ make_metallb_config_cleanup_delay | default(omit) }}" until: "{{ make_metallb_config_cleanup_until | default(true) }}" register: "make_metallb_config_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_config_cleanup" dry_run: "{{ make_metallb_config_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_config_cleanup_env|default({})), **(make_metallb_config_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000171215117040726033352 0ustar zuulzuul--- - name: Debug make_metallb_cleanup_env when: make_metallb_cleanup_env is defined ansible.builtin.debug: var: make_metallb_cleanup_env - name: Debug make_metallb_cleanup_params when: make_metallb_cleanup_params is defined ansible.builtin.debug: var: make_metallb_cleanup_params - name: Run metallb_cleanup retries: "{{ make_metallb_cleanup_retries | default(omit) }}" delay: "{{ make_metallb_cleanup_delay | default(omit) }}" until: "{{ make_metallb_cleanup_until | default(true) }}" register: "make_metallb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_cleanup" dry_run: "{{ make_metallb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_cleanup_env|default({})), **(make_metallb_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki.yml0000644000175000017500000000144515117040726033334 0ustar zuulzuul--- - name: Debug make_loki_env when: make_loki_env is defined ansible.builtin.debug: var: make_loki_env - name: Debug make_loki_params when: make_loki_params is defined ansible.builtin.debug: var: make_loki_params - name: Run loki retries: "{{ make_loki_retries | default(omit) }}" delay: "{{ make_loki_delay | default(omit) }}" until: "{{ make_loki_until | default(true) }}" register: "make_loki_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki" dry_run: "{{ make_loki_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_env|default({})), **(make_loki_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_cle0000644000175000017500000000163515117040726033360 0ustar zuulzuul--- - name: Debug make_loki_cleanup_env when: make_loki_cleanup_env is defined ansible.builtin.debug: var: make_loki_cleanup_env - name: Debug make_loki_cleanup_params when: make_loki_cleanup_params is defined ansible.builtin.debug: var: make_loki_cleanup_params - name: Run loki_cleanup retries: "{{ make_loki_cleanup_retries | default(omit) }}" delay: "{{ make_loki_cleanup_delay | default(omit) }}" until: "{{ make_loki_cleanup_until | default(true) }}" register: "make_loki_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_cleanup" dry_run: "{{ make_loki_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_cleanup_env|default({})), **(make_loki_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_dep0000644000175000017500000000161615117040726033364 0ustar zuulzuul--- - name: Debug make_loki_deploy_env when: make_loki_deploy_env is defined ansible.builtin.debug: var: make_loki_deploy_env - name: Debug make_loki_deploy_params when: make_loki_deploy_params is defined ansible.builtin.debug: var: make_loki_deploy_params - name: Run loki_deploy retries: "{{ make_loki_deploy_retries | default(omit) }}" delay: "{{ make_loki_deploy_delay | default(omit) }}" until: "{{ make_loki_deploy_until | default(true) }}" register: "make_loki_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_deploy" dry_run: "{{ make_loki_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_deploy_env|default({})), **(make_loki_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_dep0000644000175000017500000000200615117040726033356 0ustar zuulzuul--- - name: Debug make_loki_deploy_cleanup_env when: make_loki_deploy_cleanup_env is defined ansible.builtin.debug: var: make_loki_deploy_cleanup_env - name: Debug make_loki_deploy_cleanup_params when: make_loki_deploy_cleanup_params is defined ansible.builtin.debug: var: make_loki_deploy_cleanup_params - name: Run loki_deploy_cleanup retries: "{{ make_loki_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_loki_deploy_cleanup_delay | default(omit) }}" until: "{{ make_loki_deploy_cleanup_until | default(true) }}" register: "make_loki_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_deploy_cleanup" dry_run: "{{ make_loki_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_deploy_cleanup_env|default({})), **(make_loki_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000156015117040726033415 0ustar zuulzuul--- - name: Debug make_netobserv_env when: make_netobserv_env is defined ansible.builtin.debug: var: make_netobserv_env - name: Debug make_netobserv_params when: make_netobserv_params is defined ansible.builtin.debug: var: make_netobserv_params - name: Run netobserv retries: "{{ make_netobserv_retries | default(omit) }}" delay: "{{ make_netobserv_delay | default(omit) }}" until: "{{ make_netobserv_until | default(true) }}" register: "make_netobserv_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv" dry_run: "{{ make_netobserv_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_env|default({})), **(make_netobserv_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000175015117040726033416 0ustar zuulzuul--- - name: Debug make_netobserv_cleanup_env when: make_netobserv_cleanup_env is defined ansible.builtin.debug: var: make_netobserv_cleanup_env - name: Debug make_netobserv_cleanup_params when: make_netobserv_cleanup_params is defined ansible.builtin.debug: var: make_netobserv_cleanup_params - name: Run netobserv_cleanup retries: "{{ make_netobserv_cleanup_retries | default(omit) }}" delay: "{{ make_netobserv_cleanup_delay | default(omit) }}" until: "{{ make_netobserv_cleanup_until | default(true) }}" register: "make_netobserv_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_cleanup" dry_run: "{{ make_netobserv_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_cleanup_env|default({})), **(make_netobserv_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000173115117040726033415 0ustar zuulzuul--- - name: Debug make_netobserv_deploy_env when: make_netobserv_deploy_env is defined ansible.builtin.debug: var: make_netobserv_deploy_env - name: Debug make_netobserv_deploy_params when: make_netobserv_deploy_params is defined ansible.builtin.debug: var: make_netobserv_deploy_params - name: Run netobserv_deploy retries: "{{ make_netobserv_deploy_retries | default(omit) }}" delay: "{{ make_netobserv_deploy_delay | default(omit) }}" until: "{{ make_netobserv_deploy_until | default(true) }}" register: "make_netobserv_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_deploy" dry_run: "{{ make_netobserv_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_deploy_env|default({})), **(make_netobserv_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000212115117040726033407 0ustar zuulzuul--- - name: Debug make_netobserv_deploy_cleanup_env when: make_netobserv_deploy_cleanup_env is defined ansible.builtin.debug: var: make_netobserv_deploy_cleanup_env - name: Debug make_netobserv_deploy_cleanup_params when: make_netobserv_deploy_cleanup_params is defined ansible.builtin.debug: var: make_netobserv_deploy_cleanup_params - name: Run netobserv_deploy_cleanup retries: "{{ make_netobserv_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_netobserv_deploy_cleanup_delay | default(omit) }}" until: "{{ make_netobserv_deploy_cleanup_until | default(true) }}" register: "make_netobserv_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_deploy_cleanup" dry_run: "{{ make_netobserv_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_deploy_cleanup_env|default({})), **(make_netobserv_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_p0000644000175000017500000000161615117040726033356 0ustar zuulzuul--- - name: Debug make_manila_prep_env when: make_manila_prep_env is defined ansible.builtin.debug: var: make_manila_prep_env - name: Debug make_manila_prep_params when: make_manila_prep_params is defined ansible.builtin.debug: var: make_manila_prep_params - name: Run manila_prep retries: "{{ make_manila_prep_retries | default(omit) }}" delay: "{{ make_manila_prep_delay | default(omit) }}" until: "{{ make_manila_prep_until | default(true) }}" register: "make_manila_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_prep" dry_run: "{{ make_manila_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_prep_env|default({})), **(make_manila_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila.y0000644000175000017500000000150315117040726033301 0ustar zuulzuul--- - name: Debug make_manila_env when: make_manila_env is defined ansible.builtin.debug: var: make_manila_env - name: Debug make_manila_params when: make_manila_params is defined ansible.builtin.debug: var: make_manila_params - name: Run manila retries: "{{ make_manila_retries | default(omit) }}" delay: "{{ make_manila_delay | default(omit) }}" until: "{{ make_manila_until | default(true) }}" register: "make_manila_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila" dry_run: "{{ make_manila_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_env|default({})), **(make_manila_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_c0000644000175000017500000000167315117040726033344 0ustar zuulzuul--- - name: Debug make_manila_cleanup_env when: make_manila_cleanup_env is defined ansible.builtin.debug: var: make_manila_cleanup_env - name: Debug make_manila_cleanup_params when: make_manila_cleanup_params is defined ansible.builtin.debug: var: make_manila_cleanup_params - name: Run manila_cleanup retries: "{{ make_manila_cleanup_retries | default(omit) }}" delay: "{{ make_manila_cleanup_delay | default(omit) }}" until: "{{ make_manila_cleanup_until | default(true) }}" register: "make_manila_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_cleanup" dry_run: "{{ make_manila_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_cleanup_env|default({})), **(make_manila_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000176715117040726033351 0ustar zuulzuul--- - name: Debug make_manila_deploy_prep_env when: make_manila_deploy_prep_env is defined ansible.builtin.debug: var: make_manila_deploy_prep_env - name: Debug make_manila_deploy_prep_params when: make_manila_deploy_prep_params is defined ansible.builtin.debug: var: make_manila_deploy_prep_params - name: Run manila_deploy_prep retries: "{{ make_manila_deploy_prep_retries | default(omit) }}" delay: "{{ make_manila_deploy_prep_delay | default(omit) }}" until: "{{ make_manila_deploy_prep_until | default(true) }}" register: "make_manila_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy_prep" dry_run: "{{ make_manila_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_prep_env|default({})), **(make_manila_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000165415117040726033344 0ustar zuulzuul--- - name: Debug make_manila_deploy_env when: make_manila_deploy_env is defined ansible.builtin.debug: var: make_manila_deploy_env - name: Debug make_manila_deploy_params when: make_manila_deploy_params is defined ansible.builtin.debug: var: make_manila_deploy_params - name: Run manila_deploy retries: "{{ make_manila_deploy_retries | default(omit) }}" delay: "{{ make_manila_deploy_delay | default(omit) }}" until: "{{ make_manila_deploy_until | default(true) }}" register: "make_manila_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy" dry_run: "{{ make_manila_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_env|default({})), **(make_manila_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000204415117040726033336 0ustar zuulzuul--- - name: Debug make_manila_deploy_cleanup_env when: make_manila_deploy_cleanup_env is defined ansible.builtin.debug: var: make_manila_deploy_cleanup_env - name: Debug make_manila_deploy_cleanup_params when: make_manila_deploy_cleanup_params is defined ansible.builtin.debug: var: make_manila_deploy_cleanup_params - name: Run manila_deploy_cleanup retries: "{{ make_manila_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_manila_deploy_cleanup_delay | default(omit) }}" until: "{{ make_manila_deploy_cleanup_until | default(true) }}" register: "make_manila_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy_cleanup" dry_run: "{{ make_manila_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_cleanup_env|default({})), **(make_manila_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000167315117040726033422 0ustar zuulzuul--- - name: Debug make_telemetry_prep_env when: make_telemetry_prep_env is defined ansible.builtin.debug: var: make_telemetry_prep_env - name: Debug make_telemetry_prep_params when: make_telemetry_prep_params is defined ansible.builtin.debug: var: make_telemetry_prep_params - name: Run telemetry_prep retries: "{{ make_telemetry_prep_retries | default(omit) }}" delay: "{{ make_telemetry_prep_delay | default(omit) }}" until: "{{ make_telemetry_prep_until | default(true) }}" register: "make_telemetry_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_prep" dry_run: "{{ make_telemetry_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_prep_env|default({})), **(make_telemetry_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000156015117040726033415 0ustar zuulzuul--- - name: Debug make_telemetry_env when: make_telemetry_env is defined ansible.builtin.debug: var: make_telemetry_env - name: Debug make_telemetry_params when: make_telemetry_params is defined ansible.builtin.debug: var: make_telemetry_params - name: Run telemetry retries: "{{ make_telemetry_retries | default(omit) }}" delay: "{{ make_telemetry_delay | default(omit) }}" until: "{{ make_telemetry_until | default(true) }}" register: "make_telemetry_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry" dry_run: "{{ make_telemetry_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_env|default({})), **(make_telemetry_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000175015117040726033416 0ustar zuulzuul--- - name: Debug make_telemetry_cleanup_env when: make_telemetry_cleanup_env is defined ansible.builtin.debug: var: make_telemetry_cleanup_env - name: Debug make_telemetry_cleanup_params when: make_telemetry_cleanup_params is defined ansible.builtin.debug: var: make_telemetry_cleanup_params - name: Run telemetry_cleanup retries: "{{ make_telemetry_cleanup_retries | default(omit) }}" delay: "{{ make_telemetry_cleanup_delay | default(omit) }}" until: "{{ make_telemetry_cleanup_until | default(true) }}" register: "make_telemetry_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_cleanup" dry_run: "{{ make_telemetry_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_cleanup_env|default({})), **(make_telemetry_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000204415117040726033413 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_prep_env when: make_telemetry_deploy_prep_env is defined ansible.builtin.debug: var: make_telemetry_deploy_prep_env - name: Debug make_telemetry_deploy_prep_params when: make_telemetry_deploy_prep_params is defined ansible.builtin.debug: var: make_telemetry_deploy_prep_params - name: Run telemetry_deploy_prep retries: "{{ make_telemetry_deploy_prep_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_prep_delay | default(omit) }}" until: "{{ make_telemetry_deploy_prep_until | default(true) }}" register: "make_telemetry_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy_prep" dry_run: "{{ make_telemetry_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_prep_env|default({})), **(make_telemetry_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000173115117040726033415 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_env when: make_telemetry_deploy_env is defined ansible.builtin.debug: var: make_telemetry_deploy_env - name: Debug make_telemetry_deploy_params when: make_telemetry_deploy_params is defined ansible.builtin.debug: var: make_telemetry_deploy_params - name: Run telemetry_deploy retries: "{{ make_telemetry_deploy_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_delay | default(omit) }}" until: "{{ make_telemetry_deploy_until | default(true) }}" register: "make_telemetry_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy" dry_run: "{{ make_telemetry_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_env|default({})), **(make_telemetry_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000212115117040726033407 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_cleanup_env when: make_telemetry_deploy_cleanup_env is defined ansible.builtin.debug: var: make_telemetry_deploy_cleanup_env - name: Debug make_telemetry_deploy_cleanup_params when: make_telemetry_deploy_cleanup_params is defined ansible.builtin.debug: var: make_telemetry_deploy_cleanup_params - name: Run telemetry_deploy_cleanup retries: "{{ make_telemetry_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_cleanup_delay | default(omit) }}" until: "{{ make_telemetry_deploy_cleanup_until | default(true) }}" register: "make_telemetry_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy_cleanup" dry_run: "{{ make_telemetry_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_cleanup_env|default({})), **(make_telemetry_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000200615117040726033411 0ustar zuulzuul--- - name: Debug make_telemetry_kuttl_run_env when: make_telemetry_kuttl_run_env is defined ansible.builtin.debug: var: make_telemetry_kuttl_run_env - name: Debug make_telemetry_kuttl_run_params when: make_telemetry_kuttl_run_params is defined ansible.builtin.debug: var: make_telemetry_kuttl_run_params - name: Run telemetry_kuttl_run retries: "{{ make_telemetry_kuttl_run_retries | default(omit) }}" delay: "{{ make_telemetry_kuttl_run_delay | default(omit) }}" until: "{{ make_telemetry_kuttl_run_until | default(true) }}" register: "make_telemetry_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_kuttl_run" dry_run: "{{ make_telemetry_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_kuttl_run_env|default({})), **(make_telemetry_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000171215117040726033414 0ustar zuulzuul--- - name: Debug make_telemetry_kuttl_env when: make_telemetry_kuttl_env is defined ansible.builtin.debug: var: make_telemetry_kuttl_env - name: Debug make_telemetry_kuttl_params when: make_telemetry_kuttl_params is defined ansible.builtin.debug: var: make_telemetry_kuttl_params - name: Run telemetry_kuttl retries: "{{ make_telemetry_kuttl_retries | default(omit) }}" delay: "{{ make_telemetry_kuttl_delay | default(omit) }}" until: "{{ make_telemetry_kuttl_until | default(true) }}" register: "make_telemetry_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_kuttl" dry_run: "{{ make_telemetry_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_kuttl_env|default({})), **(make_telemetry_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_pr0000644000175000017500000000157715117040726033441 0ustar zuulzuul--- - name: Debug make_swift_prep_env when: make_swift_prep_env is defined ansible.builtin.debug: var: make_swift_prep_env - name: Debug make_swift_prep_params when: make_swift_prep_params is defined ansible.builtin.debug: var: make_swift_prep_params - name: Run swift_prep retries: "{{ make_swift_prep_retries | default(omit) }}" delay: "{{ make_swift_prep_delay | default(omit) }}" until: "{{ make_swift_prep_until | default(true) }}" register: "make_swift_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_prep" dry_run: "{{ make_swift_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_prep_env|default({})), **(make_swift_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift.ym0000644000175000017500000000146415117040726033357 0ustar zuulzuul--- - name: Debug make_swift_env when: make_swift_env is defined ansible.builtin.debug: var: make_swift_env - name: Debug make_swift_params when: make_swift_params is defined ansible.builtin.debug: var: make_swift_params - name: Run swift retries: "{{ make_swift_retries | default(omit) }}" delay: "{{ make_swift_delay | default(omit) }}" until: "{{ make_swift_until | default(true) }}" register: "make_swift_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift" dry_run: "{{ make_swift_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_env|default({})), **(make_swift_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_cl0000644000175000017500000000165415117040726033412 0ustar zuulzuul--- - name: Debug make_swift_cleanup_env when: make_swift_cleanup_env is defined ansible.builtin.debug: var: make_swift_cleanup_env - name: Debug make_swift_cleanup_params when: make_swift_cleanup_params is defined ansible.builtin.debug: var: make_swift_cleanup_params - name: Run swift_cleanup retries: "{{ make_swift_cleanup_retries | default(omit) }}" delay: "{{ make_swift_cleanup_delay | default(omit) }}" until: "{{ make_swift_cleanup_until | default(true) }}" register: "make_swift_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_cleanup" dry_run: "{{ make_swift_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_cleanup_env|default({})), **(make_swift_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000175015117040726033401 0ustar zuulzuul--- - name: Debug make_swift_deploy_prep_env when: make_swift_deploy_prep_env is defined ansible.builtin.debug: var: make_swift_deploy_prep_env - name: Debug make_swift_deploy_prep_params when: make_swift_deploy_prep_params is defined ansible.builtin.debug: var: make_swift_deploy_prep_params - name: Run swift_deploy_prep retries: "{{ make_swift_deploy_prep_retries | default(omit) }}" delay: "{{ make_swift_deploy_prep_delay | default(omit) }}" until: "{{ make_swift_deploy_prep_until | default(true) }}" register: "make_swift_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy_prep" dry_run: "{{ make_swift_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_prep_env|default({})), **(make_swift_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000163515117040726033403 0ustar zuulzuul--- - name: Debug make_swift_deploy_env when: make_swift_deploy_env is defined ansible.builtin.debug: var: make_swift_deploy_env - name: Debug make_swift_deploy_params when: make_swift_deploy_params is defined ansible.builtin.debug: var: make_swift_deploy_params - name: Run swift_deploy retries: "{{ make_swift_deploy_retries | default(omit) }}" delay: "{{ make_swift_deploy_delay | default(omit) }}" until: "{{ make_swift_deploy_until | default(true) }}" register: "make_swift_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy" dry_run: "{{ make_swift_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_env|default({})), **(make_swift_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000202515117040726033375 0ustar zuulzuul--- - name: Debug make_swift_deploy_cleanup_env when: make_swift_deploy_cleanup_env is defined ansible.builtin.debug: var: make_swift_deploy_cleanup_env - name: Debug make_swift_deploy_cleanup_params when: make_swift_deploy_cleanup_params is defined ansible.builtin.debug: var: make_swift_deploy_cleanup_params - name: Run swift_deploy_cleanup retries: "{{ make_swift_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_swift_deploy_cleanup_delay | default(omit) }}" until: "{{ make_swift_deploy_cleanup_until | default(true) }}" register: "make_swift_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy_cleanup" dry_run: "{{ make_swift_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_cleanup_env|default({})), **(make_swift_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmanager.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmana0000644000175000017500000000161615117040726033370 0ustar zuulzuul--- - name: Debug make_certmanager_env when: make_certmanager_env is defined ansible.builtin.debug: var: make_certmanager_env - name: Debug make_certmanager_params when: make_certmanager_params is defined ansible.builtin.debug: var: make_certmanager_params - name: Run certmanager retries: "{{ make_certmanager_retries | default(omit) }}" delay: "{{ make_certmanager_delay | default(omit) }}" until: "{{ make_certmanager_until | default(true) }}" register: "make_certmanager_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make certmanager" dry_run: "{{ make_certmanager_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_certmanager_env|default({})), **(make_certmanager_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmanager_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmana0000644000175000017500000000200615117040726033362 0ustar zuulzuul--- - name: Debug make_certmanager_cleanup_env when: make_certmanager_cleanup_env is defined ansible.builtin.debug: var: make_certmanager_cleanup_env - name: Debug make_certmanager_cleanup_params when: make_certmanager_cleanup_params is defined ansible.builtin.debug: var: make_certmanager_cleanup_params - name: Run certmanager_cleanup retries: "{{ make_certmanager_cleanup_retries | default(omit) }}" delay: "{{ make_certmanager_cleanup_delay | default(omit) }}" until: "{{ make_certmanager_cleanup_until | default(true) }}" register: "make_certmanager_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make certmanager_cleanup" dry_run: "{{ make_certmanager_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_certmanager_cleanup_env|default({})), **(make_certmanager_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_validate_marketplace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_validate0000644000175000017500000000202515117040726033362 0ustar zuulzuul--- - name: Debug make_validate_marketplace_env when: make_validate_marketplace_env is defined ansible.builtin.debug: var: make_validate_marketplace_env - name: Debug make_validate_marketplace_params when: make_validate_marketplace_params is defined ansible.builtin.debug: var: make_validate_marketplace_params - name: Run validate_marketplace retries: "{{ make_validate_marketplace_retries | default(omit) }}" delay: "{{ make_validate_marketplace_delay | default(omit) }}" until: "{{ make_validate_marketplace_until | default(true) }}" register: "make_validate_marketplace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make validate_marketplace" dry_run: "{{ make_validate_marketplace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_validate_marketplace_env|default({})), **(make_validate_marketplace_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000175015117040726033353 0ustar zuulzuul--- - name: Debug make_redis_deploy_prep_env when: make_redis_deploy_prep_env is defined ansible.builtin.debug: var: make_redis_deploy_prep_env - name: Debug make_redis_deploy_prep_params when: make_redis_deploy_prep_params is defined ansible.builtin.debug: var: make_redis_deploy_prep_params - name: Run redis_deploy_prep retries: "{{ make_redis_deploy_prep_retries | default(omit) }}" delay: "{{ make_redis_deploy_prep_delay | default(omit) }}" until: "{{ make_redis_deploy_prep_until | default(true) }}" register: "make_redis_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy_prep" dry_run: "{{ make_redis_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_prep_env|default({})), **(make_redis_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000163515117040726033355 0ustar zuulzuul--- - name: Debug make_redis_deploy_env when: make_redis_deploy_env is defined ansible.builtin.debug: var: make_redis_deploy_env - name: Debug make_redis_deploy_params when: make_redis_deploy_params is defined ansible.builtin.debug: var: make_redis_deploy_params - name: Run redis_deploy retries: "{{ make_redis_deploy_retries | default(omit) }}" delay: "{{ make_redis_deploy_delay | default(omit) }}" until: "{{ make_redis_deploy_until | default(true) }}" register: "make_redis_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy" dry_run: "{{ make_redis_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_env|default({})), **(make_redis_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000202515117040726033347 0ustar zuulzuul--- - name: Debug make_redis_deploy_cleanup_env when: make_redis_deploy_cleanup_env is defined ansible.builtin.debug: var: make_redis_deploy_cleanup_env - name: Debug make_redis_deploy_cleanup_params when: make_redis_deploy_cleanup_params is defined ansible.builtin.debug: var: make_redis_deploy_cleanup_params - name: Run redis_deploy_cleanup retries: "{{ make_redis_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_redis_deploy_cleanup_delay | default(omit) }}" until: "{{ make_redis_deploy_cleanup_until | default(true) }}" register: "make_redis_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy_cleanup" dry_run: "{{ make_redis_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_cleanup_env|default({})), **(make_redis_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_set_slower_etcd_profile.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_set_slow0000644000175000017500000000210215117040726033424 0ustar zuulzuul--- - name: Debug make_set_slower_etcd_profile_env when: make_set_slower_etcd_profile_env is defined ansible.builtin.debug: var: make_set_slower_etcd_profile_env - name: Debug make_set_slower_etcd_profile_params when: make_set_slower_etcd_profile_params is defined ansible.builtin.debug: var: make_set_slower_etcd_profile_params - name: Run set_slower_etcd_profile retries: "{{ make_set_slower_etcd_profile_retries | default(omit) }}" delay: "{{ make_set_slower_etcd_profile_delay | default(omit) }}" until: "{{ make_set_slower_etcd_profile_until | default(true) }}" register: "make_set_slower_etcd_profile_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make set_slower_etcd_profile" dry_run: "{{ make_set_slower_etcd_profile_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_set_slower_etcd_profile_env|default({})), **(make_set_slower_etcd_profile_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_download_tools.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_download0000644000175000017500000000170415117040726033403 0ustar zuulzuul--- - name: Debug make_download_tools_env when: make_download_tools_env is defined ansible.builtin.debug: var: make_download_tools_env - name: Debug make_download_tools_params when: make_download_tools_params is defined ansible.builtin.debug: var: make_download_tools_params - name: Run download_tools retries: "{{ make_download_tools_retries | default(omit) }}" delay: "{{ make_download_tools_delay | default(omit) }}" until: "{{ make_download_tools_until | default(true) }}" register: "make_download_tools_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make download_tools" dry_run: "{{ make_download_tools_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_download_tools_env|default({})), **(make_download_tools_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs.yml0000644000175000017500000000143715117040726033165 0ustar zuulzuul--- - name: Debug make_nfs_env when: make_nfs_env is defined ansible.builtin.debug: var: make_nfs_env - name: Debug make_nfs_params when: make_nfs_params is defined ansible.builtin.debug: var: make_nfs_params - name: Run nfs retries: "{{ make_nfs_retries | default(omit) }}" delay: "{{ make_nfs_delay | default(omit) }}" until: "{{ make_nfs_until | default(true) }}" register: "make_nfs_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make nfs" dry_run: "{{ make_nfs_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nfs_env|default({})), **(make_nfs_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs_clea0000644000175000017500000000162715117040726033352 0ustar zuulzuul--- - name: Debug make_nfs_cleanup_env when: make_nfs_cleanup_env is defined ansible.builtin.debug: var: make_nfs_cleanup_env - name: Debug make_nfs_cleanup_params when: make_nfs_cleanup_params is defined ansible.builtin.debug: var: make_nfs_cleanup_params - name: Run nfs_cleanup retries: "{{ make_nfs_cleanup_retries | default(omit) }}" delay: "{{ make_nfs_cleanup_delay | default(omit) }}" until: "{{ make_nfs_cleanup_until | default(true) }}" register: "make_nfs_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make nfs_cleanup" dry_run: "{{ make_nfs_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nfs_cleanup_env|default({})), **(make_nfs_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc.yml0000644000175000017500000000143715117040726033146 0ustar zuulzuul--- - name: Debug make_crc_env when: make_crc_env is defined ansible.builtin.debug: var: make_crc_env - name: Debug make_crc_params when: make_crc_params is defined ansible.builtin.debug: var: make_crc_params - name: Run crc retries: "{{ make_crc_retries | default(omit) }}" delay: "{{ make_crc_delay | default(omit) }}" until: "{{ make_crc_until | default(true) }}" register: "make_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc" dry_run: "{{ make_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_env|default({})), **(make_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_clea0000644000175000017500000000162715117040726033333 0ustar zuulzuul--- - name: Debug make_crc_cleanup_env when: make_crc_cleanup_env is defined ansible.builtin.debug: var: make_crc_cleanup_env - name: Debug make_crc_cleanup_params when: make_crc_cleanup_params is defined ansible.builtin.debug: var: make_crc_cleanup_params - name: Run crc_cleanup retries: "{{ make_crc_cleanup_retries | default(omit) }}" delay: "{{ make_crc_cleanup_delay | default(omit) }}" until: "{{ make_crc_cleanup_until | default(true) }}" register: "make_crc_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_cleanup" dry_run: "{{ make_crc_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_cleanup_env|default({})), **(make_crc_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_scrub.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_scru0000644000175000017500000000157115117040726033401 0ustar zuulzuul--- - name: Debug make_crc_scrub_env when: make_crc_scrub_env is defined ansible.builtin.debug: var: make_crc_scrub_env - name: Debug make_crc_scrub_params when: make_crc_scrub_params is defined ansible.builtin.debug: var: make_crc_scrub_params - name: Run crc_scrub retries: "{{ make_crc_scrub_retries | default(omit) }}" delay: "{{ make_crc_scrub_delay | default(omit) }}" until: "{{ make_crc_scrub_until | default(true) }}" register: "make_crc_scrub_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_scrub" dry_run: "{{ make_crc_scrub_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_scrub_env|default({})), **(make_crc_scrub_params|default({}))) }}" ././@LongLink0000644000000000000000000000017500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_attach_default_interface.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_atta0000644000175000017500000000222615117040726033354 0ustar zuulzuul--- - name: Debug make_crc_attach_default_interface_env when: make_crc_attach_default_interface_env is defined ansible.builtin.debug: var: make_crc_attach_default_interface_env - name: Debug make_crc_attach_default_interface_params when: make_crc_attach_default_interface_params is defined ansible.builtin.debug: var: make_crc_attach_default_interface_params - name: Run crc_attach_default_interface retries: "{{ make_crc_attach_default_interface_retries | default(omit) }}" delay: "{{ make_crc_attach_default_interface_delay | default(omit) }}" until: "{{ make_crc_attach_default_interface_until | default(true) }}" register: "make_crc_attach_default_interface_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_attach_default_interface" dry_run: "{{ make_crc_attach_default_interface_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_attach_default_interface_env|default({})), **(make_crc_attach_default_interface_params|default({}))) }}" ././@LongLink0000644000000000000000000000020500000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_attach_default_interface_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_atta0000644000175000017500000000241615117040726033355 0ustar zuulzuul--- - name: Debug make_crc_attach_default_interface_cleanup_env when: make_crc_attach_default_interface_cleanup_env is defined ansible.builtin.debug: var: make_crc_attach_default_interface_cleanup_env - name: Debug make_crc_attach_default_interface_cleanup_params when: make_crc_attach_default_interface_cleanup_params is defined ansible.builtin.debug: var: make_crc_attach_default_interface_cleanup_params - name: Run crc_attach_default_interface_cleanup retries: "{{ make_crc_attach_default_interface_cleanup_retries | default(omit) }}" delay: "{{ make_crc_attach_default_interface_cleanup_delay | default(omit) }}" until: "{{ make_crc_attach_default_interface_cleanup_until | default(true) }}" register: "make_crc_attach_default_interface_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_attach_default_interface_cleanup" dry_run: "{{ make_crc_attach_default_interface_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_attach_default_interface_cleanup_env|default({})), **(make_crc_attach_default_interface_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000174215117040726033300 0ustar zuulzuul--- - name: Debug make_ipv6_lab_network_env when: make_ipv6_lab_network_env is defined ansible.builtin.debug: var: make_ipv6_lab_network_env - name: Debug make_ipv6_lab_network_params when: make_ipv6_lab_network_params is defined ansible.builtin.debug: var: make_ipv6_lab_network_params - name: Run ipv6_lab_network retries: "{{ make_ipv6_lab_network_retries | default(omit) }}" delay: "{{ make_ipv6_lab_network_delay | default(omit) }}" until: "{{ make_ipv6_lab_network_until | default(true) }}" register: "make_ipv6_lab_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_network" dry_run: "{{ make_ipv6_lab_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_network_env|default({})), **(make_ipv6_lab_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000213215117040726033272 0ustar zuulzuul--- - name: Debug make_ipv6_lab_network_cleanup_env when: make_ipv6_lab_network_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_network_cleanup_env - name: Debug make_ipv6_lab_network_cleanup_params when: make_ipv6_lab_network_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_network_cleanup_params - name: Run ipv6_lab_network_cleanup retries: "{{ make_ipv6_lab_network_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_network_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_network_cleanup_until | default(true) }}" register: "make_ipv6_lab_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_network_cleanup" dry_run: "{{ make_ipv6_lab_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_network_cleanup_env|default({})), **(make_ipv6_lab_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_nat64_router.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000205515117040726033276 0ustar zuulzuul--- - name: Debug make_ipv6_lab_nat64_router_env when: make_ipv6_lab_nat64_router_env is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_env - name: Debug make_ipv6_lab_nat64_router_params when: make_ipv6_lab_nat64_router_params is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_params - name: Run ipv6_lab_nat64_router retries: "{{ make_ipv6_lab_nat64_router_retries | default(omit) }}" delay: "{{ make_ipv6_lab_nat64_router_delay | default(omit) }}" until: "{{ make_ipv6_lab_nat64_router_until | default(true) }}" register: "make_ipv6_lab_nat64_router_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_nat64_router" dry_run: "{{ make_ipv6_lab_nat64_router_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_nat64_router_env|default({})), **(make_ipv6_lab_nat64_router_params|default({}))) }}" ././@LongLink0000644000000000000000000000017600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_nat64_router_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000224515117040726033277 0ustar zuulzuul--- - name: Debug make_ipv6_lab_nat64_router_cleanup_env when: make_ipv6_lab_nat64_router_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_cleanup_env - name: Debug make_ipv6_lab_nat64_router_cleanup_params when: make_ipv6_lab_nat64_router_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_cleanup_params - name: Run ipv6_lab_nat64_router_cleanup retries: "{{ make_ipv6_lab_nat64_router_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_nat64_router_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_nat64_router_cleanup_until | default(true) }}" register: "make_ipv6_lab_nat64_router_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_nat64_router_cleanup" dry_run: "{{ make_ipv6_lab_nat64_router_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_nat64_router_cleanup_env|default({})), **(make_ipv6_lab_nat64_router_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_sno.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000164615117040726033303 0ustar zuulzuul--- - name: Debug make_ipv6_lab_sno_env when: make_ipv6_lab_sno_env is defined ansible.builtin.debug: var: make_ipv6_lab_sno_env - name: Debug make_ipv6_lab_sno_params when: make_ipv6_lab_sno_params is defined ansible.builtin.debug: var: make_ipv6_lab_sno_params - name: Run ipv6_lab_sno retries: "{{ make_ipv6_lab_sno_retries | default(omit) }}" delay: "{{ make_ipv6_lab_sno_delay | default(omit) }}" until: "{{ make_ipv6_lab_sno_until | default(true) }}" register: "make_ipv6_lab_sno_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_sno" dry_run: "{{ make_ipv6_lab_sno_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_sno_env|default({})), **(make_ipv6_lab_sno_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_sno_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000203615117040726033275 0ustar zuulzuul--- - name: Debug make_ipv6_lab_sno_cleanup_env when: make_ipv6_lab_sno_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_sno_cleanup_env - name: Debug make_ipv6_lab_sno_cleanup_params when: make_ipv6_lab_sno_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_sno_cleanup_params - name: Run ipv6_lab_sno_cleanup retries: "{{ make_ipv6_lab_sno_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_sno_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_sno_cleanup_until | default(true) }}" register: "make_ipv6_lab_sno_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_sno_cleanup" dry_run: "{{ make_ipv6_lab_sno_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_sno_cleanup_env|default({})), **(make_ipv6_lab_sno_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000155215117040726033277 0ustar zuulzuul--- - name: Debug make_ipv6_lab_env when: make_ipv6_lab_env is defined ansible.builtin.debug: var: make_ipv6_lab_env - name: Debug make_ipv6_lab_params when: make_ipv6_lab_params is defined ansible.builtin.debug: var: make_ipv6_lab_params - name: Run ipv6_lab retries: "{{ make_ipv6_lab_retries | default(omit) }}" delay: "{{ make_ipv6_lab_delay | default(omit) }}" until: "{{ make_ipv6_lab_until | default(true) }}" register: "make_ipv6_lab_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab" dry_run: "{{ make_ipv6_lab_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_env|default({})), **(make_ipv6_lab_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000174215117040726033300 0ustar zuulzuul--- - name: Debug make_ipv6_lab_cleanup_env when: make_ipv6_lab_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_cleanup_env - name: Debug make_ipv6_lab_cleanup_params when: make_ipv6_lab_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_cleanup_params - name: Run ipv6_lab_cleanup retries: "{{ make_ipv6_lab_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_cleanup_until | default(true) }}" register: "make_ipv6_lab_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_cleanup" dry_run: "{{ make_ipv6_lab_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_cleanup_env|default({})), **(make_ipv6_lab_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_default_interface.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_d0000644000175000017500000000213215117040726033337 0ustar zuulzuul--- - name: Debug make_attach_default_interface_env when: make_attach_default_interface_env is defined ansible.builtin.debug: var: make_attach_default_interface_env - name: Debug make_attach_default_interface_params when: make_attach_default_interface_params is defined ansible.builtin.debug: var: make_attach_default_interface_params - name: Run attach_default_interface retries: "{{ make_attach_default_interface_retries | default(omit) }}" delay: "{{ make_attach_default_interface_delay | default(omit) }}" until: "{{ make_attach_default_interface_until | default(true) }}" register: "make_attach_default_interface_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make attach_default_interface" dry_run: "{{ make_attach_default_interface_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_attach_default_interface_env|default({})), **(make_attach_default_interface_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_default_interface_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_d0000644000175000017500000000232215117040726033340 0ustar zuulzuul--- - name: Debug make_attach_default_interface_cleanup_env when: make_attach_default_interface_cleanup_env is defined ansible.builtin.debug: var: make_attach_default_interface_cleanup_env - name: Debug make_attach_default_interface_cleanup_params when: make_attach_default_interface_cleanup_params is defined ansible.builtin.debug: var: make_attach_default_interface_cleanup_params - name: Run attach_default_interface_cleanup retries: "{{ make_attach_default_interface_cleanup_retries | default(omit) }}" delay: "{{ make_attach_default_interface_cleanup_delay | default(omit) }}" until: "{{ make_attach_default_interface_cleanup_until | default(true) }}" register: "make_attach_default_interface_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make attach_default_interface_cleanup" dry_run: "{{ make_attach_default_interface_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_attach_default_interface_cleanup_env|default({})), **(make_attach_default_interface_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_isolation_bridge.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_0000644000175000017500000000213215117040726033420 0ustar zuulzuul--- - name: Debug make_network_isolation_bridge_env when: make_network_isolation_bridge_env is defined ansible.builtin.debug: var: make_network_isolation_bridge_env - name: Debug make_network_isolation_bridge_params when: make_network_isolation_bridge_params is defined ansible.builtin.debug: var: make_network_isolation_bridge_params - name: Run network_isolation_bridge retries: "{{ make_network_isolation_bridge_retries | default(omit) }}" delay: "{{ make_network_isolation_bridge_delay | default(omit) }}" until: "{{ make_network_isolation_bridge_until | default(true) }}" register: "make_network_isolation_bridge_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make network_isolation_bridge" dry_run: "{{ make_network_isolation_bridge_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_network_isolation_bridge_env|default({})), **(make_network_isolation_bridge_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_isolation_bridge_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_0000644000175000017500000000232215117040726033421 0ustar zuulzuul--- - name: Debug make_network_isolation_bridge_cleanup_env when: make_network_isolation_bridge_cleanup_env is defined ansible.builtin.debug: var: make_network_isolation_bridge_cleanup_env - name: Debug make_network_isolation_bridge_cleanup_params when: make_network_isolation_bridge_cleanup_params is defined ansible.builtin.debug: var: make_network_isolation_bridge_cleanup_params - name: Run network_isolation_bridge_cleanup retries: "{{ make_network_isolation_bridge_cleanup_retries | default(omit) }}" delay: "{{ make_network_isolation_bridge_cleanup_delay | default(omit) }}" until: "{{ make_network_isolation_bridge_cleanup_until | default(true) }}" register: "make_network_isolation_bridge_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make network_isolation_bridge_cleanup" dry_run: "{{ make_network_isolation_bridge_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_network_isolation_bridge_cleanup_env|default({})), **(make_network_isolation_bridge_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_baremetal_compute.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_bar0000644000175000017500000000207415117040726033346 0ustar zuulzuul--- - name: Debug make_edpm_baremetal_compute_env when: make_edpm_baremetal_compute_env is defined ansible.builtin.debug: var: make_edpm_baremetal_compute_env - name: Debug make_edpm_baremetal_compute_params when: make_edpm_baremetal_compute_params is defined ansible.builtin.debug: var: make_edpm_baremetal_compute_params - name: Run edpm_baremetal_compute retries: "{{ make_edpm_baremetal_compute_retries | default(omit) }}" delay: "{{ make_edpm_baremetal_compute_delay | default(omit) }}" until: "{{ make_edpm_baremetal_compute_until | default(true) }}" register: "make_edpm_baremetal_compute_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_baremetal_compute" dry_run: "{{ make_edpm_baremetal_compute_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_baremetal_compute_env|default({})), **(make_edpm_baremetal_compute_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000164615117040726033364 0ustar zuulzuul--- - name: Debug make_edpm_compute_env when: make_edpm_compute_env is defined ansible.builtin.debug: var: make_edpm_compute_env - name: Debug make_edpm_compute_params when: make_edpm_compute_params is defined ansible.builtin.debug: var: make_edpm_compute_params - name: Run edpm_compute retries: "{{ make_edpm_compute_retries | default(omit) }}" delay: "{{ make_edpm_compute_delay | default(omit) }}" until: "{{ make_edpm_compute_until | default(true) }}" register: "make_edpm_compute_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute" dry_run: "{{ make_edpm_compute_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_env|default({})), **(make_edpm_compute_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_bootc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000200015117040726033345 0ustar zuulzuul--- - name: Debug make_edpm_compute_bootc_env when: make_edpm_compute_bootc_env is defined ansible.builtin.debug: var: make_edpm_compute_bootc_env - name: Debug make_edpm_compute_bootc_params when: make_edpm_compute_bootc_params is defined ansible.builtin.debug: var: make_edpm_compute_bootc_params - name: Run edpm_compute_bootc retries: "{{ make_edpm_compute_bootc_retries | default(omit) }}" delay: "{{ make_edpm_compute_bootc_delay | default(omit) }}" until: "{{ make_edpm_compute_bootc_until | default(true) }}" register: "make_edpm_compute_bootc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_bootc" dry_run: "{{ make_edpm_compute_bootc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_bootc_env|default({})), **(make_edpm_compute_bootc_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_ansible_runner.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_ans0000644000175000017500000000201715117040726033360 0ustar zuulzuul--- - name: Debug make_edpm_ansible_runner_env when: make_edpm_ansible_runner_env is defined ansible.builtin.debug: var: make_edpm_ansible_runner_env - name: Debug make_edpm_ansible_runner_params when: make_edpm_ansible_runner_params is defined ansible.builtin.debug: var: make_edpm_ansible_runner_params - name: Run edpm_ansible_runner retries: "{{ make_edpm_ansible_runner_retries | default(omit) }}" delay: "{{ make_edpm_ansible_runner_delay | default(omit) }}" until: "{{ make_edpm_ansible_runner_until | default(true) }}" register: "make_edpm_ansible_runner_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_ansible_runner" dry_run: "{{ make_edpm_ansible_runner_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_ansible_runner_env|default({})), **(make_edpm_ansible_runner_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_computes_bgp.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000176115117040726033362 0ustar zuulzuul--- - name: Debug make_edpm_computes_bgp_env when: make_edpm_computes_bgp_env is defined ansible.builtin.debug: var: make_edpm_computes_bgp_env - name: Debug make_edpm_computes_bgp_params when: make_edpm_computes_bgp_params is defined ansible.builtin.debug: var: make_edpm_computes_bgp_params - name: Run edpm_computes_bgp retries: "{{ make_edpm_computes_bgp_retries | default(omit) }}" delay: "{{ make_edpm_computes_bgp_delay | default(omit) }}" until: "{{ make_edpm_computes_bgp_until | default(true) }}" register: "make_edpm_computes_bgp_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_computes_bgp" dry_run: "{{ make_edpm_computes_bgp_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_computes_bgp_env|default({})), **(make_edpm_computes_bgp_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_repos.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000200015117040726033345 0ustar zuulzuul--- - name: Debug make_edpm_compute_repos_env when: make_edpm_compute_repos_env is defined ansible.builtin.debug: var: make_edpm_compute_repos_env - name: Debug make_edpm_compute_repos_params when: make_edpm_compute_repos_params is defined ansible.builtin.debug: var: make_edpm_compute_repos_params - name: Run edpm_compute_repos retries: "{{ make_edpm_compute_repos_retries | default(omit) }}" delay: "{{ make_edpm_compute_repos_delay | default(omit) }}" until: "{{ make_edpm_compute_repos_until | default(true) }}" register: "make_edpm_compute_repos_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_repos" dry_run: "{{ make_edpm_compute_repos_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_repos_env|default({})), **(make_edpm_compute_repos_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000203615117040726033356 0ustar zuulzuul--- - name: Debug make_edpm_compute_cleanup_env when: make_edpm_compute_cleanup_env is defined ansible.builtin.debug: var: make_edpm_compute_cleanup_env - name: Debug make_edpm_compute_cleanup_params when: make_edpm_compute_cleanup_params is defined ansible.builtin.debug: var: make_edpm_compute_cleanup_params - name: Run edpm_compute_cleanup retries: "{{ make_edpm_compute_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_compute_cleanup_delay | default(omit) }}" until: "{{ make_edpm_compute_cleanup_until | default(true) }}" register: "make_edpm_compute_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_cleanup" dry_run: "{{ make_edpm_compute_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_cleanup_env|default({})), **(make_edpm_compute_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_networker.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_net0000644000175000017500000000170415117040726033367 0ustar zuulzuul--- - name: Debug make_edpm_networker_env when: make_edpm_networker_env is defined ansible.builtin.debug: var: make_edpm_networker_env - name: Debug make_edpm_networker_params when: make_edpm_networker_params is defined ansible.builtin.debug: var: make_edpm_networker_params - name: Run edpm_networker retries: "{{ make_edpm_networker_retries | default(omit) }}" delay: "{{ make_edpm_networker_delay | default(omit) }}" until: "{{ make_edpm_networker_until | default(true) }}" register: "make_edpm_networker_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_networker" dry_run: "{{ make_edpm_networker_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_networker_env|default({})), **(make_edpm_networker_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_networker_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_net0000644000175000017500000000207415117040726033370 0ustar zuulzuul--- - name: Debug make_edpm_networker_cleanup_env when: make_edpm_networker_cleanup_env is defined ansible.builtin.debug: var: make_edpm_networker_cleanup_env - name: Debug make_edpm_networker_cleanup_params when: make_edpm_networker_cleanup_params is defined ansible.builtin.debug: var: make_edpm_networker_cleanup_params - name: Run edpm_networker_cleanup retries: "{{ make_edpm_networker_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_networker_cleanup_delay | default(omit) }}" until: "{{ make_edpm_networker_cleanup_until | default(true) }}" register: "make_edpm_networker_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_networker_cleanup" dry_run: "{{ make_edpm_networker_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_networker_cleanup_env|default({})), **(make_edpm_networker_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_instance.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000203615117040726033350 0ustar zuulzuul--- - name: Debug make_edpm_deploy_instance_env when: make_edpm_deploy_instance_env is defined ansible.builtin.debug: var: make_edpm_deploy_instance_env - name: Debug make_edpm_deploy_instance_params when: make_edpm_deploy_instance_params is defined ansible.builtin.debug: var: make_edpm_deploy_instance_params - name: Run edpm_deploy_instance retries: "{{ make_edpm_deploy_instance_retries | default(omit) }}" delay: "{{ make_edpm_deploy_instance_delay | default(omit) }}" until: "{{ make_edpm_deploy_instance_until | default(true) }}" register: "make_edpm_deploy_instance_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_deploy_instance" dry_run: "{{ make_edpm_deploy_instance_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_instance_env|default({})), **(make_edpm_deploy_instance_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_tripleo_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_tripleo_0000644000175000017500000000170415117040726033411 0ustar zuulzuul--- - name: Debug make_tripleo_deploy_env when: make_tripleo_deploy_env is defined ansible.builtin.debug: var: make_tripleo_deploy_env - name: Debug make_tripleo_deploy_params when: make_tripleo_deploy_params is defined ansible.builtin.debug: var: make_tripleo_deploy_params - name: Run tripleo_deploy retries: "{{ make_tripleo_deploy_retries | default(omit) }}" delay: "{{ make_tripleo_deploy_delay | default(omit) }}" until: "{{ make_tripleo_deploy_until | default(true) }}" register: "make_tripleo_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make tripleo_deploy" dry_run: "{{ make_tripleo_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_tripleo_deploy_env|default({})), **(make_tripleo_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000176115117040726033404 0ustar zuulzuul--- - name: Debug make_standalone_deploy_env when: make_standalone_deploy_env is defined ansible.builtin.debug: var: make_standalone_deploy_env - name: Debug make_standalone_deploy_params when: make_standalone_deploy_params is defined ansible.builtin.debug: var: make_standalone_deploy_params - name: Run standalone_deploy retries: "{{ make_standalone_deploy_retries | default(omit) }}" delay: "{{ make_standalone_deploy_delay | default(omit) }}" until: "{{ make_standalone_deploy_until | default(true) }}" register: "make_standalone_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_deploy" dry_run: "{{ make_standalone_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_deploy_env|default({})), **(make_standalone_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_sync.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000172315117040726033402 0ustar zuulzuul--- - name: Debug make_standalone_sync_env when: make_standalone_sync_env is defined ansible.builtin.debug: var: make_standalone_sync_env - name: Debug make_standalone_sync_params when: make_standalone_sync_params is defined ansible.builtin.debug: var: make_standalone_sync_params - name: Run standalone_sync retries: "{{ make_standalone_sync_retries | default(omit) }}" delay: "{{ make_standalone_sync_delay | default(omit) }}" until: "{{ make_standalone_sync_until | default(true) }}" register: "make_standalone_sync_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_sync" dry_run: "{{ make_standalone_sync_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_sync_env|default({})), **(make_standalone_sync_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000161015117040726033375 0ustar zuulzuul--- - name: Debug make_standalone_env when: make_standalone_env is defined ansible.builtin.debug: var: make_standalone_env - name: Debug make_standalone_params when: make_standalone_params is defined ansible.builtin.debug: var: make_standalone_params - name: Run standalone retries: "{{ make_standalone_retries | default(omit) }}" delay: "{{ make_standalone_delay | default(omit) }}" until: "{{ make_standalone_until | default(true) }}" register: "make_standalone_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone" dry_run: "{{ make_standalone_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_env|default({})), **(make_standalone_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000200015117040726033367 0ustar zuulzuul--- - name: Debug make_standalone_cleanup_env when: make_standalone_cleanup_env is defined ansible.builtin.debug: var: make_standalone_cleanup_env - name: Debug make_standalone_cleanup_params when: make_standalone_cleanup_params is defined ansible.builtin.debug: var: make_standalone_cleanup_params - name: Run standalone_cleanup retries: "{{ make_standalone_cleanup_retries | default(omit) }}" delay: "{{ make_standalone_cleanup_delay | default(omit) }}" until: "{{ make_standalone_cleanup_until | default(true) }}" register: "make_standalone_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_cleanup" dry_run: "{{ make_standalone_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_cleanup_env|default({})), **(make_standalone_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_snapshot.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000201715117040726033377 0ustar zuulzuul--- - name: Debug make_standalone_snapshot_env when: make_standalone_snapshot_env is defined ansible.builtin.debug: var: make_standalone_snapshot_env - name: Debug make_standalone_snapshot_params when: make_standalone_snapshot_params is defined ansible.builtin.debug: var: make_standalone_snapshot_params - name: Run standalone_snapshot retries: "{{ make_standalone_snapshot_retries | default(omit) }}" delay: "{{ make_standalone_snapshot_delay | default(omit) }}" until: "{{ make_standalone_snapshot_until | default(true) }}" register: "make_standalone_snapshot_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_snapshot" dry_run: "{{ make_standalone_snapshot_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_snapshot_env|default({})), **(make_standalone_snapshot_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_revert.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000176115117040726033404 0ustar zuulzuul--- - name: Debug make_standalone_revert_env when: make_standalone_revert_env is defined ansible.builtin.debug: var: make_standalone_revert_env - name: Debug make_standalone_revert_params when: make_standalone_revert_params is defined ansible.builtin.debug: var: make_standalone_revert_params - name: Run standalone_revert retries: "{{ make_standalone_revert_retries | default(omit) }}" delay: "{{ make_standalone_revert_delay | default(omit) }}" until: "{{ make_standalone_revert_until | default(true) }}" register: "make_standalone_revert_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_revert" dry_run: "{{ make_standalone_revert_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_revert_env|default({})), **(make_standalone_revert_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_prepare.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_pr0000644000175000017500000000166515117040726033410 0ustar zuulzuul--- - name: Debug make_cifmw_prepare_env when: make_cifmw_prepare_env is defined ansible.builtin.debug: var: make_cifmw_prepare_env - name: Debug make_cifmw_prepare_params when: make_cifmw_prepare_params is defined ansible.builtin.debug: var: make_cifmw_prepare_params - name: Run cifmw_prepare retries: "{{ make_cifmw_prepare_retries | default(omit) }}" delay: "{{ make_cifmw_prepare_delay | default(omit) }}" until: "{{ make_cifmw_prepare_until | default(true) }}" register: "make_cifmw_prepare_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make cifmw_prepare" dry_run: "{{ make_cifmw_prepare_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cifmw_prepare_env|default({})), **(make_cifmw_prepare_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_cl0000644000175000017500000000166515117040726033365 0ustar zuulzuul--- - name: Debug make_cifmw_cleanup_env when: make_cifmw_cleanup_env is defined ansible.builtin.debug: var: make_cifmw_cleanup_env - name: Debug make_cifmw_cleanup_params when: make_cifmw_cleanup_params is defined ansible.builtin.debug: var: make_cifmw_cleanup_params - name: Run cifmw_cleanup retries: "{{ make_cifmw_cleanup_retries | default(omit) }}" delay: "{{ make_cifmw_cleanup_delay | default(omit) }}" until: "{{ make_cifmw_cleanup_until | default(true) }}" register: "make_cifmw_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make cifmw_cleanup" dry_run: "{{ make_cifmw_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cifmw_cleanup_env|default({})), **(make_cifmw_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ne0000644000175000017500000000166515117040726033347 0ustar zuulzuul--- - name: Debug make_bmaas_network_env when: make_bmaas_network_env is defined ansible.builtin.debug: var: make_bmaas_network_env - name: Debug make_bmaas_network_params when: make_bmaas_network_params is defined ansible.builtin.debug: var: make_bmaas_network_params - name: Run bmaas_network retries: "{{ make_bmaas_network_retries | default(omit) }}" delay: "{{ make_bmaas_network_delay | default(omit) }}" until: "{{ make_bmaas_network_until | default(true) }}" register: "make_bmaas_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_network" dry_run: "{{ make_bmaas_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_network_env|default({})), **(make_bmaas_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ne0000644000175000017500000000205515117040726033341 0ustar zuulzuul--- - name: Debug make_bmaas_network_cleanup_env when: make_bmaas_network_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_network_cleanup_env - name: Debug make_bmaas_network_cleanup_params when: make_bmaas_network_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_network_cleanup_params - name: Run bmaas_network_cleanup retries: "{{ make_bmaas_network_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_network_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_network_cleanup_until | default(true) }}" register: "make_bmaas_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_network_cleanup" dry_run: "{{ make_bmaas_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_network_cleanup_env|default({})), **(make_bmaas_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000020700000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_route_crc_and_crc_bmaas_networks.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ro0000644000175000017500000000245415117040726033362 0ustar zuulzuul--- - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_env when: make_bmaas_route_crc_and_crc_bmaas_networks_env is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_env - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_params when: make_bmaas_route_crc_and_crc_bmaas_networks_params is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_params - name: Run bmaas_route_crc_and_crc_bmaas_networks retries: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_retries | default(omit) }}" delay: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_delay | default(omit) }}" until: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_until | default(true) }}" register: "make_bmaas_route_crc_and_crc_bmaas_networks_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_route_crc_and_crc_bmaas_networks" dry_run: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_route_crc_and_crc_bmaas_networks_env|default({})), **(make_bmaas_route_crc_and_crc_bmaas_networks_params|default({}))) }}" ././@LongLink0000644000000000000000000000021700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_route_crc_and_crc_bmaas_networks_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ro0000644000175000017500000000264415117040726033363 0ustar zuulzuul--- - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env when: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params when: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params - name: Run bmaas_route_crc_and_crc_bmaas_networks_cleanup retries: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_until | default(true) }}" register: "make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_route_crc_and_crc_bmaas_networks_cleanup" dry_run: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env|default({})), **(make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_metallb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_me0000644000175000017500000000166515117040726033346 0ustar zuulzuul--- - name: Debug make_bmaas_metallb_env when: make_bmaas_metallb_env is defined ansible.builtin.debug: var: make_bmaas_metallb_env - name: Debug make_bmaas_metallb_params when: make_bmaas_metallb_params is defined ansible.builtin.debug: var: make_bmaas_metallb_params - name: Run bmaas_metallb retries: "{{ make_bmaas_metallb_retries | default(omit) }}" delay: "{{ make_bmaas_metallb_delay | default(omit) }}" until: "{{ make_bmaas_metallb_until | default(true) }}" register: "make_bmaas_metallb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_metallb" dry_run: "{{ make_bmaas_metallb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_metallb_env|default({})), **(make_bmaas_metallb_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_attach_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000213215117040726033337 0ustar zuulzuul--- - name: Debug make_bmaas_crc_attach_network_env when: make_bmaas_crc_attach_network_env is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_env - name: Debug make_bmaas_crc_attach_network_params when: make_bmaas_crc_attach_network_params is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_params - name: Run bmaas_crc_attach_network retries: "{{ make_bmaas_crc_attach_network_retries | default(omit) }}" delay: "{{ make_bmaas_crc_attach_network_delay | default(omit) }}" until: "{{ make_bmaas_crc_attach_network_until | default(true) }}" register: "make_bmaas_crc_attach_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_attach_network" dry_run: "{{ make_bmaas_crc_attach_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_attach_network_env|default({})), **(make_bmaas_crc_attach_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_attach_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000232215117040726033340 0ustar zuulzuul--- - name: Debug make_bmaas_crc_attach_network_cleanup_env when: make_bmaas_crc_attach_network_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_cleanup_env - name: Debug make_bmaas_crc_attach_network_cleanup_params when: make_bmaas_crc_attach_network_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_cleanup_params - name: Run bmaas_crc_attach_network_cleanup retries: "{{ make_bmaas_crc_attach_network_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_crc_attach_network_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_crc_attach_network_cleanup_until | default(true) }}" register: "make_bmaas_crc_attach_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_attach_network_cleanup" dry_run: "{{ make_bmaas_crc_attach_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_attach_network_cleanup_env|default({})), **(make_bmaas_crc_attach_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_baremetal_bridge.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000217015117040726033341 0ustar zuulzuul--- - name: Debug make_bmaas_crc_baremetal_bridge_env when: make_bmaas_crc_baremetal_bridge_env is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_env - name: Debug make_bmaas_crc_baremetal_bridge_params when: make_bmaas_crc_baremetal_bridge_params is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_params - name: Run bmaas_crc_baremetal_bridge retries: "{{ make_bmaas_crc_baremetal_bridge_retries | default(omit) }}" delay: "{{ make_bmaas_crc_baremetal_bridge_delay | default(omit) }}" until: "{{ make_bmaas_crc_baremetal_bridge_until | default(true) }}" register: "make_bmaas_crc_baremetal_bridge_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_baremetal_bridge" dry_run: "{{ make_bmaas_crc_baremetal_bridge_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_baremetal_bridge_env|default({})), **(make_bmaas_crc_baremetal_bridge_params|default({}))) }}" ././@LongLink0000644000000000000000000000020300000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_baremetal_bridge_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000236015117040726033342 0ustar zuulzuul--- - name: Debug make_bmaas_crc_baremetal_bridge_cleanup_env when: make_bmaas_crc_baremetal_bridge_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_cleanup_env - name: Debug make_bmaas_crc_baremetal_bridge_cleanup_params when: make_bmaas_crc_baremetal_bridge_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_cleanup_params - name: Run bmaas_crc_baremetal_bridge_cleanup retries: "{{ make_bmaas_crc_baremetal_bridge_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_crc_baremetal_bridge_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_crc_baremetal_bridge_cleanup_until | default(true) }}" register: "make_bmaas_crc_baremetal_bridge_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_baremetal_bridge_cleanup" dry_run: "{{ make_bmaas_crc_baremetal_bridge_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_baremetal_bridge_cleanup_env|default({})), **(make_bmaas_crc_baremetal_bridge_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_baremetal_net_nad.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ba0000644000175000017500000000211315117040726033314 0ustar zuulzuul--- - name: Debug make_bmaas_baremetal_net_nad_env when: make_bmaas_baremetal_net_nad_env is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_env - name: Debug make_bmaas_baremetal_net_nad_params when: make_bmaas_baremetal_net_nad_params is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_params - name: Run bmaas_baremetal_net_nad retries: "{{ make_bmaas_baremetal_net_nad_retries | default(omit) }}" delay: "{{ make_bmaas_baremetal_net_nad_delay | default(omit) }}" until: "{{ make_bmaas_baremetal_net_nad_until | default(true) }}" register: "make_bmaas_baremetal_net_nad_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_baremetal_net_nad" dry_run: "{{ make_bmaas_baremetal_net_nad_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_baremetal_net_nad_env|default({})), **(make_bmaas_baremetal_net_nad_params|default({}))) }}" ././@LongLink0000644000000000000000000000020000000000000011573 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_baremetal_net_nad_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ba0000644000175000017500000000230315117040726033315 0ustar zuulzuul--- - name: Debug make_bmaas_baremetal_net_nad_cleanup_env when: make_bmaas_baremetal_net_nad_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_cleanup_env - name: Debug make_bmaas_baremetal_net_nad_cleanup_params when: make_bmaas_baremetal_net_nad_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_cleanup_params - name: Run bmaas_baremetal_net_nad_cleanup retries: "{{ make_bmaas_baremetal_net_nad_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_baremetal_net_nad_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_baremetal_net_nad_cleanup_until | default(true) }}" register: "make_bmaas_baremetal_net_nad_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_baremetal_net_nad_cleanup" dry_run: "{{ make_bmaas_baremetal_net_nad_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_baremetal_net_nad_cleanup_env|default({})), **(make_bmaas_baremetal_net_nad_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_metallb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_me0000644000175000017500000000205515117040726033340 0ustar zuulzuul--- - name: Debug make_bmaas_metallb_cleanup_env when: make_bmaas_metallb_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_metallb_cleanup_env - name: Debug make_bmaas_metallb_cleanup_params when: make_bmaas_metallb_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_metallb_cleanup_params - name: Run bmaas_metallb_cleanup retries: "{{ make_bmaas_metallb_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_metallb_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_metallb_cleanup_until | default(true) }}" register: "make_bmaas_metallb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_metallb_cleanup" dry_run: "{{ make_bmaas_metallb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_metallb_cleanup_env|default({})), **(make_bmaas_metallb_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_virtual_bms.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_vi0000644000175000017500000000176115117040726033360 0ustar zuulzuul--- - name: Debug make_bmaas_virtual_bms_env when: make_bmaas_virtual_bms_env is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_env - name: Debug make_bmaas_virtual_bms_params when: make_bmaas_virtual_bms_params is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_params - name: Run bmaas_virtual_bms retries: "{{ make_bmaas_virtual_bms_retries | default(omit) }}" delay: "{{ make_bmaas_virtual_bms_delay | default(omit) }}" until: "{{ make_bmaas_virtual_bms_until | default(true) }}" register: "make_bmaas_virtual_bms_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_virtual_bms" dry_run: "{{ make_bmaas_virtual_bms_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_virtual_bms_env|default({})), **(make_bmaas_virtual_bms_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_virtual_bms_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_vi0000644000175000017500000000215115117040726033352 0ustar zuulzuul--- - name: Debug make_bmaas_virtual_bms_cleanup_env when: make_bmaas_virtual_bms_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_cleanup_env - name: Debug make_bmaas_virtual_bms_cleanup_params when: make_bmaas_virtual_bms_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_cleanup_params - name: Run bmaas_virtual_bms_cleanup retries: "{{ make_bmaas_virtual_bms_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_virtual_bms_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_virtual_bms_cleanup_until | default(true) }}" register: "make_bmaas_virtual_bms_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_virtual_bms_cleanup" dry_run: "{{ make_bmaas_virtual_bms_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_virtual_bms_cleanup_env|default({})), **(make_bmaas_virtual_bms_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000203615117040726033365 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_env when: make_bmaas_sushy_emulator_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_env - name: Debug make_bmaas_sushy_emulator_params when: make_bmaas_sushy_emulator_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_params - name: Run bmaas_sushy_emulator retries: "{{ make_bmaas_sushy_emulator_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_until | default(true) }}" register: "make_bmaas_sushy_emulator_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator" dry_run: "{{ make_bmaas_sushy_emulator_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_env|default({})), **(make_bmaas_sushy_emulator_params|default({}))) }}" ././@LongLink0000644000000000000000000000017500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000222615117040726033366 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_cleanup_env when: make_bmaas_sushy_emulator_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_cleanup_env - name: Debug make_bmaas_sushy_emulator_cleanup_params when: make_bmaas_sushy_emulator_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_cleanup_params - name: Run bmaas_sushy_emulator_cleanup retries: "{{ make_bmaas_sushy_emulator_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_cleanup_until | default(true) }}" register: "make_bmaas_sushy_emulator_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator_cleanup" dry_run: "{{ make_bmaas_sushy_emulator_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_cleanup_env|default({})), **(make_bmaas_sushy_emulator_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator_wait.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000215115117040726033363 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_wait_env when: make_bmaas_sushy_emulator_wait_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_wait_env - name: Debug make_bmaas_sushy_emulator_wait_params when: make_bmaas_sushy_emulator_wait_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_wait_params - name: Run bmaas_sushy_emulator_wait retries: "{{ make_bmaas_sushy_emulator_wait_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_wait_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_wait_until | default(true) }}" register: "make_bmaas_sushy_emulator_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator_wait" dry_run: "{{ make_bmaas_sushy_emulator_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_wait_env|default({})), **(make_bmaas_sushy_emulator_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_generate_nodes_yaml.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ge0000644000175000017500000000215115117040726033327 0ustar zuulzuul--- - name: Debug make_bmaas_generate_nodes_yaml_env when: make_bmaas_generate_nodes_yaml_env is defined ansible.builtin.debug: var: make_bmaas_generate_nodes_yaml_env - name: Debug make_bmaas_generate_nodes_yaml_params when: make_bmaas_generate_nodes_yaml_params is defined ansible.builtin.debug: var: make_bmaas_generate_nodes_yaml_params - name: Run bmaas_generate_nodes_yaml retries: "{{ make_bmaas_generate_nodes_yaml_retries | default(omit) }}" delay: "{{ make_bmaas_generate_nodes_yaml_delay | default(omit) }}" until: "{{ make_bmaas_generate_nodes_yaml_until | default(true) }}" register: "make_bmaas_generate_nodes_yaml_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_generate_nodes_yaml" dry_run: "{{ make_bmaas_generate_nodes_yaml_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_generate_nodes_yaml_env|default({})), **(make_bmaas_generate_nodes_yaml_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas.ym0000644000175000017500000000147515117040726033310 0ustar zuulzuul--- - name: Debug make_bmaas_env when: make_bmaas_env is defined ansible.builtin.debug: var: make_bmaas_env - name: Debug make_bmaas_params when: make_bmaas_params is defined ansible.builtin.debug: var: make_bmaas_params - name: Run bmaas retries: "{{ make_bmaas_retries | default(omit) }}" delay: "{{ make_bmaas_delay | default(omit) }}" until: "{{ make_bmaas_until | default(true) }}" register: "make_bmaas_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas" dry_run: "{{ make_bmaas_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_env|default({})), **(make_bmaas_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cl0000644000175000017500000000166515117040726033343 0ustar zuulzuul--- - name: Debug make_bmaas_cleanup_env when: make_bmaas_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_cleanup_env - name: Debug make_bmaas_cleanup_params when: make_bmaas_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_cleanup_params - name: Run bmaas_cleanup retries: "{{ make_bmaas_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_cleanup_until | default(true) }}" register: "make_bmaas_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_cleanup" dry_run: "{{ make_bmaas_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_cleanup_env|default({})), **(make_bmaas_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/0000755000175000017500000000000015117043064025073 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean-antelope-testing.repo0000644000175000017500000000317215117040653033037 0ustar zuulzuul[delorean-antelope-testing] name=dlrn-antelope-testing baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [delorean-antelope-build-deps] name=dlrn-antelope-build-deps baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/build-deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-rabbitmq] name=centos9-rabbitmq baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/messaging/$basearch/rabbitmq-38/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-storage] name=centos9-storage baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/storage/$basearch/ceph-reef/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-opstools] name=centos9-opstools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/opstools/$basearch/collectd-5/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-nfv-ovs] name=NFV SIG OpenvSwitch baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/nfv/$basearch/openvswitch-2/ gpgcheck=0 enabled=1 module_hotfixes=1 # epel is required for Ceph Reef [epel-low-priority] name=Extra Packages for Enterprise Linux $releasever - $basearch metalink=https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch&infra=$infra&content=$contentdir enabled=1 gpgcheck=0 countme=1 priority=100 includepkgs=libarrow*,parquet*,python3-asyncssh,re2,python3-grpcio,grpc*,abseil*,thrift*,blake3 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean.repo0000644000175000017500000001341515117040653027560 0ustar zuulzuul[delorean-component-barbican] name=delorean-openstack-barbican-42b4c41831408a8e323fec3c8983b5c793b64874 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/barbican/42/b4/42b4c41831408a8e323fec3c8983b5c793b64874_08052e9d enabled=1 gpgcheck=0 priority=1 [delorean-component-baremetal] name=delorean-python-glean-10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/baremetal/10/df/10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7_36137eb3 enabled=1 gpgcheck=0 priority=1 [delorean-component-cinder] name=delorean-openstack-cinder-1c00d6490d88e436f26efb71f2ac96e75252e97c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cinder/1c/00/1c00d6490d88e436f26efb71f2ac96e75252e97c_f716f000 enabled=1 gpgcheck=0 priority=1 [delorean-component-clients] name=delorean-python-stevedore-c4acc5639fd2329372142e39464fcca0209b0018 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/clients/c4/ac/c4acc5639fd2329372142e39464fcca0209b0018_d3ef8337 enabled=1 gpgcheck=0 priority=1 [delorean-component-cloudops] name=delorean-python-cloudkitty-tests-tempest-2c80f80e02c5accd099187ea762c8f8389bd7905 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cloudops/2c/80/2c80f80e02c5accd099187ea762c8f8389bd7905_33e4dd93 enabled=1 gpgcheck=0 priority=1 [delorean-component-common] name=delorean-os-refresh-config-9bfc52b5049be2d8de6134d662fdde9dfa48960f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/common/9b/fc/9bfc52b5049be2d8de6134d662fdde9dfa48960f_b85780e6 enabled=1 gpgcheck=0 priority=1 [delorean-component-compute] name=delorean-openstack-nova-6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/compute/6f/8d/6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e_dc05b899 enabled=1 gpgcheck=0 priority=1 [delorean-component-designate] name=delorean-python-designate-tests-tempest-347fdbc9b4595a10b726526b3c0b5928e5b7fcf2 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/designate/34/7f/347fdbc9b4595a10b726526b3c0b5928e5b7fcf2_3fd39337 enabled=1 gpgcheck=0 priority=1 [delorean-component-glance] name=delorean-openstack-glance-1fd12c29b339f30fe823e2b5beba14b5f241e52a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/glance/1f/d1/1fd12c29b339f30fe823e2b5beba14b5f241e52a_0d693729 enabled=1 gpgcheck=0 priority=1 [delorean-component-keystone] name=delorean-openstack-keystone-e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/keystone/e4/b4/e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7_264c03cc enabled=1 gpgcheck=0 priority=1 [delorean-component-manila] name=delorean-openstack-manila-3c01b7181572c95dac462eb19c3121e36cb0fe95 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/manila/3c/01/3c01b7181572c95dac462eb19c3121e36cb0fe95_912dfd18 enabled=1 gpgcheck=0 priority=1 [delorean-component-network] name=delorean-python-whitebox-neutron-tests-tempest-12cf06ce36a79a584fc757f4c25ff96845573c93 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/network/12/cf/12cf06ce36a79a584fc757f4c25ff96845573c93_3ed3aba3 enabled=1 gpgcheck=0 priority=1 [delorean-component-octavia] name=delorean-openstack-octavia-ba397f07a7331190208c93368ee23826ac4e2707 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/octavia/ba/39/ba397f07a7331190208c93368ee23826ac4e2707_9d6e596a enabled=1 gpgcheck=0 priority=1 [delorean-component-optimize] name=delorean-openstack-watcher-c014f81a8647287f6dcc339321c1256f5a2e82d5 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/optimize/c0/14/c014f81a8647287f6dcc339321c1256f5a2e82d5_bcbfdccc enabled=1 gpgcheck=0 priority=1 [delorean-component-podified] name=delorean-ansible-config_template-5ccaa22121a7ff05620975540d81f6efb077d8db baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/podified/5c/ca/5ccaa22121a7ff05620975540d81f6efb077d8db_83eb7cc2 enabled=1 gpgcheck=0 priority=1 [delorean-component-puppet] name=delorean-puppet-ceph-7352068d7b8c84ded636ab3158dafa6f3851951e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/puppet/73/52/7352068d7b8c84ded636ab3158dafa6f3851951e_7cde1ad1 enabled=1 gpgcheck=0 priority=1 [delorean-component-swift] name=delorean-openstack-swift-dc98a8463506ac520c469adb0ef47d0f7753905a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/swift/dc/98/dc98a8463506ac520c469adb0ef47d0f7753905a_9d02f069 enabled=1 gpgcheck=0 priority=1 [delorean-component-tempest] name=delorean-python-tempestconf-8515371b7cceebd4282e09f1d8f0cc842df82855 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/tempest/85/15/8515371b7cceebd4282e09f1d8f0cc842df82855_a1e336c7 enabled=1 gpgcheck=0 priority=1 [delorean-component-ui] name=delorean-openstack-heat-ui-013accbfd179753bc3f0d1f4e5bed07a4fd9f771 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/ui/01/3a/013accbfd179753bc3f0d1f4e5bed07a4fd9f771_0c88e467 enabled=1 gpgcheck=0 priority=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-appstream.repo0000644000175000017500000000031615117040653033351 0ustar zuulzuul [repo-setup-centos-appstream] name=repo-setup-centos-appstream baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/AppStream/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-baseos.repo0000644000175000017500000000030415117040653032626 0ustar zuulzuul [repo-setup-centos-baseos] name=repo-setup-centos-baseos baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/BaseOS/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-highavailability.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-highavailabili0000644000175000017500000000034215117040653033345 0ustar zuulzuul [repo-setup-centos-highavailability] name=repo-setup-centos-highavailability baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/HighAvailability/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-powertools.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-powertools.rep0000644000175000017500000000031115117040653033406 0ustar zuulzuul [repo-setup-centos-powertools] name=repo-setup-centos-powertools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/CRB/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean.repo.md50000644000175000017500000000004115117040652030232 0ustar zuulzuulc3923531bcda0b0811b2d5053f189beb home/zuul/zuul-output/logs/ci-framework-data/artifacts/resolv.conf0000644000175000017500000000015215117042566024531 0ustar zuulzuul# Generated by NetworkManager nameserver 192.168.122.10 nameserver 199.204.44.24 nameserver 199.204.47.54 home/zuul/zuul-output/logs/ci-framework-data/artifacts/hosts0000644000175000017500000000023715117042566023437 0ustar zuulzuul127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ip-network.txt0000644000175000017500000000315415117042566025215 0ustar zuulzuuldefault via 38.102.83.1 dev eth0 proto dhcp src 38.102.83.97 metric 100 38.102.83.0/24 dev eth0 proto kernel scope link src 38.102.83.97 metric 100 169.254.169.254 via 38.102.83.126 dev eth0 proto dhcp src 38.102.83.97 metric 100 192.168.122.0/24 dev eth1 proto kernel scope link src 192.168.122.11 metric 101 0: from all lookup local 32766: from all lookup main 32767: from all lookup default [ { "ifindex": 1, "ifname": "lo", "flags": [ "LOOPBACK","UP","LOWER_UP" ], "mtu": 65536, "qdisc": "noqueue", "operstate": "UNKNOWN", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "loopback", "address": "00:00:00:00:00:00", "broadcast": "00:00:00:00:00:00" },{ "ifindex": 2, "ifname": "eth0", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1500, "qdisc": "fq_codel", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "fa:16:3e:aa:3c:f0", "broadcast": "ff:ff:ff:ff:ff:ff", "altnames": [ "enp0s3","ens3" ] },{ "ifindex": 3, "ifname": "eth1", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1500, "qdisc": "fq_codel", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "fa:16:3e:e6:21:77", "broadcast": "ff:ff:ff:ff:ff:ff", "altnames": [ "enp0s7","ens7" ] } ] home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_check_for_oc.sh0000644000175000017500000000020715117042604027720 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_check_for_oc.log) 2>&1 command -v oc home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_run_openstack_must_gather.sh0000644000175000017500000000132215117042605032571 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_run_openstack_must_gather.log) 2>&1 timeout 2700.0 oc adm must-gather --image quay.io/openstack-k8s-operators/openstack-must-gather:latest --timeout 30m --host-network=False --dest-dir /home/zuul/ci-framework-data/logs/openstack-must-gather -- ADDITIONAL_NAMESPACES=kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko OPENSTACK_DATABASES=$OPENSTACK_DATABASES SOS_EDPM=$SOS_EDPM SOS_DECOMPRESS=$SOS_DECOMPRESS gather 2>&1 || { rc=$? if [ $rc -eq 124 ]; then echo "The must gather command did not finish on time!" echo "2700.0 seconds was not enough to finish the task." fi } home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_prepare_root_ssh.sh0000644000175000017500000000122315117043037030672 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_prepare_root_ssh.log) 2>&1 ssh -i ~/.ssh/id_cifw core@api.crc.testing < >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_copy_logs_from_crc.log) 2>&1 scp -v -r -i ~/.ssh/id_cifw core@api.crc.testing:/tmp/crc-logs-artifacts /home/zuul/ci-framework-data/logs/crc/ home/zuul/zuul-output/logs/ci-framework-data/artifacts/zuul_inventory.yml0000644000175000017500000007327415117043064026220 0ustar zuulzuulall: children: zuul_unreachable: hosts: {} hosts: controller: ansible_connection: ssh ansible_host: 38.102.83.97 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 3e8fa343-2893-4e1e-9f7a-ef822bd01638 host_id: 144026dc3a6267aa9593dce15cccee121a718779ba75fb6ce23b65e7 interface_ip: 38.102.83.97 label: cloud-centos-9-stream-tripleo-vexxhost private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul_log_collection: true crc: ansible_connection: ssh ansible_host: 38.102.83.180 ansible_port: 22 ansible_python_interpreter: auto ansible_user: core cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 6868605f-6684-4979-9a48-308ed352f6d0 host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.180 label: crc-cloud-ocp-4-20-1-3xl private_ipv4: 38.102.83.180 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.180 public_ipv6: '' region: RegionOne slot: null podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul_log_collection: true localhost: ansible_connection: local vars: cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: master build: 9e06bd9ec9c1456eb80ff4a509f0548e build_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null buildset: 5e8ab5d5989f4e03b4743e980e08eed9 buildset_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 child_jobs: [] commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 event_id: bf7cf120-d774-11f0-9b66-056cd24cb52f executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/logs result_data_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/results.json src_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/src work_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work items: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null job: stf-crc-ocp_420-nightly_bundles-index_deploy jobtags: [] max_attempts: 1 message: QWRkIE9DUCA0LjIwIGpvYnMKCkFkZCBqb2IgZGVmaW5pdGlvbnMgdXNpbmcgY3JjLWNsb3VkLW9jcC00LTIwLTEtM3hsIGFzIGJhc2UgaW1hZ2UNCg0KQ2xvc2VzOiBPU1BSSC0yMTg4MQ== patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 untrusted/project_4/github.com/infrawatch/service-telemetry-operator: canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 playbooks: - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/deploy_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_0/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_4/rdo-jobs/roles - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/test_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_1/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_1/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_1/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_1/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_1/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_1/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_1/role_4/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/infrawatch/prometheus-webhook-snmp: canonical_hostname: github.com canonical_name: github.com/infrawatch/prometheus-webhook-snmp checkout: master checkout_description: zuul branch commit: 3959c53b2613d03d066cb1b2fe5bdae8633ae895 name: infrawatch/prometheus-webhook-snmp required: true short_name: prometheus-webhook-snmp src_dir: src/github.com/infrawatch/prometheus-webhook-snmp github.com/infrawatch/service-telemetry-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master checkout_description: zuul branch commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 name: infrawatch/service-telemetry-operator required: true short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator github.com/infrawatch/sg-bridge: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-bridge checkout: master checkout_description: zuul branch commit: bab11fba86ad0c21cb35e12b56bf086a3332f1d2 name: infrawatch/sg-bridge required: true short_name: sg-bridge src_dir: src/github.com/infrawatch/sg-bridge github.com/infrawatch/sg-core: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-core checkout: master checkout_description: zuul branch commit: 5a4aece11fea9f71ce7515d11e1e7f0eae97eea6 name: infrawatch/sg-core required: true short_name: sg-core src_dir: src/github.com/infrawatch/sg-core github.com/infrawatch/smart-gateway-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/smart-gateway-operator checkout: master checkout_description: zuul branch commit: 9e0945fe8a0e74be8bc9449318446eeb74336986 name: infrawatch/smart-gateway-operator required: true short_name: smart-gateway-operator src_dir: src/github.com/infrawatch/smart-gateway-operator github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/dataplane-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/dataplane-operator checkout: main checkout_description: project override ref commit: c98b51bcd7fe14b85ed4cf3f5f76552b3455c5f2 name: openstack-k8s-operators/dataplane-operator required: true short_name: dataplane-operator src_dir: src/github.com/openstack-k8s-operators/dataplane-operator github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: project default branch commit: 78f305a7f43e4024d260a64119f250386daa6420 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: project override ref commit: 786269345f996bd262360738a1e3c6b09171f370 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project default branch commit: 2f838b62fe50aacff3d514af4b502264e0a276a5 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: master checkout_description: zuul branch commit: a333e57066b1d48e41f93af68be81188290a96b3 name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: project override ref commit: 2da49819dd6af6036aede5e4e9a080ff2c6457de name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: project override ref commit: 9a923a3e438c4f66834894bfa59207197cf3daea name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: project default branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: zuul branch commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: zuul branch commit: 672a220823fac36a8965fa0d3dca764739bb46c0 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/694/head resources: {} tenant: rdoproject.org timeout: 3600 topic: null voting: true zuul_log_collection: true home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible-vars.yml0000644000175000017500000125473515117043064025476 0ustar zuulzuul_included_dir: changed: false failed: false stat: atime: 1765556697.7910533 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary ctime: 1765556703.10918 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 121677537 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1765556703.10918 nlink: 2 path: /home/zuul/ci-framework-data/artifacts/parameters pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 120 uid: 1000 version: '3322351157' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true _included_file: changed: false failed: false stat: atime: 1765556702.3251612 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 9a42881cca85ab4410bd5c98ae90106689995b7e ctime: 1765556702.3281612 dev: 64513 device_type: 0 executable: false exists: true gid: 1000 gr_name: zuul inode: 159430636 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0600' mtime: 1765556702.0311542 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul readable: true rgrp: false roth: false rusr: true size: 288 uid: 1000 version: '3779196854' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false _parsed_vars: changed: false content: Y2lmbXdfb3BlbnNoaWZ0X2FwaTogaHR0cHM6Ly9hcGkuY3JjLnRlc3Rpbmc6NjQ0MwpjaWZtd19vcGVuc2hpZnRfY29udGV4dDogZGVmYXVsdC9hcGktY3JjLXRlc3Rpbmc6NjQ0My9rdWJlYWRtaW4KY2lmbXdfb3BlbnNoaWZ0X2t1YmVjb25maWc6IC9ob21lL3p1dWwvLmNyYy9tYWNoaW5lcy9jcmMva3ViZWNvbmZpZwpjaWZtd19vcGVuc2hpZnRfdG9rZW46IHNoYTI1Nn5FRzVwWm1Fdk10bEtiRDk2eHJGOUprM0pvQzd3SWh5aFhCQm1jT09QdlhvCmNpZm13X29wZW5zaGlmdF91c2VyOiBrdWJlYWRtaW4K encoding: base64 failed: false source: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml _tmp_dir: changed: true failed: false gid: 10001 group: zuul mode: '0700' owner: zuul path: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/tmp/ansible.luo3f_1g size: 40 state: directory uid: 10001 _yaml_files: changed: false examined: 4 failed: false files: - atime: 1765556632.2534938 ctime: 1765556630.285447 dev: 64513 gid: 1000 gr_name: zuul inode: 138457755 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1765556629.7164333 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 20283 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1765556703.10918 ctime: 1765556703.11218 dev: 64513 gid: 1000 gr_name: zuul inode: 4327976 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1765556702.9531763 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 28065 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1765556697.7920535 ctime: 1765556695.7820055 dev: 64513 gid: 1000 gr_name: zuul inode: 79714367 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1765556695.6070013 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 1126 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1765556702.3251612 ctime: 1765556702.3281612 dev: 64513 gid: 1000 gr_name: zuul inode: 159430636 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1765556702.0311542 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 288 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false matched: 4 msg: All paths examined skipped_paths: {} ansible_all_ipv4_addresses: - 38.102.83.97 ansible_all_ipv6_addresses: - fe80::f816:3eff:feaa:3cf0 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_collection_name: null ansible_config_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-12-12' day: '12' epoch: '1765557618' epoch_int: '1765557618' hour: '16' iso8601: '2025-12-12T16:40:18Z' iso8601_basic: 20251212T164018962300 iso8601_basic_short: 20251212T164018 iso8601_micro: '2025-12-12T16:40:18.962300Z' minute: '40' month: '12' second: '18' time: '16:40:18' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' ansible_default_ipv4: address: 38.102.83.97 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:aa:3c:f0 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_dependent_role_names: [] ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-12-12-16-08-48-00 vda1: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-08-48-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 40548 22 SSH_CONNECTION: 38.102.83.114 40548 38.102.83.97 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '16' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:feaa:3cf0 prefix: '64' scope: link macaddress: fa:16:3e:aa:3c:f0 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.97 all_ipv6_addresses: - fe80::f816:3eff:feaa:3cf0 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 crc_ci_bootstrap_instance_default_net_config: mtu: 1500 range: 192.168.122.0/24 crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-12-12T16:12:20Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: c980ef95-579b-4bba-a2cf-aaa697eb56ed hardware_offload_type: null hints: '' id: 5c6b1861-0ebb-4b1f-b057-c128a69c2f2b ip_allocation: immediate mac_address: fa:16:3e:a8:13:c5 name: crc-6868605f-6684-4979-9a48-308ed352f6d0 network_id: fe296b7d-f858-415e-b30f-04968989d58d numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-12-12T16:12:20Z' crc_ci_bootstrap_network_name: zuul-ci-net-9e06bd9e crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:e6:21:77 mtu: 1500 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:a8:13:c5 mtu: 1500 internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:b2:c5:bf mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:c0:69:a0 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:e1:5c:e6 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:33Z' description: '' dns_domain: '' id: fe296b7d-f858-415e-b30f-04968989d58d ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: false l2_adjacency: true mtu: 1500 name: zuul-ci-net-9e06bd9e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-12-12T16:11:34Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:42Z' description: '' enable_ndp_proxy: null external_gateway_info: enable_snat: true external_fixed_ips: - ip_address: 38.102.83.158 subnet_id: 3169b11b-94b1-4bc9-9727-4fdbbe15e56e network_id: 7abff1a9-a103-46d0-979a-1f1e599f4f41 flavor_id: null id: 00620b58-8061-48ff-affe-fc214a7e9cb5 name: zuul-ci-subnet-router-9e06bd9e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 3 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-12-12T16:11:44Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-12-12T16:11:38Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: c980ef95-579b-4bba-a2cf-aaa697eb56ed ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9e06bd9e network_id: fe296b7d-f858-415e-b30f-04968989d58d project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-12-12T16:11:38Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9e06bd9e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9e06bd9e date_time: date: '2025-12-12' day: '12' epoch: '1765557618' epoch_int: '1765557618' hour: '16' iso8601: '2025-12-12T16:40:18Z' iso8601_basic: 20251212T164018962300 iso8601_basic_short: 20251212T164018 iso8601_micro: '2025-12-12T16:40:18.962300Z' minute: '40' month: '12' second: '18' time: '16:40:18' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' default_ipv4: address: 38.102.83.97 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:aa:3c:f0 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-12-12-16-08-48-00 vda1: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-08-48-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 40548 22 SSH_CONNECTION: 38.102.83.114 40548 38.102.83.97 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '16' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:feaa:3cf0 prefix: '64' scope: link macaddress: fa:16:3e:aa:3c:f0 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:e61ebeb9-32de-4b3b-b463-d59237136be4 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-648.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.06 1m: 0.24 5m: 0.16 locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:feaa:3cf0 lsb: {} lvm: N/A machine: x86_64 machine_id: 64f1d6692049d8be5e8b216cc203502c memfree_mb: 7120 memory_mb: nocache: free: 7331 used: 348 real: free: 7120 total: 7679 used: 559 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20336140 block_size: 4096 block_total: 20954875 block_used: 618735 device: /dev/vda1 fstype: xfs inode_available: 41888385 inode_total: 41942512 inode_used: 54127 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83296829440 size_total: 85831168000 uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 python_version: 3.9.25 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPIOW+VtnF/Fu/RuR24zXRchiOz7hR0QYB5AEr3y+Rog8EefZdE7lrEycvzxm3PNImSfgERgMwSA4vjDok0fKEQ= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIBB8eIzEKga2gBA89cbnEvfEvFoGpchZEHh+uW/JueDO ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC+ncO1XqWbLlBF6ck1nTNn2UrTbaFBcVm+kh6XtCVAHkrjiGUF0j+0iuO0/3XBmR2RP+HhbYoMqcST7u0uMw8z6z7q2k7USnS6L3SauEdgu6yT4a3OSKeFnHjLxLwEqnQ2+aEfy+ApcIlyKiTGgSSZ89yni637VO2jMD1mU91RfBmVRzmxakK0OQOAZwte7UTK0PtXcjC8ws/x/iaGeAEJOKRDEBmJSXZkI9c/u9fDOuM7I36+syNIdmBUhk9kpvfJaeVPyCPHeyEBbIhCNdO8m1vo4n8/JYLvzIzg+3sIBVWYtTLYCVyEsb7Ecq7+dGmOR9ShqlxeA9bMM19/nChXHNky1WO4qPpgAO4yY6jG+4cYaUtiwsbS6K2wtgLhibqgQp8w3Md31vdcnhVmxEUtfM0vM1ynuRKDZ3jTwBa6ap8HnZ1GIgyhyAT/XHp4agpBbuP3/DbozPGEUDXmIMGRVLca0sLcjOL3w/PUx8oD7i//dbhZ6ymrCfTGuOmikuk= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 108 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - service-telemetry-operator ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: controller ansible_host: 38.102.83.97 ansible_hostname: controller ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:e61ebeb9-32de-4b3b-b463-d59237136be4 ansible_interfaces: - eth0 - lo ansible_inventory_sources: - /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-648.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.06 1m: 0.24 5m: 0.16 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:feaa:3cf0 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 64f1d6692049d8be5e8b216cc203502c ansible_memfree_mb: 7120 ansible_memory_mb: nocache: free: 7331 used: 348 real: free: 7120 total: 7679 used: 559 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 7679 ansible_mounts: - block_available: 20336140 block_size: 4096 block_total: 20954875 block_used: 618735 device: /dev/vda1 fstype: xfs inode_available: 41888385 inode_total: 41942512 inode_used: 54127 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83296829440 size_total: 85831168000 uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_nodename: controller ansible_os_family: RedHat ansible_parent_role_names: - cifmw_setup ansible_parent_role_paths: - /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/roles/cifmw_setup ansible_pkg_mgr: dnf ansible_play_batch: &id002 - controller ansible_play_hosts: - controller ansible_play_hosts_all: - controller - crc ansible_play_name: Run ci/playbooks/e2e-collect-logs.yml ansible_play_role_names: &id003 - run_hook - os_must_gather - artifacts - env_op_images - run_hook - cifmw_setup ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.25 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_role_name: artifacts ansible_role_names: - cifmw_setup - run_hook - artifacts - env_op_images - os_must_gather ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPIOW+VtnF/Fu/RuR24zXRchiOz7hR0QYB5AEr3y+Rog8EefZdE7lrEycvzxm3PNImSfgERgMwSA4vjDok0fKEQ= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIBB8eIzEKga2gBA89cbnEvfEvFoGpchZEHh+uW/JueDO ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC+ncO1XqWbLlBF6ck1nTNn2UrTbaFBcVm+kh6XtCVAHkrjiGUF0j+0iuO0/3XBmR2RP+HhbYoMqcST7u0uMw8z6z7q2k7USnS6L3SauEdgu6yT4a3OSKeFnHjLxLwEqnQ2+aEfy+ApcIlyKiTGgSSZ89yni637VO2jMD1mU91RfBmVRzmxakK0OQOAZwte7UTK0PtXcjC8ws/x/iaGeAEJOKRDEBmJSXZkI9c/u9fDOuM7I36+syNIdmBUhk9kpvfJaeVPyCPHeyEBbIhCNdO8m1vo4n8/JYLvzIzg+3sIBVWYtTLYCVyEsb7Ecq7+dGmOR9ShqlxeA9bMM19/nChXHNky1WO4qPpgAO4yY6jG+4cYaUtiwsbS6K2wtgLhibqgQp8w3Md31vdcnhVmxEUtfM0vM1ynuRKDZ3jTwBa6ap8HnZ1GIgyhyAT/XHp4agpBbuP3/DbozPGEUDXmIMGRVLca0sLcjOL3w/PUx8oD7i//dbhZ6ymrCfTGuOmikuk= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 108 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_basedir: '{{ cifmw_basedir | default(ansible_user_dir ~ ''/ci-framework-data'') }}' cifmw_artifacts_crc_host: api.crc.testing cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_artifacts_crc_sshkey_ed25519: ~/.crc/machines/crc/id_ed25519 cifmw_artifacts_crc_user: core cifmw_artifacts_gather_logs: true cifmw_artifacts_mask_logs: true cifmw_basedir: /home/zuul/ci-framework-data cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_env_op_images_dir: '{{ cifmw_basedir | default(ansible_user_dir ~ ''/ci-framework-data'') }}' cifmw_env_op_images_dryrun: false cifmw_env_op_images_file: operator_images.yaml cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: '' BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_openshift_api: https://api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_token: sha256~EG5pZmEvMtlKbD96xrF9Jk3JoC7wIhyhXBBmcOOPvXo cifmw_openshift_user: kubeadmin cifmw_os_must_gather_additional_namespaces: kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko cifmw_os_must_gather_dump_db: ALL cifmw_os_must_gather_host_network: false cifmw_os_must_gather_image: quay.io/openstack-k8s-operators/openstack-must-gather:latest cifmw_os_must_gather_image_push: true cifmw_os_must_gather_image_registry: quay.rdoproject.org/openstack-k8s-operators cifmw_os_must_gather_kubeconfig: '{{ ansible_user_dir }}/.kube/config' cifmw_os_must_gather_namespaces: - openstack-operators - openstack - baremetal-operator-system - openshift-machine-api - cert-manager - openshift-nmstate - openshift-marketplace - metallb-system - crc-storage cifmw_os_must_gather_output_dir: '{{ cifmw_basedir | default(ansible_user_dir ~ ''/ci-framework-data'') }}' cifmw_os_must_gather_output_log_dir: '{{ cifmw_os_must_gather_output_dir }}/logs/openstack-must-gather' cifmw_os_must_gather_repo_path: '{{ ansible_user_dir }}/src/github.com/openstack-k8s-operators/openstack-must-gather' cifmw_os_must_gather_timeout: 30m cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:~/.crc/bin:~/.crc/bin/oc:~/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_project_dir: src/github.com/openstack-k8s-operators/ci-framework cifmw_project_dir_absolute: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_run_hook_debug: '{{ (ansible_verbosity | int) >= 2 | bool }}' cifmw_run_tests: false cifmw_status: changed: false failed: false stat: atime: 1765556745.4361868 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: binary ctime: 1765556749.1652756 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 8715506 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1765556749.1652756 nlink: 21 path: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 4096 uid: 1000 version: '1470163367' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true cifmw_success_flag: changed: false failed: false stat: exists: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_instance_default_net_config: mtu: 1500 range: 192.168.122.0/24 crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-12-12T16:12:20Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: c980ef95-579b-4bba-a2cf-aaa697eb56ed hardware_offload_type: null hints: '' id: 5c6b1861-0ebb-4b1f-b057-c128a69c2f2b ip_allocation: immediate mac_address: fa:16:3e:a8:13:c5 name: crc-6868605f-6684-4979-9a48-308ed352f6d0 network_id: fe296b7d-f858-415e-b30f-04968989d58d numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-12-12T16:12:20Z' crc_ci_bootstrap_network_name: zuul-ci-net-9e06bd9e crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:e6:21:77 mtu: 1500 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:a8:13:c5 mtu: 1500 internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:b2:c5:bf mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:c0:69:a0 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:e1:5c:e6 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:33Z' description: '' dns_domain: '' id: fe296b7d-f858-415e-b30f-04968989d58d ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: false l2_adjacency: true mtu: 1500 name: zuul-ci-net-9e06bd9e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-12-12T16:11:34Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:42Z' description: '' enable_ndp_proxy: null external_gateway_info: enable_snat: true external_fixed_ips: - ip_address: 38.102.83.158 subnet_id: 3169b11b-94b1-4bc9-9727-4fdbbe15e56e network_id: 7abff1a9-a103-46d0-979a-1f1e599f4f41 flavor_id: null id: 00620b58-8061-48ff-affe-fc214a7e9cb5 name: zuul-ci-subnet-router-9e06bd9e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 3 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-12-12T16:11:44Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-12-12T16:11:38Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: c980ef95-579b-4bba-a2cf-aaa697eb56ed ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9e06bd9e network_id: fe296b7d-f858-415e-b30f-04968989d58d project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-12-12T16:11:38Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9e06bd9e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9e06bd9e discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true environment: - ANSIBLE_LOG_PATH: '{{ ansible_user_dir }}/ci-framework-data/logs/e2e-collect-logs-must-gather.log' gather_subset: - min group_names: - ungrouped groups: all: - controller - crc ungrouped: &id001 - controller - crc zuul_unreachable: [] hostvars: controller: _included_dir: changed: false failed: false stat: atime: 1765556697.7910533 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary ctime: 1765556703.10918 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 121677537 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1765556703.10918 nlink: 2 path: /home/zuul/ci-framework-data/artifacts/parameters pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 120 uid: 1000 version: '3322351157' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true _included_file: changed: false failed: false stat: atime: 1765556702.3251612 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 9a42881cca85ab4410bd5c98ae90106689995b7e ctime: 1765556702.3281612 dev: 64513 device_type: 0 executable: false exists: true gid: 1000 gr_name: zuul inode: 159430636 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0600' mtime: 1765556702.0311542 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul readable: true rgrp: false roth: false rusr: true size: 288 uid: 1000 version: '3779196854' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false _parsed_vars: changed: false content: Y2lmbXdfb3BlbnNoaWZ0X2FwaTogaHR0cHM6Ly9hcGkuY3JjLnRlc3Rpbmc6NjQ0MwpjaWZtd19vcGVuc2hpZnRfY29udGV4dDogZGVmYXVsdC9hcGktY3JjLXRlc3Rpbmc6NjQ0My9rdWJlYWRtaW4KY2lmbXdfb3BlbnNoaWZ0X2t1YmVjb25maWc6IC9ob21lL3p1dWwvLmNyYy9tYWNoaW5lcy9jcmMva3ViZWNvbmZpZwpjaWZtd19vcGVuc2hpZnRfdG9rZW46IHNoYTI1Nn5FRzVwWm1Fdk10bEtiRDk2eHJGOUprM0pvQzd3SWh5aFhCQm1jT09QdlhvCmNpZm13X29wZW5zaGlmdF91c2VyOiBrdWJlYWRtaW4K encoding: base64 failed: false source: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml _tmp_dir: changed: true failed: false gid: 10001 group: zuul mode: '0700' owner: zuul path: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/tmp/ansible.luo3f_1g size: 40 state: directory uid: 10001 _yaml_files: changed: false examined: 4 failed: false files: - atime: 1765556632.2534938 ctime: 1765556630.285447 dev: 64513 gid: 1000 gr_name: zuul inode: 138457755 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1765556629.7164333 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 20283 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1765556703.10918 ctime: 1765556703.11218 dev: 64513 gid: 1000 gr_name: zuul inode: 4327976 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1765556702.9531763 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 28065 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1765556697.7920535 ctime: 1765556695.7820055 dev: 64513 gid: 1000 gr_name: zuul inode: 79714367 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1765556695.6070013 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 1126 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1765556702.3251612 ctime: 1765556702.3281612 dev: 64513 gid: 1000 gr_name: zuul inode: 159430636 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1765556702.0311542 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 288 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false matched: 4 msg: All paths examined skipped_paths: {} ansible_all_ipv4_addresses: - 38.102.83.97 ansible_all_ipv6_addresses: - fe80::f816:3eff:feaa:3cf0 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_config_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-12-12' day: '12' epoch: '1765557618' epoch_int: '1765557618' hour: '16' iso8601: '2025-12-12T16:40:18Z' iso8601_basic: 20251212T164018962300 iso8601_basic_short: 20251212T164018 iso8601_micro: '2025-12-12T16:40:18.962300Z' minute: '40' month: '12' second: '18' time: '16:40:18' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' ansible_default_ipv4: address: 38.102.83.97 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:aa:3c:f0 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-12-12-16-08-48-00 vda1: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-08-48-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 40548 22 SSH_CONNECTION: 38.102.83.114 40548 38.102.83.97 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '16' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:feaa:3cf0 prefix: '64' scope: link macaddress: fa:16:3e:aa:3c:f0 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.97 all_ipv6_addresses: - fe80::f816:3eff:feaa:3cf0 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 crc_ci_bootstrap_instance_default_net_config: mtu: 1500 range: 192.168.122.0/24 crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-12-12T16:12:20Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: c980ef95-579b-4bba-a2cf-aaa697eb56ed hardware_offload_type: null hints: '' id: 5c6b1861-0ebb-4b1f-b057-c128a69c2f2b ip_allocation: immediate mac_address: fa:16:3e:a8:13:c5 name: crc-6868605f-6684-4979-9a48-308ed352f6d0 network_id: fe296b7d-f858-415e-b30f-04968989d58d numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-12-12T16:12:20Z' crc_ci_bootstrap_network_name: zuul-ci-net-9e06bd9e crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:e6:21:77 mtu: 1500 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:a8:13:c5 mtu: 1500 internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:b2:c5:bf mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:c0:69:a0 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:e1:5c:e6 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:33Z' description: '' dns_domain: '' id: fe296b7d-f858-415e-b30f-04968989d58d ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: false l2_adjacency: true mtu: 1500 name: zuul-ci-net-9e06bd9e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-12-12T16:11:34Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:42Z' description: '' enable_ndp_proxy: null external_gateway_info: enable_snat: true external_fixed_ips: - ip_address: 38.102.83.158 subnet_id: 3169b11b-94b1-4bc9-9727-4fdbbe15e56e network_id: 7abff1a9-a103-46d0-979a-1f1e599f4f41 flavor_id: null id: 00620b58-8061-48ff-affe-fc214a7e9cb5 name: zuul-ci-subnet-router-9e06bd9e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 3 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-12-12T16:11:44Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-12-12T16:11:38Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: c980ef95-579b-4bba-a2cf-aaa697eb56ed ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9e06bd9e network_id: fe296b7d-f858-415e-b30f-04968989d58d project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-12-12T16:11:38Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9e06bd9e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9e06bd9e date_time: date: '2025-12-12' day: '12' epoch: '1765557618' epoch_int: '1765557618' hour: '16' iso8601: '2025-12-12T16:40:18Z' iso8601_basic: 20251212T164018962300 iso8601_basic_short: 20251212T164018 iso8601_micro: '2025-12-12T16:40:18.962300Z' minute: '40' month: '12' second: '18' time: '16:40:18' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' default_ipv4: address: 38.102.83.97 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:aa:3c:f0 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-12-12-16-08-48-00 vda1: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-08-48-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 40548 22 SSH_CONNECTION: 38.102.83.114 40548 38.102.83.97 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '16' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:feaa:3cf0 prefix: '64' scope: link macaddress: fa:16:3e:aa:3c:f0 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:e61ebeb9-32de-4b3b-b463-d59237136be4 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-648.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.06 1m: 0.24 5m: 0.16 locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:feaa:3cf0 lsb: {} lvm: N/A machine: x86_64 machine_id: 64f1d6692049d8be5e8b216cc203502c memfree_mb: 7120 memory_mb: nocache: free: 7331 used: 348 real: free: 7120 total: 7679 used: 559 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20336140 block_size: 4096 block_total: 20954875 block_used: 618735 device: /dev/vda1 fstype: xfs inode_available: 41888385 inode_total: 41942512 inode_used: 54127 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83296829440 size_total: 85831168000 uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 python_version: 3.9.25 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPIOW+VtnF/Fu/RuR24zXRchiOz7hR0QYB5AEr3y+Rog8EefZdE7lrEycvzxm3PNImSfgERgMwSA4vjDok0fKEQ= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIBB8eIzEKga2gBA89cbnEvfEvFoGpchZEHh+uW/JueDO ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC+ncO1XqWbLlBF6ck1nTNn2UrTbaFBcVm+kh6XtCVAHkrjiGUF0j+0iuO0/3XBmR2RP+HhbYoMqcST7u0uMw8z6z7q2k7USnS6L3SauEdgu6yT4a3OSKeFnHjLxLwEqnQ2+aEfy+ApcIlyKiTGgSSZ89yni637VO2jMD1mU91RfBmVRzmxakK0OQOAZwte7UTK0PtXcjC8ws/x/iaGeAEJOKRDEBmJSXZkI9c/u9fDOuM7I36+syNIdmBUhk9kpvfJaeVPyCPHeyEBbIhCNdO8m1vo4n8/JYLvzIzg+3sIBVWYtTLYCVyEsb7Ecq7+dGmOR9ShqlxeA9bMM19/nChXHNky1WO4qPpgAO4yY6jG+4cYaUtiwsbS6K2wtgLhibqgQp8w3Md31vdcnhVmxEUtfM0vM1ynuRKDZ3jTwBa6ap8HnZ1GIgyhyAT/XHp4agpBbuP3/DbozPGEUDXmIMGRVLca0sLcjOL3w/PUx8oD7i//dbhZ6ymrCfTGuOmikuk= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 108 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - service-telemetry-operator ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: controller ansible_host: 38.102.83.97 ansible_hostname: controller ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:e61ebeb9-32de-4b3b-b463-d59237136be4 ansible_interfaces: - eth0 - lo ansible_inventory_sources: - /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-648.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.06 1m: 0.24 5m: 0.16 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:feaa:3cf0 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 64f1d6692049d8be5e8b216cc203502c ansible_memfree_mb: 7120 ansible_memory_mb: nocache: free: 7331 used: 348 real: free: 7120 total: 7679 used: 559 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 7679 ansible_mounts: - block_available: 20336140 block_size: 4096 block_total: 20954875 block_used: 618735 device: /dev/vda1 fstype: xfs inode_available: 41888385 inode_total: 41942512 inode_used: 54127 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83296829440 size_total: 85831168000 uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_nodename: controller ansible_os_family: RedHat ansible_pkg_mgr: dnf ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.25 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPIOW+VtnF/Fu/RuR24zXRchiOz7hR0QYB5AEr3y+Rog8EefZdE7lrEycvzxm3PNImSfgERgMwSA4vjDok0fKEQ= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIBB8eIzEKga2gBA89cbnEvfEvFoGpchZEHh+uW/JueDO ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC+ncO1XqWbLlBF6ck1nTNn2UrTbaFBcVm+kh6XtCVAHkrjiGUF0j+0iuO0/3XBmR2RP+HhbYoMqcST7u0uMw8z6z7q2k7USnS6L3SauEdgu6yT4a3OSKeFnHjLxLwEqnQ2+aEfy+ApcIlyKiTGgSSZ89yni637VO2jMD1mU91RfBmVRzmxakK0OQOAZwte7UTK0PtXcjC8ws/x/iaGeAEJOKRDEBmJSXZkI9c/u9fDOuM7I36+syNIdmBUhk9kpvfJaeVPyCPHeyEBbIhCNdO8m1vo4n8/JYLvzIzg+3sIBVWYtTLYCVyEsb7Ecq7+dGmOR9ShqlxeA9bMM19/nChXHNky1WO4qPpgAO4yY6jG+4cYaUtiwsbS6K2wtgLhibqgQp8w3Md31vdcnhVmxEUtfM0vM1ynuRKDZ3jTwBa6ap8HnZ1GIgyhyAT/XHp4agpBbuP3/DbozPGEUDXmIMGRVLca0sLcjOL3w/PUx8oD7i//dbhZ6ymrCfTGuOmikuk= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 108 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_basedir: /home/zuul/ci-framework-data cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: '' BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_openshift_api: https://api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_token: sha256~EG5pZmEvMtlKbD96xrF9Jk3JoC7wIhyhXBBmcOOPvXo cifmw_openshift_user: kubeadmin cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:~/.crc/bin:~/.crc/bin/oc:~/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_project_dir: src/github.com/openstack-k8s-operators/ci-framework cifmw_project_dir_absolute: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_run_tests: false cifmw_status: changed: false failed: false stat: atime: 1765556745.4361868 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: binary ctime: 1765556749.1652756 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 8715506 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1765556749.1652756 nlink: 21 path: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 4096 uid: 1000 version: '1470163367' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true cifmw_success_flag: changed: false failed: false stat: exists: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_instance_default_net_config: mtu: 1500 range: 192.168.122.0/24 crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-12-12T16:12:20Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: c980ef95-579b-4bba-a2cf-aaa697eb56ed hardware_offload_type: null hints: '' id: 5c6b1861-0ebb-4b1f-b057-c128a69c2f2b ip_allocation: immediate mac_address: fa:16:3e:a8:13:c5 name: crc-6868605f-6684-4979-9a48-308ed352f6d0 network_id: fe296b7d-f858-415e-b30f-04968989d58d numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-12-12T16:12:20Z' crc_ci_bootstrap_network_name: zuul-ci-net-9e06bd9e crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:e6:21:77 mtu: 1500 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:a8:13:c5 mtu: 1500 internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:b2:c5:bf mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:c0:69:a0 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:e1:5c:e6 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:33Z' description: '' dns_domain: '' id: fe296b7d-f858-415e-b30f-04968989d58d ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: false l2_adjacency: true mtu: 1500 name: zuul-ci-net-9e06bd9e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-12-12T16:11:34Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:42Z' description: '' enable_ndp_proxy: null external_gateway_info: enable_snat: true external_fixed_ips: - ip_address: 38.102.83.158 subnet_id: 3169b11b-94b1-4bc9-9727-4fdbbe15e56e network_id: 7abff1a9-a103-46d0-979a-1f1e599f4f41 flavor_id: null id: 00620b58-8061-48ff-affe-fc214a7e9cb5 name: zuul-ci-subnet-router-9e06bd9e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 3 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-12-12T16:11:44Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-12-12T16:11:38Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: c980ef95-579b-4bba-a2cf-aaa697eb56ed ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9e06bd9e network_id: fe296b7d-f858-415e-b30f-04968989d58d project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-12-12T16:11:38Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9e06bd9e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9e06bd9e discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true gather_subset: - min group_names: - ungrouped groups: all: - controller - crc ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1 inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/inventory.yaml inventory_hostname: controller inventory_hostname_short: controller logfiles_dest_dir: /home/zuul/ci-framework-data/logs/2025-12-12_16-40 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 3e8fa343-2893-4e1e-9f7a-ef822bd01638 host_id: 144026dc3a6267aa9593dce15cccee121a718779ba75fb6ce23b65e7 interface_ip: 38.102.83.97 label: cloud-centos-9-stream-tripleo-vexxhost private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__15d3e4796d68b09ae734195d14f5ca09ccdadd05 playbook_dir: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.97 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 3e8fa343-2893-4e1e-9f7a-ef822bd01638 host_id: 144026dc3a6267aa9593dce15cccee121a718779ba75fb6ce23b65e7 interface_ip: 38.102.83.97 label: cloud-centos-9-stream-tripleo-vexxhost private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul_log_collection: true zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: master build: 9e06bd9ec9c1456eb80ff4a509f0548e build_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null buildset: 5e8ab5d5989f4e03b4743e980e08eed9 buildset_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 child_jobs: [] commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 event_id: bf7cf120-d774-11f0-9b66-056cd24cb52f executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/logs result_data_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/results.json src_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/src work_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work items: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null job: stf-crc-ocp_420-nightly_bundles-index_deploy jobtags: [] max_attempts: 1 message: QWRkIE9DUCA0LjIwIGpvYnMKCkFkZCBqb2IgZGVmaW5pdGlvbnMgdXNpbmcgY3JjLWNsb3VkLW9jcC00LTIwLTEtM3hsIGFzIGJhc2UgaW1hZ2UNCg0KQ2xvc2VzOiBPU1BSSC0yMTg4MQ== patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 untrusted/project_4/github.com/infrawatch/service-telemetry-operator: canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 playbooks: - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/deploy_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_0/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_4/rdo-jobs/roles - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/test_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_1/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_1/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_1/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_1/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_1/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_1/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_1/role_4/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/infrawatch/prometheus-webhook-snmp: canonical_hostname: github.com canonical_name: github.com/infrawatch/prometheus-webhook-snmp checkout: master checkout_description: zuul branch commit: 3959c53b2613d03d066cb1b2fe5bdae8633ae895 name: infrawatch/prometheus-webhook-snmp required: true short_name: prometheus-webhook-snmp src_dir: src/github.com/infrawatch/prometheus-webhook-snmp github.com/infrawatch/service-telemetry-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master checkout_description: zuul branch commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 name: infrawatch/service-telemetry-operator required: true short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator github.com/infrawatch/sg-bridge: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-bridge checkout: master checkout_description: zuul branch commit: bab11fba86ad0c21cb35e12b56bf086a3332f1d2 name: infrawatch/sg-bridge required: true short_name: sg-bridge src_dir: src/github.com/infrawatch/sg-bridge github.com/infrawatch/sg-core: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-core checkout: master checkout_description: zuul branch commit: 5a4aece11fea9f71ce7515d11e1e7f0eae97eea6 name: infrawatch/sg-core required: true short_name: sg-core src_dir: src/github.com/infrawatch/sg-core github.com/infrawatch/smart-gateway-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/smart-gateway-operator checkout: master checkout_description: zuul branch commit: 9e0945fe8a0e74be8bc9449318446eeb74336986 name: infrawatch/smart-gateway-operator required: true short_name: smart-gateway-operator src_dir: src/github.com/infrawatch/smart-gateway-operator github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/dataplane-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/dataplane-operator checkout: main checkout_description: project override ref commit: c98b51bcd7fe14b85ed4cf3f5f76552b3455c5f2 name: openstack-k8s-operators/dataplane-operator required: true short_name: dataplane-operator src_dir: src/github.com/openstack-k8s-operators/dataplane-operator github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: project default branch commit: 78f305a7f43e4024d260a64119f250386daa6420 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: project override ref commit: 786269345f996bd262360738a1e3c6b09171f370 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project default branch commit: 2f838b62fe50aacff3d514af4b502264e0a276a5 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: master checkout_description: zuul branch commit: a333e57066b1d48e41f93af68be81188290a96b3 name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: project override ref commit: 2da49819dd6af6036aede5e4e9a080ff2c6457de name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: project override ref commit: 9a923a3e438c4f66834894bfa59207197cf3daea name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: project default branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: zuul branch commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: zuul branch commit: 672a220823fac36a8965fa0d3dca764739bb46c0 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/694/head resources: {} tenant: rdoproject.org timeout: 3600 topic: null voting: true zuul_change_list: - service-telemetry-operator zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '1' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'False' zuul_will_retry: 'False' crc: ansible_all_ipv4_addresses: - 192.168.126.11 - 38.102.83.180 ansible_all_ipv6_addresses: - fe80::ef27:a36c:b84d:9be0 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_br_int: active: false device: br-int features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: b2:a9:9f:57:07:84 mtu: 1400 promisc: true timestamping: [] type: ether ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/vmlinuz-5.14.0-570.57.1.el9_6.x86_64 boot: UUID=19e76f87-96b8-4794-9744-0b33dca22d5b cgroup_no_v1: all console: ttyS0 ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/0 psi: '0' root: UUID=5eb7c122-420e-4494-80ec-41664070d7b6 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' ansible_config_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-12-12' day: '12' epoch: '1765555845' epoch_int: '1765555845' hour: '16' iso8601: '2025-12-12T16:10:45Z' iso8601_basic: 20251212T161045785544 iso8601_basic_short: 20251212T161045 iso8601_micro: '2025-12-12T16:10:45.785544Z' minute: '10' month: '12' second: '45' time: '16:10:45' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' ansible_default_ipv4: address: 38.102.83.180 alias: ens3 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: ens3 macaddress: fa:16:3e:ac:f8:35 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda2: - EFI-SYSTEM vda3: - boot vda4: - root masters: {} uuids: sr0: - 2025-12-12-16-09-07-00 vda2: - 7B77-95E7 vda3: - 19e76f87-96b8-4794-9744-0b33dca22d5b vda4: - 5eb7c122-420e-4494-80ec-41664070d7b6 ansible_devices: loop0: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '12784' sectorsize: '4096' size: 6.24 MB support_discard: '4096' vendor: null virtual: 1 sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-09-07-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: - EFI-SYSTEM masters: [] uuids: - 7B77-95E7 sectors: '260096' sectorsize: 512 size: 127.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - 19e76f87-96b8-4794-9744-0b33dca22d5b sectors: '786432' sectorsize: 512 size: 384.00 MB start: '264192' uuid: 19e76f87-96b8-4794-9744-0b33dca22d5b vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - 5eb7c122-420e-4494-80ec-41664070d7b6 sectors: '418379743' sectorsize: 512 size: 199.50 GB start: '1050624' uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '419430400' sectorsize: '512' size: 200.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: RedHat ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/redhat-release ansible_distribution_file_search_string: Red Hat ansible_distribution_file_variety: RedHat ansible_distribution_major_version: '9' ansible_distribution_release: Plow ansible_distribution_version: '9.6' ansible_dns: nameservers: - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_ens3: active: true device: ens3 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.180 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::ef27:a36c:b84d:9be0 prefix: '64' scope: link macaddress: fa:16:3e:ac:f8:35 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' HOME: /var/home/core LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: core MOTD_SHOWN: pam PATH: /var/home/core/.local/bin:/var/home/core/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /var/home/core SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 55408 22 SSH_CONNECTION: 38.102.83.114 55408 38.102.83.180 22 USER: core XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '2' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth10: active: true device: eth10 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 192.168.126.11 broadcast: 192.168.126.255 netmask: 255.255.255.0 network: 192.168.126.0 prefix: '24' macaddress: f6:89:b0:26:63:00 mtu: 1500 promisc: false timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 192.168.126.11 - 38.102.83.180 all_ipv6_addresses: - fe80::ef27:a36c:b84d:9be0 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA br_int: active: false device: br-int features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: b2:a9:9f:57:07:84 mtu: 1400 promisc: true timestamping: [] type: ether chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/vmlinuz-5.14.0-570.57.1.el9_6.x86_64 boot: UUID=19e76f87-96b8-4794-9744-0b33dca22d5b cgroup_no_v1: all console: ttyS0 ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/0 psi: '0' root: UUID=5eb7c122-420e-4494-80ec-41664070d7b6 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' date_time: date: '2025-12-12' day: '12' epoch: '1765555845' epoch_int: '1765555845' hour: '16' iso8601: '2025-12-12T16:10:45Z' iso8601_basic: 20251212T161045785544 iso8601_basic_short: 20251212T161045 iso8601_micro: '2025-12-12T16:10:45.785544Z' minute: '10' month: '12' second: '45' time: '16:10:45' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' default_ipv4: address: 38.102.83.180 alias: ens3 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: ens3 macaddress: fa:16:3e:ac:f8:35 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda2: - EFI-SYSTEM vda3: - boot vda4: - root masters: {} uuids: sr0: - 2025-12-12-16-09-07-00 vda2: - 7B77-95E7 vda3: - 19e76f87-96b8-4794-9744-0b33dca22d5b vda4: - 5eb7c122-420e-4494-80ec-41664070d7b6 devices: loop0: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: {} removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '12784' sectorsize: '4096' size: 6.24 MB support_discard: '4096' vendor: null virtual: 1 sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-09-07-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: - EFI-SYSTEM masters: [] uuids: - 7B77-95E7 sectors: '260096' sectorsize: 512 size: 127.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - 19e76f87-96b8-4794-9744-0b33dca22d5b sectors: '786432' sectorsize: 512 size: 384.00 MB start: '264192' uuid: 19e76f87-96b8-4794-9744-0b33dca22d5b vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - 5eb7c122-420e-4494-80ec-41664070d7b6 sectors: '418379743' sectorsize: 512 size: 199.50 GB start: '1050624' uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '419430400' sectorsize: '512' size: 200.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: RedHat distribution_file_parsed: true distribution_file_path: /etc/redhat-release distribution_file_search_string: Red Hat distribution_file_variety: RedHat distribution_major_version: '9' distribution_release: Plow distribution_version: '9.6' dns: nameservers: - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 ens3: active: true device: ens3 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.180 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::ef27:a36c:b84d:9be0 prefix: '64' scope: link macaddress: fa:16:3e:ac:f8:35 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' HOME: /var/home/core LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: core MOTD_SHOWN: pam PATH: /var/home/core/.local/bin:/var/home/core/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /var/home/core SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 55408 22 SSH_CONNECTION: 38.102.83.114 55408 38.102.83.180 22 USER: core XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '2' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth10: active: true device: eth10 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 192.168.126.11 broadcast: 192.168.126.255 netmask: 255.255.255.0 network: 192.168.126.0 prefix: '24' macaddress: f6:89:b0:26:63:00 mtu: 1500 promisc: false timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: crc gather_subset: - all hostname: crc hostnqn: nqn.2014-08.org.nvmexpress:uuid:61c3a10d-83f9-474d-8347-456fea156b65 interfaces: - ovs-system - eth10 - tap0 - lo - ovn-k8s-mp0 - br-int - ens3 is_chroot: true iscsi_iqn: '' kernel: 5.14.0-570.57.1.el9_6.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Sun Oct 19 22:05:48 EDT 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.04 1m: 0.34 5m: 0.12 locally_reachable_ips: ipv4: - 38.102.83.180 - 127.0.0.0/8 - 127.0.0.1 - 192.168.126.11 ipv6: - ::1 - fe80::ef27:a36c:b84d:9be0 lsb: {} lvm: N/A machine: x86_64 machine_id: 80bc4fba336e4ca1bc9d28a8be52a356 memfree_mb: 31414 memory_mb: nocache: free: 31594 used: 497 real: free: 31414 total: 32091 used: 677 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 32091 module_setup: true mounts: - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /sysroot options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /etc options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /sysroot/ostree/deploy/rhcos/var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 205820 block_size: 1024 block_total: 358271 block_used: 152451 device: /dev/vda3 fstype: ext4 inode_available: 97936 inode_total: 98304 inode_used: 368 mount: /boot options: ro,seclabel,nosuid,nodev,relatime size_available: 210759680 size_total: 366869504 uuid: 19e76f87-96b8-4794-9744-0b33dca22d5b - block_available: 0 block_size: 2048 block_total: 241 block_used: 241 device: /dev/sr0 fstype: iso9660 inode_available: 0 inode_total: 0 inode_used: 0 mount: /tmp/openstack-config-drive options: ro,relatime,nojoliet,check=s,map=n,blocksize=2048 size_available: 0 size_total: 493568 uuid: 2025-12-12-16-09-07-00 nodename: crc os_family: RedHat ovn_k8s_mp0: active: false device: ovn-k8s-mp0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 0a:58:0a:d9:00:02 mtu: 1400 promisc: true timestamping: [] type: ether ovs_system: active: false device: ovs-system features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 86:67:3f:f5:d0:37 mtu: 1500 promisc: true timestamping: [] type: ether pkg_mgr: atomic_container proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/vmlinuz-5.14.0-570.57.1.el9_6.x86_64 boot: UUID=19e76f87-96b8-4794-9744-0b33dca22d5b cgroup_no_v1: all console: - hvc0 - ttyS0 ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/0 psi: '0' root: UUID=5eb7c122-420e-4494-80ec-41664070d7b6 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor - '8' - AuthenticAMD - AMD EPYC-Rome Processor - '9' - AuthenticAMD - AMD EPYC-Rome Processor - '10' - AuthenticAMD - AMD EPYC-Rome Processor - '11' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 12 processor_nproc: 12 processor_threads_per_core: 1 processor_vcpus: 12 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 21 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 21 - final - 0 python_version: 3.9.21 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNOi6aia0Vzi/G61OmUyR+FuLN3apRFqwgibK6Ieig/gswBwzh9Tl+tL2JMHgu7Zwnwszh4eyOqhnAQIn5Dsi/Y= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIHEiEZvE6yiZWiZi2HStEyrlmAZU/lmQ3KUh/7/SECAb ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQD6TjspZjZs7EOk5r0Sj0Rd7tV+bp2vreqf2XgKvDEnPgSbYhfzpUp7iniKunKuxRYPbIBeXLvOgMAcrsXX75zD0x/5wbLkufUuZ3UANLYhM76n9l44pMonbyof2S0TZAEAKre/5KuTrunRX/zKxH3BldOPszTIUPihQfSzfV64BqwPm9tWeNdxGjuFBJuGNx3ZB4V+iSYnsYMrQHElXzVBiNVShGKiYhiP8FNlBY/5lFqv0vRGizNYtJEQnf5rH02bJIXvM4msHyEh3nDFE5jdjvZusKPA4dAFZ/FJSjPT1Hi8mr1qbKHCbhUWCYCboU+N5qF43Z4w0gpQKKraX8kTBTmLCl5JW3dm1nfkOJLr7hrvQpQe1QyBnARYdEqxXTaSsjGIUN2bWuy49paXNXUDOXO944D+XWBJsz8VUUzWuv1nLcH05NGmt5leVAvd+n2Ya2NkmviYsoNgv/2hSwAmVPrpTvQ6gx+6iKWHfHHWepRXcQ/ydNoV2a3GaBz49T0= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation tap0: active: false device: tap0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: off [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'off' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: off [requested on] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'off' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: off [requested on] tx_tcp_ecn_segmentation: off [requested on] tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: off [requested on] tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 5a:94:ef:e4:0c:ee mtu: 1500 promisc: false speed: 10 timestamping: [] type: ether uptime_seconds: 90 user_dir: /var/home/core user_gecos: CoreOS Admin user_gid: 1000 user_id: core user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: crc ansible_host: 38.102.83.180 ansible_hostname: crc ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:61c3a10d-83f9-474d-8347-456fea156b65 ansible_interfaces: - ovs-system - eth10 - tap0 - lo - ovn-k8s-mp0 - br-int - ens3 ansible_inventory_sources: - /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/inventory.yaml ansible_is_chroot: true ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-570.57.1.el9_6.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Sun Oct 19 22:05:48 EDT 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.04 1m: 0.34 5m: 0.12 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.180 - 127.0.0.0/8 - 127.0.0.1 - 192.168.126.11 ipv6: - ::1 - fe80::ef27:a36c:b84d:9be0 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 80bc4fba336e4ca1bc9d28a8be52a356 ansible_memfree_mb: 31414 ansible_memory_mb: nocache: free: 31594 used: 497 real: free: 31414 total: 32091 used: 677 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 32091 ansible_mounts: - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /sysroot options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /etc options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /sysroot/ostree/deploy/rhcos/var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 14988975 block_size: 4096 block_total: 20823472 block_used: 5834497 device: /dev/vda4 fstype: xfs inode_available: 41549689 inode_total: 41679680 inode_used: 129991 mount: /var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 61394841600 size_total: 85292941312 uuid: 5eb7c122-420e-4494-80ec-41664070d7b6 - block_available: 205820 block_size: 1024 block_total: 358271 block_used: 152451 device: /dev/vda3 fstype: ext4 inode_available: 97936 inode_total: 98304 inode_used: 368 mount: /boot options: ro,seclabel,nosuid,nodev,relatime size_available: 210759680 size_total: 366869504 uuid: 19e76f87-96b8-4794-9744-0b33dca22d5b - block_available: 0 block_size: 2048 block_total: 241 block_used: 241 device: /dev/sr0 fstype: iso9660 inode_available: 0 inode_total: 0 inode_used: 0 mount: /tmp/openstack-config-drive options: ro,relatime,nojoliet,check=s,map=n,blocksize=2048 size_available: 0 size_total: 493568 uuid: 2025-12-12-16-09-07-00 ansible_nodename: crc ansible_os_family: RedHat ansible_ovn_k8s_mp0: active: false device: ovn-k8s-mp0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 0a:58:0a:d9:00:02 mtu: 1400 promisc: true timestamping: [] type: ether ansible_ovs_system: active: false device: ovs-system features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 86:67:3f:f5:d0:37 mtu: 1500 promisc: true timestamping: [] type: ether ansible_pkg_mgr: atomic_container ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/vmlinuz-5.14.0-570.57.1.el9_6.x86_64 boot: UUID=19e76f87-96b8-4794-9744-0b33dca22d5b cgroup_no_v1: all console: - hvc0 - ttyS0 ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/12a61ee52bd2826a8183af75be3fde40ba3ac3c6861f00f5f1ec8b26ded7ec8a/0 psi: '0' root: UUID=5eb7c122-420e-4494-80ec-41664070d7b6 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor - '8' - AuthenticAMD - AMD EPYC-Rome Processor - '9' - AuthenticAMD - AMD EPYC-Rome Processor - '10' - AuthenticAMD - AMD EPYC-Rome Processor - '11' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 12 ansible_processor_nproc: 12 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 12 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 21 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 21 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.21 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNOi6aia0Vzi/G61OmUyR+FuLN3apRFqwgibK6Ieig/gswBwzh9Tl+tL2JMHgu7Zwnwszh4eyOqhnAQIn5Dsi/Y= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIHEiEZvE6yiZWiZi2HStEyrlmAZU/lmQ3KUh/7/SECAb ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQD6TjspZjZs7EOk5r0Sj0Rd7tV+bp2vreqf2XgKvDEnPgSbYhfzpUp7iniKunKuxRYPbIBeXLvOgMAcrsXX75zD0x/5wbLkufUuZ3UANLYhM76n9l44pMonbyof2S0TZAEAKre/5KuTrunRX/zKxH3BldOPszTIUPihQfSzfV64BqwPm9tWeNdxGjuFBJuGNx3ZB4V+iSYnsYMrQHElXzVBiNVShGKiYhiP8FNlBY/5lFqv0vRGizNYtJEQnf5rH02bJIXvM4msHyEh3nDFE5jdjvZusKPA4dAFZ/FJSjPT1Hi8mr1qbKHCbhUWCYCboU+N5qF43Z4w0gpQKKraX8kTBTmLCl5JW3dm1nfkOJLr7hrvQpQe1QyBnARYdEqxXTaSsjGIUN2bWuy49paXNXUDOXO944D+XWBJsz8VUUzWuv1nLcH05NGmt5leVAvd+n2Ya2NkmviYsoNgv/2hSwAmVPrpTvQ6gx+6iKWHfHHWepRXcQ/ydNoV2a3GaBz49T0= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_tap0: active: false device: tap0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: off [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'off' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: off [requested on] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'off' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: off [requested on] tx_tcp_ecn_segmentation: off [requested on] tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: off [requested on] tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 5a:94:ef:e4:0c:ee mtu: 1500 promisc: false speed: 10 timestamping: [] type: ether ansible_uptime_seconds: 90 ansible_user: core ansible_user_dir: /var/home/core ansible_user_gecos: CoreOS Admin ansible_user_gid: 1000 ansible_user_id: core ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /var/home/core/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_installyamls_repos: /var/home/core/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /var/home/core/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_project_dir: src/github.com/openstack-k8s-operators/ci-framework cifmw_project_dir_absolute: /var/home/core/src/github.com/openstack-k8s-operators/ci-framework cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true gather_subset: - all group_names: - ungrouped groups: all: - controller - crc ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1 inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/inventory.yaml inventory_hostname: crc inventory_hostname_short: crc module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 6868605f-6684-4979-9a48-308ed352f6d0 host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.180 label: crc-cloud-ocp-4-20-1-3xl private_ipv4: 38.102.83.180 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.180 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__15d3e4796d68b09ae734195d14f5ca09ccdadd05 playbook_dir: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.180 ansible_port: 22 ansible_python_interpreter: auto ansible_user: core cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 6868605f-6684-4979-9a48-308ed352f6d0 host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.180 label: crc-cloud-ocp-4-20-1-3xl private_ipv4: 38.102.83.180 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.180 public_ipv6: '' region: RegionOne slot: null podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul_log_collection: true zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: master build: 9e06bd9ec9c1456eb80ff4a509f0548e build_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null buildset: 5e8ab5d5989f4e03b4743e980e08eed9 buildset_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 child_jobs: [] commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 event_id: bf7cf120-d774-11f0-9b66-056cd24cb52f executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/logs result_data_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/results.json src_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/src work_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work items: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null job: stf-crc-ocp_420-nightly_bundles-index_deploy jobtags: [] max_attempts: 1 message: QWRkIE9DUCA0LjIwIGpvYnMKCkFkZCBqb2IgZGVmaW5pdGlvbnMgdXNpbmcgY3JjLWNsb3VkLW9jcC00LTIwLTEtM3hsIGFzIGJhc2UgaW1hZ2UNCg0KQ2xvc2VzOiBPU1BSSC0yMTg4MQ== patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 untrusted/project_4/github.com/infrawatch/service-telemetry-operator: canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 playbooks: - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/deploy_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_0/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_4/rdo-jobs/roles - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/test_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_1/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_1/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_1/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_1/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_1/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_1/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_1/role_4/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/infrawatch/prometheus-webhook-snmp: canonical_hostname: github.com canonical_name: github.com/infrawatch/prometheus-webhook-snmp checkout: master checkout_description: zuul branch commit: 3959c53b2613d03d066cb1b2fe5bdae8633ae895 name: infrawatch/prometheus-webhook-snmp required: true short_name: prometheus-webhook-snmp src_dir: src/github.com/infrawatch/prometheus-webhook-snmp github.com/infrawatch/service-telemetry-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master checkout_description: zuul branch commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 name: infrawatch/service-telemetry-operator required: true short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator github.com/infrawatch/sg-bridge: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-bridge checkout: master checkout_description: zuul branch commit: bab11fba86ad0c21cb35e12b56bf086a3332f1d2 name: infrawatch/sg-bridge required: true short_name: sg-bridge src_dir: src/github.com/infrawatch/sg-bridge github.com/infrawatch/sg-core: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-core checkout: master checkout_description: zuul branch commit: 5a4aece11fea9f71ce7515d11e1e7f0eae97eea6 name: infrawatch/sg-core required: true short_name: sg-core src_dir: src/github.com/infrawatch/sg-core github.com/infrawatch/smart-gateway-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/smart-gateway-operator checkout: master checkout_description: zuul branch commit: 9e0945fe8a0e74be8bc9449318446eeb74336986 name: infrawatch/smart-gateway-operator required: true short_name: smart-gateway-operator src_dir: src/github.com/infrawatch/smart-gateway-operator github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/dataplane-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/dataplane-operator checkout: main checkout_description: project override ref commit: c98b51bcd7fe14b85ed4cf3f5f76552b3455c5f2 name: openstack-k8s-operators/dataplane-operator required: true short_name: dataplane-operator src_dir: src/github.com/openstack-k8s-operators/dataplane-operator github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: project default branch commit: 78f305a7f43e4024d260a64119f250386daa6420 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: project override ref commit: 786269345f996bd262360738a1e3c6b09171f370 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project default branch commit: 2f838b62fe50aacff3d514af4b502264e0a276a5 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: master checkout_description: zuul branch commit: a333e57066b1d48e41f93af68be81188290a96b3 name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: project override ref commit: 2da49819dd6af6036aede5e4e9a080ff2c6457de name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: project override ref commit: 9a923a3e438c4f66834894bfa59207197cf3daea name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: project default branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: zuul branch commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: zuul branch commit: 672a220823fac36a8965fa0d3dca764739bb46c0 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/694/head resources: {} tenant: rdoproject.org timeout: 3600 topic: null voting: true zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '1' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'False' zuul_will_retry: 'False' inventory_dir: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1 inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/post_playbook_1/inventory.yaml inventory_hostname: controller inventory_hostname_short: controller logfiles_dest_dir: /home/zuul/ci-framework-data/logs/2025-12-12_16-40 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 3e8fa343-2893-4e1e-9f7a-ef822bd01638 host_id: 144026dc3a6267aa9593dce15cccee121a718779ba75fb6ce23b65e7 interface_ip: 38.102.83.97 label: cloud-centos-9-stream-tripleo-vexxhost private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__15d3e4796d68b09ae734195d14f5ca09ccdadd05 openstack_namespace: openstack play_hosts: *id002 playbook_dir: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true role_name: artifacts role_names: *id003 role_path: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/roles/artifacts role_uuid: fa163ec2-ffbe-94ba-bee7-00000000002e scenario: nightly_bundles-index_deploy unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.97 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 3e8fa343-2893-4e1e-9f7a-ef822bd01638 host_id: 144026dc3a6267aa9593dce15cccee121a718779ba75fb6ce23b65e7 interface_ip: 38.102.83.97 label: cloud-centos-9-stream-tripleo-vexxhost private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul_log_collection: true zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: master build: 9e06bd9ec9c1456eb80ff4a509f0548e build_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null buildset: 5e8ab5d5989f4e03b4743e980e08eed9 buildset_refs: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 child_jobs: [] commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 event_id: bf7cf120-d774-11f0-9b66-056cd24cb52f executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/logs result_data_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/results.json src_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/src work_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work items: - branch: master change: '694' change_message: "Add OCP 4.20 jobs\n\nAdd job definitions using crc-cloud-ocp-4-20-1-3xl as base image\r\n\r\nCloses: OSPRH-21881" change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null job: stf-crc-ocp_420-nightly_bundles-index_deploy jobtags: [] max_attempts: 1 message: QWRkIE9DUCA0LjIwIGpvYnMKCkFkZCBqb2IgZGVmaW5pdGlvbnMgdXNpbmcgY3JjLWNsb3VkLW9jcC00LTIwLTEtM3hsIGFzIGJhc2UgaW1hZ2UNCg0KQ2xvc2VzOiBPU1BSSC0yMTg4MQ== patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 untrusted/project_4/github.com/infrawatch/service-telemetry-operator: canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 playbooks: - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/deploy_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_0/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_4/rdo-jobs/roles - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/test_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_1/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_1/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_1/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_1/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_1/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_1/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_1/role_4/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/infrawatch/prometheus-webhook-snmp: canonical_hostname: github.com canonical_name: github.com/infrawatch/prometheus-webhook-snmp checkout: master checkout_description: zuul branch commit: 3959c53b2613d03d066cb1b2fe5bdae8633ae895 name: infrawatch/prometheus-webhook-snmp required: true short_name: prometheus-webhook-snmp src_dir: src/github.com/infrawatch/prometheus-webhook-snmp github.com/infrawatch/service-telemetry-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master checkout_description: zuul branch commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 name: infrawatch/service-telemetry-operator required: true short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator github.com/infrawatch/sg-bridge: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-bridge checkout: master checkout_description: zuul branch commit: bab11fba86ad0c21cb35e12b56bf086a3332f1d2 name: infrawatch/sg-bridge required: true short_name: sg-bridge src_dir: src/github.com/infrawatch/sg-bridge github.com/infrawatch/sg-core: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-core checkout: master checkout_description: zuul branch commit: 5a4aece11fea9f71ce7515d11e1e7f0eae97eea6 name: infrawatch/sg-core required: true short_name: sg-core src_dir: src/github.com/infrawatch/sg-core github.com/infrawatch/smart-gateway-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/smart-gateway-operator checkout: master checkout_description: zuul branch commit: 9e0945fe8a0e74be8bc9449318446eeb74336986 name: infrawatch/smart-gateway-operator required: true short_name: smart-gateway-operator src_dir: src/github.com/infrawatch/smart-gateway-operator github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/dataplane-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/dataplane-operator checkout: main checkout_description: project override ref commit: c98b51bcd7fe14b85ed4cf3f5f76552b3455c5f2 name: openstack-k8s-operators/dataplane-operator required: true short_name: dataplane-operator src_dir: src/github.com/openstack-k8s-operators/dataplane-operator github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: project default branch commit: 78f305a7f43e4024d260a64119f250386daa6420 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: project override ref commit: 786269345f996bd262360738a1e3c6b09171f370 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project default branch commit: 2f838b62fe50aacff3d514af4b502264e0a276a5 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: master checkout_description: zuul branch commit: a333e57066b1d48e41f93af68be81188290a96b3 name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: project override ref commit: 2da49819dd6af6036aede5e4e9a080ff2c6457de name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: project override ref commit: 9a923a3e438c4f66834894bfa59207197cf3daea name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: project default branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: zuul branch commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: zuul branch commit: 672a220823fac36a8965fa0d3dca764739bb46c0 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/694/head resources: {} tenant: rdoproject.org timeout: 3600 topic: null voting: true zuul_change_list: - service-telemetry-operator zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '1' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'False' zuul_will_retry: 'False' home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_fetch_openshift.sh0000644000175000017500000000032515117040733030466 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_fetch_openshift.log) 2>&1 oc login -u kubeadmin -p 123456789 --insecure-skip-tls-verify=true api.crc.testing:6443 ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_001_login_into_openshift_internal.shhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_001_login_into_openshift_internal.s0000644000175000017500000000044515117040744033270 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_001_login_into_openshift_internal.log) 2>&1 podman login -u kubeadmin -p sha256~EG5pZmEvMtlKbD96xrF9Jk3JoC7wIhyhXBBmcOOPvXo --tls-verify=false default-route-openshift-image-registry.apps-crc.testing home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/0000755000175000017500000000000015117043064024507 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/install-yamls-params.yml0000644000175000017500000006655515117043064031325 0ustar zuulzuulcifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: '' BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/custom-params.yml0000644000175000017500000000215315117043064030026 0ustar zuulzuulcifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:~/.crc/bin:~/.crc/bin/oc:~/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_project_dir: src/github.com/openstack-k8s-operators/ci-framework cifmw_project_dir_absolute: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/zuul-params.yml0000644000175000017500000004750015117043064027520 0ustar zuulzuulcifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_deploy_edpm: false cifmw_dlrn_report_result: false cifmw_extras: - '@scenarios/centos-9/multinode-ci.yml' - '@scenarios/centos-9/horizon.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_run_tests: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: 1500 range: 192.168.122.0/24 internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true podified_validation: true push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true scenario: nightly_bundles-index_deploy zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: master build: 9e06bd9ec9c1456eb80ff4a509f0548e build_refs: - branch: master change: '694' change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null buildset: 5e8ab5d5989f4e03b4743e980e08eed9 buildset_refs: - branch: master change: '694' change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null change: '694' change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 child_jobs: [] commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 event_id: bf7cf120-d774-11f0-9b66-056cd24cb52f executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/logs result_data_file: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/results.json src_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work/src work_root: /var/lib/zuul/builds/9e06bd9ec9c1456eb80ff4a509f0548e/work items: - branch: master change: '694' change_url: https://github.com/infrawatch/service-telemetry-operator/pull/694 commit_id: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator topic: null job: stf-crc-ocp_420-nightly_bundles-index_deploy jobtags: [] max_attempts: 1 message: QWRkIE9DUCA0LjIwIGpvYnMKCkFkZCBqb2IgZGVmaW5pdGlvbnMgdXNpbmcgY3JjLWNsb3VkLW9jcC00LTIwLTEtM3hsIGFzIGJhc2UgaW1hZ2UNCg0KQ2xvc2VzOiBPU1BSSC0yMTg4MQ== patchset: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 672a220823fac36a8965fa0d3dca764739bb46c0 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: 9df4e7d5b028e976203d64479f9b7a76c1c95a24 untrusted/project_4/github.com/infrawatch/service-telemetry-operator: canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 playbooks: - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/deploy_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_0/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_0/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_4/rdo-jobs/roles - path: untrusted/project_4/github.com/infrawatch/service-telemetry-operator/ci/test_stf.yml roles: - checkout: master checkout_description: playbook branch link_name: ansible/playbook_1/role_0/service-telemetry-operator link_target: untrusted/project_4/github.com/infrawatch/service-telemetry-operator role_path: ansible/playbook_1/role_0/service-telemetry-operator/roles - checkout: main checkout_description: project override ref link_name: ansible/playbook_1/role_1/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_1/role_1/ci-framework/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_2/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_1/role_2/config/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_3/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_1/role_3/zuul-jobs/roles - checkout: master checkout_description: zuul branch link_name: ansible/playbook_1/role_4/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_1/role_4/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator name: infrawatch/service-telemetry-operator short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/infrawatch/prometheus-webhook-snmp: canonical_hostname: github.com canonical_name: github.com/infrawatch/prometheus-webhook-snmp checkout: master checkout_description: zuul branch commit: 3959c53b2613d03d066cb1b2fe5bdae8633ae895 name: infrawatch/prometheus-webhook-snmp required: true short_name: prometheus-webhook-snmp src_dir: src/github.com/infrawatch/prometheus-webhook-snmp github.com/infrawatch/service-telemetry-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/service-telemetry-operator checkout: master checkout_description: zuul branch commit: 3eef2b73e4f47c90a6bce6a8b87d5fbaffc39c14 name: infrawatch/service-telemetry-operator required: true short_name: service-telemetry-operator src_dir: src/github.com/infrawatch/service-telemetry-operator github.com/infrawatch/sg-bridge: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-bridge checkout: master checkout_description: zuul branch commit: bab11fba86ad0c21cb35e12b56bf086a3332f1d2 name: infrawatch/sg-bridge required: true short_name: sg-bridge src_dir: src/github.com/infrawatch/sg-bridge github.com/infrawatch/sg-core: canonical_hostname: github.com canonical_name: github.com/infrawatch/sg-core checkout: master checkout_description: zuul branch commit: 5a4aece11fea9f71ce7515d11e1e7f0eae97eea6 name: infrawatch/sg-core required: true short_name: sg-core src_dir: src/github.com/infrawatch/sg-core github.com/infrawatch/smart-gateway-operator: canonical_hostname: github.com canonical_name: github.com/infrawatch/smart-gateway-operator checkout: master checkout_description: zuul branch commit: 9e0945fe8a0e74be8bc9449318446eeb74336986 name: infrawatch/smart-gateway-operator required: true short_name: smart-gateway-operator src_dir: src/github.com/infrawatch/smart-gateway-operator github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: b9f05e2b6eff8ddb76fcb7c45350db75c6af9b72 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/dataplane-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/dataplane-operator checkout: main checkout_description: project override ref commit: c98b51bcd7fe14b85ed4cf3f5f76552b3455c5f2 name: openstack-k8s-operators/dataplane-operator required: true short_name: dataplane-operator src_dir: src/github.com/openstack-k8s-operators/dataplane-operator github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: project default branch commit: 78f305a7f43e4024d260a64119f250386daa6420 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: project override ref commit: 786269345f996bd262360738a1e3c6b09171f370 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project default branch commit: 2f838b62fe50aacff3d514af4b502264e0a276a5 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: master checkout_description: zuul branch commit: a333e57066b1d48e41f93af68be81188290a96b3 name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: project override ref commit: 2da49819dd6af6036aede5e4e9a080ff2c6457de name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: project override ref commit: 9a923a3e438c4f66834894bfa59207197cf3daea name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: project default branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: zuul branch commit: 935cfd422c2237f4863cdcdf5fb201bce8c32a67 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: zuul branch commit: 672a220823fac36a8965fa0d3dca764739bb46c0 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/694/head resources: {} tenant: rdoproject.org timeout: 3600 topic: null voting: true zuul_log_collection: true home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/openshift-login-params.yml0000644000175000017500000000044015117040736031621 0ustar zuulzuulcifmw_openshift_api: https://api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_token: sha256~EG5pZmEvMtlKbD96xrF9Jk3JoC7wIhyhXBBmcOOPvXo cifmw_openshift_user: kubeadmin home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43/0000777000175000017500000000000015117043104026721 5ustar zuulzuul././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43/ansible_facts_cache/home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43/ansible_facts_0000755000175000017500000000000015117043104031572 5ustar zuulzuul././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43/ansible_facts_cache/localhosthome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43/ansible_facts_0000644000175000017500000016060015117043104031577 0ustar zuulzuul{ "_ansible_facts_gathered": true, "ansible_all_ipv4_addresses": [ "38.102.83.97", "192.168.122.11" ], "ansible_all_ipv6_addresses": [ "fe80::f816:3eff:feaa:3cf0" ], "ansible_apparmor": { "status": "disabled" }, "ansible_architecture": "x86_64", "ansible_bios_date": "04/01/2014", "ansible_bios_vendor": "SeaBIOS", "ansible_bios_version": "1.15.0-1", "ansible_board_asset_tag": "NA", "ansible_board_name": "NA", "ansible_board_serial": "NA", "ansible_board_vendor": "NA", "ansible_board_version": "NA", "ansible_chassis_asset_tag": "NA", "ansible_chassis_serial": "NA", "ansible_chassis_vendor": "QEMU", "ansible_chassis_version": "pc-i440fx-6.2", "ansible_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266" }, "ansible_date_time": { "date": "2025-12-12", "day": "12", "epoch": "1765556633", "epoch_int": "1765556633", "hour": "16", "iso8601": "2025-12-12T16:23:53Z", "iso8601_basic": "20251212T162353102294", "iso8601_basic_short": "20251212T162353", "iso8601_micro": "2025-12-12T16:23:53.102294Z", "minute": "23", "month": "12", "second": "53", "time": "16:23:53", "tz": "UTC", "tz_dst": "UTC", "tz_offset": "+0000", "weekday": "Friday", "weekday_number": "5", "weeknumber": "49", "year": "2025" }, "ansible_default_ipv4": { "address": "38.102.83.97", "alias": "eth0", "broadcast": "38.102.83.255", "gateway": "38.102.83.1", "interface": "eth0", "macaddress": "fa:16:3e:aa:3c:f0", "mtu": 1500, "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24", "type": "ether" }, "ansible_default_ipv6": {}, "ansible_device_links": { "ids": { "sr0": [ "ata-QEMU_DVD-ROM_QM00001" ] }, "labels": { "sr0": [ "config-2" ] }, "masters": {}, "uuids": { "sr0": [ "2025-12-12-16-08-48-00" ], "vda1": [ "cbdedf45-ed1d-4952-82a8-33a12c0ba266" ] } }, "ansible_devices": { "sr0": { "holders": [], "host": "", "links": { "ids": [ "ata-QEMU_DVD-ROM_QM00001" ], "labels": [ "config-2" ], "masters": [], "uuids": [ "2025-12-12-16-08-48-00" ] }, "model": "QEMU DVD-ROM", "partitions": {}, "removable": "1", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "mq-deadline", "sectors": "964", "sectorsize": "2048", "size": "482.00 KB", "support_discard": "2048", "vendor": "QEMU", "virtual": 1 }, "vda": { "holders": [], "host": "", "links": { "ids": [], "labels": [], "masters": [], "uuids": [] }, "model": null, "partitions": { "vda1": { "holders": [], "links": { "ids": [], "labels": [], "masters": [], "uuids": [ "cbdedf45-ed1d-4952-82a8-33a12c0ba266" ] }, "sectors": "167770079", "sectorsize": 512, "size": "80.00 GB", "start": "2048", "uuid": "cbdedf45-ed1d-4952-82a8-33a12c0ba266" } }, "removable": "0", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "none", "sectors": "167772160", "sectorsize": "512", "size": "80.00 GB", "support_discard": "512", "vendor": "0x1af4", "virtual": 1 } }, "ansible_distribution": "CentOS", "ansible_distribution_file_parsed": true, "ansible_distribution_file_path": "/etc/centos-release", "ansible_distribution_file_variety": "CentOS", "ansible_distribution_major_version": "9", "ansible_distribution_release": "Stream", "ansible_distribution_version": "9", "ansible_dns": { "nameservers": [ "192.168.122.10", "199.204.44.24", "199.204.47.54" ] }, "ansible_domain": "", "ansible_effective_group_id": 1000, "ansible_effective_user_id": 1000, "ansible_env": { "BASH_FUNC_which%%": "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}", "DBUS_SESSION_BUS_ADDRESS": "unix:path=/run/user/1000/bus", "DEBUGINFOD_IMA_CERT_PATH": "/etc/keys/ima:", "DEBUGINFOD_URLS": "https://debuginfod.centos.org/ ", "HOME": "/home/zuul", "LANG": "en_US.UTF-8", "LESSOPEN": "||/usr/bin/lesspipe.sh %s", "LOGNAME": "zuul", "MOTD_SHOWN": "pam", "PATH": "~/.crc/bin:~/.crc/bin/oc:~/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "PWD": "/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks", "SELINUX_LEVEL_REQUESTED": "", "SELINUX_ROLE_REQUESTED": "", "SELINUX_USE_CURRENT_RANGE": "", "SHELL": "/bin/bash", "SHLVL": "2", "SSH_CLIENT": "38.102.83.114 41690 22", "SSH_CONNECTION": "38.102.83.114 41690 38.102.83.97 22", "USER": "zuul", "XDG_RUNTIME_DIR": "/run/user/1000", "XDG_SESSION_CLASS": "user", "XDG_SESSION_ID": "9", "XDG_SESSION_TYPE": "tty", "_": "/usr/bin/python3", "which_declare": "declare -f" }, "ansible_eth0": { "active": true, "device": "eth0", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "38.102.83.97", "broadcast": "38.102.83.255", "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24" }, "ipv6": [ { "address": "fe80::f816:3eff:feaa:3cf0", "prefix": "64", "scope": "link" } ], "macaddress": "fa:16:3e:aa:3c:f0", "module": "virtio_net", "mtu": 1500, "pciid": "virtio1", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1": { "active": true, "device": "eth1", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "192.168.122.11", "broadcast": "192.168.122.255", "netmask": "255.255.255.0", "network": "192.168.122.0", "prefix": "24" }, "macaddress": "fa:16:3e:e6:21:77", "module": "virtio_net", "mtu": 1500, "pciid": "virtio5", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_fibre_channel_wwn": [], "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "controller", "ansible_hostname": "controller", "ansible_hostnqn": "nqn.2014-08.org.nvmexpress:uuid:e61ebeb9-32de-4b3b-b463-d59237136be4", "ansible_interfaces": [ "lo", "eth1", "eth0" ], "ansible_is_chroot": false, "ansible_iscsi_iqn": "", "ansible_kernel": "5.14.0-648.el9.x86_64", "ansible_kernel_version": "#1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025", "ansible_lo": { "active": true, "device": "lo", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "on [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on [fixed]", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "on [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off [fixed]", "tx_scatter_gather": "on [fixed]", "tx_scatter_gather_fraglist": "on [fixed]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "on [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "127.0.0.1", "broadcast": "", "netmask": "255.0.0.0", "network": "127.0.0.0", "prefix": "8" }, "ipv6": [ { "address": "::1", "prefix": "128", "scope": "host" } ], "mtu": 65536, "promisc": false, "timestamping": [], "type": "loopback" }, "ansible_loadavg": { "15m": 0.41, "1m": 1.53, "5m": 0.8 }, "ansible_local": {}, "ansible_locally_reachable_ips": { "ipv4": [ "38.102.83.97", "127.0.0.0/8", "127.0.0.1", "192.168.122.11" ], "ipv6": [ "::1", "fe80::f816:3eff:feaa:3cf0" ] }, "ansible_lsb": {}, "ansible_lvm": "N/A", "ansible_machine": "x86_64", "ansible_machine_id": "64f1d6692049d8be5e8b216cc203502c", "ansible_memfree_mb": 5190, "ansible_memory_mb": { "nocache": { "free": 6680, "used": 999 }, "real": { "free": 5190, "total": 7679, "used": 2489 }, "swap": { "cached": 0, "free": 0, "total": 0, "used": 0 } }, "ansible_memtotal_mb": 7679, "ansible_mounts": [ { "block_available": 19892255, "block_size": 4096, "block_total": 20954875, "block_used": 1062620, "device": "/dev/vda1", "fstype": "xfs", "inode_available": 41790508, "inode_total": 41942512, "inode_used": 152004, "mount": "/", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 81478676480, "size_total": 85831168000, "uuid": "cbdedf45-ed1d-4952-82a8-33a12c0ba266" } ], "ansible_nodename": "controller", "ansible_os_family": "RedHat", "ansible_pkg_mgr": "dnf", "ansible_proc_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266" }, "ansible_processor": [ "0", "AuthenticAMD", "AMD EPYC-Rome Processor", "1", "AuthenticAMD", "AMD EPYC-Rome Processor", "2", "AuthenticAMD", "AMD EPYC-Rome Processor", "3", "AuthenticAMD", "AMD EPYC-Rome Processor", "4", "AuthenticAMD", "AMD EPYC-Rome Processor", "5", "AuthenticAMD", "AMD EPYC-Rome Processor", "6", "AuthenticAMD", "AMD EPYC-Rome Processor", "7", "AuthenticAMD", "AMD EPYC-Rome Processor" ], "ansible_processor_cores": 1, "ansible_processor_count": 8, "ansible_processor_nproc": 8, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 8, "ansible_product_name": "OpenStack Nova", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "26.3.1", "ansible_python": { "executable": "/usr/bin/python3", "has_sslcontext": true, "type": "cpython", "version": { "major": 3, "micro": 25, "minor": 9, "releaselevel": "final", "serial": 0 }, "version_info": [ 3, 9, 25, "final", 0 ] }, "ansible_python_version": "3.9.25", "ansible_real_group_id": 1000, "ansible_real_user_id": 1000, "ansible_selinux": { "config_mode": "enforcing", "mode": "enforcing", "policyvers": 33, "status": "enabled", "type": "targeted" }, "ansible_selinux_python_present": true, "ansible_service_mgr": "systemd", "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPIOW+VtnF/Fu/RuR24zXRchiOz7hR0QYB5AEr3y+Rog8EefZdE7lrEycvzxm3PNImSfgERgMwSA4vjDok0fKEQ=", "ansible_ssh_host_key_ecdsa_public_keytype": "ecdsa-sha2-nistp256", "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIBB8eIzEKga2gBA89cbnEvfEvFoGpchZEHh+uW/JueDO", "ansible_ssh_host_key_ed25519_public_keytype": "ssh-ed25519", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABgQC+ncO1XqWbLlBF6ck1nTNn2UrTbaFBcVm+kh6XtCVAHkrjiGUF0j+0iuO0/3XBmR2RP+HhbYoMqcST7u0uMw8z6z7q2k7USnS6L3SauEdgu6yT4a3OSKeFnHjLxLwEqnQ2+aEfy+ApcIlyKiTGgSSZ89yni637VO2jMD1mU91RfBmVRzmxakK0OQOAZwte7UTK0PtXcjC8ws/x/iaGeAEJOKRDEBmJSXZkI9c/u9fDOuM7I36+syNIdmBUhk9kpvfJaeVPyCPHeyEBbIhCNdO8m1vo4n8/JYLvzIzg+3sIBVWYtTLYCVyEsb7Ecq7+dGmOR9ShqlxeA9bMM19/nChXHNky1WO4qPpgAO4yY6jG+4cYaUtiwsbS6K2wtgLhibqgQp8w3Md31vdcnhVmxEUtfM0vM1ynuRKDZ3jTwBa6ap8HnZ1GIgyhyAT/XHp4agpBbuP3/DbozPGEUDXmIMGRVLca0sLcjOL3w/PUx8oD7i//dbhZ6ymrCfTGuOmikuk=", "ansible_ssh_host_key_rsa_public_keytype": "ssh-rsa", "ansible_swapfree_mb": 0, "ansible_swaptotal_mb": 0, "ansible_system": "Linux", "ansible_system_capabilities": [ "" ], "ansible_system_capabilities_enforced": "True", "ansible_system_vendor": "OpenStack Foundation", "ansible_uptime_seconds": 895, "ansible_user_dir": "/home/zuul", "ansible_user_gecos": "", "ansible_user_gid": 1000, "ansible_user_id": "zuul", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_tech_guest": [ "openstack" ], "ansible_virtualization_tech_host": [ "kvm" ], "ansible_virtualization_type": "openstack", "cifmw_discovered_hash": "74bbc8589b27428ecda9125f78c03c8944b07a8b2fd431216ed273af7f01a4bd", "cifmw_discovered_hash_algorithm": "sha256", "cifmw_discovered_image_name": "CentOS-Stream-GenericCloud-x86_64-9-latest.x86_64.qcow2", "cifmw_discovered_image_url": "https://cloud.centos.org/centos/9-stream/x86_64/images//CentOS-Stream-GenericCloud-x86_64-9-latest.x86_64.qcow2", "cifmw_install_yamls_defaults": { "ADOPTED_EXTERNAL_NETWORK": "172.21.1.0/24", "ADOPTED_INTERNALAPI_NETWORK": "172.17.1.0/24", "ADOPTED_STORAGEMGMT_NETWORK": "172.20.1.0/24", "ADOPTED_STORAGE_NETWORK": "172.18.1.0/24", "ADOPTED_TENANT_NETWORK": "172.9.1.0/24", "ANSIBLEEE": "config/samples/_v1beta1_ansibleee.yaml", "ANSIBLEEE_BRANCH": "main", "ANSIBLEEE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml", "ANSIBLEEE_IMG": "quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest", "ANSIBLEEE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml", "ANSIBLEEE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests", "ANSIBLEEE_KUTTL_NAMESPACE": "ansibleee-kuttl-tests", "ANSIBLEEE_REPO": "https://github.com/openstack-k8s-operators/openstack-ansibleee-operator", "ANSIBLEE_COMMIT_HASH": "", "BARBICAN": "config/samples/barbican_v1beta1_barbican.yaml", "BARBICAN_BRANCH": "main", "BARBICAN_COMMIT_HASH": "", "BARBICAN_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml", "BARBICAN_DEPL_IMG": "unused", "BARBICAN_IMG": "quay.io/openstack-k8s-operators/barbican-operator-index:latest", "BARBICAN_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml", "BARBICAN_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests", "BARBICAN_KUTTL_NAMESPACE": "barbican-kuttl-tests", "BARBICAN_REPO": "https://github.com/openstack-k8s-operators/barbican-operator.git", "BARBICAN_SERVICE_ENABLED": "true", "BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY": "sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU=", "BAREMETAL_BRANCH": "main", "BAREMETAL_COMMIT_HASH": "", "BAREMETAL_IMG": "quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest", "BAREMETAL_OS_CONTAINER_IMG": "", "BAREMETAL_OS_IMG": "", "BAREMETAL_REPO": "https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git", "BAREMETAL_TIMEOUT": "20m", "BASH_IMG": "quay.io/openstack-k8s-operators/bash:latest", "BGP_ASN": "64999", "BGP_LEAF_1": "100.65.4.1", "BGP_LEAF_2": "100.64.4.1", "BGP_OVN_ROUTING": "false", "BGP_PEER_ASN": "64999", "BGP_SOURCE_IP": "172.30.4.2", "BGP_SOURCE_IP6": "f00d:f00d:f00d:f00d:f00d:f00d:f00d:42", "BMAAS_BRIDGE_IPV4_PREFIX": "172.20.1.2/24", "BMAAS_BRIDGE_IPV6_PREFIX": "fd00:bbbb::2/64", "BMAAS_INSTANCE_DISK_SIZE": "20", "BMAAS_INSTANCE_MEMORY": "4096", "BMAAS_INSTANCE_NAME_PREFIX": "crc-bmaas", "BMAAS_INSTANCE_NET_MODEL": "virtio", "BMAAS_INSTANCE_OS_VARIANT": "centos-stream9", "BMAAS_INSTANCE_VCPUS": "2", "BMAAS_INSTANCE_VIRT_TYPE": "kvm", "BMAAS_IPV4": "true", "BMAAS_IPV6": "false", "BMAAS_LIBVIRT_USER": "sushyemu", "BMAAS_METALLB_ADDRESS_POOL": "172.20.1.64/26", "BMAAS_METALLB_POOL_NAME": "baremetal", "BMAAS_NETWORK_IPV4_PREFIX": "172.20.1.1/24", "BMAAS_NETWORK_IPV6_PREFIX": "fd00:bbbb::1/64", "BMAAS_NETWORK_NAME": "crc-bmaas", "BMAAS_NODE_COUNT": "1", "BMAAS_OCP_INSTANCE_NAME": "crc", "BMAAS_REDFISH_PASSWORD": "password", "BMAAS_REDFISH_USERNAME": "admin", "BMAAS_ROUTE_LIBVIRT_NETWORKS": "crc-bmaas,crc,default", "BMAAS_SUSHY_EMULATOR_DRIVER": "libvirt", "BMAAS_SUSHY_EMULATOR_IMAGE": "quay.io/metal3-io/sushy-tools:latest", "BMAAS_SUSHY_EMULATOR_NAMESPACE": "sushy-emulator", "BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE": "/etc/openstack/clouds.yaml", "BMAAS_SUSHY_EMULATOR_OS_CLOUD": "openstack", "BMH_NAMESPACE": "openstack", "BMO_BRANCH": "release-0.9", "BMO_CLEANUP": "true", "BMO_COMMIT_HASH": "", "BMO_IPA_BRANCH": "stable/2024.1", "BMO_IRONIC_HOST": "192.168.122.10", "BMO_PROVISIONING_INTERFACE": "", "BMO_REPO": "https://github.com/metal3-io/baremetal-operator", "BMO_SETUP": "", "BMO_SETUP_ROUTE_REPLACE": "true", "BM_CTLPLANE_INTERFACE": "enp1s0", "BM_INSTANCE_MEMORY": "8192", "BM_INSTANCE_NAME_PREFIX": "edpm-compute-baremetal", "BM_INSTANCE_NAME_SUFFIX": "0", "BM_NETWORK_NAME": "default", "BM_NODE_COUNT": "1", "BM_ROOT_PASSWORD": "", "BM_ROOT_PASSWORD_SECRET": "", "CEILOMETER_CENTRAL_DEPL_IMG": "unused", "CEILOMETER_NOTIFICATION_DEPL_IMG": "unused", "CEPH_BRANCH": "release-1.15", "CEPH_CLIENT": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml", "CEPH_COMMON": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml", "CEPH_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml", "CEPH_CRDS": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml", "CEPH_IMG": "quay.io/ceph/demo:latest-squid", "CEPH_OP": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml", "CEPH_REPO": "https://github.com/rook/rook.git", "CERTMANAGER_TIMEOUT": "300s", "CHECKOUT_FROM_OPENSTACK_REF": "true", "CINDER": "config/samples/cinder_v1beta1_cinder.yaml", "CINDERAPI_DEPL_IMG": "unused", "CINDERBKP_DEPL_IMG": "unused", "CINDERSCH_DEPL_IMG": "unused", "CINDERVOL_DEPL_IMG": "unused", "CINDER_BRANCH": "main", "CINDER_COMMIT_HASH": "", "CINDER_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml", "CINDER_IMG": "quay.io/openstack-k8s-operators/cinder-operator-index:latest", "CINDER_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml", "CINDER_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests", "CINDER_KUTTL_NAMESPACE": "cinder-kuttl-tests", "CINDER_REPO": "https://github.com/openstack-k8s-operators/cinder-operator.git", "CLEANUP_DIR_CMD": "rm -Rf", "CRC_BGP_NIC_1_MAC": "52:54:00:11:11:11", "CRC_BGP_NIC_2_MAC": "52:54:00:11:11:12", "CRC_HTTPS_PROXY": "", "CRC_HTTP_PROXY": "", "CRC_STORAGE_NAMESPACE": "crc-storage", "CRC_STORAGE_RETRIES": "3", "CRC_URL": "'https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz'", "CRC_VERSION": "latest", "DATAPLANE_ANSIBLE_SECRET": "dataplane-ansible-ssh-private-key-secret", "DATAPLANE_ANSIBLE_USER": "", "DATAPLANE_COMPUTE_IP": "192.168.122.100", "DATAPLANE_CONTAINER_PREFIX": "openstack", "DATAPLANE_CONTAINER_TAG": "current-podified", "DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG": "quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest", "DATAPLANE_DEFAULT_GW": "192.168.122.1", "DATAPLANE_EXTRA_NOVA_CONFIG_FILE": "/dev/null", "DATAPLANE_GROWVOLS_ARGS": "/=8GB /tmp=1GB /home=1GB /var=100%", "DATAPLANE_KUSTOMIZE_SCENARIO": "preprovisioned", "DATAPLANE_NETWORKER_IP": "192.168.122.200", "DATAPLANE_NETWORK_INTERFACE_NAME": "eth0", "DATAPLANE_NOVA_NFS_PATH": "", "DATAPLANE_NTP_SERVER": "pool.ntp.org", "DATAPLANE_PLAYBOOK": "osp.edpm.download_cache", "DATAPLANE_REGISTRY_URL": "quay.io/podified-antelope-centos9", "DATAPLANE_RUNNER_IMG": "", "DATAPLANE_SERVER_ROLE": "compute", "DATAPLANE_SSHD_ALLOWED_RANGES": "['192.168.122.0/24']", "DATAPLANE_TIMEOUT": "30m", "DATAPLANE_TLS_ENABLED": "true", "DATAPLANE_TOTAL_NETWORKER_NODES": "1", "DATAPLANE_TOTAL_NODES": "1", "DBSERVICE": "galera", "DESIGNATE": "config/samples/designate_v1beta1_designate.yaml", "DESIGNATE_BRANCH": "main", "DESIGNATE_COMMIT_HASH": "", "DESIGNATE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml", "DESIGNATE_IMG": "quay.io/openstack-k8s-operators/designate-operator-index:latest", "DESIGNATE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml", "DESIGNATE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests", "DESIGNATE_KUTTL_NAMESPACE": "designate-kuttl-tests", "DESIGNATE_REPO": "https://github.com/openstack-k8s-operators/designate-operator.git", "DNSDATA": "config/samples/network_v1beta1_dnsdata.yaml", "DNSDATA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml", "DNSMASQ": "config/samples/network_v1beta1_dnsmasq.yaml", "DNSMASQ_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml", "DNS_DEPL_IMG": "unused", "DNS_DOMAIN": "localdomain", "DOWNLOAD_TOOLS_SELECTION": "all", "EDPM_ATTACH_EXTNET": "true", "EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES": "'[]'", "EDPM_COMPUTE_ADDITIONAL_NETWORKS": "'[]'", "EDPM_COMPUTE_CELLS": "1", "EDPM_COMPUTE_CEPH_ENABLED": "true", "EDPM_COMPUTE_CEPH_NOVA": "true", "EDPM_COMPUTE_DHCP_AGENT_ENABLED": "true", "EDPM_COMPUTE_SRIOV_ENABLED": "true", "EDPM_COMPUTE_SUFFIX": "0", "EDPM_CONFIGURE_DEFAULT_ROUTE": "true", "EDPM_CONFIGURE_HUGEPAGES": "false", "EDPM_CONFIGURE_NETWORKING": "true", "EDPM_FIRSTBOOT_EXTRA": "/tmp/edpm-firstboot-extra", "EDPM_NETWORKER_SUFFIX": "0", "EDPM_TOTAL_NETWORKERS": "1", "EDPM_TOTAL_NODES": "1", "GALERA_REPLICAS": "", "GENERATE_SSH_KEYS": "true", "GIT_CLONE_OPTS": "", "GLANCE": "config/samples/glance_v1beta1_glance.yaml", "GLANCEAPI_DEPL_IMG": "unused", "GLANCE_BRANCH": "main", "GLANCE_COMMIT_HASH": "", "GLANCE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml", "GLANCE_IMG": "quay.io/openstack-k8s-operators/glance-operator-index:latest", "GLANCE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml", "GLANCE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests", "GLANCE_KUTTL_NAMESPACE": "glance-kuttl-tests", "GLANCE_REPO": "https://github.com/openstack-k8s-operators/glance-operator.git", "HEAT": "config/samples/heat_v1beta1_heat.yaml", "HEATAPI_DEPL_IMG": "unused", "HEATCFNAPI_DEPL_IMG": "unused", "HEATENGINE_DEPL_IMG": "unused", "HEAT_AUTH_ENCRYPTION_KEY": "767c3ed056cbaa3b9dfedb8c6f825bf0", "HEAT_BRANCH": "main", "HEAT_COMMIT_HASH": "", "HEAT_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml", "HEAT_IMG": "quay.io/openstack-k8s-operators/heat-operator-index:latest", "HEAT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml", "HEAT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests", "HEAT_KUTTL_NAMESPACE": "heat-kuttl-tests", "HEAT_REPO": "https://github.com/openstack-k8s-operators/heat-operator.git", "HEAT_SERVICE_ENABLED": "true", "HORIZON": "config/samples/horizon_v1beta1_horizon.yaml", "HORIZON_BRANCH": "main", "HORIZON_COMMIT_HASH": "", "HORIZON_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml", "HORIZON_DEPL_IMG": "unused", "HORIZON_IMG": "quay.io/openstack-k8s-operators/horizon-operator-index:latest", "HORIZON_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml", "HORIZON_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests", "HORIZON_KUTTL_NAMESPACE": "horizon-kuttl-tests", "HORIZON_REPO": "https://github.com/openstack-k8s-operators/horizon-operator.git", "INFRA_BRANCH": "main", "INFRA_COMMIT_HASH": "", "INFRA_IMG": "quay.io/openstack-k8s-operators/infra-operator-index:latest", "INFRA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml", "INFRA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests", "INFRA_KUTTL_NAMESPACE": "infra-kuttl-tests", "INFRA_REPO": "https://github.com/openstack-k8s-operators/infra-operator.git", "INSTALL_CERT_MANAGER": "true", "INSTALL_NMSTATE": "true || false", "INSTALL_NNCP": "true || false", "INTERNALAPI_HOST_ROUTES": "", "IPV6_LAB_IPV4_NETWORK_IPADDRESS": "172.30.0.1/24", "IPV6_LAB_IPV6_NETWORK_IPADDRESS": "fd00:abcd:abcd:fc00::1/64", "IPV6_LAB_LIBVIRT_STORAGE_POOL": "default", "IPV6_LAB_MANAGE_FIREWALLD": "true", "IPV6_LAB_NAT64_HOST_IPV4": "172.30.0.2/24", "IPV6_LAB_NAT64_HOST_IPV6": "fd00:abcd:abcd:fc00::2/64", "IPV6_LAB_NAT64_INSTANCE_NAME": "nat64-router", "IPV6_LAB_NAT64_IPV6_NETWORK": "fd00:abcd:abcd:fc00::/64", "IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL": "192.168.255.0/24", "IPV6_LAB_NAT64_TAYGA_IPV4": "192.168.255.1", "IPV6_LAB_NAT64_TAYGA_IPV6": "fd00:abcd:abcd:fc00::3", "IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX": "fd00:abcd:abcd:fcff::/96", "IPV6_LAB_NAT64_UPDATE_PACKAGES": "false", "IPV6_LAB_NETWORK_NAME": "nat64", "IPV6_LAB_SNO_CLUSTER_NETWORK": "fd00:abcd:0::/48", "IPV6_LAB_SNO_HOST_IP": "fd00:abcd:abcd:fc00::11", "IPV6_LAB_SNO_HOST_PREFIX": "64", "IPV6_LAB_SNO_INSTANCE_NAME": "sno", "IPV6_LAB_SNO_MACHINE_NETWORK": "fd00:abcd:abcd:fc00::/64", "IPV6_LAB_SNO_OCP_MIRROR_URL": "https://mirror.openshift.com/pub/openshift-v4/clients/ocp", "IPV6_LAB_SNO_OCP_VERSION": "latest-4.14", "IPV6_LAB_SNO_SERVICE_NETWORK": "fd00:abcd:abcd:fc03::/112", "IPV6_LAB_SSH_PUB_KEY": "/home/zuul/.ssh/id_rsa.pub", "IPV6_LAB_WORK_DIR": "/home/zuul/.ipv6lab", "IRONIC": "config/samples/ironic_v1beta1_ironic.yaml", "IRONICAPI_DEPL_IMG": "unused", "IRONICCON_DEPL_IMG": "unused", "IRONICINS_DEPL_IMG": "unused", "IRONICNAG_DEPL_IMG": "unused", "IRONICPXE_DEPL_IMG": "unused", "IRONIC_BRANCH": "main", "IRONIC_COMMIT_HASH": "", "IRONIC_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml", "IRONIC_IMAGE": "quay.io/metal3-io/ironic", "IRONIC_IMAGE_TAG": "release-24.1", "IRONIC_IMG": "quay.io/openstack-k8s-operators/ironic-operator-index:latest", "IRONIC_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml", "IRONIC_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests", "IRONIC_KUTTL_NAMESPACE": "ironic-kuttl-tests", "IRONIC_REPO": "https://github.com/openstack-k8s-operators/ironic-operator.git", "KEYSTONEAPI": "config/samples/keystone_v1beta1_keystoneapi.yaml", "KEYSTONEAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml", "KEYSTONEAPI_DEPL_IMG": "unused", "KEYSTONE_BRANCH": "main", "KEYSTONE_COMMIT_HASH": "", "KEYSTONE_FEDERATION_CLIENT_SECRET": "COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f", "KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE": "openstack", "KEYSTONE_IMG": "quay.io/openstack-k8s-operators/keystone-operator-index:latest", "KEYSTONE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml", "KEYSTONE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests", "KEYSTONE_KUTTL_NAMESPACE": "keystone-kuttl-tests", "KEYSTONE_REPO": "https://github.com/openstack-k8s-operators/keystone-operator.git", "KUBEADMIN_PWD": "12345678", "LIBVIRT_SECRET": "libvirt-secret", "LOKI_DEPLOY_MODE": "openshift-network", "LOKI_DEPLOY_NAMESPACE": "netobserv", "LOKI_DEPLOY_SIZE": "1x.demo", "LOKI_NAMESPACE": "openshift-operators-redhat", "LOKI_OPERATOR_GROUP": "openshift-operators-redhat-loki", "LOKI_SUBSCRIPTION": "loki-operator", "LVMS_CR": "1", "MANILA": "config/samples/manila_v1beta1_manila.yaml", "MANILAAPI_DEPL_IMG": "unused", "MANILASCH_DEPL_IMG": "unused", "MANILASHARE_DEPL_IMG": "unused", "MANILA_BRANCH": "main", "MANILA_COMMIT_HASH": "", "MANILA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml", "MANILA_IMG": "quay.io/openstack-k8s-operators/manila-operator-index:latest", "MANILA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml", "MANILA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests", "MANILA_KUTTL_NAMESPACE": "manila-kuttl-tests", "MANILA_REPO": "https://github.com/openstack-k8s-operators/manila-operator.git", "MANILA_SERVICE_ENABLED": "true", "MARIADB": "config/samples/mariadb_v1beta1_galera.yaml", "MARIADB_BRANCH": "main", "MARIADB_CHAINSAW_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml", "MARIADB_CHAINSAW_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests", "MARIADB_CHAINSAW_NAMESPACE": "mariadb-chainsaw-tests", "MARIADB_COMMIT_HASH": "", "MARIADB_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml", "MARIADB_DEPL_IMG": "unused", "MARIADB_IMG": "quay.io/openstack-k8s-operators/mariadb-operator-index:latest", "MARIADB_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml", "MARIADB_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests", "MARIADB_KUTTL_NAMESPACE": "mariadb-kuttl-tests", "MARIADB_REPO": "https://github.com/openstack-k8s-operators/mariadb-operator.git", "MEMCACHED": "config/samples/memcached_v1beta1_memcached.yaml", "MEMCACHED_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml", "MEMCACHED_DEPL_IMG": "unused", "METADATA_SHARED_SECRET": "1234567842", "METALLB_IPV6_POOL": "fd00:aaaa::80-fd00:aaaa::90", "METALLB_POOL": "192.168.122.80-192.168.122.90", "MICROSHIFT": "0", "NAMESPACE": "openstack", "NETCONFIG": "config/samples/network_v1beta1_netconfig.yaml", "NETCONFIG_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml", "NETCONFIG_DEPL_IMG": "unused", "NETOBSERV_DEPLOY_NAMESPACE": "netobserv", "NETOBSERV_NAMESPACE": "openshift-netobserv-operator", "NETOBSERV_OPERATOR_GROUP": "openshift-netobserv-operator-net", "NETOBSERV_SUBSCRIPTION": "netobserv-operator", "NETWORK_BGP": "false", "NETWORK_DESIGNATE_ADDRESS_PREFIX": "172.28.0", "NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX": "172.50.0", "NETWORK_INTERNALAPI_ADDRESS_PREFIX": "172.17.0", "NETWORK_ISOLATION": "true", "NETWORK_ISOLATION_INSTANCE_NAME": "crc", "NETWORK_ISOLATION_IPV4": "true", "NETWORK_ISOLATION_IPV4_ADDRESS": "172.16.1.1/24", "NETWORK_ISOLATION_IPV4_NAT": "true", "NETWORK_ISOLATION_IPV6": "false", "NETWORK_ISOLATION_IPV6_ADDRESS": "fd00:aaaa::1/64", "NETWORK_ISOLATION_IP_ADDRESS": "192.168.122.10", "NETWORK_ISOLATION_MAC": "52:54:00:11:11:10", "NETWORK_ISOLATION_NETWORK_NAME": "net-iso", "NETWORK_ISOLATION_NET_NAME": "default", "NETWORK_ISOLATION_USE_DEFAULT_NETWORK": "true", "NETWORK_MTU": "1500", "NETWORK_STORAGEMGMT_ADDRESS_PREFIX": "172.20.0", "NETWORK_STORAGE_ADDRESS_PREFIX": "172.18.0", "NETWORK_STORAGE_MACVLAN": "", "NETWORK_TENANT_ADDRESS_PREFIX": "172.19.0", "NETWORK_VLAN_START": "20", "NETWORK_VLAN_STEP": "1", "NEUTRONAPI": "config/samples/neutron_v1beta1_neutronapi.yaml", "NEUTRONAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml", "NEUTRONAPI_DEPL_IMG": "unused", "NEUTRON_BRANCH": "main", "NEUTRON_COMMIT_HASH": "", "NEUTRON_IMG": "quay.io/openstack-k8s-operators/neutron-operator-index:latest", "NEUTRON_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml", "NEUTRON_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests", "NEUTRON_KUTTL_NAMESPACE": "neutron-kuttl-tests", "NEUTRON_REPO": "https://github.com/openstack-k8s-operators/neutron-operator.git", "NFS_HOME": "/home/nfs", "NMSTATE_NAMESPACE": "openshift-nmstate", "NMSTATE_OPERATOR_GROUP": "openshift-nmstate-tn6k8", "NMSTATE_SUBSCRIPTION": "kubernetes-nmstate-operator", "NNCP_ADDITIONAL_HOST_ROUTES": "", "NNCP_BGP_1_INTERFACE": "enp7s0", "NNCP_BGP_1_IP_ADDRESS": "100.65.4.2", "NNCP_BGP_2_INTERFACE": "enp8s0", "NNCP_BGP_2_IP_ADDRESS": "100.64.4.2", "NNCP_BRIDGE": "ospbr", "NNCP_CLEANUP_TIMEOUT": "120s", "NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX": "fd00:aaaa::", "NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX": "10", "NNCP_CTLPLANE_IP_ADDRESS_PREFIX": "192.168.122", "NNCP_CTLPLANE_IP_ADDRESS_SUFFIX": "10", "NNCP_DNS_SERVER": "192.168.122.1", "NNCP_DNS_SERVER_IPV6": "fd00:aaaa::1", "NNCP_GATEWAY": "192.168.122.1", "NNCP_GATEWAY_IPV6": "fd00:aaaa::1", "NNCP_INTERFACE": "enp6s0", "NNCP_NODES": "", "NNCP_TIMEOUT": "240s", "NOVA": "config/samples/nova_v1beta1_nova_collapsed_cell.yaml", "NOVA_BRANCH": "main", "NOVA_COMMIT_HASH": "", "NOVA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml", "NOVA_IMG": "quay.io/openstack-k8s-operators/nova-operator-index:latest", "NOVA_REPO": "https://github.com/openstack-k8s-operators/nova-operator.git", "NUMBER_OF_INSTANCES": "1", "OCP_NETWORK_NAME": "crc", "OCTAVIA": "config/samples/octavia_v1beta1_octavia.yaml", "OCTAVIA_BRANCH": "main", "OCTAVIA_COMMIT_HASH": "", "OCTAVIA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml", "OCTAVIA_IMG": "quay.io/openstack-k8s-operators/octavia-operator-index:latest", "OCTAVIA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml", "OCTAVIA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests", "OCTAVIA_KUTTL_NAMESPACE": "octavia-kuttl-tests", "OCTAVIA_REPO": "https://github.com/openstack-k8s-operators/octavia-operator.git", "OKD": "false", "OPENSTACK_BRANCH": "main", "OPENSTACK_BUNDLE_IMG": "quay.io/openstack-k8s-operators/openstack-operator-bundle:latest", "OPENSTACK_COMMIT_HASH": "", "OPENSTACK_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml", "OPENSTACK_CRDS_DIR": "openstack_crds", "OPENSTACK_CTLPLANE": "config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml", "OPENSTACK_IMG": "quay.io/openstack-k8s-operators/openstack-operator-index:latest", "OPENSTACK_K8S_BRANCH": "main", "OPENSTACK_K8S_TAG": "latest", "OPENSTACK_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml", "OPENSTACK_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests", "OPENSTACK_KUTTL_NAMESPACE": "openstack-kuttl-tests", "OPENSTACK_NEUTRON_CUSTOM_CONF": "", "OPENSTACK_REPO": "https://github.com/openstack-k8s-operators/openstack-operator.git", "OPENSTACK_STORAGE_BUNDLE_IMG": "quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest", "OPERATOR_BASE_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator", "OPERATOR_CHANNEL": "", "OPERATOR_NAMESPACE": "openstack-operators", "OPERATOR_SOURCE": "", "OPERATOR_SOURCE_NAMESPACE": "", "OUT": "/home/zuul/ci-framework-data/artifacts/manifests", "OUTPUT_DIR": "/home/zuul/ci-framework-data/artifacts/edpm", "OVNCONTROLLER": "config/samples/ovn_v1beta1_ovncontroller.yaml", "OVNCONTROLLER_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml", "OVNCONTROLLER_NMAP": "true", "OVNDBS": "config/samples/ovn_v1beta1_ovndbcluster.yaml", "OVNDBS_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml", "OVNNORTHD": "config/samples/ovn_v1beta1_ovnnorthd.yaml", "OVNNORTHD_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml", "OVN_BRANCH": "main", "OVN_COMMIT_HASH": "", "OVN_IMG": "quay.io/openstack-k8s-operators/ovn-operator-index:latest", "OVN_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml", "OVN_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests", "OVN_KUTTL_NAMESPACE": "ovn-kuttl-tests", "OVN_REPO": "https://github.com/openstack-k8s-operators/ovn-operator.git", "PASSWORD": "12345678", "PLACEMENTAPI": "config/samples/placement_v1beta1_placementapi.yaml", "PLACEMENTAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml", "PLACEMENTAPI_DEPL_IMG": "unused", "PLACEMENT_BRANCH": "main", "PLACEMENT_COMMIT_HASH": "", "PLACEMENT_IMG": "quay.io/openstack-k8s-operators/placement-operator-index:latest", "PLACEMENT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml", "PLACEMENT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests", "PLACEMENT_KUTTL_NAMESPACE": "placement-kuttl-tests", "PLACEMENT_REPO": "https://github.com/openstack-k8s-operators/placement-operator.git", "PULL_SECRET": "/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/playbooks/pull-secret.txt", "RABBITMQ": "docs/examples/default-security-context/rabbitmq.yaml", "RABBITMQ_BRANCH": "patches", "RABBITMQ_COMMIT_HASH": "", "RABBITMQ_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml", "RABBITMQ_DEPL_IMG": "unused", "RABBITMQ_IMG": "quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest", "RABBITMQ_REPO": "https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git", "REDHAT_OPERATORS": "false", "REDIS": "config/samples/redis_v1beta1_redis.yaml", "REDIS_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml", "REDIS_DEPL_IMG": "unused", "RH_REGISTRY_PWD": "", "RH_REGISTRY_USER": "", "SECRET": "osp-secret", "SG_CORE_DEPL_IMG": "unused", "STANDALONE_COMPUTE_DRIVER": "libvirt", "STANDALONE_EXTERNAL_NET_PREFFIX": "172.21.0", "STANDALONE_INTERNALAPI_NET_PREFIX": "172.17.0", "STANDALONE_STORAGEMGMT_NET_PREFIX": "172.20.0", "STANDALONE_STORAGE_NET_PREFIX": "172.18.0", "STANDALONE_TENANT_NET_PREFIX": "172.19.0", "STORAGEMGMT_HOST_ROUTES": "", "STORAGE_CLASS": "local-storage", "STORAGE_HOST_ROUTES": "", "SWIFT": "config/samples/swift_v1beta1_swift.yaml", "SWIFT_BRANCH": "main", "SWIFT_COMMIT_HASH": "", "SWIFT_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml", "SWIFT_IMG": "quay.io/openstack-k8s-operators/swift-operator-index:latest", "SWIFT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml", "SWIFT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests", "SWIFT_KUTTL_NAMESPACE": "swift-kuttl-tests", "SWIFT_REPO": "https://github.com/openstack-k8s-operators/swift-operator.git", "TELEMETRY": "config/samples/telemetry_v1beta1_telemetry.yaml", "TELEMETRY_BRANCH": "main", "TELEMETRY_COMMIT_HASH": "", "TELEMETRY_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml", "TELEMETRY_IMG": "quay.io/openstack-k8s-operators/telemetry-operator-index:latest", "TELEMETRY_KUTTL_BASEDIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator", "TELEMETRY_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml", "TELEMETRY_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites", "TELEMETRY_KUTTL_NAMESPACE": "telemetry-kuttl-tests", "TELEMETRY_KUTTL_RELPATH": "test/kuttl/suites", "TELEMETRY_REPO": "https://github.com/openstack-k8s-operators/telemetry-operator.git", "TENANT_HOST_ROUTES": "", "TIMEOUT": "300s", "TLS_ENABLED": "false", "tripleo_deploy": "export REGISTRY_USER:" }, "cifmw_install_yamls_environment": { "CHECKOUT_FROM_OPENSTACK_REF": "true", "KUBECONFIG": "/home/zuul/.crc/machines/crc/kubeconfig", "OPENSTACK_K8S_BRANCH": "main", "OUT": "/home/zuul/ci-framework-data/artifacts/manifests", "OUTPUT_DIR": "/home/zuul/ci-framework-data/artifacts/edpm" }, "cifmw_openshift_api": "https://api.crc.testing:6443", "cifmw_openshift_context": "default/api-crc-testing:6443/kubeadmin", "cifmw_openshift_kubeconfig": "/home/zuul/.crc/machines/crc/kubeconfig", "cifmw_openshift_login_api": "https://api.crc.testing:6443", "cifmw_openshift_login_cert_login": false, "cifmw_openshift_login_context": "default/api-crc-testing:6443/kubeadmin", "cifmw_openshift_login_kubeconfig": "/home/zuul/.crc/machines/crc/kubeconfig", "cifmw_openshift_login_password": 123456789, "cifmw_openshift_login_token": "sha256~EG5pZmEvMtlKbD96xrF9Jk3JoC7wIhyhXBBmcOOPvXo", "cifmw_openshift_login_user": "kubeadmin", "cifmw_openshift_token": "sha256~EG5pZmEvMtlKbD96xrF9Jk3JoC7wIhyhXBBmcOOPvXo", "cifmw_openshift_user": "kubeadmin", "cifmw_path": "/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:~/.crc/bin:~/.crc/bin/oc:~/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "cifmw_repo_setup_commit_hash": null, "cifmw_repo_setup_distro_hash": null, "cifmw_repo_setup_dlrn_api_url": "https://trunk.rdoproject.org/api-centos9-antelope", "cifmw_repo_setup_dlrn_url": "https://trunk.rdoproject.org/centos9-antelope/current-podified/delorean.repo.md5", "cifmw_repo_setup_extended_hash": null, "cifmw_repo_setup_full_hash": "c3923531bcda0b0811b2d5053f189beb", "cifmw_repo_setup_release": "antelope", "discovered_interpreter_python": "/usr/bin/python3", "gather_subset": [ "all" ], "module_setup": true }home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible-facts.yml0000644000175000017500000004657315117042565025626 0ustar zuulzuul_ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.97 all_ipv6_addresses: - fe80::f816:3eff:feaa:3cf0 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 crc_ci_bootstrap_instance_default_net_config: mtu: 1500 range: 192.168.122.0/24 crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-12-12T16:12:20Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: c980ef95-579b-4bba-a2cf-aaa697eb56ed hardware_offload_type: null hints: '' id: 5c6b1861-0ebb-4b1f-b057-c128a69c2f2b ip_allocation: immediate mac_address: fa:16:3e:a8:13:c5 name: crc-6868605f-6684-4979-9a48-308ed352f6d0 network_id: fe296b7d-f858-415e-b30f-04968989d58d numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-12-12T16:12:20Z' crc_ci_bootstrap_network_name: zuul-ci-net-9e06bd9e crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:e6:21:77 mtu: 1500 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:a8:13:c5 mtu: 1500 internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:b2:c5:bf mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:c0:69:a0 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:e1:5c:e6 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:33Z' description: '' dns_domain: '' id: fe296b7d-f858-415e-b30f-04968989d58d ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: false l2_adjacency: true mtu: 1500 name: zuul-ci-net-9e06bd9e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-12-12T16:11:34Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-12-12T16:11:42Z' description: '' enable_ndp_proxy: null external_gateway_info: enable_snat: true external_fixed_ips: - ip_address: 38.102.83.158 subnet_id: 3169b11b-94b1-4bc9-9727-4fdbbe15e56e network_id: 7abff1a9-a103-46d0-979a-1f1e599f4f41 flavor_id: null id: 00620b58-8061-48ff-affe-fc214a7e9cb5 name: zuul-ci-subnet-router-9e06bd9e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 3 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-12-12T16:11:44Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-12-12T16:11:38Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: c980ef95-579b-4bba-a2cf-aaa697eb56ed ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9e06bd9e network_id: fe296b7d-f858-415e-b30f-04968989d58d project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-12-12T16:11:38Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9e06bd9e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9e06bd9e date_time: date: '2025-12-12' day: '12' epoch: '1765557618' epoch_int: '1765557618' hour: '16' iso8601: '2025-12-12T16:40:18Z' iso8601_basic: 20251212T164018962300 iso8601_basic_short: 20251212T164018 iso8601_micro: '2025-12-12T16:40:18.962300Z' minute: '40' month: '12' second: '18' time: '16:40:18' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Friday weekday_number: '5' weeknumber: '49' year: '2025' default_ipv4: address: 38.102.83.97 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:aa:3c:f0 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-12-12-16-08-48-00 vda1: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-12-12-16-08-48-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - cbdedf45-ed1d-4952-82a8-33a12c0ba266 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 40548 22 SSH_CONNECTION: 38.102.83.114 40548 38.102.83.97 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '16' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:feaa:3cf0 prefix: '64' scope: link macaddress: fa:16:3e:aa:3c:f0 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:e61ebeb9-32de-4b3b-b463-d59237136be4 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-648.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.06 1m: 0.24 5m: 0.16 locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:feaa:3cf0 lsb: {} lvm: N/A machine: x86_64 machine_id: 64f1d6692049d8be5e8b216cc203502c memfree_mb: 7120 memory_mb: nocache: free: 7331 used: 348 real: free: 7120 total: 7679 used: 559 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20336140 block_size: 4096 block_total: 20954875 block_used: 618735 device: /dev/vda1 fstype: xfs inode_available: 41888385 inode_total: 41942512 inode_used: 54127 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83296829440 size_total: 85831168000 uuid: cbdedf45-ed1d-4952-82a8-33a12c0ba266 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 python_version: 3.9.25 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPIOW+VtnF/Fu/RuR24zXRchiOz7hR0QYB5AEr3y+Rog8EefZdE7lrEycvzxm3PNImSfgERgMwSA4vjDok0fKEQ= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIBB8eIzEKga2gBA89cbnEvfEvFoGpchZEHh+uW/JueDO ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC+ncO1XqWbLlBF6ck1nTNn2UrTbaFBcVm+kh6XtCVAHkrjiGUF0j+0iuO0/3XBmR2RP+HhbYoMqcST7u0uMw8z6z7q2k7USnS6L3SauEdgu6yT4a3OSKeFnHjLxLwEqnQ2+aEfy+ApcIlyKiTGgSSZ89yni637VO2jMD1mU91RfBmVRzmxakK0OQOAZwte7UTK0PtXcjC8ws/x/iaGeAEJOKRDEBmJSXZkI9c/u9fDOuM7I36+syNIdmBUhk9kpvfJaeVPyCPHeyEBbIhCNdO8m1vo4n8/JYLvzIzg+3sIBVWYtTLYCVyEsb7Ecq7+dGmOR9ShqlxeA9bMM19/nChXHNky1WO4qPpgAO4yY6jG+4cYaUtiwsbS6K2wtgLhibqgQp8w3Md31vdcnhVmxEUtfM0vM1ynuRKDZ3jTwBa6ap8HnZ1GIgyhyAT/XHp4agpBbuP3/DbozPGEUDXmIMGRVLca0sLcjOL3w/PUx8oD7i//dbhZ6ymrCfTGuOmikuk= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 108 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - service-telemetry-operator home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/0000755000175000017500000000000015117040723024334 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/0000755000175000017500000000000015117040723026323 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/cr/0000755000175000017500000000000015117040723026727 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/0000755000175000017500000000000015117043064025270 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ens3.nmconnection0000644000175000017500000000026215117042566030562 0ustar zuulzuul[connection] id=ens3 uuid=80e201bc-bccd-49eb-9325-1e09b23a9293 type=ethernet interface-name=ens3 [ethernet] [ipv4] method=auto [ipv6] addr-gen-mode=eui64 method=auto [proxy] ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network.nmconnectio0000644000175000017500000000051315117042566033265 0ustar zuulzuul[connection] id=ci-private-network uuid=28a21f5b-6875-55a2-a3cf-bd30934907b9 type=ethernet autoconnect=true interface-name=eth1 [ethernet] mac-address=fa:16:3e:e6:21:77 mtu=1500 [ipv4] method=manual addresses=192.168.122.11/24 never-default=true gateway=192.168.122.1 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/0000755000175000017500000000000015117043064024366 5ustar zuulzuul././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-highavailability.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-highavailability.0000644000175000017500000000034215117042566033300 0ustar zuulzuul [repo-setup-centos-highavailability] name=repo-setup-centos-highavailability baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/HighAvailability/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-powertools.repo0000644000175000017500000000031115117042566033065 0ustar zuulzuul [repo-setup-centos-powertools] name=repo-setup-centos-powertools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/CRB/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean.repo.md50000644000175000017500000000004115117042566027533 0ustar zuulzuulc3923531bcda0b0811b2d5053f189beb home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-appstream.repo0000644000175000017500000000031615117042566032651 0ustar zuulzuul [repo-setup-centos-appstream] name=repo-setup-centos-appstream baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/AppStream/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean-antelope-testing.repo0000644000175000017500000000317215117042566032337 0ustar zuulzuul[delorean-antelope-testing] name=dlrn-antelope-testing baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [delorean-antelope-build-deps] name=dlrn-antelope-build-deps baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/build-deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-rabbitmq] name=centos9-rabbitmq baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/messaging/$basearch/rabbitmq-38/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-storage] name=centos9-storage baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/storage/$basearch/ceph-reef/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-opstools] name=centos9-opstools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/opstools/$basearch/collectd-5/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-nfv-ovs] name=NFV SIG OpenvSwitch baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/nfv/$basearch/openvswitch-2/ gpgcheck=0 enabled=1 module_hotfixes=1 # epel is required for Ceph Reef [epel-low-priority] name=Extra Packages for Enterprise Linux $releasever - $basearch metalink=https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch&infra=$infra&content=$contentdir enabled=1 gpgcheck=0 countme=1 priority=100 includepkgs=libarrow*,parquet*,python3-asyncssh,re2,python3-grpcio,grpc*,abseil*,thrift*,blake3 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean.repo0000644000175000017500000001341515117042566027060 0ustar zuulzuul[delorean-component-barbican] name=delorean-openstack-barbican-42b4c41831408a8e323fec3c8983b5c793b64874 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/barbican/42/b4/42b4c41831408a8e323fec3c8983b5c793b64874_08052e9d enabled=1 gpgcheck=0 priority=1 [delorean-component-baremetal] name=delorean-python-glean-10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/baremetal/10/df/10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7_36137eb3 enabled=1 gpgcheck=0 priority=1 [delorean-component-cinder] name=delorean-openstack-cinder-1c00d6490d88e436f26efb71f2ac96e75252e97c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cinder/1c/00/1c00d6490d88e436f26efb71f2ac96e75252e97c_f716f000 enabled=1 gpgcheck=0 priority=1 [delorean-component-clients] name=delorean-python-stevedore-c4acc5639fd2329372142e39464fcca0209b0018 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/clients/c4/ac/c4acc5639fd2329372142e39464fcca0209b0018_d3ef8337 enabled=1 gpgcheck=0 priority=1 [delorean-component-cloudops] name=delorean-python-cloudkitty-tests-tempest-2c80f80e02c5accd099187ea762c8f8389bd7905 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cloudops/2c/80/2c80f80e02c5accd099187ea762c8f8389bd7905_33e4dd93 enabled=1 gpgcheck=0 priority=1 [delorean-component-common] name=delorean-os-refresh-config-9bfc52b5049be2d8de6134d662fdde9dfa48960f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/common/9b/fc/9bfc52b5049be2d8de6134d662fdde9dfa48960f_b85780e6 enabled=1 gpgcheck=0 priority=1 [delorean-component-compute] name=delorean-openstack-nova-6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/compute/6f/8d/6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e_dc05b899 enabled=1 gpgcheck=0 priority=1 [delorean-component-designate] name=delorean-python-designate-tests-tempest-347fdbc9b4595a10b726526b3c0b5928e5b7fcf2 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/designate/34/7f/347fdbc9b4595a10b726526b3c0b5928e5b7fcf2_3fd39337 enabled=1 gpgcheck=0 priority=1 [delorean-component-glance] name=delorean-openstack-glance-1fd12c29b339f30fe823e2b5beba14b5f241e52a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/glance/1f/d1/1fd12c29b339f30fe823e2b5beba14b5f241e52a_0d693729 enabled=1 gpgcheck=0 priority=1 [delorean-component-keystone] name=delorean-openstack-keystone-e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/keystone/e4/b4/e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7_264c03cc enabled=1 gpgcheck=0 priority=1 [delorean-component-manila] name=delorean-openstack-manila-3c01b7181572c95dac462eb19c3121e36cb0fe95 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/manila/3c/01/3c01b7181572c95dac462eb19c3121e36cb0fe95_912dfd18 enabled=1 gpgcheck=0 priority=1 [delorean-component-network] name=delorean-python-whitebox-neutron-tests-tempest-12cf06ce36a79a584fc757f4c25ff96845573c93 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/network/12/cf/12cf06ce36a79a584fc757f4c25ff96845573c93_3ed3aba3 enabled=1 gpgcheck=0 priority=1 [delorean-component-octavia] name=delorean-openstack-octavia-ba397f07a7331190208c93368ee23826ac4e2707 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/octavia/ba/39/ba397f07a7331190208c93368ee23826ac4e2707_9d6e596a enabled=1 gpgcheck=0 priority=1 [delorean-component-optimize] name=delorean-openstack-watcher-c014f81a8647287f6dcc339321c1256f5a2e82d5 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/optimize/c0/14/c014f81a8647287f6dcc339321c1256f5a2e82d5_bcbfdccc enabled=1 gpgcheck=0 priority=1 [delorean-component-podified] name=delorean-ansible-config_template-5ccaa22121a7ff05620975540d81f6efb077d8db baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/podified/5c/ca/5ccaa22121a7ff05620975540d81f6efb077d8db_83eb7cc2 enabled=1 gpgcheck=0 priority=1 [delorean-component-puppet] name=delorean-puppet-ceph-7352068d7b8c84ded636ab3158dafa6f3851951e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/puppet/73/52/7352068d7b8c84ded636ab3158dafa6f3851951e_7cde1ad1 enabled=1 gpgcheck=0 priority=1 [delorean-component-swift] name=delorean-openstack-swift-dc98a8463506ac520c469adb0ef47d0f7753905a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/swift/dc/98/dc98a8463506ac520c469adb0ef47d0f7753905a_9d02f069 enabled=1 gpgcheck=0 priority=1 [delorean-component-tempest] name=delorean-python-tempestconf-8515371b7cceebd4282e09f1d8f0cc842df82855 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/tempest/85/15/8515371b7cceebd4282e09f1d8f0cc842df82855_a1e336c7 enabled=1 gpgcheck=0 priority=1 [delorean-component-ui] name=delorean-openstack-heat-ui-013accbfd179753bc3f0d1f4e5bed07a4fd9f771 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/ui/01/3a/013accbfd179753bc3f0d1f4e5bed07a4fd9f771_0c88e467 enabled=1 gpgcheck=0 priority=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-baseos.repo0000644000175000017500000000030415117042566032126 0ustar zuulzuul [repo-setup-centos-baseos] name=repo-setup-centos-baseos baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/BaseOS/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/selinux-listing.log0000644000175000017500000037176715117043113020750 0ustar zuulzuul/home/zuul/ci-framework-data: total 8 drwxr-xr-x. 10 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 artifacts drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 logs drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 24 Dec 12 16:23 tmp drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Dec 12 16:24 volumes /home/zuul/ci-framework-data/artifacts: total 516 drwxrwxrwx. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:43 ansible_facts.2025-12-12_16-43 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 19835 Dec 12 16:40 ansible-facts.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 350685 Dec 12 16:43 ansible-vars.yml drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:43 ci-env -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 135 Dec 12 16:40 ci_script_000_check_for_oc.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 239 Dec 12 16:43 ci_script_000_copy_logs_from_crc.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 213 Dec 12 16:24 ci_script_000_fetch_openshift.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 659 Dec 12 16:43 ci_script_000_prepare_root_ssh.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 722 Dec 12 16:40 ci_script_000_run_openstack_must_gather.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 293 Dec 12 16:25 ci_script_001_login_into_openshift_internal.sh -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 159 Dec 12 16:40 hosts -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 77119 Dec 12 16:40 installed-packages.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1644 Dec 12 16:40 ip-network.txt drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Dec 12 16:24 manifests drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 70 Dec 12 16:43 NetworkManager drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 120 Dec 12 16:43 parameters drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 repositories -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 106 Dec 12 16:40 resolv.conf drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:24 roles drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 yum_repos -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 30396 Dec 12 16:43 zuul_inventory.yml /home/zuul/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Dec 12 16:43 ansible_facts_cache /home/zuul/ci-framework-data/artifacts/ansible_facts.2025-12-12_16-43/ansible_facts_cache: total 60 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 57728 Dec 12 16:43 localhost /home/zuul/ci-framework-data/artifacts/ci-env: total 4 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1226 Dec 12 16:40 networking-info.yml /home/zuul/ci-framework-data/artifacts/manifests: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Dec 12 16:24 openstack /home/zuul/ci-framework-data/artifacts/manifests/openstack: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Dec 12 16:24 cr /home/zuul/ci-framework-data/artifacts/manifests/openstack/cr: total 0 /home/zuul/ci-framework-data/artifacts/NetworkManager: total 8 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 331 Dec 12 16:40 ci-private-network.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 178 Dec 12 16:40 ens3.nmconnection /home/zuul/ci-framework-data/artifacts/parameters: total 56 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1131 Dec 12 16:43 custom-params.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 28013 Dec 12 16:43 install-yamls-params.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 288 Dec 12 16:25 openshift-login-params.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 20288 Dec 12 16:43 zuul-params.yml /home/zuul/ci-framework-data/artifacts/repositories: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1658 Dec 12 16:24 delorean-antelope-testing.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 5901 Dec 12 16:24 delorean.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:24 delorean.repo.md5 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 206 Dec 12 16:24 repo-setup-centos-appstream.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 196 Dec 12 16:24 repo-setup-centos-baseos.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 226 Dec 12 16:24 repo-setup-centos-highavailability.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 201 Dec 12 16:24 repo-setup-centos-powertools.repo /home/zuul/ci-framework-data/artifacts/roles: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:24 install_yamls_makes /home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes: total 20 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 16384 Dec 12 16:43 tasks /home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks: total 1256 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 790 Dec 12 16:24 make_all.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_ansibleee_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Dec 12 16:24 make_ansibleee_kuttl_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_ansibleee_kuttl_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_ansibleee_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_ansibleee_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_ansibleee_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_ansibleee.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1234 Dec 12 16:24 make_attach_default_interface_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Dec 12 16:24 make_attach_default_interface.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_barbican_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Dec 12 16:24 make_barbican_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_barbican_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_barbican_deploy_validate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_barbican_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_barbican_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_barbican_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_barbican_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Dec 12 16:24 make_barbican.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_baremetal_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_baremetal_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1219 Dec 12 16:24 make_bmaas_baremetal_net_nad_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1099 Dec 12 16:24 make_bmaas_baremetal_net_nad.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Dec 12 16:24 make_bmaas_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1234 Dec 12 16:24 make_bmaas_crc_attach_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Dec 12 16:24 make_bmaas_crc_attach_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1264 Dec 12 16:24 make_bmaas_crc_baremetal_bridge_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1144 Dec 12 16:24 make_bmaas_crc_baremetal_bridge.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1129 Dec 12 16:24 make_bmaas_generate_nodes_yaml.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1069 Dec 12 16:24 make_bmaas_metallb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Dec 12 16:24 make_bmaas_metallb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1069 Dec 12 16:24 make_bmaas_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Dec 12 16:24 make_bmaas_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1444 Dec 12 16:24 make_bmaas_route_crc_and_crc_bmaas_networks_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1324 Dec 12 16:24 make_bmaas_route_crc_and_crc_bmaas_networks.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1174 Dec 12 16:24 make_bmaas_sushy_emulator_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1129 Dec 12 16:24 make_bmaas_sushy_emulator_wait.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Dec 12 16:24 make_bmaas_sushy_emulator.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1129 Dec 12 16:24 make_bmaas_virtual_bms_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Dec 12 16:24 make_bmaas_virtual_bms.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 829 Dec 12 16:24 make_bmaas.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_ceph_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_ceph_help.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_ceph.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_certmanager_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_certmanager.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Dec 12 16:24 make_cifmw_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Dec 12 16:24 make_cifmw_prepare.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_cinder_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_cinder_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_cinder_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_cinder_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_cinder_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_cinder_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_cinder_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Dec 12 16:24 make_cinder.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1294 Dec 12 16:24 make_crc_attach_default_interface_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1174 Dec 12 16:24 make_crc_attach_default_interface.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_crc_bmo_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_crc_bmo_setup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 919 Dec 12 16:24 make_crc_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 889 Dec 12 16:24 make_crc_scrub.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1225 Dec 12 16:24 make_crc_storage_cleanup_with_retries.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_crc_storage_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_crc_storage_release.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_crc_storage_with_retries.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_crc_storage.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 799 Dec 12 16:24 make_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_designate_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_designate_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_designate_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_designate_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_designate_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_designate_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_designate_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_designate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_dns_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_dns_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Dec 12 16:24 make_dns_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 964 Dec 12 16:24 make_download_tools.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1039 Dec 12 16:24 make_edpm_ansible_runner.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1084 Dec 12 16:24 make_edpm_baremetal_compute.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1024 Dec 12 16:24 make_edpm_compute_bootc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Dec 12 16:24 make_edpm_compute_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1024 Dec 12 16:24 make_edpm_compute_repos.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Dec 12 16:24 make_edpm_computes_bgp.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 934 Dec 12 16:24 make_edpm_compute.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1135 Dec 12 16:24 make_edpm_deploy_baremetal_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_edpm_deploy_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_edpm_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1120 Dec 12 16:24 make_edpm_deploy_generate_keys.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Dec 12 16:24 make_edpm_deploy_instance.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1180 Dec 12 16:24 make_edpm_deploy_networker_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1135 Dec 12 16:24 make_edpm_deploy_networker_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_edpm_deploy_networker.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_edpm_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_edpm_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1084 Dec 12 16:24 make_edpm_networker_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 964 Dec 12 16:24 make_edpm_networker.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_edpm_nova_discover_hosts.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1210 Dec 12 16:24 make_edpm_patch_ansible_runner_image.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_edpm_register_dns.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1135 Dec 12 16:24 make_edpm_wait_deploy_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_edpm_wait_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_glance_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_glance_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_glance_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_glance_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_glance_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_glance_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_glance_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Dec 12 16:24 make_glance.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_heat_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_heat_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_heat_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_heat_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_heat_kuttl_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_heat_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Dec 12 16:24 make_heat_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_heat_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_heat.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 814 Dec 12 16:24 make_help.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_horizon_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Dec 12 16:24 make_horizon_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_horizon_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_horizon_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_horizon_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_horizon_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_horizon_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_horizon.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_infra_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_infra_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_infra_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Dec 12 16:24 make_infra_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 820 Dec 12 16:24 make_infra.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_input_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 820 Dec 12 16:24 make_input.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 994 Dec 12 16:24 make_ipv6_lab_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1189 Dec 12 16:24 make_ipv6_lab_nat64_router_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1069 Dec 12 16:24 make_ipv6_lab_nat64_router.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Dec 12 16:24 make_ipv6_lab_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 994 Dec 12 16:24 make_ipv6_lab_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Dec 12 16:24 make_ipv6_lab_sno_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 934 Dec 12 16:24 make_ipv6_lab_sno.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 874 Dec 12 16:24 make_ipv6_lab.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_ironic_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_ironic_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_ironic_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_ironic_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_ironic_kuttl_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_ironic_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_ironic_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_ironic_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Dec 12 16:24 make_ironic.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_keystone_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Dec 12 16:24 make_keystone_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_keystone_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_keystone_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_keystone_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_keystone_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_keystone_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Dec 12 16:24 make_keystone.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_kuttl_common_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_kuttl_common_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_kuttl_db_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_kuttl_db_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_loki_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_loki_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_loki_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_loki.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_lvms.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_manila_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_manila_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_manila_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_manila_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_manila_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_manila_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_manila_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Dec 12 16:24 make_manila.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_mariadb_chainsaw_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_mariadb_chainsaw.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_mariadb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Dec 12 16:24 make_mariadb_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_mariadb_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_mariadb_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_mariadb_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_mariadb_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_mariadb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_memcached_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_memcached_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_memcached_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_metallb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Dec 12 16:24 make_metallb_config_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_metallb_config.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_metallb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_namespace_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_namespace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_netattach_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_netattach.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_netconfig_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_netconfig_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_netconfig_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_netobserv_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_netobserv_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_netobserv_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_netobserv.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1234 Dec 12 16:24 make_network_isolation_bridge_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Dec 12 16:24 make_network_isolation_bridge.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_neutron_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Dec 12 16:24 make_neutron_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_neutron_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_neutron_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_neutron_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_neutron_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_neutron_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_neutron.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 919 Dec 12 16:24 make_nfs_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 799 Dec 12 16:24 make_nfs.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_nmstate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_nncp_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_nncp.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_nova_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_nova_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_nova_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_nova_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_nova_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_nova.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_octavia_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Dec 12 16:24 make_octavia_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_octavia_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_octavia_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_octavia_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_octavia_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_octavia_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Dec 12 16:24 make_octavia.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_openstack_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Dec 12 16:24 make_openstack_crds_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_openstack_crds.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_openstack_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_openstack_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_openstack_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_openstack_init.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_openstack_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_openstack_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Dec 12 16:24 make_openstack_patch_version.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_openstack_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_openstack_repo.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_openstack_update_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_openstack_wait_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_openstack_wait.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_openstack.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_operator_namespace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_ovn_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Dec 12 16:24 make_ovn_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_ovn_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Dec 12 16:24 make_ovn_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_ovn_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_ovn_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Dec 12 16:24 make_ovn_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 790 Dec 12 16:24 make_ovn.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_placement_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_placement_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_placement_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_placement_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_placement_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_placement_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_placement_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_placement.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_rabbitmq_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Dec 12 16:24 make_rabbitmq_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_rabbitmq_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_rabbitmq_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_rabbitmq_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Dec 12 16:24 make_rabbitmq.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_redis_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_redis_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_redis_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_rook_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_rook_crc_disk.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_rook_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_rook_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_rook_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_rook.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Dec 12 16:24 make_set_slower_etcd_profile.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1024 Dec 12 16:24 make_standalone_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Dec 12 16:24 make_standalone_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Dec 12 16:24 make_standalone_revert.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1039 Dec 12 16:24 make_standalone_snapshot.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 979 Dec 12 16:24 make_standalone_sync.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 904 Dec 12 16:24 make_standalone.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_swift_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_swift_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_swift_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Dec 12 16:24 make_swift_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_swift_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Dec 12 16:24 make_swift_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Dec 12 16:24 make_swift_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 820 Dec 12 16:24 make_swift.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Dec 12 16:24 make_telemetry_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Dec 12 16:24 make_telemetry_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:24 make_telemetry_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Dec 12 16:24 make_telemetry_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Dec 12 16:24 make_telemetry_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_telemetry_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Dec 12 16:24 make_telemetry_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Dec 12 16:24 make_telemetry.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 964 Dec 12 16:24 make_tripleo_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Dec 12 16:24 make_update_services.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Dec 12 16:24 make_update_system.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Dec 12 16:24 make_validate_marketplace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Dec 12 16:24 make_wait.yml /home/zuul/ci-framework-data/artifacts/yum_repos: total 32 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1658 Dec 12 16:40 delorean-antelope-testing.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 5901 Dec 12 16:40 delorean.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:40 delorean.repo.md5 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 206 Dec 12 16:40 repo-setup-centos-appstream.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 196 Dec 12 16:40 repo-setup-centos-baseos.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 226 Dec 12 16:40 repo-setup-centos-highavailability.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 201 Dec 12 16:40 repo-setup-centos-powertools.repo /home/zuul/ci-framework-data/logs: total 300 drwxrwxr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Dec 12 16:43 2025-12-12_16-40 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 156181 Dec 12 16:43 ansible.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 10487 Dec 12 16:43 ci_script_000_copy_logs_from_crc.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 234 Dec 12 16:25 ci_script_000_fetch_openshift.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 117048 Dec 12 16:43 ci_script_000_prepare_root_ssh.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3676 Dec 12 16:42 ci_script_000_run_openstack_must_gather.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 17 Dec 12 16:25 ci_script_001_login_into_openshift_internal.log drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Dec 12 16:43 crc drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 openstack-must-gather /home/zuul/ci-framework-data/logs/2025-12-12_16-40: total 156 -rw-rw-rw-. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 156223 Dec 12 16:25 ansible.log /home/zuul/ci-framework-data/logs/crc: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 18 Dec 12 16:43 crc-logs-artifacts /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts: total 16 drwxr-xr-x. 88 zuul zuul unconfined_u:object_r:user_home_t:s0 12288 Dec 12 16:43 pods /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods: total 12 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Dec 12 16:43 cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Dec 12 16:43 cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 34 Dec 12 16:43 cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Dec 12 16:43 cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 108 Dec 12 16:43 hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 105 Dec 12 16:43 openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 42 Dec 12 16:43 openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Dec 12 16:43 openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Dec 12 16:43 openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 64 Dec 12 16:43 openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 152 Dec 12 16:43 openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 38 Dec 12 16:43 openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 60 Dec 12 16:43 openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 21 Dec 12 16:43 openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Dec 12 16:43 openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Dec 12 16:43 openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Dec 12 16:43 openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 51 Dec 12 16:43 openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 40 Dec 12 16:43 openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 31 Dec 12 16:43 openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 49 Dec 12 16:43 openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e drwxr-xr-x. 10 zuul zuul unconfined_u:object_r:user_home_t:s0 156 Dec 12 16:43 openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 27 Dec 12 16:43 openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 45 Dec 12 16:43 openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 22 Dec 12 16:43 openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 21 Dec 12 16:43 openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 38 Dec 12 16:43 openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 53 Dec 12 16:43 openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Dec 12 16:43 openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Dec 12 16:43 openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2 drwxr-xr-x. 8 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Dec 12 16:43 openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Dec 12 16:43 openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Dec 12 16:43 openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 164 Dec 12 16:43 openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 46 Dec 12 16:43 openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08 drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 130 Dec 12 16:43 openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 47 Dec 12 16:43 openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Dec 12 16:43 openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 50 Dec 12 16:43 openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 52 Dec 12 16:43 openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 48 Dec 12 16:43 openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 57 Dec 12 16:43 openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 47 Dec 12 16:43 openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 62 Dec 12 16:43 openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 58 Dec 12 16:43 openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 60 Dec 12 16:43 openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Dec 12 16:43 openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 45 Dec 12 16:43 openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 45 Dec 12 16:43 openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 45 Dec 12 16:43 openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Dec 12 16:43 openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Dec 12 16:43 openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 34 Dec 12 16:43 openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Dec 12 16:43 openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3 drwxr-xr-x. 9 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 64 Dec 12 16:43 openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Dec 12 16:43 openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 59 Dec 12 16:43 openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 39 Dec 12 16:43 openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Dec 12 16:43 openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 44 Dec 12 16:43 openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Dec 12 16:43 openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Dec 12 16:43 openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Dec 12 16:43 openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 58 Dec 12 16:43 openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Dec 12 16:43 openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Dec 12 16:43 openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Dec 12 16:43 openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 26 Dec 12 16:43 openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 27 Dec 12 16:43 openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 59 Dec 12 16:43 openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:43 openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 51 Dec 12 16:43 openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 51 Dec 12 16:43 openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 22 Dec 12 16:43 openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Dec 12 16:43 openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 60 Dec 12 16:43 openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9 drwxr-xr-x. 11 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Dec 12 16:43 openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 38 Dec 12 16:43 openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Dec 12 16:43 openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Dec 12 16:43 openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 21 Dec 12 16:43 service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 99 Dec 12 16:43 service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Dec 12 16:43 service-telemetry_infrawatch-operators-6bs58_6510d065-e486-4274-a8ca-4c2cdb8dd1ae drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Dec 12 16:43 service-telemetry_infrawatch-operators-cdpts_eeed1a9b-f386-4d11-b730-03bcb44f9a55 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cert-manager-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-858d87f86b-r7f8q_7b3ac2d2-e3da-4934-b2d6-6e7b3be9afdc/cert-manager-controller: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 9393 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cert-manager-cainjector /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-7dbf76d5c8-lv2hl_7f3690b6-63d7-48cc-9508-e016e3476a99/cert-manager-cainjector: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 13419 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cert-manager-webhook /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7894b5b9b4-2kmrt_c184b148-4467-4bd5-8204-6369360370ee/cert-manager-webhook: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4017 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cert-manager-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager-operator_cert-manager-operator-controller-manager-64c74584c4-djdmt_50e025ff-2065-4156-844d-68d8587d7b6c/cert-manager-operator: total 68 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 68096 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 csi-provisioner drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 hostpath-provisioner drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 liveness-probe drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 node-driver-registrar /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/csi-provisioner: total 108 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 108623 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/hostpath-provisioner: total 44 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 45038 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/liveness-probe: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 396 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-59hhc_e0adb788-edae-4099-900e-8af998a81f87/node-driver-registrar: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1504 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 fix-audit-permissions drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 openshift-apiserver drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 openshift-apiserver-check-endpoints /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/fix-audit-permissions: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver: total 120 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 121266 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-9ddfb9f55-sg8rq_693e66ed-f826-4819-a47d-f32faf9dab96/openshift-apiserver-check-endpoints: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 29932 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 openshift-apiserver-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-846cbfc458-zf8cv_e0a1decf-4248-4f48-ba06-e9ec8fdbbea8/openshift-apiserver-operator: total 96 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 95225 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 oauth-openshift /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-6567f5ffdb-jrpfr_5b0a332f-52bd-409b-b5c0-f2723c617bed/oauth-openshift: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 16184 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 authentication-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7f5c659b84-6t92c_d55f43e2-46df-4460-b17f-0daa75b89154/authentication-operator: total 436 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 445913 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 machine-approver-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/kube-rbac-proxy: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 8366 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-54c688565-62rws_6f56ef95-299c-4bae-bc46-92e9d8358097/machine-approver-controller: total 68 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 67429 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cluster-samples-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cluster-samples-operator-watch -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 d25a5167e83c106fd6aae82bd4f1881d7b1012c90d8673c0eb50d806ecfe8a9d.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator: total 84 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 83322 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-6b564684c8-fzlkp_2a282672-c872-405b-9325-f8f48865334c/cluster-samples-operator-watch: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 664 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cluster-version-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-7c9b9cfd6-d85ps_3be77ab3-0638-4ffa-960a-34823c8e08a1/cluster-version-operator: total 3992 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 4085436 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 openshift-api drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 openshift-config-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-api: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-5777786469-49zmj_eb351b5c-811a-4e79-ace2-5d78737aef4c/openshift-config-operator: total 36 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 35930 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 console /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-64d44f6ddf-zhgm9_4651322b-9aec-4667-afa3-1602ad5176fe/console: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2901 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 download-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-747b44746d-sm46g_f967d508-b683-4df4-9be0-3a7fb5afa7bb/download-server: total 36 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 35708 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 console-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-67c89758df-5tw72_65efae24-6623-454c-b665-e5e407e86269/console-operator: total 120 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 122131 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 controller-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-7b9f779b68-rhrzf_7313ab95-a89a-4df9-a791-1d048a6beba9/controller-manager: total 64 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 65458 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 openshift-controller-manager-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-686468bdd5-xknw6_9cc5b0f4-dc96-4a65-8404-f3d36ad70787/openshift-controller-manager-operator: total 304 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 309969 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 dns drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/dns: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 32441 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-rl44g_9dc06dad-6486-4dd5-9456-40ce964abc7f/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1040 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 dns-node-resolver /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-tddhh_72dbaca9-d010-46f5-a645-d2713a98f846/dns-node-resolver: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 96 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 dns-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/dns-operator: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 14141 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-799b87ffcd-2w9hn_e1875478-2fa5-47f4-9c0a-13afc9166e8e/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1040 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcdctl drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd-ensure-env-vars drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd-metrics drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd-readyz drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd-resources-copy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd-rev drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 setup /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 29005 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcdctl: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-ensure-env-vars: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-metrics: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 17964 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-readyz: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 240 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-resources-copy: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/etcd-rev: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 124 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_20c5c5b4bed930554494851fe3cb2b2a/setup: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 156 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 etcd-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-69b85846b6-mrrt5_a6c070b2-83ee-4c73-9201-3ab5dcc9aeca/etcd-operator: total 124 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 123379 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cluster-image-registry-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-86c45576b9-sfm9v_5274eff7-dc1d-4efb-aee0-4ab77a1dd3d9/cluster-image-registry-operator: total 44 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 44575 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 registry /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-5d9d95bf5b-6md9w_b75bc011-274b-4fb1-8311-15ffa1b33366/registry: total 208 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 210780 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 node-ca /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-2xpcq_ab3d3198-2798-4180-aa5a-a0e495348125/node-ca: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 16416 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 serve-healthcheck-canary /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-tqcqf_47102097-389c-44ce-a25f-6b8d25a70e1d/serve-healthcheck-canary: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2210 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 ingress-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/ingress-operator: total 48 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 48631 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-6b9cb4dbcf-5twrv_338f89a1-1c2f-4e37-9572-c5b13d682ca9/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1040 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 router /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-68cf44c8b8-bqttx_1a9ac0b2-cad1-44fa-993c-0ae63193f086/router: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 17092 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_214aeed8-f6a2-4251-b4d0-c81fd217c7c2/installer: total 64 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 62917 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-apiserver drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-apiserver-cert-regeneration-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-apiserver-cert-syncer drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-apiserver-check-endpoints drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-apiserver-insecure-readyz drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 setup /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver: total 628 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 641292 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-regeneration-controller: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 19611 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-cert-syncer: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 8140 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-check-endpoints: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 17986 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/kube-apiserver-insecure-readyz: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 116 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_57755cc5f99000cc11e193051474d4e2/setup: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 265 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-apiserver-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-575994946d-wff8v_22a6a238-12c9-43ae-afbc-f9595d46e727/kube-apiserver-operator: total 260 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 265784 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-11-crc_0ad9be1e-b38d-4280-8a67-505c4461c55d/pruner: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1902 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_revision-pruner-12-crc_24732491-f54a-410e-a29e-c8fb26fd9cde/pruner: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1959 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cluster-policy-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Dec 12 16:43 kube-controller-manager drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-controller-manager-cert-syncer drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-controller-manager-recovery-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/cluster-policy-controller: total 268 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 274009 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager: total 2020 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1091373 Dec 12 16:43 0.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 973677 Dec 12 16:43 1.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-cert-syncer: total 16 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 15126 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_9f0bc7fcb0822a2c13eb2d22cd8c0641/kube-controller-manager-recovery-controller: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 16776 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-controller-manager-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-69d5f845f8-nsdgk_4c111429-5512-4d9c-898b-d3ec0bdb5d08/kube-controller-manager-operator: total 144 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 146104 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-scheduler drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-scheduler-cert-syncer drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-scheduler-recovery-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 wait-for-host-port /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler: total 148 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 150323 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-cert-syncer: total 12 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 9556 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/kube-scheduler-recovery-controller: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 7803 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_0b638b8f4bb0070e40528db779baf6a2/wait-for-host-port: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 85 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-scheduler-operator-container /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-54f497555d-dcs9d_60d98f7f-99e4-4bb4-a7b6-48de2ff6071c/kube-scheduler-operator-container: total 132 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 133690 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_revision-pruner-6-crc_6e33370d-b952-4a48-a6cb-73e765546903/pruner: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1900 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 graceful-termination drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 migrator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/graceful-termination: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 72 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-866fcbc849-6mhsj_2403b973-68b3-4a15-a444-7e271aea91c1/migrator: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1875 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-storage-version-migrator-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-565b79b866-krgxf_dd1275f2-1d38-4b18-acdd-8f4f8e6cedf7/kube-storage-version-migrator-operator: total 48 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 48279 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 control-plane-machine-set-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-75ffdb6fcd-m8gw7_9c49153e-af72-4d2f-8184-fa7ba43a5a3e/control-plane-machine-set-operator: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 17470 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 machine-api-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/kube-rbac-proxy: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 8366 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-755bb95488-dmjfw_0abafdd2-351e-4f65-9dea-5578d313b760/machine-api-operator: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 18275 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy-crio drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 setup /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/kube-rbac-proxy-crio: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1376 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_4e08c320b1e9e2405e6e0107bdf7eeb4/setup: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 101 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 machine-config-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1212 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-f9cdd68f7-ndnxt_097ff9f3-52cb-4063-a6a1-0c8178adccc9/machine-config-controller: total 72 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 70432 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Dec 12 16:43 machine-config-daemon /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1212 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-qwg8p_5eed03e3-b46f-4ae0-a063-d9a0d64c3a7e/machine-config-daemon: total 144 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 75448 Dec 12 16:43 6.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 67331 Dec 12 16:43 7.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 machine-config-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1212 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-67c9d58cbb-bg744_1999cfc6-e5a0-4ddb-883d-71f861b286a8/machine-config-operator: total 692 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 707189 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 machine-config-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-nwxp2_62e07220-a49a-4989-8f0a-7eb7daf6fc61/machine-config-server: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1040 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 pull drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 util /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/extract: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2146 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/pull: total 72 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 71478 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aplxm5_86d29eb0-7bf6-47c0-bd9a-c7ae45a7b728/util: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 71 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 pull drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 util /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/extract: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 5285 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/pull: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2578 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105bc85_475bdfbd-4d7a-4f0b-9483-7ad3811012cf/util: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 71 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 pull drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 util /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/extract: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2197 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/pull: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1956 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep8glx_fd6585e4-c189-4aaf-98f6-4081874d4336/util: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 71 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-content: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/extract-utilities: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-psnw2_2d107578-4c5d-4271-a1a7-660aadfab0d1/registry-server: total 2296 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2349472 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-content: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/extract-utilities: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-6jgv5_0b3a2ae2-26c9-4d3a-8ea3-af2fc0de40d8/registry-server: total 4260 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4358794 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 marketplace-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-547dbd544d-4vhrb_9a0e237f-ebef-42b0-ad96-926e15307914/marketplace-operator: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 14467 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-content: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/extract-utilities: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-wqdb8_c82ddae8-4dc3-4d48-96b1-cd9613cc32c3/registry-server: total 3832 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3920400 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 bond-cni-plugin drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 cni-plugins drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 egress-router-binary-copy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-multus-additional-cni-plugins drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 routeoverride-cni drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 whereabouts-cni drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 whereabouts-cni-bincopy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/bond-cni-plugin: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 392 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/cni-plugins: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 404 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/egress-router-binary-copy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 414 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/kube-multus-additional-cni-plugins: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/routeoverride-cni: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 411 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 80 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-mqfd8_fbe9d4b4-6ed6-4516-a3b9-5aaa9f447f75/whereabouts-cni-bincopy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 408 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 multus-admission-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1040 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-69db94689b-xks9x_be106c32-9849-49fd-9e4a-4b5b9c16920a/multus-admission-controller: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1276 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Dec 12 16:43 kube-multus /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-rzhgf_6625166c-6688-498a-81c5-89ec476edef2/kube-multus: total 392 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 271271 Dec 12 16:43 0.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 123064 Dec 12 16:43 1.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 network-metrics-daemon /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1040 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-jhhcn_4e8bbb2d-9d91-4541-a2d2-891ab81dd883/network-metrics-daemon: total 40 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 39907 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 networking-console-plugin /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-console_networking-console-plugin-5ff7774fd9-nljh6_6a9ae5f6-97bd-46ac-bafa-ca1b4452a141/networking-console-plugin: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 779 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 check-endpoints /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5bb8f5cd97-xdvz5_f863fff9-286a-45fa-b8f0-8a86994b8440/check-endpoints: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 5847 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 network-check-target-container /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-fhkjl_17b87002-b798-480a-8e17-83053d698239/network-check-target-container: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 61 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 approver drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 webhook /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/approver: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 12433 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-dgvkt_fc4541ce-7789-4670-bc75-5c2868e52ce0/webhook: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3698 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 iptables-alerter /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-5jnd7_428b39f5-eb1c-4f65-b7a4-eeb6e84860cc/iptables-alerter: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 120 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 network-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-7bdcf4f5bd-7fjxv_34177974-8d82-49d2-a763-391d0df3bbd8/network-operator: total 848 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865650 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 fix-audit-permissions drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 oauth-apiserver /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/fix-audit-permissions: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-8596bd845d-njgb5_1bfafc57-4718-4d71-9f69-52b321379a27/oauth-apiserver: total 56 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 53910 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 catalog-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-75ff9f647d-4v9cj_5a94df8d-2607-41a1-b1f9-21016895dcd6/catalog-operator: total 1040 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1061942 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 collect-profiles /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425935-7hkrm_19e81fea-065e-43b5-8e56-49bfcfa342f7/collect-profiles: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 273 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 collect-profiles /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29425950-g52jh_ab30f5e0-5097-4413-bb3e-fe8ca350378f/collect-profiles: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 736 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 olm-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-5cdf44d969-kcw92_124ec2f9-0e23-47da-b25f-66a13947465e/olm-operator: total 292 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 295194 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 packageserver /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-7d4fc7d867-lfwgk_6e354e82-d648-4680-b0c8-e901bfcfbd5f/packageserver: total 64 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 64362 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 package-server-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-77f986bd66-mjzlp_00c7f3b3-f4dd-4d19-9739-512a35f436f5/package-server-manager: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 7997 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 prometheus-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-86648f486b-wbj29_18744739-d26e-4056-a036-656151fcc824/prometheus-operator: total 20 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 19178 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 prometheus-operator-admission-webhook /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-52l2g_c6b5aa8b-142f-4f74-a328-f0937a20672f/prometheus-operator-admission-webhook: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1061 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 prometheus-operator-admission-webhook /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5cd6b88c95-7vbzr_bc636fbb-cf50-4a1f-82f5-81db89bb0f5b/prometheus-operator-admission-webhook: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_observability-operator-78c97476f4-qxqmn_9425bd1f-c734-4ec0-9e2e-80b2d5ece709/operator: total 24 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 22494 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 perses-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operators_perses-operator-68bdb49cbf-nqtp8_f38bca5c-15f3-4d63-9c03-a33ec7a5f22b/perses-operator: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3422 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 ovnkube-cluster-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/kube-rbac-proxy: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1183 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-97c9b6c48-w5wsh_9dfc6a17-c67e-4928-96ac-f36d2ba8aac9/ovnkube-cluster-manager: total 40 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 40429 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kubecfg-setup drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy-node drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 kube-rbac-proxy-ovn-metrics drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 nbdb drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 northd drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 ovn-acl-logging drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 ovn-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 ovnkube-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 sbdb /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kubecfg-setup: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-node: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4680 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/kube-rbac-proxy-ovn-metrics: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4640 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/nbdb: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2425 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/northd: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4519 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-acl-logging: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 30662 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovn-controller: total 40 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 38714 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/ovnkube-controller: total 2260 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2311284 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-4pkx2_79c3e1a9-4077-41cc-8987-8284d900106c/sbdb: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2357 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 route-controller-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-8fdcdbb66-mzfqh_952b1cf6-a983-4b00-bca6-24b95d6bff57/route-controller-manager: total 24 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 22758 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 service-ca-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-5b9c976747-9wbcx_8b00dfbb-ff49-4fb2-bf80-0ad5f48198f7/service-ca-operator: total 48 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 48830 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 service-ca-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-74545575db-gsm6t_6baa2db5-b688-47dd-8d81-7dadbbbd3759/service-ca-controller: total 44 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 42871 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elastic-operator-6c994c654b-42tmw_1aa11df6-5c2b-4018-8146-09c5d79b9311/manager: total 148 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 151481 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 elastic-internal-init-filesystem drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 elastic-internal-suspend drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Dec 12 16:43 elasticsearch /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-init-filesystem: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 10539 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elastic-internal-suspend: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_elasticsearch-es-default-0_8b73b1a4-74b4-4b36-9c02-328f2cc9b99a/elasticsearch: total 52 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 52753 Dec 12 16:43 0.log /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_infrawatch-operators-6bs58_6510d065-e486-4274-a8ca-4c2cdb8dd1ae: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/service-telemetry_infrawatch-operators-cdpts_eeed1a9b-f386-4d11-b730-03bcb44f9a55: total 0 /home/zuul/ci-framework-data/logs/openstack-must-gather: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3336 Dec 12 16:42 event-filter.html lrwxrwxrwx. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 125 Dec 12 16:42 latest -> quay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2c -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3194 Dec 12 16:42 must-gather.logs drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Dec 12 16:43 quay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2c -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 111 Dec 12 16:42 timestamp /home/zuul/ci-framework-data/logs/openstack-must-gather/quay-io-openstack-k8s-operators-openstack-must-gather-sha256-854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2c: total 0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 0 Dec 12 16:42 gather.logs /home/zuul/ci-framework-data/tmp: total 0 /home/zuul/ci-framework-data/volumes: total 0 home/zuul/zuul-output/logs/README.html0000644000175000017500000000306615117043113016712 0ustar zuulzuul README for CIFMW Logs

Logs of interest

Generated content of interest

home/zuul/zuul-output/logs/installed-pkgs.log0000644000175000017500000004723615117043114020523 0ustar zuulzuulaardvark-dns-1.17.0-1.el9.x86_64 abattis-cantarell-fonts-0.301-4.el9.noarch acl-2.3.1-4.el9.x86_64 adobe-source-code-pro-fonts-2.030.1.050-12.el9.1.noarch alternatives-1.24-2.el9.x86_64 annobin-12.98-1.el9.x86_64 ansible-core-2.14.18-2.el9.x86_64 attr-2.5.1-3.el9.x86_64 audit-3.1.5-7.el9.x86_64 audit-libs-3.1.5-7.el9.x86_64 authselect-1.2.6-3.el9.x86_64 authselect-compat-1.2.6-3.el9.x86_64 authselect-libs-1.2.6-3.el9.x86_64 avahi-libs-0.8-23.el9.x86_64 basesystem-11-13.el9.noarch bash-5.1.8-9.el9.x86_64 bash-completion-2.11-5.el9.noarch binutils-2.35.2-69.el9.x86_64 binutils-gold-2.35.2-69.el9.x86_64 buildah-1.41.3-1.el9.x86_64 bzip2-1.0.8-10.el9.x86_64 bzip2-libs-1.0.8-10.el9.x86_64 ca-certificates-2025.2.80_v9.0.305-91.el9.noarch c-ares-1.19.1-2.el9.x86_64 centos-gpg-keys-9.0-30.el9.noarch centos-logos-90.8-3.el9.x86_64 centos-stream-release-9.0-30.el9.noarch centos-stream-repos-9.0-30.el9.noarch checkpolicy-3.6-1.el9.x86_64 chrony-4.8-1.el9.x86_64 cloud-init-24.4-7.el9.noarch cloud-utils-growpart-0.33-1.el9.x86_64 cmake-filesystem-3.31.8-3.el9.x86_64 cockpit-bridge-348-1.el9.noarch cockpit-system-348-1.el9.noarch cockpit-ws-348-1.el9.x86_64 cockpit-ws-selinux-348-1.el9.x86_64 conmon-2.1.13-1.el9.x86_64 containers-common-1-134.el9.x86_64 containers-common-extra-1-134.el9.x86_64 container-selinux-2.242.0-1.el9.noarch coreutils-8.32-39.el9.x86_64 coreutils-common-8.32-39.el9.x86_64 cpio-2.13-16.el9.x86_64 cpp-11.5.0-14.el9.x86_64 cracklib-2.9.6-27.el9.x86_64 cracklib-dicts-2.9.6-27.el9.x86_64 createrepo_c-0.20.1-4.el9.x86_64 createrepo_c-libs-0.20.1-4.el9.x86_64 criu-3.19-3.el9.x86_64 criu-libs-3.19-3.el9.x86_64 cronie-1.5.7-14.el9.x86_64 cronie-anacron-1.5.7-14.el9.x86_64 crontabs-1.11-26.20190603git.el9.noarch crun-1.24-1.el9.x86_64 crypto-policies-20251126-1.gite9c4db2.el9.noarch crypto-policies-scripts-20251126-1.gite9c4db2.el9.noarch cryptsetup-libs-2.8.1-2.el9.x86_64 curl-7.76.1-38.el9.x86_64 cyrus-sasl-2.1.27-21.el9.x86_64 cyrus-sasl-devel-2.1.27-21.el9.x86_64 cyrus-sasl-gssapi-2.1.27-21.el9.x86_64 cyrus-sasl-lib-2.1.27-21.el9.x86_64 dbus-1.12.20-8.el9.x86_64 dbus-broker-28-7.el9.x86_64 dbus-common-1.12.20-8.el9.noarch dbus-libs-1.12.20-8.el9.x86_64 dbus-tools-1.12.20-8.el9.x86_64 debugedit-5.0-11.el9.x86_64 dejavu-sans-fonts-2.37-18.el9.noarch desktop-file-utils-0.26-6.el9.x86_64 device-mapper-1.02.206-2.el9.x86_64 device-mapper-libs-1.02.206-2.el9.x86_64 dhcp-client-4.4.2-19.b1.el9.x86_64 dhcp-common-4.4.2-19.b1.el9.noarch diffutils-3.7-12.el9.x86_64 dnf-4.14.0-31.el9.noarch dnf-data-4.14.0-31.el9.noarch dnf-plugins-core-4.3.0-24.el9.noarch dracut-057-102.git20250818.el9.x86_64 dracut-config-generic-057-102.git20250818.el9.x86_64 dracut-network-057-102.git20250818.el9.x86_64 dracut-squash-057-102.git20250818.el9.x86_64 dwz-0.16-1.el9.x86_64 e2fsprogs-1.46.5-8.el9.x86_64 e2fsprogs-libs-1.46.5-8.el9.x86_64 ed-1.14.2-12.el9.x86_64 efi-srpm-macros-6-4.el9.noarch elfutils-0.194-1.el9.x86_64 elfutils-debuginfod-client-0.194-1.el9.x86_64 elfutils-default-yama-scope-0.194-1.el9.noarch elfutils-libelf-0.194-1.el9.x86_64 elfutils-libs-0.194-1.el9.x86_64 emacs-filesystem-27.2-18.el9.noarch enchant-1.6.0-30.el9.x86_64 ethtool-6.15-2.el9.x86_64 expat-2.5.0-5.el9.x86_64 expect-5.45.4-16.el9.x86_64 file-5.39-16.el9.x86_64 file-libs-5.39-16.el9.x86_64 filesystem-3.16-5.el9.x86_64 findutils-4.8.0-7.el9.x86_64 fonts-filesystem-2.0.5-7.el9.1.noarch fonts-srpm-macros-2.0.5-7.el9.1.noarch fuse3-3.10.2-9.el9.x86_64 fuse3-libs-3.10.2-9.el9.x86_64 fuse-common-3.10.2-9.el9.x86_64 fuse-libs-2.9.9-17.el9.x86_64 fuse-overlayfs-1.16-1.el9.x86_64 gawk-5.1.0-6.el9.x86_64 gawk-all-langpacks-5.1.0-6.el9.x86_64 gcc-11.5.0-14.el9.x86_64 gcc-c++-11.5.0-14.el9.x86_64 gcc-plugin-annobin-11.5.0-14.el9.x86_64 gdb-minimal-16.3-2.el9.x86_64 gdbm-libs-1.23-1.el9.x86_64 gdisk-1.0.7-5.el9.x86_64 gdk-pixbuf2-2.42.6-6.el9.x86_64 geolite2-city-20191217-6.el9.noarch geolite2-country-20191217-6.el9.noarch gettext-0.21-8.el9.x86_64 gettext-libs-0.21-8.el9.x86_64 ghc-srpm-macros-1.5.0-6.el9.noarch git-2.47.3-1.el9.x86_64 git-core-2.47.3-1.el9.x86_64 git-core-doc-2.47.3-1.el9.noarch glib2-2.68.4-18.el9.x86_64 glibc-2.34-244.el9.x86_64 glibc-common-2.34-244.el9.x86_64 glibc-devel-2.34-244.el9.x86_64 glibc-gconv-extra-2.34-244.el9.x86_64 glibc-headers-2.34-244.el9.x86_64 glibc-langpack-en-2.34-244.el9.x86_64 glib-networking-2.68.3-3.el9.x86_64 gmp-6.2.0-13.el9.x86_64 gnupg2-2.3.3-4.el9.x86_64 gnutls-3.8.10-1.el9.x86_64 gobject-introspection-1.68.0-11.el9.x86_64 go-srpm-macros-3.8.1-1.el9.noarch gpgme-1.15.1-6.el9.x86_64 gpg-pubkey-8483c65d-5ccc5b19 grep-3.6-5.el9.x86_64 groff-base-1.22.4-10.el9.x86_64 grub2-common-2.06-120.el9.noarch grub2-pc-2.06-120.el9.x86_64 grub2-pc-modules-2.06-120.el9.noarch grub2-tools-2.06-120.el9.x86_64 grub2-tools-minimal-2.06-120.el9.x86_64 grubby-8.40-69.el9.x86_64 gsettings-desktop-schemas-40.0-8.el9.x86_64 gssproxy-0.8.4-7.el9.x86_64 gzip-1.12-1.el9.x86_64 hostname-3.23-6.el9.x86_64 hunspell-1.7.0-11.el9.x86_64 hunspell-en-GB-0.20140811.1-20.el9.noarch hunspell-en-US-0.20140811.1-20.el9.noarch hunspell-filesystem-1.7.0-11.el9.x86_64 hwdata-0.348-9.20.el9.noarch ima-evm-utils-1.6.2-2.el9.x86_64 info-6.7-15.el9.x86_64 inih-49-6.el9.x86_64 initscripts-rename-device-10.11.8-4.el9.x86_64 initscripts-service-10.11.8-4.el9.noarch ipcalc-1.0.0-5.el9.x86_64 iproute-6.17.0-1.el9.x86_64 iproute-tc-6.17.0-1.el9.x86_64 iptables-libs-1.8.10-11.el9.x86_64 iptables-nft-1.8.10-11.el9.x86_64 iptables-nft-services-1.8.10-11.el9.noarch iputils-20210202-15.el9.x86_64 irqbalance-1.9.4-5.el9.x86_64 jansson-2.14-1.el9.x86_64 jq-1.6-19.el9.x86_64 json-c-0.14-11.el9.x86_64 json-glib-1.6.6-1.el9.x86_64 kbd-2.4.0-11.el9.x86_64 kbd-legacy-2.4.0-11.el9.noarch kbd-misc-2.4.0-11.el9.noarch kernel-5.14.0-648.el9.x86_64 kernel-core-5.14.0-648.el9.x86_64 kernel-headers-5.14.0-648.el9.x86_64 kernel-modules-5.14.0-648.el9.x86_64 kernel-modules-core-5.14.0-648.el9.x86_64 kernel-srpm-macros-1.0-14.el9.noarch kernel-tools-5.14.0-648.el9.x86_64 kernel-tools-libs-5.14.0-648.el9.x86_64 kexec-tools-2.0.29-12.el9.x86_64 keyutils-1.6.3-1.el9.x86_64 keyutils-libs-1.6.3-1.el9.x86_64 kmod-28-11.el9.x86_64 kmod-libs-28-11.el9.x86_64 kpartx-0.8.7-39.el9.x86_64 krb5-libs-1.21.1-8.el9.x86_64 langpacks-core-en_GB-3.0-16.el9.noarch langpacks-core-font-en-3.0-16.el9.noarch langpacks-en_GB-3.0-16.el9.noarch less-590-6.el9.x86_64 libacl-2.3.1-4.el9.x86_64 libappstream-glib-0.7.18-5.el9.x86_64 libarchive-3.5.3-6.el9.x86_64 libassuan-2.5.5-3.el9.x86_64 libattr-2.5.1-3.el9.x86_64 libbasicobjects-0.1.1-53.el9.x86_64 libblkid-2.37.4-21.el9.x86_64 libbpf-1.5.0-2.el9.x86_64 libbrotli-1.0.9-7.el9.x86_64 libcap-2.48-10.el9.x86_64 libcap-ng-0.8.2-7.el9.x86_64 libcbor-0.7.0-5.el9.x86_64 libcollection-0.7.0-53.el9.x86_64 libcom_err-1.46.5-8.el9.x86_64 libcomps-0.1.18-1.el9.x86_64 libcurl-7.76.1-38.el9.x86_64 libdaemon-0.14-23.el9.x86_64 libdb-5.3.28-57.el9.x86_64 libdhash-0.5.0-53.el9.x86_64 libdnf-0.69.0-16.el9.x86_64 libeconf-0.4.1-4.el9.x86_64 libedit-3.1-38.20210216cvs.el9.x86_64 libestr-0.1.11-4.el9.x86_64 libev-4.33-6.el9.x86_64 libevent-2.1.12-8.el9.x86_64 libfastjson-0.99.9-5.el9.x86_64 libfdisk-2.37.4-21.el9.x86_64 libffi-3.4.2-8.el9.x86_64 libffi-devel-3.4.2-8.el9.x86_64 libfido2-1.13.0-2.el9.x86_64 libgcc-11.5.0-14.el9.x86_64 libgcrypt-1.10.0-11.el9.x86_64 libgomp-11.5.0-14.el9.x86_64 libgpg-error-1.42-5.el9.x86_64 libgpg-error-devel-1.42-5.el9.x86_64 libibverbs-57.0-2.el9.x86_64 libicu-67.1-10.el9.x86_64 libidn2-2.3.0-7.el9.x86_64 libini_config-1.3.1-53.el9.x86_64 libjpeg-turbo-2.0.90-7.el9.x86_64 libkcapi-1.4.0-2.el9.x86_64 libkcapi-hmaccalc-1.4.0-2.el9.x86_64 libksba-1.5.1-7.el9.x86_64 libldb-4.23.3-1.el9.x86_64 libmaxminddb-1.5.2-4.el9.x86_64 libmnl-1.0.4-16.el9.x86_64 libmodulemd-2.13.0-2.el9.x86_64 libmount-2.37.4-21.el9.x86_64 libmpc-1.2.1-4.el9.x86_64 libndp-1.9-1.el9.x86_64 libnet-1.2-7.el9.x86_64 libnetfilter_conntrack-1.0.9-1.el9.x86_64 libnfnetlink-1.0.1-23.el9.x86_64 libnfsidmap-2.5.4-39.el9.x86_64 libnftnl-1.2.6-4.el9.x86_64 libnghttp2-1.43.0-6.el9.x86_64 libnl3-3.11.0-1.el9.x86_64 libnl3-cli-3.11.0-1.el9.x86_64 libnsl2-2.0.0-1.el9.x86_64 libpath_utils-0.2.1-53.el9.x86_64 libpcap-1.10.0-4.el9.x86_64 libpipeline-1.5.3-4.el9.x86_64 libpkgconf-1.7.3-10.el9.x86_64 libpng-1.6.37-12.el9.x86_64 libproxy-0.4.15-35.el9.x86_64 libproxy-webkitgtk4-0.4.15-35.el9.x86_64 libpsl-0.21.1-5.el9.x86_64 libpwquality-1.4.4-8.el9.x86_64 libref_array-0.1.5-53.el9.x86_64 librepo-1.14.5-3.el9.x86_64 libreport-filesystem-2.15.2-6.el9.noarch libseccomp-2.5.2-2.el9.x86_64 libselinux-3.6-3.el9.x86_64 libselinux-utils-3.6-3.el9.x86_64 libsemanage-3.6-5.el9.x86_64 libsepol-3.6-3.el9.x86_64 libsigsegv-2.13-4.el9.x86_64 libslirp-4.4.0-8.el9.x86_64 libsmartcols-2.37.4-21.el9.x86_64 libsolv-0.7.24-3.el9.x86_64 libsoup-2.72.0-10.el9.x86_64 libss-1.46.5-8.el9.x86_64 libssh-0.10.4-15.el9.x86_64 libssh-config-0.10.4-15.el9.noarch libsss_certmap-2.9.7-5.el9.x86_64 libsss_idmap-2.9.7-5.el9.x86_64 libsss_nss_idmap-2.9.7-5.el9.x86_64 libsss_sudo-2.9.7-5.el9.x86_64 libstdc++-11.5.0-14.el9.x86_64 libstdc++-devel-11.5.0-14.el9.x86_64 libstemmer-0-18.585svn.el9.x86_64 libsysfs-2.1.1-11.el9.x86_64 libtalloc-2.4.3-1.el9.x86_64 libtasn1-4.16.0-9.el9.x86_64 libtdb-1.4.14-1.el9.x86_64 libteam-1.31-16.el9.x86_64 libtevent-0.17.1-1.el9.x86_64 libtirpc-1.3.3-9.el9.x86_64 libtool-ltdl-2.4.6-46.el9.x86_64 libunistring-0.9.10-15.el9.x86_64 liburing-2.12-1.el9.x86_64 libuser-0.63-17.el9.x86_64 libutempter-1.2.1-6.el9.x86_64 libuuid-2.37.4-21.el9.x86_64 libverto-0.3.2-3.el9.x86_64 libverto-libev-0.3.2-3.el9.x86_64 libvirt-libs-11.9.0-1.el9.x86_64 libwbclient-4.23.3-1.el9.x86_64 libxcrypt-4.4.18-3.el9.x86_64 libxcrypt-compat-4.4.18-3.el9.x86_64 libxcrypt-devel-4.4.18-3.el9.x86_64 libxml2-2.9.13-14.el9.x86_64 libxml2-devel-2.9.13-14.el9.x86_64 libxslt-1.1.34-12.el9.x86_64 libxslt-devel-1.1.34-12.el9.x86_64 libyaml-0.2.5-7.el9.x86_64 libzstd-1.5.5-1.el9.x86_64 llvm-filesystem-21.1.3-1.el9.x86_64 llvm-libs-21.1.3-1.el9.x86_64 lmdb-libs-0.9.29-3.el9.x86_64 logrotate-3.18.0-12.el9.x86_64 lshw-B.02.20-3.el9.x86_64 lsscsi-0.32-6.el9.x86_64 lua-libs-5.4.4-4.el9.x86_64 lua-srpm-macros-1-6.el9.noarch lz4-libs-1.9.3-5.el9.x86_64 lzo-2.10-7.el9.x86_64 make-4.3-8.el9.x86_64 man-db-2.9.3-9.el9.x86_64 microcode_ctl-20251111-1.el9.noarch mpdecimal-2.5.1-3.el9.x86_64 mpfr-4.1.0-7.el9.x86_64 ncurses-6.2-12.20210508.el9.x86_64 ncurses-base-6.2-12.20210508.el9.noarch ncurses-c++-libs-6.2-12.20210508.el9.x86_64 ncurses-devel-6.2-12.20210508.el9.x86_64 ncurses-libs-6.2-12.20210508.el9.x86_64 netavark-1.16.0-1.el9.x86_64 nettle-3.10.1-1.el9.x86_64 NetworkManager-1.54.2-1.el9.x86_64 NetworkManager-libnm-1.54.2-1.el9.x86_64 NetworkManager-team-1.54.2-1.el9.x86_64 NetworkManager-tui-1.54.2-1.el9.x86_64 newt-0.52.21-11.el9.x86_64 nfs-utils-2.5.4-39.el9.x86_64 nftables-1.0.9-5.el9.x86_64 npth-1.6-8.el9.x86_64 numactl-libs-2.0.19-3.el9.x86_64 ocaml-srpm-macros-6-6.el9.noarch oddjob-0.34.7-7.el9.x86_64 oddjob-mkhomedir-0.34.7-7.el9.x86_64 oniguruma-6.9.6-1.el9.6.x86_64 openblas-srpm-macros-2-11.el9.noarch openldap-2.6.8-4.el9.x86_64 openldap-devel-2.6.8-4.el9.x86_64 openssh-9.9p1-2.el9.x86_64 openssh-clients-9.9p1-2.el9.x86_64 openssh-server-9.9p1-2.el9.x86_64 openssl-3.5.1-6.el9.x86_64 openssl-devel-3.5.1-6.el9.x86_64 openssl-fips-provider-3.5.1-6.el9.x86_64 openssl-libs-3.5.1-6.el9.x86_64 os-prober-1.77-12.el9.x86_64 p11-kit-0.25.10-1.el9.x86_64 p11-kit-trust-0.25.10-1.el9.x86_64 pam-1.5.1-26.el9.x86_64 parted-3.5-3.el9.x86_64 passt-0^20250512.g8ec1341-2.el9.x86_64 passt-selinux-0^20250512.g8ec1341-2.el9.noarch passwd-0.80-12.el9.x86_64 patch-2.7.6-16.el9.x86_64 pciutils-libs-3.7.0-7.el9.x86_64 pcre2-10.40-6.el9.x86_64 pcre2-syntax-10.40-6.el9.noarch pcre-8.44-4.el9.x86_64 perl-AutoLoader-5.74-483.el9.noarch perl-B-1.80-483.el9.x86_64 perl-base-2.27-483.el9.noarch perl-Carp-1.50-460.el9.noarch perl-Class-Struct-0.66-483.el9.noarch perl-constant-1.33-461.el9.noarch perl-Data-Dumper-2.174-462.el9.x86_64 perl-Digest-1.19-4.el9.noarch perl-Digest-MD5-2.58-4.el9.x86_64 perl-DynaLoader-1.47-483.el9.x86_64 perl-Encode-3.08-462.el9.x86_64 perl-Errno-1.30-483.el9.x86_64 perl-Error-0.17029-7.el9.noarch perl-Exporter-5.74-461.el9.noarch perl-Fcntl-1.13-483.el9.x86_64 perl-File-Basename-2.85-483.el9.noarch perl-File-Find-1.37-483.el9.noarch perl-FileHandle-2.03-483.el9.noarch perl-File-Path-2.18-4.el9.noarch perl-File-stat-1.09-483.el9.noarch perl-File-Temp-0.231.100-4.el9.noarch perl-Getopt-Long-2.52-4.el9.noarch perl-Getopt-Std-1.12-483.el9.noarch perl-Git-2.47.3-1.el9.noarch perl-HTTP-Tiny-0.076-462.el9.noarch perl-if-0.60.800-483.el9.noarch perl-interpreter-5.32.1-483.el9.x86_64 perl-IO-1.43-483.el9.x86_64 perl-IO-Socket-IP-0.41-5.el9.noarch perl-IO-Socket-SSL-2.073-2.el9.noarch perl-IPC-Open3-1.21-483.el9.noarch perl-lib-0.65-483.el9.x86_64 perl-libnet-3.13-4.el9.noarch perl-libs-5.32.1-483.el9.x86_64 perl-MIME-Base64-3.16-4.el9.x86_64 perl-Mozilla-CA-20200520-6.el9.noarch perl-mro-1.23-483.el9.x86_64 perl-NDBM_File-1.15-483.el9.x86_64 perl-Net-SSLeay-1.94-3.el9.x86_64 perl-overload-1.31-483.el9.noarch perl-overloading-0.02-483.el9.noarch perl-parent-0.238-460.el9.noarch perl-PathTools-3.78-461.el9.x86_64 perl-Pod-Escapes-1.07-460.el9.noarch perl-podlators-4.14-460.el9.noarch perl-Pod-Perldoc-3.28.01-461.el9.noarch perl-Pod-Simple-3.42-4.el9.noarch perl-Pod-Usage-2.01-4.el9.noarch perl-POSIX-1.94-483.el9.x86_64 perl-Scalar-List-Utils-1.56-462.el9.x86_64 perl-SelectSaver-1.02-483.el9.noarch perl-Socket-2.031-4.el9.x86_64 perl-srpm-macros-1-41.el9.noarch perl-Storable-3.21-460.el9.x86_64 perl-subs-1.03-483.el9.noarch perl-Symbol-1.08-483.el9.noarch perl-Term-ANSIColor-5.01-461.el9.noarch perl-Term-Cap-1.17-460.el9.noarch perl-TermReadKey-2.38-11.el9.x86_64 perl-Text-ParseWords-3.30-460.el9.noarch perl-Text-Tabs+Wrap-2013.0523-460.el9.noarch perl-Time-Local-1.300-7.el9.noarch perl-URI-5.09-3.el9.noarch perl-vars-1.05-483.el9.noarch pigz-2.5-4.el9.x86_64 pkgconf-1.7.3-10.el9.x86_64 pkgconf-m4-1.7.3-10.el9.noarch pkgconf-pkg-config-1.7.3-10.el9.x86_64 podman-5.6.0-2.el9.x86_64 policycoreutils-3.6-3.el9.x86_64 policycoreutils-python-utils-3.6-3.el9.noarch polkit-0.117-14.el9.x86_64 polkit-libs-0.117-14.el9.x86_64 polkit-pkla-compat-0.1-21.el9.x86_64 popt-1.18-8.el9.x86_64 prefixdevname-0.1.0-8.el9.x86_64 procps-ng-3.3.17-14.el9.x86_64 protobuf-c-1.3.3-13.el9.x86_64 psmisc-23.4-3.el9.x86_64 publicsuffix-list-dafsa-20210518-3.el9.noarch pyproject-srpm-macros-1.16.2-1.el9.noarch python3.12-3.12.12-1.el9.x86_64 python3.12-libs-3.12.12-1.el9.x86_64 python3.12-pip-23.2.1-5.el9.noarch python3.12-pip-wheel-23.2.1-5.el9.noarch python3.12-setuptools-68.2.2-5.el9.noarch python3-3.9.25-2.el9.x86_64 python3-attrs-20.3.0-7.el9.noarch python3-audit-3.1.5-7.el9.x86_64 python3-babel-2.9.1-2.el9.noarch python3-cffi-1.14.5-5.el9.x86_64 python3-chardet-4.0.0-5.el9.noarch python3-configobj-5.0.6-25.el9.noarch python3-cryptography-36.0.1-5.el9.x86_64 python3-dasbus-1.7-1.el9.noarch python3-dateutil-2.9.0.post0-1.el9.noarch python3-dbus-1.2.18-2.el9.x86_64 python3-devel-3.9.25-2.el9.x86_64 python3-distro-1.5.0-7.el9.noarch python3-dnf-4.14.0-31.el9.noarch python3-dnf-plugins-core-4.3.0-24.el9.noarch python3-enchant-3.2.0-5.el9.noarch python3-file-magic-5.39-16.el9.noarch python3-gobject-base-3.40.1-6.el9.x86_64 python3-gobject-base-noarch-3.40.1-6.el9.noarch python3-gpg-1.15.1-6.el9.x86_64 python3-hawkey-0.69.0-16.el9.x86_64 python3-idna-2.10-7.el9.1.noarch python3-jinja2-2.11.3-8.el9.noarch python3-jmespath-1.0.1-1.el9.noarch python3-jsonpatch-1.21-16.el9.noarch python3-jsonpointer-2.0-4.el9.noarch python3-jsonschema-3.2.0-13.el9.noarch python3-libcomps-0.1.18-1.el9.x86_64 python3-libdnf-0.69.0-16.el9.x86_64 python3-libs-3.9.25-2.el9.x86_64 python3-libselinux-3.6-3.el9.x86_64 python3-libsemanage-3.6-5.el9.x86_64 python3-libvirt-11.9.0-1.el9.x86_64 python3-libxml2-2.9.13-14.el9.x86_64 python3-lxml-4.6.5-3.el9.x86_64 python3-markupsafe-1.1.1-12.el9.x86_64 python3-netaddr-0.10.1-3.el9.noarch python3-netifaces-0.10.6-15.el9.x86_64 python3-oauthlib-3.1.1-5.el9.noarch python3-packaging-20.9-5.el9.noarch python3-pexpect-4.8.0-7.el9.noarch python3-pip-21.3.1-1.el9.noarch python3-pip-wheel-21.3.1-1.el9.noarch python3-ply-3.11-14.el9.noarch python3-policycoreutils-3.6-3.el9.noarch python3-prettytable-0.7.2-27.el9.noarch python3-ptyprocess-0.6.0-12.el9.noarch python3-pycparser-2.20-6.el9.noarch python3-pyparsing-2.4.7-9.el9.noarch python3-pyrsistent-0.17.3-8.el9.x86_64 python3-pyserial-3.4-12.el9.noarch python3-pysocks-1.7.1-12.el9.noarch python3-pytz-2021.1-5.el9.noarch python3-pyyaml-5.4.1-6.el9.x86_64 python3-requests-2.25.1-10.el9.noarch python3-resolvelib-0.5.4-5.el9.noarch python3-rpm-4.16.1.3-40.el9.x86_64 python3-rpm-generators-12-9.el9.noarch python3-rpm-macros-3.9-54.el9.noarch python3-setools-4.4.4-1.el9.x86_64 python3-setuptools-53.0.0-15.el9.noarch python3-setuptools-wheel-53.0.0-15.el9.noarch python3-six-1.15.0-9.el9.noarch python3-systemd-234-19.el9.x86_64 python3-urllib3-1.26.5-6.el9.noarch python-rpm-macros-3.9-54.el9.noarch python-srpm-macros-3.9-54.el9.noarch python-unversioned-command-3.9.25-2.el9.noarch qemu-guest-agent-10.1.0-7.el9.x86_64 qt5-srpm-macros-5.15.9-1.el9.noarch quota-4.09-4.el9.x86_64 quota-nls-4.09-4.el9.noarch readline-8.1-4.el9.x86_64 readline-devel-8.1-4.el9.x86_64 redhat-rpm-config-210-1.el9.noarch rootfiles-8.1-35.el9.noarch rpcbind-1.2.6-7.el9.x86_64 rpm-4.16.1.3-40.el9.x86_64 rpm-build-4.16.1.3-40.el9.x86_64 rpm-build-libs-4.16.1.3-40.el9.x86_64 rpm-libs-4.16.1.3-40.el9.x86_64 rpmlint-1.11-19.el9.noarch rpm-plugin-audit-4.16.1.3-40.el9.x86_64 rpm-plugin-selinux-4.16.1.3-40.el9.x86_64 rpm-plugin-systemd-inhibit-4.16.1.3-40.el9.x86_64 rpm-sign-4.16.1.3-40.el9.x86_64 rpm-sign-libs-4.16.1.3-40.el9.x86_64 rsync-3.2.5-4.el9.x86_64 rsyslog-8.2510.0-2.el9.x86_64 rsyslog-logrotate-8.2510.0-2.el9.x86_64 ruby-3.0.7-165.el9.x86_64 ruby-default-gems-3.0.7-165.el9.noarch ruby-devel-3.0.7-165.el9.x86_64 rubygem-bigdecimal-3.0.0-165.el9.x86_64 rubygem-bundler-2.2.33-165.el9.noarch rubygem-io-console-0.5.7-165.el9.x86_64 rubygem-json-2.5.1-165.el9.x86_64 rubygem-psych-3.3.2-165.el9.x86_64 rubygem-rdoc-6.3.4.1-165.el9.noarch rubygems-3.2.33-165.el9.noarch ruby-libs-3.0.7-165.el9.x86_64 rust-srpm-macros-17-4.el9.noarch samba-client-libs-4.23.3-1.el9.x86_64 samba-common-4.23.3-1.el9.noarch samba-common-libs-4.23.3-1.el9.x86_64 sed-4.8-9.el9.x86_64 selinux-policy-38.1.69-1.el9.noarch selinux-policy-targeted-38.1.69-1.el9.noarch setroubleshoot-plugins-3.3.14-4.el9.noarch setroubleshoot-server-3.3.35-2.el9.x86_64 setup-2.13.7-10.el9.noarch sg3_utils-1.47-10.el9.x86_64 sg3_utils-libs-1.47-10.el9.x86_64 shadow-utils-4.9-15.el9.x86_64 shadow-utils-subid-4.9-15.el9.x86_64 shared-mime-info-2.1-5.el9.x86_64 slang-2.3.2-11.el9.x86_64 slirp4netns-1.3.3-1.el9.x86_64 snappy-1.1.8-8.el9.x86_64 sos-4.10.1-1.el9.noarch sqlite-libs-3.34.1-9.el9.x86_64 squashfs-tools-4.4-10.git1.el9.x86_64 sscg-4.0.3-2.el9.x86_64 sshpass-1.09-4.el9.x86_64 sssd-client-2.9.7-5.el9.x86_64 sssd-common-2.9.7-5.el9.x86_64 sssd-kcm-2.9.7-5.el9.x86_64 sssd-nfs-idmap-2.9.7-5.el9.x86_64 sudo-1.9.5p2-13.el9.x86_64 systemd-252-59.el9.x86_64 systemd-devel-252-59.el9.x86_64 systemd-libs-252-59.el9.x86_64 systemd-pam-252-59.el9.x86_64 systemd-rpm-macros-252-59.el9.noarch systemd-udev-252-59.el9.x86_64 tar-1.34-7.el9.x86_64 tcl-8.6.10-7.el9.x86_64 tcpdump-4.99.0-9.el9.x86_64 teamd-1.31-16.el9.x86_64 time-1.9-18.el9.x86_64 tmux-3.2a-5.el9.x86_64 tpm2-tss-3.2.3-1.el9.x86_64 traceroute-2.1.1-1.el9.x86_64 tzdata-2025b-2.el9.noarch unzip-6.0-59.el9.x86_64 userspace-rcu-0.12.1-6.el9.x86_64 util-linux-2.37.4-21.el9.x86_64 util-linux-core-2.37.4-21.el9.x86_64 vim-minimal-8.2.2637-23.el9.x86_64 webkit2gtk3-jsc-2.50.3-1.el9.x86_64 wget-1.21.1-8.el9.x86_64 which-2.21-30.el9.x86_64 xfsprogs-6.4.0-7.el9.x86_64 xz-5.2.5-8.el9.x86_64 xz-devel-5.2.5-8.el9.x86_64 xz-libs-5.2.5-8.el9.x86_64 yajl-2.1.0-25.el9.x86_64 yum-4.14.0-31.el9.noarch yum-utils-4.3.0-24.el9.noarch zip-3.0-35.el9.x86_64 zlib-1.2.11-41.el9.x86_64 zlib-devel-1.2.11-41.el9.x86_64 zstd-1.5.5-1.el9.x86_64 home/zuul/zuul-output/logs/python.log0000644000175000017500000000223615117043115017113 0ustar zuulzuulPython 3.9.25 pip 25.3 from /home/zuul/.local/lib/python3.12/site-packages/pip (python 3.12) ansible [core 2.17.8] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/zuul/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /home/zuul/.local/lib/python3.12/site-packages/ansible ansible collection location = /home/zuul/.ansible/collections:/usr/share/ansible/collections executable location = /home/zuul/.local/bin/ansible python version = 3.12.12 (main, Nov 14 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-14)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True ansible-core==2.17.8 cachetools==6.2.2 certifi==2025.11.12 cffi==2.0.0 charset-normalizer==3.4.4 cryptography==46.0.3 google-auth==2.43.0 idna==3.11 Jinja2==3.1.6 kubernetes==24.2.0 MarkupSafe==3.0.3 oauthlib==3.2.2 openshift==0.13.1 packaging==25.0 pyasn1==0.6.1 pyasn1_modules==0.4.2 pycparser==2.23 python-dateutil==2.9.0.post0 python-string-utils==1.0.0 PyYAML==6.0.3 requests==2.32.4 requests-oauthlib==1.3.0 resolvelib==1.0.1 rsa==4.9.1 setuptools==68.2.2 six==1.17.0 urllib3==2.6.2 websocket-client==1.9.0 home/zuul/zuul-output/logs/dmesg.log0000644000175000017500000015204515117043115016675 0ustar zuulzuul[Fri Dec 12 16:08:57 2025] Linux version 5.14.0-648.el9.x86_64 (mockbuild@x86-05.stream.rdu2.redhat.com) (gcc (GCC) 11.5.0 20240719 (Red Hat 11.5.0-14), GNU ld version 2.35.2-69.el9) #1 SMP PREEMPT_DYNAMIC Fri Dec 5 11:18:23 UTC 2025 [Fri Dec 12 16:08:57 2025] The list of certified hardware and cloud instances for Red Hat Enterprise Linux 9 can be viewed at the Red Hat Ecosystem Catalog, https://catalog.redhat.com. [Fri Dec 12 16:08:57 2025] Command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 root=UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M [Fri Dec 12 16:08:57 2025] BIOS-provided physical RAM map: [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x0000000000000000-0x000000000009fbff] usable [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x000000000009fc00-0x000000000009ffff] reserved [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x00000000000f0000-0x00000000000fffff] reserved [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x0000000000100000-0x00000000bffdafff] usable [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x00000000bffdb000-0x00000000bfffffff] reserved [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x00000000feffc000-0x00000000feffffff] reserved [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x00000000fffc0000-0x00000000ffffffff] reserved [Fri Dec 12 16:08:57 2025] BIOS-e820: [mem 0x0000000100000000-0x000000023fffffff] usable [Fri Dec 12 16:08:57 2025] NX (Execute Disable) protection: active [Fri Dec 12 16:08:57 2025] APIC: Static calls initialized [Fri Dec 12 16:08:57 2025] SMBIOS 2.8 present. [Fri Dec 12 16:08:57 2025] DMI: OpenStack Foundation OpenStack Nova, BIOS 1.15.0-1 04/01/2014 [Fri Dec 12 16:08:57 2025] Hypervisor detected: KVM [Fri Dec 12 16:08:57 2025] kvm-clock: Using msrs 4b564d01 and 4b564d00 [Fri Dec 12 16:08:57 2025] kvm-clock: using sched offset of 3255165391 cycles [Fri Dec 12 16:08:57 2025] clocksource: kvm-clock: mask: 0xffffffffffffffff max_cycles: 0x1cd42e4dffb, max_idle_ns: 881590591483 ns [Fri Dec 12 16:08:57 2025] tsc: Detected 2799.998 MHz processor [Fri Dec 12 16:08:57 2025] e820: update [mem 0x00000000-0x00000fff] usable ==> reserved [Fri Dec 12 16:08:57 2025] e820: remove [mem 0x000a0000-0x000fffff] usable [Fri Dec 12 16:08:57 2025] last_pfn = 0x240000 max_arch_pfn = 0x400000000 [Fri Dec 12 16:08:57 2025] MTRR map: 4 entries (3 fixed + 1 variable; max 19), built from 8 variable MTRRs [Fri Dec 12 16:08:57 2025] x86/PAT: Configuration [0-7]: WB WC UC- UC WB WP UC- WT [Fri Dec 12 16:08:57 2025] last_pfn = 0xbffdb max_arch_pfn = 0x400000000 [Fri Dec 12 16:08:57 2025] found SMP MP-table at [mem 0x000f5ae0-0x000f5aef] [Fri Dec 12 16:08:57 2025] Using GB pages for direct mapping [Fri Dec 12 16:08:57 2025] RAMDISK: [mem 0x2d46a000-0x32a2cfff] [Fri Dec 12 16:08:57 2025] ACPI: Early table checksum verification disabled [Fri Dec 12 16:08:57 2025] ACPI: RSDP 0x00000000000F5AA0 000014 (v00 BOCHS ) [Fri Dec 12 16:08:57 2025] ACPI: RSDT 0x00000000BFFE16BD 000030 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Fri Dec 12 16:08:57 2025] ACPI: FACP 0x00000000BFFE1571 000074 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Fri Dec 12 16:08:57 2025] ACPI: DSDT 0x00000000BFFDFC80 0018F1 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Fri Dec 12 16:08:57 2025] ACPI: FACS 0x00000000BFFDFC40 000040 [Fri Dec 12 16:08:57 2025] ACPI: APIC 0x00000000BFFE15E5 0000B0 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Fri Dec 12 16:08:57 2025] ACPI: WAET 0x00000000BFFE1695 000028 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Fri Dec 12 16:08:57 2025] ACPI: Reserving FACP table memory at [mem 0xbffe1571-0xbffe15e4] [Fri Dec 12 16:08:57 2025] ACPI: Reserving DSDT table memory at [mem 0xbffdfc80-0xbffe1570] [Fri Dec 12 16:08:57 2025] ACPI: Reserving FACS table memory at [mem 0xbffdfc40-0xbffdfc7f] [Fri Dec 12 16:08:57 2025] ACPI: Reserving APIC table memory at [mem 0xbffe15e5-0xbffe1694] [Fri Dec 12 16:08:57 2025] ACPI: Reserving WAET table memory at [mem 0xbffe1695-0xbffe16bc] [Fri Dec 12 16:08:57 2025] No NUMA configuration found [Fri Dec 12 16:08:57 2025] Faking a node at [mem 0x0000000000000000-0x000000023fffffff] [Fri Dec 12 16:08:57 2025] NODE_DATA(0) allocated [mem 0x23ffd5000-0x23fffffff] [Fri Dec 12 16:08:57 2025] crashkernel reserved: 0x00000000af000000 - 0x00000000bf000000 (256 MB) [Fri Dec 12 16:08:57 2025] Zone ranges: [Fri Dec 12 16:08:57 2025] DMA [mem 0x0000000000001000-0x0000000000ffffff] [Fri Dec 12 16:08:57 2025] DMA32 [mem 0x0000000001000000-0x00000000ffffffff] [Fri Dec 12 16:08:57 2025] Normal [mem 0x0000000100000000-0x000000023fffffff] [Fri Dec 12 16:08:57 2025] Device empty [Fri Dec 12 16:08:57 2025] Movable zone start for each node [Fri Dec 12 16:08:57 2025] Early memory node ranges [Fri Dec 12 16:08:57 2025] node 0: [mem 0x0000000000001000-0x000000000009efff] [Fri Dec 12 16:08:57 2025] node 0: [mem 0x0000000000100000-0x00000000bffdafff] [Fri Dec 12 16:08:57 2025] node 0: [mem 0x0000000100000000-0x000000023fffffff] [Fri Dec 12 16:08:57 2025] Initmem setup node 0 [mem 0x0000000000001000-0x000000023fffffff] [Fri Dec 12 16:08:57 2025] On node 0, zone DMA: 1 pages in unavailable ranges [Fri Dec 12 16:08:57 2025] On node 0, zone DMA: 97 pages in unavailable ranges [Fri Dec 12 16:08:57 2025] On node 0, zone Normal: 37 pages in unavailable ranges [Fri Dec 12 16:08:57 2025] ACPI: PM-Timer IO Port: 0x608 [Fri Dec 12 16:08:57 2025] ACPI: LAPIC_NMI (acpi_id[0xff] dfl dfl lint[0x1]) [Fri Dec 12 16:08:57 2025] IOAPIC[0]: apic_id 0, version 17, address 0xfec00000, GSI 0-23 [Fri Dec 12 16:08:57 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 0 global_irq 2 dfl dfl) [Fri Dec 12 16:08:57 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 5 global_irq 5 high level) [Fri Dec 12 16:08:57 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 9 global_irq 9 high level) [Fri Dec 12 16:08:57 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 10 global_irq 10 high level) [Fri Dec 12 16:08:57 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 11 global_irq 11 high level) [Fri Dec 12 16:08:57 2025] ACPI: Using ACPI (MADT) for SMP configuration information [Fri Dec 12 16:08:57 2025] TSC deadline timer available [Fri Dec 12 16:08:57 2025] CPU topo: Max. logical packages: 8 [Fri Dec 12 16:08:57 2025] CPU topo: Max. logical dies: 8 [Fri Dec 12 16:08:57 2025] CPU topo: Max. dies per package: 1 [Fri Dec 12 16:08:57 2025] CPU topo: Max. threads per core: 1 [Fri Dec 12 16:08:57 2025] CPU topo: Num. cores per package: 1 [Fri Dec 12 16:08:57 2025] CPU topo: Num. threads per package: 1 [Fri Dec 12 16:08:57 2025] CPU topo: Allowing 8 present CPUs plus 0 hotplug CPUs [Fri Dec 12 16:08:57 2025] kvm-guest: APIC: eoi() replaced with kvm_guest_apic_eoi_write() [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0x00000000-0x00000fff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0x0009f000-0x0009ffff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0x000a0000-0x000effff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0x000f0000-0x000fffff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0xbffdb000-0xbfffffff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0xc0000000-0xfeffbfff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0xfeffc000-0xfeffffff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0xff000000-0xfffbffff] [Fri Dec 12 16:08:57 2025] PM: hibernation: Registered nosave memory: [mem 0xfffc0000-0xffffffff] [Fri Dec 12 16:08:57 2025] [mem 0xc0000000-0xfeffbfff] available for PCI devices [Fri Dec 12 16:08:57 2025] Booting paravirtualized kernel on KVM [Fri Dec 12 16:08:57 2025] clocksource: refined-jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1910969940391419 ns [Fri Dec 12 16:08:57 2025] setup_percpu: NR_CPUS:8192 nr_cpumask_bits:8 nr_cpu_ids:8 nr_node_ids:1 [Fri Dec 12 16:08:57 2025] percpu: Embedded 64 pages/cpu s225280 r8192 d28672 u262144 [Fri Dec 12 16:08:57 2025] pcpu-alloc: s225280 r8192 d28672 u262144 alloc=1*2097152 [Fri Dec 12 16:08:57 2025] pcpu-alloc: [0] 0 1 2 3 4 5 6 7 [Fri Dec 12 16:08:57 2025] kvm-guest: PV spinlocks disabled, no host support [Fri Dec 12 16:08:57 2025] Kernel command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 root=UUID=cbdedf45-ed1d-4952-82a8-33a12c0ba266 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M [Fri Dec 12 16:08:57 2025] Unknown kernel command line parameters "BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64", will be passed to user space. [Fri Dec 12 16:08:57 2025] random: crng init done [Fri Dec 12 16:08:57 2025] Dentry cache hash table entries: 1048576 (order: 11, 8388608 bytes, linear) [Fri Dec 12 16:08:57 2025] Inode-cache hash table entries: 524288 (order: 10, 4194304 bytes, linear) [Fri Dec 12 16:08:57 2025] Fallback order for Node 0: 0 [Fri Dec 12 16:08:57 2025] Built 1 zonelists, mobility grouping on. Total pages: 2064091 [Fri Dec 12 16:08:57 2025] Policy zone: Normal [Fri Dec 12 16:08:57 2025] mem auto-init: stack:off, heap alloc:off, heap free:off [Fri Dec 12 16:08:57 2025] software IO TLB: area num 8. [Fri Dec 12 16:08:57 2025] SLUB: HWalign=64, Order=0-3, MinObjects=0, CPUs=8, Nodes=1 [Fri Dec 12 16:08:57 2025] ftrace: allocating 49357 entries in 193 pages [Fri Dec 12 16:08:57 2025] ftrace: allocated 193 pages with 3 groups [Fri Dec 12 16:08:57 2025] Dynamic Preempt: voluntary [Fri Dec 12 16:08:57 2025] rcu: Preemptible hierarchical RCU implementation. [Fri Dec 12 16:08:57 2025] rcu: RCU event tracing is enabled. [Fri Dec 12 16:08:57 2025] rcu: RCU restricting CPUs from NR_CPUS=8192 to nr_cpu_ids=8. [Fri Dec 12 16:08:57 2025] Trampoline variant of Tasks RCU enabled. [Fri Dec 12 16:08:57 2025] Rude variant of Tasks RCU enabled. [Fri Dec 12 16:08:57 2025] Tracing variant of Tasks RCU enabled. [Fri Dec 12 16:08:57 2025] rcu: RCU calculated value of scheduler-enlistment delay is 100 jiffies. [Fri Dec 12 16:08:57 2025] rcu: Adjusting geometry for rcu_fanout_leaf=16, nr_cpu_ids=8 [Fri Dec 12 16:08:57 2025] RCU Tasks: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8. [Fri Dec 12 16:08:57 2025] RCU Tasks Rude: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8. [Fri Dec 12 16:08:57 2025] RCU Tasks Trace: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8. [Fri Dec 12 16:08:57 2025] NR_IRQS: 524544, nr_irqs: 488, preallocated irqs: 16 [Fri Dec 12 16:08:57 2025] rcu: srcu_init: Setting srcu_struct sizes based on contention. [Fri Dec 12 16:08:57 2025] kfence: initialized - using 2097152 bytes for 255 objects at 0x(____ptrval____)-0x(____ptrval____) [Fri Dec 12 16:08:57 2025] Console: colour VGA+ 80x25 [Fri Dec 12 16:08:57 2025] printk: console [ttyS0] enabled [Fri Dec 12 16:08:57 2025] ACPI: Core revision 20230331 [Fri Dec 12 16:08:57 2025] APIC: Switch to symmetric I/O mode setup [Fri Dec 12 16:08:57 2025] x2apic enabled [Fri Dec 12 16:08:57 2025] APIC: Switched APIC routing to: physical x2apic [Fri Dec 12 16:08:57 2025] tsc: Marking TSC unstable due to TSCs unsynchronized [Fri Dec 12 16:08:57 2025] Calibrating delay loop (skipped) preset value.. 5599.99 BogoMIPS (lpj=2799998) [Fri Dec 12 16:08:57 2025] x86/cpu: User Mode Instruction Prevention (UMIP) activated [Fri Dec 12 16:08:57 2025] Last level iTLB entries: 4KB 512, 2MB 255, 4MB 127 [Fri Dec 12 16:08:57 2025] Last level dTLB entries: 4KB 512, 2MB 255, 4MB 127, 1GB 0 [Fri Dec 12 16:08:57 2025] Spectre V1 : Mitigation: usercopy/swapgs barriers and __user pointer sanitization [Fri Dec 12 16:08:57 2025] Spectre V2 : Mitigation: Retpolines [Fri Dec 12 16:08:57 2025] Spectre V2 : Spectre v2 / SpectreRSB: Filling RSB on context switch and VMEXIT [Fri Dec 12 16:08:57 2025] Spectre V2 : Enabling Speculation Barrier for firmware calls [Fri Dec 12 16:08:57 2025] RETBleed: Mitigation: untrained return thunk [Fri Dec 12 16:08:57 2025] Spectre V2 : mitigation: Enabling conditional Indirect Branch Prediction Barrier [Fri Dec 12 16:08:57 2025] Speculative Store Bypass: Mitigation: Speculative Store Bypass disabled via prctl [Fri Dec 12 16:08:57 2025] Speculative Return Stack Overflow: IBPB-extending microcode not applied! [Fri Dec 12 16:08:57 2025] Speculative Return Stack Overflow: WARNING: See https://kernel.org/doc/html/latest/admin-guide/hw-vuln/srso.html for mitigation options. [Fri Dec 12 16:08:57 2025] x86/bugs: return thunk changed [Fri Dec 12 16:08:57 2025] Speculative Return Stack Overflow: Vulnerable: Safe RET, no microcode [Fri Dec 12 16:08:57 2025] x86/fpu: Supporting XSAVE feature 0x001: 'x87 floating point registers' [Fri Dec 12 16:08:57 2025] x86/fpu: Supporting XSAVE feature 0x002: 'SSE registers' [Fri Dec 12 16:08:57 2025] x86/fpu: Supporting XSAVE feature 0x004: 'AVX registers' [Fri Dec 12 16:08:57 2025] x86/fpu: xstate_offset[2]: 576, xstate_sizes[2]: 256 [Fri Dec 12 16:08:57 2025] x86/fpu: Enabled xstate features 0x7, context size is 832 bytes, using 'compacted' format. [Fri Dec 12 16:08:57 2025] Freeing SMP alternatives memory: 40K [Fri Dec 12 16:08:57 2025] pid_max: default: 32768 minimum: 301 [Fri Dec 12 16:08:57 2025] LSM: initializing lsm=lockdown,capability,landlock,yama,integrity,selinux,bpf [Fri Dec 12 16:08:57 2025] landlock: Up and running. [Fri Dec 12 16:08:57 2025] Yama: becoming mindful. [Fri Dec 12 16:08:57 2025] SELinux: Initializing. [Fri Dec 12 16:08:57 2025] LSM support for eBPF active [Fri Dec 12 16:08:57 2025] Mount-cache hash table entries: 16384 (order: 5, 131072 bytes, linear) [Fri Dec 12 16:08:57 2025] Mountpoint-cache hash table entries: 16384 (order: 5, 131072 bytes, linear) [Fri Dec 12 16:08:57 2025] smpboot: CPU0: AMD EPYC-Rome Processor (family: 0x17, model: 0x31, stepping: 0x0) [Fri Dec 12 16:08:57 2025] Performance Events: Fam17h+ core perfctr, AMD PMU driver. [Fri Dec 12 16:08:57 2025] ... version: 0 [Fri Dec 12 16:08:57 2025] ... bit width: 48 [Fri Dec 12 16:08:57 2025] ... generic registers: 6 [Fri Dec 12 16:08:57 2025] ... value mask: 0000ffffffffffff [Fri Dec 12 16:08:57 2025] ... max period: 00007fffffffffff [Fri Dec 12 16:08:57 2025] ... fixed-purpose events: 0 [Fri Dec 12 16:08:57 2025] ... event mask: 000000000000003f [Fri Dec 12 16:08:57 2025] signal: max sigframe size: 1776 [Fri Dec 12 16:08:57 2025] rcu: Hierarchical SRCU implementation. [Fri Dec 12 16:08:57 2025] rcu: Max phase no-delay instances is 400. [Fri Dec 12 16:08:57 2025] smp: Bringing up secondary CPUs ... [Fri Dec 12 16:08:57 2025] smpboot: x86: Booting SMP configuration: [Fri Dec 12 16:08:57 2025] .... node #0, CPUs: #1 #2 #3 #4 #5 #6 #7 [Fri Dec 12 16:08:57 2025] smp: Brought up 1 node, 8 CPUs [Fri Dec 12 16:08:57 2025] smpboot: Total of 8 processors activated (44799.96 BogoMIPS) [Fri Dec 12 16:08:57 2025] node 0 deferred pages initialised in 9ms [Fri Dec 12 16:08:57 2025] Memory: 7763972K/8388068K available (16384K kernel code, 5795K rwdata, 13916K rodata, 4192K init, 7164K bss, 618220K reserved, 0K cma-reserved) [Fri Dec 12 16:08:57 2025] devtmpfs: initialized [Fri Dec 12 16:08:57 2025] x86/mm: Memory block size: 128MB [Fri Dec 12 16:08:57 2025] clocksource: jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1911260446275000 ns [Fri Dec 12 16:08:57 2025] futex hash table entries: 2048 (131072 bytes on 1 NUMA nodes, total 128 KiB, linear). [Fri Dec 12 16:08:57 2025] pinctrl core: initialized pinctrl subsystem [Fri Dec 12 16:08:57 2025] NET: Registered PF_NETLINK/PF_ROUTE protocol family [Fri Dec 12 16:08:57 2025] DMA: preallocated 1024 KiB GFP_KERNEL pool for atomic allocations [Fri Dec 12 16:08:57 2025] DMA: preallocated 1024 KiB GFP_KERNEL|GFP_DMA pool for atomic allocations [Fri Dec 12 16:08:57 2025] DMA: preallocated 1024 KiB GFP_KERNEL|GFP_DMA32 pool for atomic allocations [Fri Dec 12 16:08:57 2025] audit: initializing netlink subsys (disabled) [Fri Dec 12 16:08:57 2025] audit: type=2000 audit(1765555737.418:1): state=initialized audit_enabled=0 res=1 [Fri Dec 12 16:08:57 2025] thermal_sys: Registered thermal governor 'fair_share' [Fri Dec 12 16:08:57 2025] thermal_sys: Registered thermal governor 'step_wise' [Fri Dec 12 16:08:57 2025] thermal_sys: Registered thermal governor 'user_space' [Fri Dec 12 16:08:57 2025] cpuidle: using governor menu [Fri Dec 12 16:08:57 2025] acpiphp: ACPI Hot Plug PCI Controller Driver version: 0.5 [Fri Dec 12 16:08:57 2025] PCI: Using configuration type 1 for base access [Fri Dec 12 16:08:57 2025] PCI: Using configuration type 1 for extended access [Fri Dec 12 16:08:57 2025] kprobes: kprobe jump-optimization is enabled. All kprobes are optimized if possible. [Fri Dec 12 16:08:57 2025] HugeTLB: registered 1.00 GiB page size, pre-allocated 0 pages [Fri Dec 12 16:08:57 2025] HugeTLB: 16380 KiB vmemmap can be freed for a 1.00 GiB page [Fri Dec 12 16:08:57 2025] HugeTLB: registered 2.00 MiB page size, pre-allocated 0 pages [Fri Dec 12 16:08:57 2025] HugeTLB: 28 KiB vmemmap can be freed for a 2.00 MiB page [Fri Dec 12 16:08:57 2025] Demotion targets for Node 0: null [Fri Dec 12 16:08:57 2025] cryptd: max_cpu_qlen set to 1000 [Fri Dec 12 16:08:57 2025] ACPI: Added _OSI(Module Device) [Fri Dec 12 16:08:57 2025] ACPI: Added _OSI(Processor Device) [Fri Dec 12 16:08:57 2025] ACPI: Added _OSI(3.0 _SCP Extensions) [Fri Dec 12 16:08:57 2025] ACPI: Added _OSI(Processor Aggregator Device) [Fri Dec 12 16:08:57 2025] ACPI: 1 ACPI AML tables successfully acquired and loaded [Fri Dec 12 16:08:57 2025] ACPI: _OSC evaluation for CPUs failed, trying _PDC [Fri Dec 12 16:08:57 2025] ACPI: Interpreter enabled [Fri Dec 12 16:08:57 2025] ACPI: PM: (supports S0 S3 S4 S5) [Fri Dec 12 16:08:57 2025] ACPI: Using IOAPIC for interrupt routing [Fri Dec 12 16:08:57 2025] PCI: Using host bridge windows from ACPI; if necessary, use "pci=nocrs" and report a bug [Fri Dec 12 16:08:57 2025] PCI: Using E820 reservations for host bridge windows [Fri Dec 12 16:08:57 2025] ACPI: Enabled 2 GPEs in block 00 to 0F [Fri Dec 12 16:08:57 2025] ACPI: PCI Root Bridge [PCI0] (domain 0000 [bus 00-ff]) [Fri Dec 12 16:08:57 2025] acpi PNP0A03:00: _OSC: OS supports [ExtendedConfig ASPM ClockPM Segments MSI EDR HPX-Type3] [Fri Dec 12 16:08:57 2025] acpiphp: Slot [3] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [4] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [5] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [6] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [7] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [8] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [9] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [10] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [11] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [12] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [13] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [14] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [15] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [16] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [17] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [18] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [19] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [20] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [21] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [22] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [23] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [24] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [25] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [26] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [27] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [28] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [29] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [30] registered [Fri Dec 12 16:08:57 2025] acpiphp: Slot [31] registered [Fri Dec 12 16:08:57 2025] PCI host bridge to bus 0000:00 [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: root bus resource [io 0x0000-0x0cf7 window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: root bus resource [io 0x0d00-0xffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: root bus resource [mem 0x000a0000-0x000bffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: root bus resource [mem 0xc0000000-0xfebfffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: root bus resource [mem 0x240000000-0x2bfffffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: root bus resource [bus 00-ff] [Fri Dec 12 16:08:57 2025] pci 0000:00:00.0: [8086:1237] type 00 class 0x060000 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:01.0: [8086:7000] type 00 class 0x060100 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:01.1: [8086:7010] type 00 class 0x010180 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:01.1: BAR 4 [io 0xc140-0xc14f] [Fri Dec 12 16:08:57 2025] pci 0000:00:01.1: BAR 0 [io 0x01f0-0x01f7]: legacy IDE quirk [Fri Dec 12 16:08:57 2025] pci 0000:00:01.1: BAR 1 [io 0x03f6]: legacy IDE quirk [Fri Dec 12 16:08:57 2025] pci 0000:00:01.1: BAR 2 [io 0x0170-0x0177]: legacy IDE quirk [Fri Dec 12 16:08:57 2025] pci 0000:00:01.1: BAR 3 [io 0x0376]: legacy IDE quirk [Fri Dec 12 16:08:57 2025] pci 0000:00:01.2: [8086:7020] type 00 class 0x0c0300 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:01.2: BAR 4 [io 0xc100-0xc11f] [Fri Dec 12 16:08:57 2025] pci 0000:00:01.3: [8086:7113] type 00 class 0x068000 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:01.3: quirk: [io 0x0600-0x063f] claimed by PIIX4 ACPI [Fri Dec 12 16:08:57 2025] pci 0000:00:01.3: quirk: [io 0x0700-0x070f] claimed by PIIX4 SMB [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: [1af4:1050] type 00 class 0x030000 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: BAR 0 [mem 0xfe000000-0xfe7fffff pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: BAR 2 [mem 0xfe800000-0xfe803fff 64bit pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: BAR 4 [mem 0xfeb90000-0xfeb90fff] [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: ROM [mem 0xfeb80000-0xfeb8ffff pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: Video device with shadowed ROM at [mem 0x000c0000-0x000dffff] [Fri Dec 12 16:08:57 2025] pci 0000:00:03.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:03.0: BAR 0 [io 0xc080-0xc0bf] [Fri Dec 12 16:08:57 2025] pci 0000:00:03.0: BAR 1 [mem 0xfeb91000-0xfeb91fff] [Fri Dec 12 16:08:57 2025] pci 0000:00:03.0: BAR 4 [mem 0xfe804000-0xfe807fff 64bit pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:03.0: ROM [mem 0xfeb00000-0xfeb7ffff pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:04.0: [1af4:1001] type 00 class 0x010000 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:04.0: BAR 0 [io 0xc000-0xc07f] [Fri Dec 12 16:08:57 2025] pci 0000:00:04.0: BAR 1 [mem 0xfeb92000-0xfeb92fff] [Fri Dec 12 16:08:57 2025] pci 0000:00:04.0: BAR 4 [mem 0xfe808000-0xfe80bfff 64bit pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:05.0: [1af4:1002] type 00 class 0x00ff00 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:05.0: BAR 0 [io 0xc0c0-0xc0ff] [Fri Dec 12 16:08:57 2025] pci 0000:00:05.0: BAR 4 [mem 0xfe80c000-0xfe80ffff 64bit pref] [Fri Dec 12 16:08:57 2025] pci 0000:00:06.0: [1af4:1005] type 00 class 0x00ff00 conventional PCI endpoint [Fri Dec 12 16:08:57 2025] pci 0000:00:06.0: BAR 0 [io 0xc120-0xc13f] [Fri Dec 12 16:08:57 2025] pci 0000:00:06.0: BAR 4 [mem 0xfe810000-0xfe813fff 64bit pref] [Fri Dec 12 16:08:57 2025] ACPI: PCI: Interrupt link LNKA configured for IRQ 10 [Fri Dec 12 16:08:57 2025] ACPI: PCI: Interrupt link LNKB configured for IRQ 10 [Fri Dec 12 16:08:57 2025] ACPI: PCI: Interrupt link LNKC configured for IRQ 11 [Fri Dec 12 16:08:57 2025] ACPI: PCI: Interrupt link LNKD configured for IRQ 11 [Fri Dec 12 16:08:57 2025] ACPI: PCI: Interrupt link LNKS configured for IRQ 9 [Fri Dec 12 16:08:57 2025] iommu: Default domain type: Translated [Fri Dec 12 16:08:57 2025] iommu: DMA domain TLB invalidation policy: lazy mode [Fri Dec 12 16:08:57 2025] SCSI subsystem initialized [Fri Dec 12 16:08:57 2025] ACPI: bus type USB registered [Fri Dec 12 16:08:57 2025] usbcore: registered new interface driver usbfs [Fri Dec 12 16:08:57 2025] usbcore: registered new interface driver hub [Fri Dec 12 16:08:57 2025] usbcore: registered new device driver usb [Fri Dec 12 16:08:57 2025] pps_core: LinuxPPS API ver. 1 registered [Fri Dec 12 16:08:57 2025] pps_core: Software ver. 5.3.6 - Copyright 2005-2007 Rodolfo Giometti [Fri Dec 12 16:08:57 2025] PTP clock support registered [Fri Dec 12 16:08:57 2025] EDAC MC: Ver: 3.0.0 [Fri Dec 12 16:08:57 2025] NetLabel: Initializing [Fri Dec 12 16:08:57 2025] NetLabel: domain hash size = 128 [Fri Dec 12 16:08:57 2025] NetLabel: protocols = UNLABELED CIPSOv4 CALIPSO [Fri Dec 12 16:08:57 2025] NetLabel: unlabeled traffic allowed by default [Fri Dec 12 16:08:57 2025] PCI: Using ACPI for IRQ routing [Fri Dec 12 16:08:57 2025] PCI: pci_cache_line_size set to 64 bytes [Fri Dec 12 16:08:57 2025] e820: reserve RAM buffer [mem 0x0009fc00-0x0009ffff] [Fri Dec 12 16:08:57 2025] e820: reserve RAM buffer [mem 0xbffdb000-0xbfffffff] [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: vgaarb: setting as boot VGA device [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: vgaarb: bridge control possible [Fri Dec 12 16:08:57 2025] pci 0000:00:02.0: vgaarb: VGA device added: decodes=io+mem,owns=io+mem,locks=none [Fri Dec 12 16:08:57 2025] vgaarb: loaded [Fri Dec 12 16:08:57 2025] clocksource: Switched to clocksource kvm-clock [Fri Dec 12 16:08:57 2025] VFS: Disk quotas dquot_6.6.0 [Fri Dec 12 16:08:57 2025] VFS: Dquot-cache hash table entries: 512 (order 0, 4096 bytes) [Fri Dec 12 16:08:57 2025] pnp: PnP ACPI init [Fri Dec 12 16:08:57 2025] pnp 00:03: [dma 2] [Fri Dec 12 16:08:57 2025] pnp: PnP ACPI: found 5 devices [Fri Dec 12 16:08:57 2025] clocksource: acpi_pm: mask: 0xffffff max_cycles: 0xffffff, max_idle_ns: 2085701024 ns [Fri Dec 12 16:08:57 2025] NET: Registered PF_INET protocol family [Fri Dec 12 16:08:57 2025] IP idents hash table entries: 131072 (order: 8, 1048576 bytes, linear) [Fri Dec 12 16:08:57 2025] tcp_listen_portaddr_hash hash table entries: 4096 (order: 4, 65536 bytes, linear) [Fri Dec 12 16:08:57 2025] Table-perturb hash table entries: 65536 (order: 6, 262144 bytes, linear) [Fri Dec 12 16:08:57 2025] TCP established hash table entries: 65536 (order: 7, 524288 bytes, linear) [Fri Dec 12 16:08:57 2025] TCP bind hash table entries: 65536 (order: 8, 1048576 bytes, linear) [Fri Dec 12 16:08:57 2025] TCP: Hash tables configured (established 65536 bind 65536) [Fri Dec 12 16:08:57 2025] MPTCP token hash table entries: 8192 (order: 5, 196608 bytes, linear) [Fri Dec 12 16:08:57 2025] UDP hash table entries: 4096 (order: 5, 131072 bytes, linear) [Fri Dec 12 16:08:57 2025] UDP-Lite hash table entries: 4096 (order: 5, 131072 bytes, linear) [Fri Dec 12 16:08:57 2025] NET: Registered PF_UNIX/PF_LOCAL protocol family [Fri Dec 12 16:08:57 2025] NET: Registered PF_XDP protocol family [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: resource 4 [io 0x0000-0x0cf7 window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: resource 5 [io 0x0d00-0xffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: resource 6 [mem 0x000a0000-0x000bffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: resource 7 [mem 0xc0000000-0xfebfffff window] [Fri Dec 12 16:08:57 2025] pci_bus 0000:00: resource 8 [mem 0x240000000-0x2bfffffff window] [Fri Dec 12 16:08:57 2025] pci 0000:00:01.0: PIIX3: Enabling Passive Release [Fri Dec 12 16:08:57 2025] pci 0000:00:00.0: Limiting direct PCI/PCI transfers [Fri Dec 12 16:08:57 2025] ACPI: \_SB_.LNKD: Enabled at IRQ 11 [Fri Dec 12 16:08:57 2025] pci 0000:00:01.2: quirk_usb_early_handoff+0x0/0x160 took 76818 usecs [Fri Dec 12 16:08:57 2025] PCI: CLS 0 bytes, default 64 [Fri Dec 12 16:08:57 2025] PCI-DMA: Using software bounce buffering for IO (SWIOTLB) [Fri Dec 12 16:08:57 2025] software IO TLB: mapped [mem 0x00000000ab000000-0x00000000af000000] (64MB) [Fri Dec 12 16:08:57 2025] Trying to unpack rootfs image as initramfs... [Fri Dec 12 16:08:57 2025] ACPI: bus type thunderbolt registered [Fri Dec 12 16:08:57 2025] Initialise system trusted keyrings [Fri Dec 12 16:08:57 2025] Key type blacklist registered [Fri Dec 12 16:08:57 2025] workingset: timestamp_bits=36 max_order=21 bucket_order=0 [Fri Dec 12 16:08:57 2025] zbud: loaded [Fri Dec 12 16:08:57 2025] integrity: Platform Keyring initialized [Fri Dec 12 16:08:57 2025] integrity: Machine keyring initialized [Fri Dec 12 16:08:58 2025] Freeing initrd memory: 87820K [Fri Dec 12 16:08:58 2025] NET: Registered PF_ALG protocol family [Fri Dec 12 16:08:58 2025] xor: automatically using best checksumming function avx [Fri Dec 12 16:08:58 2025] Key type asymmetric registered [Fri Dec 12 16:08:58 2025] Asymmetric key parser 'x509' registered [Fri Dec 12 16:08:58 2025] Block layer SCSI generic (bsg) driver version 0.4 loaded (major 246) [Fri Dec 12 16:08:58 2025] io scheduler mq-deadline registered [Fri Dec 12 16:08:58 2025] io scheduler kyber registered [Fri Dec 12 16:08:58 2025] io scheduler bfq registered [Fri Dec 12 16:08:58 2025] atomic64_test: passed for x86-64 platform with CX8 and with SSE [Fri Dec 12 16:08:58 2025] shpchp: Standard Hot Plug PCI Controller Driver version: 0.4 [Fri Dec 12 16:08:58 2025] input: Power Button as /devices/LNXSYSTM:00/LNXPWRBN:00/input/input0 [Fri Dec 12 16:08:58 2025] ACPI: button: Power Button [PWRF] [Fri Dec 12 16:08:58 2025] ACPI: \_SB_.LNKB: Enabled at IRQ 10 [Fri Dec 12 16:08:58 2025] ACPI: \_SB_.LNKC: Enabled at IRQ 11 [Fri Dec 12 16:08:58 2025] ACPI: \_SB_.LNKA: Enabled at IRQ 10 [Fri Dec 12 16:08:58 2025] Serial: 8250/16550 driver, 4 ports, IRQ sharing enabled [Fri Dec 12 16:08:58 2025] 00:00: ttyS0 at I/O 0x3f8 (irq = 4, base_baud = 115200) is a 16550A [Fri Dec 12 16:08:58 2025] Non-volatile memory driver v1.3 [Fri Dec 12 16:08:58 2025] rdac: device handler registered [Fri Dec 12 16:08:58 2025] hp_sw: device handler registered [Fri Dec 12 16:08:58 2025] emc: device handler registered [Fri Dec 12 16:08:58 2025] alua: device handler registered [Fri Dec 12 16:08:58 2025] uhci_hcd 0000:00:01.2: UHCI Host Controller [Fri Dec 12 16:08:58 2025] uhci_hcd 0000:00:01.2: new USB bus registered, assigned bus number 1 [Fri Dec 12 16:08:58 2025] uhci_hcd 0000:00:01.2: detected 2 ports [Fri Dec 12 16:08:58 2025] uhci_hcd 0000:00:01.2: irq 11, io port 0x0000c100 [Fri Dec 12 16:08:58 2025] usb usb1: New USB device found, idVendor=1d6b, idProduct=0001, bcdDevice= 5.14 [Fri Dec 12 16:08:58 2025] usb usb1: New USB device strings: Mfr=3, Product=2, SerialNumber=1 [Fri Dec 12 16:08:58 2025] usb usb1: Product: UHCI Host Controller [Fri Dec 12 16:08:58 2025] usb usb1: Manufacturer: Linux 5.14.0-648.el9.x86_64 uhci_hcd [Fri Dec 12 16:08:58 2025] usb usb1: SerialNumber: 0000:00:01.2 [Fri Dec 12 16:08:58 2025] hub 1-0:1.0: USB hub found [Fri Dec 12 16:08:58 2025] hub 1-0:1.0: 2 ports detected [Fri Dec 12 16:08:58 2025] usbcore: registered new interface driver usbserial_generic [Fri Dec 12 16:08:58 2025] usbserial: USB Serial support registered for generic [Fri Dec 12 16:08:58 2025] i8042: PNP: PS/2 Controller [PNP0303:KBD,PNP0f13:MOU] at 0x60,0x64 irq 1,12 [Fri Dec 12 16:08:58 2025] serio: i8042 KBD port at 0x60,0x64 irq 1 [Fri Dec 12 16:08:58 2025] serio: i8042 AUX port at 0x60,0x64 irq 12 [Fri Dec 12 16:08:58 2025] mousedev: PS/2 mouse device common for all mice [Fri Dec 12 16:08:58 2025] rtc_cmos 00:04: RTC can wake from S4 [Fri Dec 12 16:08:58 2025] input: AT Translated Set 2 keyboard as /devices/platform/i8042/serio0/input/input1 [Fri Dec 12 16:08:58 2025] rtc_cmos 00:04: registered as rtc0 [Fri Dec 12 16:08:58 2025] rtc_cmos 00:04: setting system clock to 2025-12-12T16:08:58 UTC (1765555738) [Fri Dec 12 16:08:58 2025] rtc_cmos 00:04: alarms up to one day, y3k, 242 bytes nvram [Fri Dec 12 16:08:58 2025] amd_pstate: the _CPC object is not present in SBIOS or ACPI disabled [Fri Dec 12 16:08:58 2025] input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input4 [Fri Dec 12 16:08:58 2025] input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input3 [Fri Dec 12 16:08:58 2025] hid: raw HID events driver (C) Jiri Kosina [Fri Dec 12 16:08:58 2025] usbcore: registered new interface driver usbhid [Fri Dec 12 16:08:58 2025] usbhid: USB HID core driver [Fri Dec 12 16:08:58 2025] drop_monitor: Initializing network drop monitor service [Fri Dec 12 16:08:58 2025] Initializing XFRM netlink socket [Fri Dec 12 16:08:58 2025] NET: Registered PF_INET6 protocol family [Fri Dec 12 16:08:58 2025] Segment Routing with IPv6 [Fri Dec 12 16:08:58 2025] NET: Registered PF_PACKET protocol family [Fri Dec 12 16:08:58 2025] mpls_gso: MPLS GSO support [Fri Dec 12 16:08:58 2025] IPI shorthand broadcast: enabled [Fri Dec 12 16:08:58 2025] AVX2 version of gcm_enc/dec engaged. [Fri Dec 12 16:08:58 2025] AES CTR mode by8 optimization enabled [Fri Dec 12 16:08:58 2025] sched_clock: Marking stable (1215002917, 164893128)->(1499743261, -119847216) [Fri Dec 12 16:08:58 2025] registered taskstats version 1 [Fri Dec 12 16:08:58 2025] Loading compiled-in X.509 certificates [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: bcc7fcdcfd9be61e8634554e9f7a1c01f32489d8' [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'Red Hat Enterprise Linux Driver Update Program (key 3): bf57f3e87362bc7229d9f465321773dfd1f77a80' [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'Red Hat Enterprise Linux kpatch signing key: 4d38fd864ebe18c5f0b72e3852e2014c3a676fc8' [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'RH-IMA-CA: Red Hat IMA CA: fb31825dd0e073685b264e3038963673f753959a' [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'Nvidia GPU OOT signing 001: 55e1cef88193e60419f0b0ec379c49f77545acf0' [Fri Dec 12 16:08:58 2025] Demotion targets for Node 0: null [Fri Dec 12 16:08:58 2025] page_owner is disabled [Fri Dec 12 16:08:58 2025] Key type .fscrypt registered [Fri Dec 12 16:08:58 2025] Key type fscrypt-provisioning registered [Fri Dec 12 16:08:58 2025] Key type big_key registered [Fri Dec 12 16:08:58 2025] Key type encrypted registered [Fri Dec 12 16:08:58 2025] ima: No TPM chip found, activating TPM-bypass! [Fri Dec 12 16:08:58 2025] Loading compiled-in module X.509 certificates [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: bcc7fcdcfd9be61e8634554e9f7a1c01f32489d8' [Fri Dec 12 16:08:58 2025] ima: Allocated hash algorithm: sha256 [Fri Dec 12 16:08:58 2025] ima: No architecture policies found [Fri Dec 12 16:08:58 2025] evm: Initialising EVM extended attributes: [Fri Dec 12 16:08:58 2025] evm: security.selinux [Fri Dec 12 16:08:58 2025] evm: security.SMACK64 (disabled) [Fri Dec 12 16:08:58 2025] evm: security.SMACK64EXEC (disabled) [Fri Dec 12 16:08:58 2025] evm: security.SMACK64TRANSMUTE (disabled) [Fri Dec 12 16:08:58 2025] evm: security.SMACK64MMAP (disabled) [Fri Dec 12 16:08:58 2025] evm: security.apparmor (disabled) [Fri Dec 12 16:08:58 2025] evm: security.ima [Fri Dec 12 16:08:58 2025] evm: security.capability [Fri Dec 12 16:08:58 2025] evm: HMAC attrs: 0x1 [Fri Dec 12 16:08:58 2025] usb 1-1: new full-speed USB device number 2 using uhci_hcd [Fri Dec 12 16:08:58 2025] Running certificate verification RSA selftest [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'Certificate verification self-testing key: f58703bb33ce1b73ee02eccdee5b8817518fe3db' [Fri Dec 12 16:08:58 2025] Running certificate verification ECDSA selftest [Fri Dec 12 16:08:58 2025] Loaded X.509 cert 'Certificate verification ECDSA self-testing key: 2900bcea1deb7bc8479a84a23d758efdfdd2b2d3' [Fri Dec 12 16:08:58 2025] clk: Disabling unused clocks [Fri Dec 12 16:08:58 2025] Freeing unused decrypted memory: 2028K [Fri Dec 12 16:08:58 2025] Freeing unused kernel image (initmem) memory: 4192K [Fri Dec 12 16:08:58 2025] Write protecting the kernel read-only data: 30720k [Fri Dec 12 16:08:58 2025] Freeing unused kernel image (rodata/data gap) memory: 420K [Fri Dec 12 16:08:58 2025] x86/mm: Checked W+X mappings: passed, no W+X pages found. [Fri Dec 12 16:08:58 2025] Run /init as init process [Fri Dec 12 16:08:58 2025] with arguments: [Fri Dec 12 16:08:58 2025] /init [Fri Dec 12 16:08:58 2025] with environment: [Fri Dec 12 16:08:58 2025] HOME=/ [Fri Dec 12 16:08:58 2025] TERM=linux [Fri Dec 12 16:08:58 2025] BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-648.el9.x86_64 [Fri Dec 12 16:08:58 2025] systemd[1]: systemd 252-59.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) [Fri Dec 12 16:08:58 2025] systemd[1]: Detected virtualization kvm. [Fri Dec 12 16:08:58 2025] systemd[1]: Detected architecture x86-64. [Fri Dec 12 16:08:58 2025] systemd[1]: Running in initrd. [Fri Dec 12 16:08:58 2025] systemd[1]: No hostname configured, using default hostname. [Fri Dec 12 16:08:58 2025] systemd[1]: Hostname set to . [Fri Dec 12 16:08:58 2025] systemd[1]: Initializing machine ID from VM UUID. [Fri Dec 12 16:08:58 2025] usb 1-1: New USB device found, idVendor=0627, idProduct=0001, bcdDevice= 0.00 [Fri Dec 12 16:08:58 2025] usb 1-1: New USB device strings: Mfr=1, Product=3, SerialNumber=10 [Fri Dec 12 16:08:58 2025] usb 1-1: Product: QEMU USB Tablet [Fri Dec 12 16:08:58 2025] usb 1-1: Manufacturer: QEMU [Fri Dec 12 16:08:58 2025] usb 1-1: SerialNumber: 28754-0000:00:01.2-1 [Fri Dec 12 16:08:58 2025] input: QEMU QEMU USB Tablet as /devices/pci0000:00/0000:00:01.2/usb1/1-1/1-1:1.0/0003:0627:0001.0001/input/input5 [Fri Dec 12 16:08:58 2025] hid-generic 0003:0627:0001.0001: input,hidraw0: USB HID v0.01 Mouse [QEMU QEMU USB Tablet] on usb-0000:00:01.2-1/input0 [Fri Dec 12 16:08:58 2025] systemd[1]: Queued start job for default target Initrd Default Target. [Fri Dec 12 16:08:58 2025] systemd[1]: Started Dispatch Password Requests to Console Directory Watch. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Local Encrypted Volumes. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Initrd /usr File System. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Local File Systems. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Path Units. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Slice Units. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Swaps. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Timer Units. [Fri Dec 12 16:08:58 2025] systemd[1]: Listening on D-Bus System Message Bus Socket. [Fri Dec 12 16:08:58 2025] systemd[1]: Listening on Journal Socket (/dev/log). [Fri Dec 12 16:08:58 2025] systemd[1]: Listening on Journal Socket. [Fri Dec 12 16:08:58 2025] systemd[1]: Listening on udev Control Socket. [Fri Dec 12 16:08:58 2025] systemd[1]: Listening on udev Kernel Socket. [Fri Dec 12 16:08:58 2025] systemd[1]: Reached target Socket Units. [Fri Dec 12 16:08:58 2025] systemd[1]: Starting Create List of Static Device Nodes... [Fri Dec 12 16:08:58 2025] systemd[1]: Starting Journal Service... [Fri Dec 12 16:08:58 2025] systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. [Fri Dec 12 16:08:58 2025] systemd[1]: Starting Apply Kernel Variables... [Fri Dec 12 16:08:58 2025] systemd[1]: Starting Create System Users... [Fri Dec 12 16:08:58 2025] systemd[1]: Starting Setup Virtual Console... [Fri Dec 12 16:08:58 2025] systemd[1]: Finished Create List of Static Device Nodes. [Fri Dec 12 16:08:58 2025] systemd[1]: Finished Apply Kernel Variables. [Fri Dec 12 16:08:58 2025] systemd[1]: Finished Create System Users. [Fri Dec 12 16:08:58 2025] systemd[1]: Started Journal Service. [Fri Dec 12 16:08:59 2025] device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. [Fri Dec 12 16:08:59 2025] device-mapper: uevent: version 1.0.3 [Fri Dec 12 16:08:59 2025] device-mapper: ioctl: 4.50.0-ioctl (2025-04-28) initialised: dm-devel@lists.linux.dev [Fri Dec 12 16:08:59 2025] RPC: Registered named UNIX socket transport module. [Fri Dec 12 16:08:59 2025] RPC: Registered udp transport module. [Fri Dec 12 16:08:59 2025] RPC: Registered tcp transport module. [Fri Dec 12 16:08:59 2025] RPC: Registered tcp-with-tls transport module. [Fri Dec 12 16:08:59 2025] RPC: Registered tcp NFSv4.1 backchannel transport module. [Fri Dec 12 16:08:59 2025] virtio_blk virtio2: 8/0/0 default/read/poll queues [Fri Dec 12 16:08:59 2025] virtio_blk virtio2: [vda] 167772160 512-byte logical blocks (85.9 GB/80.0 GiB) [Fri Dec 12 16:08:59 2025] libata version 3.00 loaded. [Fri Dec 12 16:08:59 2025] vda: vda1 [Fri Dec 12 16:08:59 2025] ata_piix 0000:00:01.1: version 2.13 [Fri Dec 12 16:08:59 2025] scsi host0: ata_piix [Fri Dec 12 16:08:59 2025] scsi host1: ata_piix [Fri Dec 12 16:08:59 2025] ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc140 irq 14 lpm-pol 0 [Fri Dec 12 16:08:59 2025] ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc148 irq 15 lpm-pol 0 [Fri Dec 12 16:08:59 2025] ata1: found unknown device (class 0) [Fri Dec 12 16:08:59 2025] ata1.00: ATAPI: QEMU DVD-ROM, 2.5+, max UDMA/100 [Fri Dec 12 16:08:59 2025] scsi 0:0:0:0: CD-ROM QEMU QEMU DVD-ROM 2.5+ PQ: 0 ANSI: 5 [Fri Dec 12 16:08:59 2025] scsi 0:0:0:0: Attached scsi generic sg0 type 5 [Fri Dec 12 16:08:59 2025] sr 0:0:0:0: [sr0] scsi3-mmc drive: 4x/4x cd/rw xa/form2 tray [Fri Dec 12 16:08:59 2025] cdrom: Uniform CD-ROM driver Revision: 3.20 [Fri Dec 12 16:09:00 2025] sr 0:0:0:0: Attached scsi CD-ROM sr0 [Fri Dec 12 16:09:00 2025] SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled [Fri Dec 12 16:09:00 2025] XFS (vda1): Mounting V5 Filesystem cbdedf45-ed1d-4952-82a8-33a12c0ba266 [Fri Dec 12 16:09:00 2025] XFS (vda1): Ending clean mount [Fri Dec 12 16:09:01 2025] systemd-journald[307]: Received SIGTERM from PID 1 (systemd). [Fri Dec 12 16:09:01 2025] audit: type=1404 audit(1765555741.362:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 [Fri Dec 12 16:09:01 2025] SELinux: policy capability network_peer_controls=1 [Fri Dec 12 16:09:01 2025] SELinux: policy capability open_perms=1 [Fri Dec 12 16:09:01 2025] SELinux: policy capability extended_socket_class=1 [Fri Dec 12 16:09:01 2025] SELinux: policy capability always_check_network=0 [Fri Dec 12 16:09:01 2025] SELinux: policy capability cgroup_seclabel=1 [Fri Dec 12 16:09:01 2025] SELinux: policy capability nnp_nosuid_transition=1 [Fri Dec 12 16:09:01 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Fri Dec 12 16:09:01 2025] audit: type=1403 audit(1765555741.495:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 [Fri Dec 12 16:09:01 2025] systemd[1]: Successfully loaded SELinux policy in 135.267ms. [Fri Dec 12 16:09:01 2025] systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 26.859ms. [Fri Dec 12 16:09:01 2025] systemd[1]: systemd 252-59.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) [Fri Dec 12 16:09:01 2025] systemd[1]: Detected virtualization kvm. [Fri Dec 12 16:09:01 2025] systemd[1]: Detected architecture x86-64. [Fri Dec 12 16:09:01 2025] systemd-rc-local-generator[637]: /etc/rc.d/rc.local is not marked executable, skipping. [Fri Dec 12 16:09:01 2025] systemd[1]: initrd-switch-root.service: Deactivated successfully. [Fri Dec 12 16:09:01 2025] systemd[1]: Stopped Switch Root. [Fri Dec 12 16:09:01 2025] systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. [Fri Dec 12 16:09:01 2025] systemd[1]: Created slice Slice /system/getty. [Fri Dec 12 16:09:01 2025] systemd[1]: Created slice Slice /system/serial-getty. [Fri Dec 12 16:09:01 2025] systemd[1]: Created slice Slice /system/sshd-keygen. [Fri Dec 12 16:09:01 2025] systemd[1]: Created slice User and Session Slice. [Fri Dec 12 16:09:01 2025] systemd[1]: Started Dispatch Password Requests to Console Directory Watch. [Fri Dec 12 16:09:01 2025] systemd[1]: Started Forward Password Requests to Wall Directory Watch. [Fri Dec 12 16:09:01 2025] systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target Local Encrypted Volumes. [Fri Dec 12 16:09:01 2025] systemd[1]: Stopped target Switch Root. [Fri Dec 12 16:09:01 2025] systemd[1]: Stopped target Initrd File Systems. [Fri Dec 12 16:09:01 2025] systemd[1]: Stopped target Initrd Root File System. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target Local Integrity Protected Volumes. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target Path Units. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target rpc_pipefs.target. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target Slice Units. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target Swaps. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target Local Verity Protected Volumes. [Fri Dec 12 16:09:01 2025] systemd[1]: Listening on RPCbind Server Activation Socket. [Fri Dec 12 16:09:01 2025] systemd[1]: Reached target RPC Port Mapper. [Fri Dec 12 16:09:01 2025] systemd[1]: Listening on Process Core Dump Socket. [Fri Dec 12 16:09:01 2025] systemd[1]: Listening on initctl Compatibility Named Pipe. [Fri Dec 12 16:09:01 2025] systemd[1]: Listening on udev Control Socket. [Fri Dec 12 16:09:01 2025] systemd[1]: Listening on udev Kernel Socket. [Fri Dec 12 16:09:01 2025] systemd[1]: Mounting Huge Pages File System... [Fri Dec 12 16:09:01 2025] systemd[1]: Mounting POSIX Message Queue File System... [Fri Dec 12 16:09:01 2025] systemd[1]: Mounting Kernel Debug File System... [Fri Dec 12 16:09:01 2025] systemd[1]: Mounting Kernel Trace File System... [Fri Dec 12 16:09:01 2025] systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Create List of Static Device Nodes... [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Load Kernel Module configfs... [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Load Kernel Module drm... [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Load Kernel Module efi_pstore... [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Load Kernel Module fuse... [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network... [Fri Dec 12 16:09:01 2025] systemd[1]: systemd-fsck-root.service: Deactivated successfully. [Fri Dec 12 16:09:01 2025] systemd[1]: Stopped File System Check on Root Device. [Fri Dec 12 16:09:01 2025] systemd[1]: Stopped Journal Service. [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Journal Service... [Fri Dec 12 16:09:01 2025] systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Generate network units from Kernel command line... [Fri Dec 12 16:09:01 2025] systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Remount Root and Kernel File Systems... [Fri Dec 12 16:09:01 2025] systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met. [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Apply Kernel Variables... [Fri Dec 12 16:09:01 2025] fuse: init (API version 7.37) [Fri Dec 12 16:09:01 2025] systemd[1]: Starting Coldplug All udev Devices... [Fri Dec 12 16:09:01 2025] xfs filesystem being remounted at / supports timestamps until 2038 (0x7fffffff) [Fri Dec 12 16:09:01 2025] systemd[1]: Mounted Huge Pages File System. [Fri Dec 12 16:09:01 2025] systemd[1]: Mounted POSIX Message Queue File System. [Fri Dec 12 16:09:01 2025] systemd[1]: Started Journal Service. [Fri Dec 12 16:09:01 2025] ACPI: bus type drm_connector registered [Fri Dec 12 16:09:01 2025] systemd-journald[679]: Received client request to flush runtime journal. [Fri Dec 12 16:09:02 2025] input: PC Speaker as /devices/platform/pcspkr/input/input6 [Fri Dec 12 16:09:02 2025] piix4_smbus 0000:00:01.3: SMBus Host Controller at 0x700, revision 0 [Fri Dec 12 16:09:02 2025] i2c i2c-0: 1/1 memory slots populated (from DMI) [Fri Dec 12 16:09:02 2025] i2c i2c-0: Memory type 0x07 not supported yet, not instantiating SPD [Fri Dec 12 16:09:02 2025] Warning: Deprecated Driver is detected: nft_compat will not be maintained in a future major release and may be disabled [Fri Dec 12 16:09:02 2025] Warning: Deprecated Driver is detected: nft_compat_module_init will not be maintained in a future major release and may be disabled [Fri Dec 12 16:09:02 2025] [drm] pci: virtio-vga detected at 0000:00:02.0 [Fri Dec 12 16:09:02 2025] virtio-pci 0000:00:02.0: vgaarb: deactivate vga console [Fri Dec 12 16:09:02 2025] Console: switching to colour dummy device 80x25 [Fri Dec 12 16:09:02 2025] [drm] features: -virgl +edid -resource_blob -host_visible [Fri Dec 12 16:09:02 2025] [drm] features: -context_init [Fri Dec 12 16:09:02 2025] [drm] number of scanouts: 1 [Fri Dec 12 16:09:02 2025] [drm] number of cap sets: 0 [Fri Dec 12 16:09:02 2025] [drm] Initialized virtio_gpu 0.1.0 for 0000:00:02.0 on minor 0 [Fri Dec 12 16:09:03 2025] kvm_amd: TSC scaling supported [Fri Dec 12 16:09:03 2025] kvm_amd: Nested Virtualization enabled [Fri Dec 12 16:09:03 2025] kvm_amd: Nested Paging enabled [Fri Dec 12 16:09:03 2025] kvm_amd: LBR virtualization supported [Fri Dec 12 16:09:03 2025] fbcon: virtio_gpudrmfb (fb0) is primary device [Fri Dec 12 16:09:03 2025] Console: switching to colour frame buffer device 128x48 [Fri Dec 12 16:09:03 2025] virtio-pci 0000:00:02.0: [drm] fb0: virtio_gpudrmfb frame buffer device [Fri Dec 12 16:09:03 2025] ISO 9660 Extensions: Microsoft Joliet Level 3 [Fri Dec 12 16:09:03 2025] ISO 9660 Extensions: RRIP_1991A [Fri Dec 12 16:09:10 2025] block vda: the capability attribute has been deprecated. [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: BAR 0 [io 0x0000-0x003f] [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: BAR 1 [mem 0x00000000-0x00000fff] [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: BAR 4 [mem 0x00000000-0x00003fff 64bit pref] [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: ROM [mem 0x00000000-0x0007ffff pref] [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: ROM [mem 0xc0000000-0xc007ffff pref]: assigned [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: BAR 4 [mem 0x240000000-0x240003fff 64bit pref]: assigned [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: BAR 1 [mem 0xc0080000-0xc0080fff]: assigned [Fri Dec 12 16:12:13 2025] pci 0000:00:07.0: BAR 0 [io 0x1000-0x103f]: assigned [Fri Dec 12 16:12:13 2025] virtio-pci 0000:00:07.0: enabling device (0000 -> 0003) [Fri Dec 12 16:20:45 2025] systemd-rc-local-generator[8640]: /etc/rc.d/rc.local is not marked executable, skipping. [Fri Dec 12 16:21:07 2025] SELinux: Converting 387 SID table entries... [Fri Dec 12 16:21:07 2025] SELinux: policy capability network_peer_controls=1 [Fri Dec 12 16:21:07 2025] SELinux: policy capability open_perms=1 [Fri Dec 12 16:21:07 2025] SELinux: policy capability extended_socket_class=1 [Fri Dec 12 16:21:07 2025] SELinux: policy capability always_check_network=0 [Fri Dec 12 16:21:07 2025] SELinux: policy capability cgroup_seclabel=1 [Fri Dec 12 16:21:07 2025] SELinux: policy capability nnp_nosuid_transition=1 [Fri Dec 12 16:21:07 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Fri Dec 12 16:21:16 2025] SELinux: Converting 387 SID table entries... [Fri Dec 12 16:21:16 2025] SELinux: policy capability network_peer_controls=1 [Fri Dec 12 16:21:16 2025] SELinux: policy capability open_perms=1 [Fri Dec 12 16:21:16 2025] SELinux: policy capability extended_socket_class=1 [Fri Dec 12 16:21:16 2025] SELinux: policy capability always_check_network=0 [Fri Dec 12 16:21:16 2025] SELinux: policy capability cgroup_seclabel=1 [Fri Dec 12 16:21:16 2025] SELinux: policy capability nnp_nosuid_transition=1 [Fri Dec 12 16:21:16 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Fri Dec 12 16:21:25 2025] SELinux: Converting 387 SID table entries... [Fri Dec 12 16:21:25 2025] SELinux: policy capability network_peer_controls=1 [Fri Dec 12 16:21:25 2025] SELinux: policy capability open_perms=1 [Fri Dec 12 16:21:25 2025] SELinux: policy capability extended_socket_class=1 [Fri Dec 12 16:21:25 2025] SELinux: policy capability always_check_network=0 [Fri Dec 12 16:21:25 2025] SELinux: policy capability cgroup_seclabel=1 [Fri Dec 12 16:21:25 2025] SELinux: policy capability nnp_nosuid_transition=1 [Fri Dec 12 16:21:25 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Fri Dec 12 16:21:37 2025] SELinux: Converting 390 SID table entries... [Fri Dec 12 16:21:37 2025] SELinux: policy capability network_peer_controls=1 [Fri Dec 12 16:21:37 2025] SELinux: policy capability open_perms=1 [Fri Dec 12 16:21:37 2025] SELinux: policy capability extended_socket_class=1 [Fri Dec 12 16:21:37 2025] SELinux: policy capability always_check_network=0 [Fri Dec 12 16:21:37 2025] SELinux: policy capability cgroup_seclabel=1 [Fri Dec 12 16:21:37 2025] SELinux: policy capability nnp_nosuid_transition=1 [Fri Dec 12 16:21:37 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Fri Dec 12 16:21:59 2025] systemd-rc-local-generator[9690]: /etc/rc.d/rc.local is not marked executable, skipping. [Fri Dec 12 16:22:02 2025] evm: overlay not supported home/zuul/zuul-output/logs/selinux-denials.log0000644000000000000000000000000015117043117020601 0ustar rootroothome/zuul/zuul-output/logs/system-config/0000755000175000017500000000000015117043117017655 5ustar zuulzuulhome/zuul/zuul-output/logs/system-config/libvirt/0000755000175000017500000000000015117043120021322 5ustar zuulzuulhome/zuul/zuul-output/logs/system-config/libvirt/libvirt-admin.conf0000644000175000000000000000070215117043120024700 0ustar zuulroot# # This can be used to setup URI aliases for frequently # used connection URIs. Aliases may contain only the # characters a-Z, 0-9, _, -. # # Following the '=' may be any valid libvirt admin connection # URI, including arbitrary parameters #uri_aliases = [ # "admin=libvirtd:///system", #] # This specifies the default location the client tries to connect to if no other # URI is provided by the application #uri_default = "libvirtd:///system" home/zuul/zuul-output/logs/system-config/libvirt/libvirt.conf0000644000175000000000000000104315117043120023611 0ustar zuulroot# # This can be used to setup URI aliases for frequently # used connection URIs. Aliases may contain only the # characters a-Z, 0-9, _, -. # # Following the '=' may be any valid libvirt connection # URI, including arbitrary parameters #uri_aliases = [ # "hail=qemu+ssh://root@hail.cloud.example.com/system", # "sleet=qemu+ssh://root@sleet.cloud.example.com/system", #] # # These can be used in cases when no URI is supplied by the application # (@uri_default also prevents probing of the hypervisor driver). # #uri_default = "qemu:///system" home/zuul/zuul-output/logs/registries.conf0000644000000000000000000000744715117043120020041 0ustar rootroot# For more information on this configuration file, see containers-registries.conf(5). # # NOTE: RISK OF USING UNQUALIFIED IMAGE NAMES # We recommend always using fully qualified image names including the registry # server (full dns name), namespace, image name, and tag # (e.g., registry.redhat.io/ubi8/ubi:latest). Pulling by digest (i.e., # quay.io/repository/name@digest) further eliminates the ambiguity of tags. # When using short names, there is always an inherent risk that the image being # pulled could be spoofed. For example, a user wants to pull an image named # `foobar` from a registry and expects it to come from myregistry.com. If # myregistry.com is not first in the search list, an attacker could place a # different `foobar` image at a registry earlier in the search list. The user # would accidentally pull and run the attacker's image and code rather than the # intended content. We recommend only adding registries which are completely # trusted (i.e., registries which don't allow unknown or anonymous users to # create accounts with arbitrary names). This will prevent an image from being # spoofed, squatted or otherwise made insecure. If it is necessary to use one # of these registries, it should be added at the end of the list. # # # An array of host[:port] registries to try when pulling an unqualified image, in order. unqualified-search-registries = ["registry.access.redhat.com", "registry.redhat.io", "docker.io"] # [[registry]] # # The "prefix" field is used to choose the relevant [[registry]] TOML table; # # (only) the TOML table with the longest match for the input image name # # (taking into account namespace/repo/tag/digest separators) is used. # # # # The prefix can also be of the form: *.example.com for wildcard subdomain # # matching. # # # # If the prefix field is missing, it defaults to be the same as the "location" field. # prefix = "example.com/foo" # # # If true, unencrypted HTTP as well as TLS connections with untrusted # # certificates are allowed. # insecure = false # # # If true, pulling images with matching names is forbidden. # blocked = false # # # The physical location of the "prefix"-rooted namespace. # # # # By default, this is equal to "prefix" (in which case "prefix" can be omitted # # and the [[registry]] TOML table can only specify "location"). # # # # Example: Given # # prefix = "example.com/foo" # # location = "internal-registry-for-example.net/bar" # # requests for the image example.com/foo/myimage:latest will actually work with the # # internal-registry-for-example.net/bar/myimage:latest image. # # # The location can be empty iff prefix is in a # # wildcarded format: "*.example.com". In this case, the input reference will # # be used as-is without any rewrite. # location = internal-registry-for-example.com/bar" # # # (Possibly-partial) mirrors for the "prefix"-rooted namespace. # # # # The mirrors are attempted in the specified order; the first one that can be # # contacted and contains the image will be used (and if none of the mirrors contains the image, # # the primary location specified by the "registry.location" field, or using the unmodified # # user-specified reference, is tried last). # # # # Each TOML table in the "mirror" array can contain the following fields, with the same semantics # # as if specified in the [[registry]] TOML table directly: # # - location # # - insecure # [[registry.mirror]] # location = "example-mirror-0.local/mirror-for-foo" # [[registry.mirror]] # location = "example-mirror-1.local/mirrors/foo" # insecure = true # # Given the above, a pull of example.com/foo/image:latest will try: # # 1. example-mirror-0.local/mirror-for-foo/image:latest # # 2. example-mirror-1.local/mirrors/foo/image:latest # # 3. internal-registry-for-example.net/bar/image:latest # # in order, and use the first one that exists. short-name-mode = "enforcing" home/zuul/zuul-output/logs/registries.conf.d/0000755000175000000000000000000015117043120020355 5ustar zuulroothome/zuul/zuul-output/logs/registries.conf.d/000-shortnames.conf0000644000175000000000000001735515117043120023717 0ustar zuulroot[aliases] # almalinux "almalinux" = "docker.io/library/almalinux" "almalinux-minimal" = "docker.io/library/almalinux-minimal" # Amazon Linux "amazonlinux" = "public.ecr.aws/amazonlinux/amazonlinux" # Arch Linux "archlinux" = "docker.io/library/archlinux" # centos "centos" = "quay.io/centos/centos" # containers "skopeo" = "quay.io/skopeo/stable" "buildah" = "quay.io/buildah/stable" "podman" = "quay.io/podman/stable" "hello" = "quay.io/podman/hello" "hello-world" = "quay.io/podman/hello" # docker "alpine" = "docker.io/library/alpine" "docker" = "docker.io/library/docker" "registry" = "docker.io/library/registry" "swarm" = "docker.io/library/swarm" # Fedora "fedora-bootc" = "registry.fedoraproject.org/fedora-bootc" "fedora-minimal" = "registry.fedoraproject.org/fedora-minimal" "fedora" = "registry.fedoraproject.org/fedora" # Gentoo "gentoo" = "docker.io/gentoo/stage3" # openSUSE "opensuse/tumbleweed" = "registry.opensuse.org/opensuse/tumbleweed" "opensuse/tumbleweed-dnf" = "registry.opensuse.org/opensuse/tumbleweed-dnf" "opensuse/tumbleweed-microdnf" = "registry.opensuse.org/opensuse/tumbleweed-microdnf" "opensuse/leap" = "registry.opensuse.org/opensuse/leap" "opensuse/busybox" = "registry.opensuse.org/opensuse/busybox" "tumbleweed" = "registry.opensuse.org/opensuse/tumbleweed" "tumbleweed-dnf" = "registry.opensuse.org/opensuse/tumbleweed-dnf" "tumbleweed-microdnf" = "registry.opensuse.org/opensuse/tumbleweed-microdnf" "leap" = "registry.opensuse.org/opensuse/leap" "leap-dnf" = "registry.opensuse.org/opensuse/leap-dnf" "leap-microdnf" = "registry.opensuse.org/opensuse/leap-microdnf" "tw-busybox" = "registry.opensuse.org/opensuse/busybox" # OTel (Open Telemetry) - opentelemetry.io "otel/autoinstrumentation-go" = "docker.io/otel/autoinstrumentation-go" "otel/autoinstrumentation-nodejs" = "docker.io/otel/autoinstrumentation-nodejs" "otel/autoinstrumentation-python" = "docker.io/otel/autoinstrumentation-python" "otel/autoinstrumentation-java" = "docker.io/otel/autoinstrumentation-java" "otel/autoinstrumentation-dotnet" = "docker.io/otel/autoinstrumentation-dotnet" "otel/opentelemetry-collector" = "docker.io/otel/opentelemetry-collector" "otel/opentelemetry-collector-contrib" = "docker.io/otel/opentelemetry-collector-contrib" "otel/opentelemetry-collector-contrib-dev" = "docker.io/otel/opentelemetry-collector-contrib-dev" "otel/opentelemetry-collector-k8s" = "docker.io/otel/opentelemetry-collector-k8s" "otel/opentelemetry-operator" = "docker.io/otel/opentelemetry-operator" "otel/opentelemetry-operator-bundle" = "docker.io/otel/opentelemetry-operator-bundle" "otel/operator-opamp-bridge" = "docker.io/otel/operator-opamp-bridge" "otel/semconvgen" = "docker.io/otel/semconvgen" "otel/weaver" = "docker.io/otel/weaver" # SUSE "suse/sle15" = "registry.suse.com/suse/sle15" "suse/sles12sp5" = "registry.suse.com/suse/sles12sp5" "suse/sles12sp4" = "registry.suse.com/suse/sles12sp4" "suse/sles12sp3" = "registry.suse.com/suse/sles12sp3" "sle15" = "registry.suse.com/suse/sle15" "sles12sp5" = "registry.suse.com/suse/sles12sp5" "sles12sp4" = "registry.suse.com/suse/sles12sp4" "sles12sp3" = "registry.suse.com/suse/sles12sp3" "bci-base" = "registry.suse.com/bci/bci-base" "bci/bci-base" = "registry.suse.com/bci/bci-base" "bci-micro" = "registry.suse.com/bci/bci-micro" "bci/bci-micro" = "registry.suse.com/bci/bci-micro" "bci-minimal" = "registry.suse.com/bci/bci-minimal" "bci/bci-minimal" = "registry.suse.com/bci/bci-minimal" "bci-busybox" = "registry.suse.com/bci/bci-busybox" "bci/bci-busybox" = "registry.suse.com/bci/bci-busybox" # Red Hat Enterprise Linux "rhel" = "registry.access.redhat.com/rhel" "rhel6" = "registry.access.redhat.com/rhel6" "rhel7" = "registry.access.redhat.com/rhel7" "rhel7.9" = "registry.access.redhat.com/rhel7.9" "rhel-atomic" = "registry.access.redhat.com/rhel-atomic" "rhel9-bootc" = "registry.redhat.io/rhel9/rhel-bootc" "rhel-minimal" = "registry.access.redhat.com/rhel-minimal" "rhel-init" = "registry.access.redhat.com/rhel-init" "rhel7-atomic" = "registry.access.redhat.com/rhel7-atomic" "rhel7-minimal" = "registry.access.redhat.com/rhel7-minimal" "rhel7-init" = "registry.access.redhat.com/rhel7-init" "rhel7/rhel" = "registry.access.redhat.com/rhel7/rhel" "rhel7/rhel-atomic" = "registry.access.redhat.com/rhel7/rhel7/rhel-atomic" "ubi7/ubi" = "registry.access.redhat.com/ubi7/ubi" "ubi7/ubi-minimal" = "registry.access.redhat.com/ubi7-minimal" "ubi7/ubi-init" = "registry.access.redhat.com/ubi7-init" "ubi7" = "registry.access.redhat.com/ubi7" "ubi7-init" = "registry.access.redhat.com/ubi7-init" "ubi7-minimal" = "registry.access.redhat.com/ubi7-minimal" "rhel8" = "registry.access.redhat.com/ubi8" "rhel8-init" = "registry.access.redhat.com/ubi8-init" "rhel8-minimal" = "registry.access.redhat.com/ubi8-minimal" "rhel8-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8" = "registry.access.redhat.com/ubi8" "ubi8-minimal" = "registry.access.redhat.com/ubi8-minimal" "ubi8-init" = "registry.access.redhat.com/ubi8-init" "ubi8-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8/ubi" = "registry.access.redhat.com/ubi8/ubi" "ubi8/ubi-minimal" = "registry.access.redhat.com/ubi8-minimal" "ubi8/ubi-init" = "registry.access.redhat.com/ubi8-init" "ubi8/ubi-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8/podman" = "registry.access.redhat.com/ubi8/podman" "ubi8/buildah" = "registry.access.redhat.com/ubi8/buildah" "ubi8/skopeo" = "registry.access.redhat.com/ubi8/skopeo" "rhel9" = "registry.access.redhat.com/ubi9" "rhel9-init" = "registry.access.redhat.com/ubi9-init" "rhel9-minimal" = "registry.access.redhat.com/ubi9-minimal" "rhel9-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9" = "registry.access.redhat.com/ubi9" "ubi9-minimal" = "registry.access.redhat.com/ubi9-minimal" "ubi9-init" = "registry.access.redhat.com/ubi9-init" "ubi9-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9/ubi" = "registry.access.redhat.com/ubi9/ubi" "ubi9/ubi-minimal" = "registry.access.redhat.com/ubi9-minimal" "ubi9/ubi-init" = "registry.access.redhat.com/ubi9-init" "ubi9/ubi-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9/podman" = "registry.access.redhat.com/ubi9/podman" "ubi9/buildah" = "registry.access.redhat.com/ubi9/buildah" "ubi9/skopeo" = "registry.access.redhat.com/ubi9/skopeo" # Rocky Linux "rockylinux" = "quay.io/rockylinux/rockylinux" # Debian "debian" = "docker.io/library/debian" # Kali Linux "kali-bleeding-edge" = "docker.io/kalilinux/kali-bleeding-edge" "kali-dev" = "docker.io/kalilinux/kali-dev" "kali-experimental" = "docker.io/kalilinux/kali-experimental" "kali-last-release" = "docker.io/kalilinux/kali-last-release" "kali-rolling" = "docker.io/kalilinux/kali-rolling" # Ubuntu "ubuntu" = "docker.io/library/ubuntu" # Oracle Linux "oraclelinux" = "container-registry.oracle.com/os/oraclelinux" # busybox "busybox" = "docker.io/library/busybox" # golang "golang" = "docker.io/library/golang" # php "php" = "docker.io/library/php" # python "python" = "docker.io/library/python" # rust "rust" = "docker.io/library/rust" # node "node" = "docker.io/library/node" # Grafana Labs "grafana/agent" = "docker.io/grafana/agent" "grafana/grafana" = "docker.io/grafana/grafana" "grafana/k6" = "docker.io/grafana/k6" "grafana/loki" = "docker.io/grafana/loki" "grafana/mimir" = "docker.io/grafana/mimir" "grafana/oncall" = "docker.io/grafana/oncall" "grafana/pyroscope" = "docker.io/grafana/pyroscope" "grafana/tempo" = "docker.io/grafana/tempo" # curl "curl" = "quay.io/curl/curl" # nginx "nginx" = "docker.io/library/nginx" # QUBIP "qubip/pq-container" = "quay.io/qubip/pq-container" home/zuul/zuul-output/artifacts/0000755000175000017500000000000015117037211016100 5ustar zuulzuulhome/zuul/zuul-output/docs/0000755000175000017500000000000015117037211015050 5ustar zuulzuul